diff --git a/.saplings b/.saplings deleted file mode 100644 index dcb1736e5..000000000 --- a/.saplings +++ /dev/null @@ -1,29 +0,0 @@ -pants = { - 'name': 'pants', - 'paths': [ - '3rdparty/python', - 'build-support/bin/ci.sh', - 'build-support/pylint', - 'build-support/python', - 'build-support/.gitignore', - 'build-support/virtualenv', - 'src/python/twitter/pants', - 'tests/python/twitter/pants', - '.gitignore', - '.reviewboardrc', - '.saplings', - '.travis.yml', - 'BUILD', - 'pants', - 'pants.bootstrap', - 'pants.ini', - 'pantsbuild_migration.py', - 'rbt' - ], -} - -splits = [ - pants -] - - diff --git a/README b/README index 8371c3d5f..6018b1fab 100644 --- a/README +++ b/README @@ -11,14 +11,12 @@ The build tool is custom and hosted in the repository itself. == Usage == To build all jvm code and run all tests: -$ ./pants goal test tests/{java,scala}/com/twitter/common:: +$ ./pants goal test {src,tests}/java/com/twitter/common:: +$ ./pants goal test {src,tests}/scala/com/twitter/common:: Likewise for python commons: $ ./pants tests/python/twitter/common/:all -And for the pants build tool itself: -$ ./pants tests/python/twitter/pants:all - To get help on pants: $ ./pants help diff --git a/pants.bootstrap b/pants.bootstrap deleted file mode 100755 index a2def421f..000000000 --- a/pants.bootstrap +++ /dev/null @@ -1,80 +0,0 @@ -#!/bin/bash -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -MY_DIR=$(dirname $0) -VIRTUAL_PYTHON=$MY_DIR/.python/bootstrap - -PANTS_EXE=$MY_DIR/src/python/twitter/pants/bin/pants_exe.py -PANTS_BOOTSTRAP_ARGS=${PANTS_BOOTSTRAP_ARGS:-''} - -function with_virtualenv() { - source $VIRTUAL_PYTHON/bin/activate - ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future PYTHONPATH=$MY_DIR/src/python "$@" - result=$? - deactivate - return $((result)) -} - -function build_pants() { - with_virtualenv python $PYTHON_ARGS $PANTS_EXE build $PANTS_BOOTSTRAP_ARGS \ - src/python/twitter/pants:pants -} - -function run_pants_bare() { - with_virtualenv python $PYTHON_ARGS $PANTS_EXE "$@" -} - -if [ ! -e $VIRTUAL_PYTHON/bin/python ]; then - $MY_DIR/build-support/python/setup.sh - rc=$? - if [[ $rc != 0 ]]; then - echo "Unable to bootstrap a sane Python virtual environment for pants!" - rm -rf $VIRTUAL_PYTHON - exit $rc - fi - source $VIRTUAL_PYTHON/bin/activate - deactivate -fi - -if [ -z "${PANTS_DEV}" -a ! -e $MY_DIR/pants.pex ]; then - build_pants - if [ ! -e $MY_DIR/dist/pants.pex ]; then - echo "Unable to build pants! Cannot continue!" - exit 1 - else - mv $MY_DIR/dist/pants.pex $MY_DIR/pants.pex - cp $MY_DIR/pants.pex $VIRTUAL_PYTHON/pants.pex - fi -fi - -if [ ! -z "${PANTS_DEV}" ]; then - # TODO(John Sirois): ideally this would just use pants.pex to run the pants target in py mode: - # ./pants.pex py src/python/twitter/pants "$@" - echo "*** running pants in dev mode from $PANTS_EXE ***" 1>&2 - run_pants_bare "$@" -else - ${MY_DIR}/pants.pex "$@" - return_code=$? - if [[ $return_code != 0 ]]; then - pants_version=$(unzip -q -c pants.pex PEX-INFO | \ - with_virtualenv python -c 'import json,sys; \ - print json.loads(sys.stdin.read()).get("build_properties", \ - {"revision": None})["revision"]') - echo "pants.pex version $pants_version" - exit $return_code - fi -fi diff --git a/pantsbuild_migration.py b/pantsbuild_migration.py deleted file mode 100644 index 670bfeab6..000000000 --- a/pantsbuild_migration.py +++ /dev/null @@ -1,275 +0,0 @@ -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (nested_scopes, generators, division, absolute_import, with_statement, - print_function, unicode_literals) - -import os -import re -import sys - - -PANTS_ROOT = os.path.dirname(os.path.realpath(__file__)) -SRC_ROOT = os.path.join(PANTS_ROOT, 'src', 'python') -TESTS_ROOT = os.path.join(PANTS_ROOT, 'tests', 'python') - - -KNOWN_STD_LIBS = set(["abc", "anydbm", "argparse", "array", "asynchat", "asyncore", "atexit", "base64", - "BaseHTTPServer", "bisect", "bz2", "calendar", "cgitb", "cmd", "codecs", - "collections", "commands", "compileall", "ConfigParser", "contextlib", "Cookie", - "copy", "cPickle", "cProfile", "cStringIO", "csv", "datetime", "dbhash", "dbm", - "decimal", "difflib", "dircache", "dis", "doctest", "dumbdbm", "EasyDialogs", - "errno", "exceptions", "filecmp", "fileinput", "fnmatch", "fractions", - "functools", "gc", "gdbm", "getopt", "getpass", "gettext", "glob", "grp", "gzip", - "hashlib", "heapq", "hmac", "imaplib", "imp", "inspect", "itertools", "json", - "linecache", "locale", "logging", "mailbox", "math", "mhlib", "mmap", - "multiprocessing", "operator", "optparse", "os", "pdb", "pickle", "pipes", - "pkgutil", "platform", "plistlib", "pprint", "profile", "pstats", "pwd", "pyclbr", - "pydoc", "Queue", "random", "re", "readline", "resource", "rlcompleter", - "robotparser", "sched", "select", "shelve", "shlex", "shutil", "signal", - "SimpleXMLRPCServer", "site", "sitecustomize", "smtpd", "smtplib", "socket", - "SocketServer", "sqlite3", "string", "StringIO", "struct", "subprocess", "sys", - "sysconfig", "tabnanny", "tarfile", "tempfile", "textwrap", "threading", "time", - "timeit", "trace", "traceback", "unittest", "urllib", "urllib2", "urlparse", - "usercustomize", "uuid", "warnings", "weakref", "webbrowser", "whichdb", "xml", - "xmlrpclib", "zipfile", "zipimport", "zlib", 'builtins', '__builtin__']) - -OLD_PANTS_PACKAGE = 'twitter.pants' -NEW_PANTS_PACKAGE = 'pants' - -IMPORT_RE = re.compile(r'import\s+(.*)') -FROM_IMPORT_RE = re.compile(r'from\s+(.*)\s+import\s+(.*)') - -AUTHOR_RE = re.compile(r'__author__\s*=\s*.+') - -def has_continuation(line): - return line.endswith('\\') - -HEADER_COMMENT = [ - '# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).', - '# Licensed under the Apache License, Version 2.0 (see LICENSE).' -] - -FUTURE_IMPORTS = [ - 'from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,', - ' print_function, unicode_literals)' -] - -class Import(object): - def __init__(self, symbol): - self._symbol = symbol.strip() - if self._symbol.startswith(OLD_PANTS_PACKAGE): - self._symbol = self._symbol[8:] - - def package(self): - return self._symbol - - def sort_key(self): - return 'AAA' + self._symbol - - def __str__(self): - return 'import %s' % self._symbol - - -class FromImport(object): - def __init__(self, frm, symbols): - self._from = frm.strip() - if self._from.startswith(OLD_PANTS_PACKAGE): - self._from = NEW_PANTS_PACKAGE + self._from[len(OLD_PANTS_PACKAGE):] - self._symbols = filter(None, [filter(lambda c: c not in '()', s.strip()).strip() for s in symbols]) - - def package(self): - return self._from - - def sort_key(self): - return 'ZZZ' + self._from - - def __str__(self): - return 'from %s import %s' % (self._from, ', '.join(sorted(self._symbols))) - - -class BuildFile(object): - def __init__(self, path): - self._path = path - self._body = [] - - def process(self): - self.load() - self.parse_header() - self.save() - - def load(self): - with open(self._path, 'r') as infile: - self._old_lines = [line.rstrip() for line in infile.read().splitlines()] - - def parse_header(self): - # Find first non-header-comment line. - try: - p = next(i for i, line in enumerate(self._old_lines) if line and not line.startswith('#')) - except StopIteration: - return # File is empty (possibly except for a comment). - def _translate(line): - return line.replace('twitter/pants', 'pants').replace('twitter.pants', 'pants').replace( - 'src/python/twitter/common/', 'src/python/pants/BUILD.commons:twitter.common.' - ) - self._body = map(_translate, self._old_lines[p:]) - # Remove any trailing empty lines. - while not self._body[-1]: - self._body = self._body[0:-1] - - def save(self): - with open(self._path, 'w') as outfile: - if self._body: - for line in HEADER_COMMENT: - outfile.write(line) - outfile.write('\n') - outfile.write('\n') - for line in self._body: - outfile.write(line) - outfile.write('\n') - - -class PantsSourceFile(object): - def __init__(self, path): - self._path = path - absdir = os.path.dirname(os.path.abspath(path)) - if absdir.startswith(SRC_ROOT): - root = SRC_ROOT - elif absdir.startswith(TESTS_ROOT): - root = TESTS_ROOT - else: - raise Exception('File not in src or tests roots: %s' % path) - self._package = os.path.relpath(absdir, root).replace(os.path.sep, '.') - self._old_lines = [] - self._stdlib_imports = [] - self._thirdparty_imports = [] - self._pants_imports = [] - self._body = [] - - def process(self): - self.load() - self.parse_header() - self.save() - - def is_empty(self): - return not (self._stdlib_imports or self._thirdparty_imports or self._pants_imports or self._body) - - def load(self): - with open(self._path, 'r') as infile: - self._old_lines = [line.rstrip() for line in infile.read().splitlines()] - - def parse_header(self): - # Strip __author__. - lines = filter(lambda x: not AUTHOR_RE.match(x), self._old_lines) - - # Find first non-header-comment line. - try: - p = next(i for i, line in enumerate(lines) if line and not line.startswith('#')) - except StopIteration: - return # File is empty (possibly except for a comment). - - content_lines = lines[p:] - - def add_import(imp): - s = imp.package() - if s.split('.', 1)[0] in KNOWN_STD_LIBS: - self._stdlib_imports.append(imp) - elif s.startswith(NEW_PANTS_PACKAGE): - self._pants_imports.append(imp) - else: - self._thirdparty_imports.append(imp) - - def is_import(line): - m = IMPORT_RE.match(line) - if m: - add_import(Import(m.group(1))) - return True - else: - return False - - def is_from_import(line): - def absify(imp): - if imp == '.': - return self._package - elif imp.startswith('.'): - return '%s.' % self._package + imp[1:] - else: - return imp - m = FROM_IMPORT_RE.match(line) - if m: - if not m.group(1) == '__future__': - add_import(FromImport(absify(m.group(1)), m.group(2).split(','))) - return True - else: - return False - - # Parse imports. - lines_iter = iter(content_lines) - line = '' - line_parts = [] - try: - while not line or is_import(line) or is_from_import(line): - line_parts = [lines_iter.next()] - while has_continuation(line_parts[-1]): - line_parts.append(lines_iter.next()) - line = ' '.join([x[:-1].strip() for x in line_parts[:-1]] + [line_parts[-1].strip()]) - if line.startswith('from ') and '(' in line: - line_parts = [line] - next_line = '' - while not ')' in next_line: - next_line = lines_iter.next().strip() - line_parts.append(next_line) - line = ' '.join(line_parts) - except StopIteration: - line_parts = [] - - def _translate(line): - return line.replace('twitter/pants', 'pants').replace('twitter.pants', 'pants') - self._body = map(_translate, [''] + line_parts + list(lines_iter)) - - # Remove any trailing empty lines. - while self._body and not self._body[-1]: - self._body = self._body[0:-1] - - def save(self): - sorted_stdlib_imports = map(str, sorted(self._stdlib_imports, key=lambda x: x.sort_key())) - sorted_thirdparty_imports = map(str, sorted(self._thirdparty_imports, key=lambda x: x.sort_key())) - sorted_pants_imports = map(str, sorted(self._pants_imports, key=lambda x: x.sort_key())) - with open(self._path, 'w') as outfile: - if not self.is_empty(): - for lines in [HEADER_COMMENT, FUTURE_IMPORTS, sorted_stdlib_imports, - sorted_thirdparty_imports, sorted_pants_imports]: - for line in lines: - outfile.write(line) - outfile.write('\n') - if lines: - outfile.write('\n') - for line in self._body: - outfile.write(line) - outfile.write('\n') - - -def handle_path(path): - if os.path.isfile(path): - if path.endswith('.py') and not path.endswith('pantsbuild_migration.py'): - print('PROCESSING: %s' % path) - srcfile = PantsSourceFile(path) - srcfile.process() - elif os.path.basename(path).startswith('BUILD'): - print('PROCESSING: %s' % path) - srcfile = BuildFile(path) - srcfile.process() - elif path.endswith('.rst') or path.endswith('.sh') or path.endswith('pants.bootstrap'): - print('PROCESSING: %s' % path) - with open(path, 'r') as infile: - content = infile.read() - new_content = content.replace('twitter.pants', 'pants').replace('twitter/pants', 'pants') - with open(path, 'w') as outfile: - outfile.write(new_content) - elif os.path.isdir(path): - for p in os.listdir(path): - handle_path(os.path.join(path, p)) - -if __name__ == '__main__': - path = sys.argv[1] - handle_path(path) diff --git a/src/python/twitter/common/python/README.md b/src/python/twitter/common/python/README.md index 3124076e8..dcf9bdddf 100644 --- a/src/python/twitter/common/python/README.md +++ b/src/python/twitter/common/python/README.md @@ -1,7 +1,7 @@ Pex.pex: Usage ============== -[PEX](https://github.com/twitter/commons/blob/master/src/python/twitter/pants/python/README.md) files are single-file lightweight virtual Python environments. +[PEX](http://pantsbuild.github.io/python-readme.html) files are single-file lightweight virtual Python environments. pex.pex is a utility that: * creates PEX files diff --git a/src/python/twitter/pants/.gitignore b/src/python/twitter/pants/.gitignore deleted file mode 100644 index ca2f74eb7..000000000 --- a/src/python/twitter/pants/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!/bin diff --git a/src/python/twitter/pants/README.md b/src/python/twitter/pants/README.md deleted file mode 100644 index 38e65722b..000000000 --- a/src/python/twitter/pants/README.md +++ /dev/null @@ -1 +0,0 @@ -[Pants documentation](http://pantsbuild.github.io/) has moved... \ No newline at end of file diff --git a/src/python/twitter/pants/__init__.py b/src/python/twitter/pants/__init__.py deleted file mode 100644 index 3e2ce18e7..000000000 --- a/src/python/twitter/pants/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__import__('pkg_resources').declare_namespace(__name__) diff --git a/src/python/twitter/pants/authentication/__init__.py b/src/python/twitter/pants/authentication/__init__.py deleted file mode 100644 index a5b9ce203..000000000 --- a/src/python/twitter/pants/authentication/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/authentication/netrc_util.py b/src/python/twitter/pants/authentication/netrc_util.py deleted file mode 100644 index f98c3bdb4..000000000 --- a/src/python/twitter/pants/authentication/netrc_util.py +++ /dev/null @@ -1,57 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import collections -import os - -from netrc import netrc as NetrcDb, NetrcParseError - -from twitter.pants.tasks.task_error import TaskError - - -class Netrc(object): - - def __init__(self): - self._login = collections.defaultdict(lambda: None) - self._password = collections.defaultdict(lambda: None) - - def getusername(self, repository): - self._ensure_loaded() - return self._login[repository] - - def getpassword(self, repository): - self._ensure_loaded() - return self._password[repository] - - def _ensure_loaded(self): - if not self._login and not self._password: - db = os.path.expanduser('~/.netrc') - if not os.path.exists(db): - raise TaskError('A ~/.netrc file is required to authenticate') - try: - db = NetrcDb(db) - for host, value in db.hosts.items(): - auth = db.authenticators(host) - if auth: - login, _, password = auth - self._login[host] = login - self._password[host] = password - if len(self._login) == 0: - raise TaskError('Found no usable authentication blocks for twitter in ~/.netrc') - except NetrcParseError as e: - raise TaskError('Problem parsing ~/.netrc: %s' % e) diff --git a/src/python/twitter/pants/base/__init__.py b/src/python/twitter/pants/base/__init__.py deleted file mode 100644 index 24684ba62..000000000 --- a/src/python/twitter/pants/base/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/base/abbreviate_target_ids.py b/src/python/twitter/pants/base/abbreviate_target_ids.py deleted file mode 100644 index 49baa6e7b..000000000 --- a/src/python/twitter/pants/base/abbreviate_target_ids.py +++ /dev/null @@ -1,107 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Ryan Williams' - -def abbreviate_target_ids(arr): - """Map a list of target IDs to shortened versions. - - This method takes a list of strings (e.g. target IDs) and maps them to shortened versions of - themselves. - - The original strings should consist of '.'-delimited segments, and the abbreviated versions are - subsequences of these segments such that each string's subsequence is unique from others in @arr. - - For example: :: - - input: [ - 'com.twitter.pants.a.b', - 'com.twitter.pants.a.c', - 'com.twitter.pants.d' - ] - - might return: :: - - { - 'com.twitter.pants.a.b': 'b', - 'com.twitter.pants.a.c': 'c', - 'com.twitter.pants.d': 'd' - } - - This can be useful for debugging purposes, removing a lot of boilerplate from printed lists of - target IDs. - - :param arr: List of strings representing target IDs. - """ - split_keys = [tuple(a.split('.')) for a in arr] - - split_keys_by_subseq = {} - - def subseq_map(arr, subseq_fn=None, result_cmp_fn=None): - def subseq_map_rec(remaining_arr, subseq, indent=''): - if not remaining_arr: - if subseq_fn: - subseq_fn(arr, subseq) - return subseq - - next_segment = remaining_arr.pop() - next_subseq = tuple([next_segment] + list(subseq)) - - skip_value = subseq_map_rec(remaining_arr, subseq, indent + '\t') - - add_value = subseq_map_rec(remaining_arr, next_subseq, indent + '\t') - - remaining_arr.append(next_segment) - - if result_cmp_fn: - if not subseq: - # Empty subsequence should always lose. - return add_value - if result_cmp_fn(skip_value, add_value): - return skip_value - return add_value - - return None - - val = subseq_map_rec(list(arr), tuple()) - return val - - def add_subseq(arr, subseq): - if subseq not in split_keys_by_subseq: - split_keys_by_subseq[subseq] = set() - if split_key not in split_keys_by_subseq[subseq]: - split_keys_by_subseq[subseq].add(arr) - - for split_key in split_keys: - subseq_map(split_key, add_subseq) - - def return_min_subseqs(subseq1, subseq2): - collisions1 = split_keys_by_subseq[subseq1] - collisions2 = split_keys_by_subseq[subseq2] - return (len(collisions1) < len(collisions2) - or (len(collisions1) == len(collisions2) - and len(subseq1) <= len(subseq2))) - - min_subseq_by_key = {} - - for split_key in split_keys: - min_subseq = subseq_map(split_key, result_cmp_fn=return_min_subseqs) - if not min_subseq: - raise Exception("No min subseq found for %s: %s" % (str(split_key), str(min_subseq))) - min_subseq_by_key['.'.join(str(segment) for segment in split_key)] = '.'.join(min_subseq) - - return min_subseq_by_key - diff --git a/src/python/twitter/pants/base/address.py b/src/python/twitter/pants/base/address.py deleted file mode 100644 index aad72ae91..000000000 --- a/src/python/twitter/pants/base/address.py +++ /dev/null @@ -1,108 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.lang import Compatibility -from twitter.pants.base.build_file import BuildFile - - -class Address(object): - """A target address. - - An address is a unique name representing a - :class:`twitter.pants.base.target.Target`. Its composed of the - :class:`twitter.pants.base.build_file.BuildFile` plus target name. - - While not their only use, a noteworthy use of addresses is specifying - target dependencies. For example: - - :: - - some_target(name='mytarget', - dependencies=[pants('path/to/buildfile:targetname')] - ) - - Where ``path/to/buildfile:targetname`` is the dependent target address. - """ - - @classmethod - def parse(cls, root_dir, spec, is_relative=True): - """Parses the given spec into an Address. - - An address spec can be one of: - 1.) the (relative) path of a BUILD file - 2.) the (relative) path of a directory containing a BUILD file child - 3.) either of 1 or 2 with a ':[target name]' suffix - 4.) a bare ':[target name]' indicating the BUILD file to use is the one in the current directory - - If the spec does not have a target name suffix the target name is taken to be the same name - as the BUILD file's parent directory. In this way the containing directory name - becomes the 'default' target name for a BUILD file. - - If there is no BUILD file at the path pointed to, or if there is but the specified target name - is not defined in the BUILD file, an IOError is raised. - """ - - if spec.startswith(':'): - spec = '.' + spec - parts = spec.split(':', 1) - path = parts[0] - if is_relative: - path = os.path.relpath(os.path.abspath(path), root_dir) - buildfile = BuildFile(root_dir, path) - - name = os.path.basename(os.path.dirname(buildfile.relpath)) if len(parts) == 1 else parts[1] - return Address(buildfile, name) - - def __init__(self, buildfile, target_name): - """ - :param BuildFile buildfile: A BuildFile defined in the repo. - :param string target_name: The name of a target defined in buildfile. - """ - assert isinstance(buildfile, BuildFile) - assert isinstance(target_name, Compatibility.string) - self.buildfile = buildfile - self.target_name = target_name - - def reference(self, referencing_buildfile_path=None): - """How to reference this address in a BUILD file.""" - dirname = os.path.dirname(self.buildfile.relpath) - if referencing_buildfile_path and dirname == os.path.dirname(referencing_buildfile_path): - return ':%s' % self.target_name - elif os.path.basename(dirname) != self.target_name: - return '%s:%s' % (dirname, self.target_name) - else: - return dirname - - def __eq__(self, other): - result = other and ( - type(other) == Address) and ( - self.buildfile.canonical_relpath == other.buildfile.canonical_relpath) and ( - self.target_name == other.target_name) - return result - - def __hash__(self): - value = 17 - value *= 37 + hash(self.buildfile.canonical_relpath) - value *= 37 + hash(self.target_name) - return value - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "%s:%s" % (self.buildfile, self.target_name) diff --git a/src/python/twitter/pants/base/build_environment.py b/src/python/twitter/pants/base/build_environment.py deleted file mode 100644 index b52e03b0e..000000000 --- a/src/python/twitter/pants/base/build_environment.py +++ /dev/null @@ -1,81 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import sys - -from twitter.common import log - -from twitter.pants.version import VERSION as _VERSION - -from .build_root import BuildRoot - - -def get_version(): - return _VERSION - - -def get_buildroot(): - """Returns the pants ROOT_DIR, calculating it if needed.""" - try: - return BuildRoot().path - except BuildRoot.NotFoundError as e: - print(e.message, file=sys.stderr) - sys.exit(1) - - -def set_buildroot(path): - """Sets the pants ROOT_DIR. - - Generally only useful for tests. - """ - BuildRoot().path = path - - -from twitter.pants.scm import Scm - - -_SCM = None - - -def get_scm(): - """Returns the pants Scm if any.""" - # TODO(John Sirois): Extract a module/class to carry the bootstrap logic. - global _SCM - if not _SCM: - # We know about git, so attempt an auto-configure - git_dir = os.path.join(get_buildroot(), '.git') - if os.path.isdir(git_dir): - from twitter.pants.scm.git import Git - git = Git(worktree=get_buildroot()) - try: - log.info('Detected git repository on branch %s' % git.branch_name) - set_scm(git) - except git.LocalException: - pass - return _SCM - - -def set_scm(scm): - """Sets the pants Scm.""" - if scm is not None: - if not isinstance(scm, Scm): - raise ValueError('The scm must be an instance of Scm, given %s' % scm) - global _SCM - _SCM = scm - diff --git a/src/python/twitter/pants/base/build_file.py b/src/python/twitter/pants/base/build_file.py deleted file mode 100644 index 0532a6c5a..000000000 --- a/src/python/twitter/pants/base/build_file.py +++ /dev/null @@ -1,165 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import marshal -import os -import re - -from glob import glob1 - -from twitter.common.collections import OrderedSet -from twitter.common.python.interpreter import PythonIdentity - - -class BuildFile(object): - _CANONICAL_NAME = 'BUILD' - _PATTERN = re.compile('^%s(\.[a-z]+)?$' % _CANONICAL_NAME) - - @staticmethod - def _is_buildfile_name(name): - return BuildFile._PATTERN.match(name) - - @staticmethod - def scan_buildfiles(root_dir, base_path=None): - """Looks for all BUILD files under base_path""" - - buildfiles = [] - for root, dirs, files in os.walk(base_path if base_path else root_dir): - for filename in files: - if BuildFile._is_buildfile_name(filename): - buildfile_relpath = os.path.relpath(os.path.join(root, filename), root_dir) - buildfiles.append(BuildFile(root_dir, buildfile_relpath)) - return OrderedSet(sorted(buildfiles, key=lambda buildfile: buildfile.full_path)) - - def __init__(self, root_dir, relpath, must_exist=True): - """Creates a BuildFile object representing the BUILD file set at the specified path. - - root_dir: The base directory of the project - relpath: The path relative to root_dir where the BUILD file is found - this can either point - directly at the BUILD file or else to a directory which contains BUILD files - must_exist: If True, the specified BUILD file must exist or else an IOError is thrown - raises IOError if the specified path does not house a BUILD file and must_exist is True - """ - - path = os.path.abspath(os.path.join(root_dir, relpath)) - buildfile = os.path.join(path, BuildFile._CANONICAL_NAME) if os.path.isdir(path) else path - - if os.path.isdir(buildfile): - raise IOError("%s is a directory" % buildfile) - - if must_exist: - if not os.path.exists(buildfile): - raise IOError("BUILD file does not exist at: %s" % buildfile) - - if not BuildFile._is_buildfile_name(os.path.basename(buildfile)): - raise IOError("%s is not a BUILD file" % buildfile) - - if not os.path.exists(buildfile): - raise IOError("BUILD file does not exist at: %s" % buildfile) - - self.root_dir = os.path.realpath(root_dir) - self.full_path = os.path.realpath(buildfile) - - self.name = os.path.basename(self.full_path) - self.parent_path = os.path.dirname(self.full_path) - - self._bytecode_path = os.path.join(self.parent_path, '.%s.%s.pyc' % ( - self.name, PythonIdentity.get())) - - self.relpath = os.path.relpath(self.full_path, self.root_dir) - self.canonical_relpath = os.path.join(os.path.dirname(self.relpath), BuildFile._CANONICAL_NAME) - - def exists(self): - """Returns True if this BuildFile corresponds to a real BUILD file on disk.""" - return os.path.exists(self.full_path) - - def descendants(self): - """Returns all BUILD files in descendant directories of this BUILD file's parent directory.""" - - descendants = BuildFile.scan_buildfiles(self.root_dir, self.parent_path) - for sibling in self.family(): - descendants.discard(sibling) - return descendants - - def ancestors(self): - """Returns all BUILD files in ancestor directories of this BUILD file's parent directory.""" - - def find_parent(dir): - parent = os.path.dirname(dir) - buildfile = os.path.join(parent, BuildFile._CANONICAL_NAME) - if os.path.exists(buildfile) and not os.path.isdir(buildfile): - return parent, BuildFile(self.root_dir, os.path.relpath(buildfile, self.root_dir)) - else: - return parent, None - - parent_buildfiles = OrderedSet() - - parentdir = os.path.dirname(self.full_path) - visited = set() - while parentdir not in visited and self.root_dir != parentdir: - visited.add(parentdir) - parentdir, buildfile = find_parent(parentdir) - if buildfile: - parent_buildfiles.update(buildfile.family()) - - return parent_buildfiles - - def siblings(self): - """Returns an iterator over all the BUILD files co-located with this BUILD file not including - this BUILD file itself""" - - for build in glob1(self.parent_path, 'BUILD*'): - if self.name != build and BuildFile._is_buildfile_name(build): - siblingpath = os.path.join(os.path.dirname(self.relpath), build) - if not os.path.isdir(os.path.join(self.root_dir, siblingpath)): - yield BuildFile(self.root_dir, siblingpath) - - def family(self): - """Returns an iterator over all the BUILD files co-located with this BUILD file including this - BUILD file itself. The family forms a single logical BUILD file composed of the canonical BUILD - file and optional sibling build files each with their own extension, eg: BUILD.extras.""" - - yield self - for sibling in self.siblings(): - yield sibling - - def code(self): - """Returns the code object for this BUILD file.""" - if (os.path.exists(self._bytecode_path) and - os.path.getmtime(self.full_path) <= os.path.getmtime(self._bytecode_path)): - with open(self._bytecode_path, 'rb') as bytecode: - return marshal.load(bytecode) - else: - with open(self.full_path, 'rb') as source: - code = compile(source.read(), self.full_path, 'exec') - with open(self._bytecode_path, 'wb') as bytecode: - marshal.dump(code, bytecode) - return code - - def __eq__(self, other): - result = other and ( - type(other) == BuildFile) and ( - self.full_path == other.full_path) - return result - - def __hash__(self): - return hash(self.full_path) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return self.relpath diff --git a/src/python/twitter/pants/base/build_file_aliases.py b/src/python/twitter/pants/base/build_file_aliases.py deleted file mode 100644 index d61db7969..000000000 --- a/src/python/twitter/pants/base/build_file_aliases.py +++ /dev/null @@ -1,83 +0,0 @@ -# ================================================================================================= -# Copyright 2011 Twitter, Inc. -# ------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================= - -from twitter.pants.targets.annotation_processor import AnnotationProcessor -from twitter.pants.targets.artifact import Artifact -from twitter.pants.targets.benchmark import Benchmark -from twitter.pants.targets.credentials import Credentials -from twitter.pants.targets.doc import Page, Wiki -from twitter.pants.targets.exclude import Exclude -from twitter.pants.targets.jar_dependency import JarDependency -from twitter.pants.targets.jar_library import JarLibrary -from twitter.pants.targets.java_agent import JavaAgent -from twitter.pants.targets.java_antlr_library import JavaAntlrLibrary -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_protobuf_library import JavaProtobufLibrary -from twitter.pants.targets.java_tests import JavaTests -from twitter.pants.targets.java_thrift_library import JavaThriftLibrary -from twitter.pants.targets.jvm_binary import Bundle, JvmApp, JvmBinary -from twitter.pants.targets.pants_target import Pants -from twitter.pants.targets.python_antlr_library import PythonAntlrLibrary -from twitter.pants.targets.python_artifact import PythonArtifact -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_egg import PythonEgg -from twitter.pants.targets.python_library import PythonLibrary -from twitter.pants.targets.python_requirement import PythonRequirement -from twitter.pants.targets.python_tests import PythonTests, PythonTestSuite -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary -from twitter.pants.targets.repository import Repository -from twitter.pants.targets.resources import Resources -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.targets.scala_tests import ScalaTests -from twitter.pants.targets.scalac_plugin import ScalacPlugin -from twitter.pants.targets.sources import SourceRoot - - -# aliases -annotation_processor = AnnotationProcessor -artifact = Artifact -benchmark = Benchmark -bundle = Bundle -credentials = Credentials -dependencies = jar_library = JarLibrary -egg = PythonEgg -exclude = Exclude -fancy_pants = Pants -jar = JarDependency -java_agent = JavaAgent -java_library = JavaLibrary -java_antlr_library = JavaAntlrLibrary -java_protobuf_library = JavaProtobufLibrary -junit_tests = java_tests = JavaTests -java_thrift_library = JavaThriftLibrary -jvm_binary = JvmBinary -jvm_app = JvmApp -page = Page -python_artifact = setup_py = PythonArtifact -python_binary = PythonBinary -python_library = PythonLibrary -python_antlr_library = PythonAntlrLibrary -python_requirement = PythonRequirement -python_thrift_library = PythonThriftLibrary -python_tests = PythonTests -python_test_suite = PythonTestSuite -repo = Repository -resources = Resources -scala_library = ScalaLibrary -scala_specs = scala_tests = ScalaTests -scalac_plugin = ScalacPlugin -source_root = SourceRoot -wiki = Wiki diff --git a/src/python/twitter/pants/base/build_file_context.py b/src/python/twitter/pants/base/build_file_context.py deleted file mode 100644 index aea99ba08..000000000 --- a/src/python/twitter/pants/base/build_file_context.py +++ /dev/null @@ -1,31 +0,0 @@ -# ================================================================================================= -# Copyright 2011 Twitter, Inc. -# ------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================= - -from twitter.pants.goal import Context, Goal, Group, Phase -from twitter.pants.targets.pants_target import Pants -from twitter.pants.tasks import Task, TaskError - -pants = Pants -goal = Goal -group = Group -phase = Phase - -from .build_file_aliases import * -from .build_file_helpers import * -from .config import Config - -# TODO(John Sirois): XXX kill -from .build_environment import * diff --git a/src/python/twitter/pants/base/build_file_helpers.py b/src/python/twitter/pants/base/build_file_helpers.py deleted file mode 100644 index d34257550..000000000 --- a/src/python/twitter/pants/base/build_file_helpers.py +++ /dev/null @@ -1,65 +0,0 @@ -# ================================================================================================= -# Copyright 2011 Twitter, Inc. -# ------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================= - -import os - -from twitter.pants.targets.annotation_processor import AnnotationProcessor -from twitter.pants.targets.doc import Page -from twitter.pants.targets.java_agent import JavaAgent -from twitter.pants.targets.java_antlr_library import JavaAntlrLibrary -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_protobuf_library import JavaProtobufLibrary -from twitter.pants.targets.java_tests import JavaTests -from twitter.pants.targets.java_thrift_library import JavaThriftLibrary -from twitter.pants.targets.jvm_binary import JvmBinary -from twitter.pants.targets.python_antlr_library import PythonAntlrLibrary -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_library import PythonLibrary -from twitter.pants.targets.python_tests import PythonTests, PythonTestSuite -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary -from twitter.pants.targets.resources import Resources -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.targets.scala_tests import ScalaTests -from twitter.pants.targets.sources import SourceRoot - - -def maven_layout(basedir=None): - """Sets up typical maven project source roots for all built-in pants target types. - - Shortcut for ``source_root('src/main/java', *java targets*)``, - ``source_root('src/main/python', *python targets*)``, ... - - :param string basedir: Instead of using this BUILD file's directory as - the base of the source tree, use a subdirectory. E.g., instead of - expecting to find java files in ``src/main/java``, expect them in - ``**basedir**/src/main/java``. - """ - - def root(path, *types): - SourceRoot.register(os.path.join(basedir, path) if basedir else path, *types) - - root('src/main/antlr', JavaAntlrLibrary, Page, PythonAntlrLibrary) - root('src/main/java', AnnotationProcessor, JavaAgent, JavaLibrary, JvmBinary, Page) - root('src/main/protobuf', JavaProtobufLibrary, Page) - root('src/main/python', Page, PythonBinary, PythonLibrary) - root('src/main/resources', Page, Resources) - root('src/main/scala', JvmBinary, Page, ScalaLibrary) - root('src/main/thrift', JavaThriftLibrary, Page, PythonThriftLibrary) - - root('src/test/java', JavaLibrary, JavaTests, Page) - root('src/test/python', Page, PythonLibrary, PythonTests, PythonTestSuite) - root('src/test/resources', Page, Resources) - root('src/test/scala', JavaTests, Page, ScalaLibrary, ScalaTests) diff --git a/src/python/twitter/pants/base/build_invalidator.py b/src/python/twitter/pants/base/build_invalidator.py deleted file mode 100644 index 0efb61549..000000000 --- a/src/python/twitter/pants/base/build_invalidator.py +++ /dev/null @@ -1,246 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import errno -import hashlib -import itertools -import os - -from abc import abstractmethod -from collections import namedtuple - -from twitter.common.dirutil import safe_mkdir -from twitter.common.lang import Compatibility, Interface - -from twitter.pants.base.hash_utils import hash_all -from twitter.pants.fs.fs import safe_filename -from twitter.pants.base.target import Target - - -# A CacheKey represents some version of a set of targets. -# - id identifies the set of targets. -# - hash is a fingerprint of all invalidating inputs to the build step, i.e., it uniquely -# determines a given version of the artifacts created when building the target set. -# - num_sources is the number of source files used to build this version of the target set. -# Needed only for display. -# - sources is an (optional) list of the source files used to compute this key. -# Needed only for display. - -CacheKey = namedtuple('CacheKey', ['id', 'hash', 'num_sources', 'sources']) - - -class SourceScope(Interface): - """Selects sources of a given scope from targets.""" - - @abstractmethod - def select(self, target): - """Selects source files from the given target and returns them as absolute paths.""" - - @abstractmethod - def valid(self, target): - """Returns True if the given target can be used with this SourceScope.""" - - -class NoSources(SourceScope): - """A SourceScope where all targets are valid but no sources are ever selected.""" - - def select(self, target): - return [] - - def valid(self, target): - return True - -NO_SOURCES = NoSources() - - -class DefaultSourceScope(SourceScope): - """Selects sources from subclasses of TargetWithSources.""" - - def __init__(self, recursive, include_buildfile): - self._recursive = recursive - self._include_buildfile = include_buildfile - - def select(self, tgt): - return tgt.expand_files(self._recursive, self._include_buildfile) - - def valid(self, target): - return hasattr(target, 'expand_files') - -TARGET_SOURCES = DefaultSourceScope(recursive=False, include_buildfile=False) -TRANSITIVE_SOURCES = DefaultSourceScope(recursive=True, include_buildfile=False) - -# Bump this to invalidate all existing keys in artifact caches across all pants deployments in the world. -# Do this if you've made a change that invalidates existing artifacts, e.g., fixed a bug that -# caused bad artifacts to be cached. -GLOBAL_CACHE_KEY_GEN_VERSION = '6' - -class CacheKeyGenerator(object): - """Generates cache keys for versions of target sets.""" - - @staticmethod - def combine_cache_keys(cache_keys): - """Returns a cache key for a list of target sets that already have cache keys. - - This operation is 'idempotent' in the sense that if cache_keys contains a single key - then that key is returned. - - Note that this operation is commutative but not associative. We use the term 'combine' rather - than 'merge' or 'union' to remind the user of this. Associativity is not a necessary property, - in practice. - """ - if len(cache_keys) == 1: - return cache_keys[0] - else: - combined_id = Target.maybe_readable_combine_ids(cache_key.id for cache_key in cache_keys) - combined_hash = hash_all(sorted(cache_key.hash for cache_key in cache_keys)) - combined_num_sources = sum(cache_key.num_sources for cache_key in cache_keys) - combined_sources = \ - sorted(list(itertools.chain(*[cache_key.sources for cache_key in cache_keys]))) - return CacheKey(combined_id, combined_hash, combined_num_sources, combined_sources) - - def __init__(self, cache_key_gen_version=None): - """cache_key_gen_version - If provided, added to all cache keys. Allows you to invalidate all cache - keys in a single pants repo, by changing this value in config. - """ - self._cache_key_gen_version = (cache_key_gen_version or '') + '_' + GLOBAL_CACHE_KEY_GEN_VERSION - - def key_for_target(self, target, sources=TARGET_SOURCES, fingerprint_extra=None): - """Get a key representing the given target and its sources. - - A key for a set of targets can be created by calling combine_cache_keys() - on the target's individual cache keys. - - :target: The target to create a CacheKey for. - :sources: A source scope to select from the target for hashing, defaults to TARGET_SOURCES. - :fingerprint_extra: A function that accepts a sha hash and updates it with extra fprint data. - """ - if not fingerprint_extra: - if not sources or not sources.valid(target): - raise ValueError('A target needs to have at least one of sources or a ' - 'fingerprint_extra function to generate a CacheKey.') - if not sources: - sources = NO_SOURCES - - sha = hashlib.sha1() - srcs = sorted(sources.select(target)) - actual_srcs = self._sources_hash(sha, srcs) - if fingerprint_extra: - fingerprint_extra(sha) - sha.update(self._cache_key_gen_version) - return CacheKey(target.id, sha.hexdigest(), len(actual_srcs), actual_srcs) - - def key_for(self, target_id, sources): - """Get a cache key representing some id and its associated source files. - - Useful primarily in tests. Normally we use key_for_target(). - """ - sha = hashlib.sha1() - actual_srcs = self._sources_hash(sha, sources) - return CacheKey(target_id, sha.hexdigest(), len(actual_srcs), actual_srcs) - - def _walk_paths(self, paths): - """Recursively walk the given paths. - - :returns: Iterable of (relative_path, absolute_path). - """ - for path in sorted(paths): - if os.path.isdir(path): - for dir_name, _, filenames in sorted(os.walk(path)): - for filename in filenames: - filename = os.path.join(dir_name, filename) - yield os.path.relpath(filename, path), filename - else: - yield os.path.basename(path), path - - def _sources_hash(self, sha, paths): - """Update a SHA1 digest with the content of all files under the given paths. - - :returns: The files found under the given paths. - """ - files = [] - for relative_filename, filename in self._walk_paths(paths): - with open(filename, "rb") as fd: - sha.update(Compatibility.to_bytes(relative_filename)) - sha.update(fd.read()) - files.append(filename) - return files - - -# A persistent map from target set to cache key, which is a fingerprint of all -# the inputs to the current version of that target set. That cache key can then be used -# to look up build artifacts in an artifact cache. -class BuildInvalidator(object): - """Invalidates build targets based on the SHA1 hash of source files and other inputs.""" - - def __init__(self, root): - self._root = os.path.join(root, GLOBAL_CACHE_KEY_GEN_VERSION) - safe_mkdir(self._root) - - def needs_update(self, cache_key): - """Check if the given cached item is invalid. - - :param cache_key: A CacheKey object (as returned by BuildInvalidator.key_for(). - :returns: True if the cached version of the item is out of date. - """ - return self._read_sha(cache_key) != cache_key.hash - - def update(self, cache_key): - """Makes cache_key the valid version of the corresponding target set. - - :param cache_key: A CacheKey object (typically returned by BuildInvalidator.key_for()). - """ - self._write_sha(cache_key) - - def force_invalidate_all(self): - """Force-invalidates all cached items.""" - safe_mkdir(self._root, clean=True) - - def force_invalidate(self, cache_key): - """Force-invalidate the cached item.""" - try: - os.unlink(self._sha_file(cache_key)) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - def existing_hash(self, id): - """Returns the existing hash for the specified id. - - Returns None if there is no existing hash for this id. - """ - return self._read_sha_by_id(id) - - def _sha_file(self, cache_key): - return self._sha_file_by_id(cache_key.id) - - def _sha_file_by_id(self, id): - return os.path.join(self._root, safe_filename(id, extension='.hash')) - - def _write_sha(self, cache_key): - with open(self._sha_file(cache_key), 'w') as fd: - fd.write(cache_key.hash) - - def _read_sha(self, cache_key): - return self._read_sha_by_id(cache_key.id) - - def _read_sha_by_id(self, id): - try: - with open(self._sha_file_by_id(id), 'rb') as fd: - return fd.read().strip() - except IOError as e: - if e.errno != errno.ENOENT: - raise - return None # File doesn't exist. diff --git a/src/python/twitter/pants/base/build_manual.py b/src/python/twitter/pants/base/build_manual.py deleted file mode 100644 index 43e85d505..000000000 --- a/src/python/twitter/pants/base/build_manual.py +++ /dev/null @@ -1,46 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - - -class manual(object): - """Annotate things that should appear in generated documents""" - - @staticmethod - def builddict(tags=None): - """Decorator to mark something that belongs in the BUILD Dictionary doc. - - Use it on a function to mention the function. Use it on a class to - mention the class; use it on a class' method to mention that method - within the class' doc. (Default behavior uses the constructor but - ignores methods. You want to decorate methods that are kosher for - BUILD files.) - - tags: E.g., tags=["python"] means This thingy should appear in the - Python section" - """ - tags = tags or [] - def builddictdecorator(funcorclass): - funcorclass.builddictdict = {"tags": tags} - return funcorclass - return builddictdecorator - - -def get_builddict_info(funcorclass): - """Return None if arg doesn't belong in BUILD dictionary, else something""" - if hasattr(funcorclass, "builddictdict"): - return getattr(funcorclass, "builddictdict") - else: - return None diff --git a/src/python/twitter/pants/base/build_root.py b/src/python/twitter/pants/base/build_root.py deleted file mode 100644 index d2b222be4..000000000 --- a/src/python/twitter/pants/base/build_root.py +++ /dev/null @@ -1,80 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from contextlib import contextmanager - -from twitter.common.lang import Singleton - - -class BuildRoot(Singleton): - """Represents the global workspace ROOT_DIR. - - By default a pants workspace is defined by a root directory where the workspace configuration - file - 'pants.ini' - lives. This can be overridden by exporting 'PANTS_BUILD_ROOT' in the - environment with the path to the ROOT_DIR or manipulated through this interface. - """ - - class NotFoundError(Exception): - """Raised when unable to find the current workspace ROOT_DIR.""" - - def __init__(self): - self._root_dir = None - - @property - def path(self): - """Returns the ROOT_DIR for the current workspace.""" - if self._root_dir is None: - if 'PANTS_BUILD_ROOT' in os.environ: - self._root_dir = os.environ['PANTS_BUILD_ROOT'] - else: - buildroot = os.path.abspath(os.getcwd()) - while not os.path.exists(os.path.join(buildroot, 'pants.ini')): - if buildroot != os.path.dirname(buildroot): - buildroot = os.path.dirname(buildroot) - else: - raise self.NotFoundError('Could not find pants.ini!') - self._root_dir = buildroot - return self._root_dir - - @path.setter - def path(self, root_dir): - """Manually establishes the ROOT_DIR for the current workspace.""" - path = os.path.realpath(root_dir) - if not os.path.exists(path): - raise ValueError('Build root does not exist: %s' % root_dir) - self._root_dir = path - - def reset(self): - """Clears the last calculated ROOT_DIR for the current workspace.""" - self._root_dir = None - - def __str__(self): - return 'BuildRoot(%s)' % self._root_dir - - @contextmanager - def temporary(self, path): - """A contextmanager that establishes a temporary ROOT_DIR, restoring the prior ROOT_DIR on - exit.""" - if path is None: - raise ValueError('Can only temporarily establish a build root given a path.') - prior = self._root_dir - self._root_dir = path - try: - yield - finally: - self._root_dir = prior diff --git a/src/python/twitter/pants/base/config.py b/src/python/twitter/pants/base/config.py deleted file mode 100644 index f8eacb141..000000000 --- a/src/python/twitter/pants/base/config.py +++ /dev/null @@ -1,193 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -try: - import ConfigParser -except ImportError: - import configparser as ConfigParser - -import os -import getpass - -from twitter.pants.base.build_environment import get_buildroot - - -class Config(object): - """ - Encapsulates ini-style config file loading and access additionally supporting recursive variable - substitution using standard python format strings, ie: %(var_name)s will be replaced with the - value of var_name. - """ - - DEFAULT_SECTION = ConfigParser.DEFAULTSECT - - class ConfigError(Exception): - pass - - @staticmethod - def load(configpath=None, defaults=None): - """ - Loads a Config from the given path, by default the path to the pants.ini file in the current - build root directory. Any defaults supplied will act as if specified in the loaded config - file's DEFAULT section. The 'buildroot', invoking 'user' and invoking user's 'homedir' are - automatically defaulted. - """ - configpath = configpath or os.path.join(get_buildroot(), 'pants.ini') - parser = Config.create_parser(defaults=defaults) - with open(configpath) as ini: - parser.readfp(ini) - return Config(parser) - - @staticmethod - def create_parser(defaults=None): - """Creates a config parser that supports %([key-name])s value substitution. - - Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and - be available for substitutions. - - All of the following are seeded with defaults in the config - user: the current user - homedir: the current user's home directory - buildroot: the root of this repo - pants_bootstrapdir: the global pants scratch space primarily used for caches - pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml - pants_distdir: user visible artifacts for this repo go here - pants_workdir: the scratch space used to for live builds in this repo - """ - standard_defaults = dict( - buildroot=get_buildroot(), - homedir=os.path.expanduser('~'), - user=getpass.getuser(), - pants_bootstrapdir=os.path.expanduser('~/.pants.d'), - pants_workdir=os.path.join(get_buildroot(), '.pants.d'), - pants_supportdir=os.path.join(get_buildroot(), 'build-support'), - pants_distdir=os.path.join(get_buildroot(), 'dist') - ) - if defaults: - standard_defaults.update(defaults) - return ConfigParser.SafeConfigParser(standard_defaults) - - def __init__(self, configparser): - self.configparser = configparser - - # Overrides - # - # This feature allows a second configuration file which will override - # pants.ini to be specified. The file is currently specified via an env - # variable because the cmd line flags are parsed after config is loaded. - # - # The main use of the extra file is to have different settings based on - # the environment. For example, the setting used to compile or locations - # of caches might be different between a developer's local environment - # and the environment used to build and publish artifacts (e.g. Jenkins) - # - # The files cannot reference each other's values, so make sure each one is - # internally consistent - self.overrides_path = os.environ.get('PANTS_CONFIG_OVERRIDE') - self.overrides_parser = None - if self.overrides_path is not None: - self.overrides_path = os.path.join(get_buildroot(), self.overrides_path) - self.overrides_parser = Config.create_parser() - with open(self.overrides_path) as o_ini: - self.overrides_parser.readfp(o_ini, filename=self.overrides_path) - - def getbool(self, section, option, default=None): - """Equivalent to calling get with expected type string""" - return self.get(section, option, type=bool, default=default) - - def getint(self, section, option, default=None): - """Equivalent to calling get with expected type int""" - return self.get(section, option, type=int, default=default) - - def getfloat(self, section, option, default=None): - """Equivalent to calling get with expected type float""" - return self.get(section, option, type=float, default=default) - - def getlist(self, section, option, default=None): - """Equivalent to calling get with expected type list""" - return self.get(section, option, type=list, default=default) - - def getdict(self, section, option, default=None): - """Equivalent to calling get with expected type dict""" - return self.get(section, option, type=dict, default=default) - - def getdefault(self, option, type=str, default=None): - """ - Retrieves option from the DEFAULT section if it exists and attempts to parse it as type. - If there is no definition found, the default value supplied is returned. - """ - return self.get(Config.DEFAULT_SECTION, option, type, default=default) - - def get(self, section, option, type=str, default=None): - """ - Retrieves option from the specified section if it exists and attempts to parse it as type. - If the specified section is missing a definition for the option, the value is looked up in the - DEFAULT section. If there is still no definition found, the default value supplied is - returned. - """ - return self._getinstance(section, option, type, default=default) - - def get_required(self, section, option, type=str): - """Retrieves option from the specified section and attempts to parse it as type. - If the specified section is missing a definition for the option, the value is - looked up in the DEFAULT section. If there is still no definition found, - a `ConfigError` is raised. - - :param string section: Section to lookup the option in, before looking in DEFAULT. - :param string option: Option to retrieve. - :param type: Type to retrieve the option as. - :returns: The option as the specified type. - :raises: :class:`twitter.pants.base.config.Config.ConfigError` if option is not found. - """ - val = self.get(section, option, type=type) - if val is None: - raise Config.ConfigError('Required option %s.%s is not defined.' % (section, option)) - return val - - def has_section(self, section): - """Return whether or not this config has the section.""" - return self.configparser.has_section(section) - - def _has_option(self, section, option): - if self.overrides_parser and self.overrides_parser.has_option(section, option): - return True - elif self.configparser.has_option(section, option): - return True - return False - - def _get_value(self, section, option): - if self.overrides_parser and self.overrides_parser.has_option(section, option): - return self.overrides_parser.get(section, option) - return self.configparser.get(section, option) - - def _getinstance(self, section, option, type, default=None): - if not self._has_option(section, option): - return default - raw_value = self._get_value(section, option) - if issubclass(type, str): - return raw_value - - try: - parsed_value = eval(raw_value, {}, {}) - except SyntaxError as e: - raise Config.ConfigError('No valid %s for %s.%s: %s\n%s' % ( - type.__name__, section, option, raw_value, e)) - - if not isinstance(parsed_value, type): - raise Config.ConfigError('No valid %s for %s.%s: %s' % ( - type.__name__, section, option, raw_value)) - - return parsed_value diff --git a/src/python/twitter/pants/base/double_dag.py b/src/python/twitter/pants/base/double_dag.py deleted file mode 100644 index 870463f20..000000000 --- a/src/python/twitter/pants/base/double_dag.py +++ /dev/null @@ -1,137 +0,0 @@ -__author__ = 'Ryan Williams' - -from twitter.pants.base.abbreviate_target_ids import abbreviate_target_ids - -# This file contains the implementation for a doubly-linked DAG data structure that is useful for dependency analysis. - -class DoubleDagNode(object): - def __init__(self, data): - self.data = data - self.parents = set() - self.children = set() - - def __repr__(self): - return "Node(%s)" % self.data.id - - -class DoubleDag(object): - """This implementation of a doubly-linked DAG builds itself from a list of objects (of theoretically unknown type) - and a function for generating each object's "children". It wraps each object in a "node" structure and exposes the - following: - - - list of all nodes in the DAG (.nodes) - - lookup dag node from original object (.lookup) - - set of leaf nodes (.leaves) - - a method (remove_nodes) that removes nodes and updates the set of leaves appropriately - - the inverse method (restore_nodes) - - These are useful for computing the order in which to compile what groups of targets. - """ - def __init__(self, objects, child_fn, logger): - self._child_fn = child_fn - self._logger = logger - - self.nodes = [ DoubleDagNode(object) for object in objects ] - - node_ids = [ node.data.id for node in self.nodes ] - abbreviated_id_map = abbreviate_target_ids(node_ids) - for node in self.nodes: - node.short_id = abbreviated_id_map[node.data.id] - node.data.short_id = abbreviated_id_map[node.data.id] - - self._nodes_by_data_map = {} - for node in self.nodes: - self._nodes_by_data_map[node.data] = node - - self._roots = set([]) - self.leaves = set([]) - - self._logger.debug("%d nodes:" % len(self.nodes)) - for node in self.nodes: - self._logger.debug(node.data.id,) - self._logger.debug('') - - self._init_parent_and_child_relationships() - - self._find_roots_and_leaves() - - self._logger.debug("%d roots:" % len(self._roots)) - for root in self._roots: - self._logger.debug(root.data.id) - self._logger.debug('') - - self._logger.debug("%d leaves:" % len(self.leaves)) - for leaf in self.leaves: - self._logger.debug(leaf.data.id) - self._logger.debug('') - - - def print_tree(self, use_short_ids=True): - """This method prints out a python dictionary representing this DAG in a format suitable for eval'ing and useful - for debugging.""" - def short_id(node): - return node.short_id - def id(node): - return node.data.id - - node_fn = short_id if use_short_ids else id - self._logger.debug("deps = {") - for node in self.nodes: - self._logger.debug( - """ "%s": {"num": %d, "children": [%s]},""" % ( - node_fn(node), - node.data.num_sources, - ','.join(['"%s"' % node_fn(child) for child in node.children])) - ) - self._logger.debug('}') - self._logger.debug('') - - def lookup(self, data): - if data in self._nodes_by_data_map: - return self._nodes_by_data_map[data] - return None - - def _init_parent_and_child_relationships(self): - def find_children(original_node, data): - for child_data in self._child_fn(data): - if child_data in self._nodes_by_data_map: - child_node = self._nodes_by_data_map[child_data] - original_node.children.add(child_node) - child_node.parents.add(original_node) - else: - raise Exception( - "DAG child_fn shouldn't yield data objects not in tree:\n %s. child of: %s. original data: %s" % ( - str(child_data), - str(data), - str(original_node.data))) - - for node in self.nodes: - find_children(node, node.data) - - - def _find_roots_and_leaves(self): - for node in self.nodes: - if not node.parents: - self._roots.add(node) - if not node.children: - self.leaves.add(node) - - - def remove_nodes(self, nodes): - """Removes the given nodes, updates self.leaves accordingly, and returns any nodes that have become leaves as a - result of this removal.""" - new_leaves = set() - for node in nodes: - if node not in self.nodes: - raise Exception("Attempting to remove invalid node: %s" % node.data.id) - for parent_node in node.parents: - if parent_node in nodes: - continue - parent_node.children.remove(node) - if not parent_node.children: - new_leaves.add(parent_node) - - # Do these outside in case 'nodes' is in fact self.leaves, so that we don't change the set we're iterating over. - self.leaves -= nodes - self.leaves.update(new_leaves) - return new_leaves diff --git a/src/python/twitter/pants/base/generator.py b/src/python/twitter/pants/base/generator.py deleted file mode 100644 index 39cab534e..000000000 --- a/src/python/twitter/pants/base/generator.py +++ /dev/null @@ -1,69 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import pprint -import pystache - -from twitter.common.lang import Compatibility -from twitter.pants.base.mustache import MustacheRenderer - - -class TemplateData(dict): - """Encapsulates data for a mustache template as a property-addressable read-only map-like struct. - """ - - def __init__(self, **kwargs): - dict.__init__(self, MustacheRenderer.expand(kwargs)) - - def extend(self, **kwargs): - """Returns a new TemplateData with this template's data overlayed by the key value pairs - specified as keyword arguments.""" - - props = self.copy() - props.update(kwargs) - return TemplateData(**props) - - def __setattr__(self, key, value): - raise AttributeError("Mutation not allowed - use %s.extend(%s = %s)" % (self, key, value)) - - def __getattr__(self, key): - if key in self: - return self[key] - else: - return object.__getattribute__(self, key) - - def __str__(self): - return 'TemplateData(%s)' % pprint.pformat(self) - - -class Generator(object): - """Generates pants intermediary output files using a configured mustache template.""" - - def __init__(self, template_text, **template_data): - # pystache does a typecheck for unicode in python 2.x but rewrites its sources to deal unicode - # via str in python 3.x. - if Compatibility.PY2: - template_text = unicode(template_text) - self._template = pystache.parse(template_text) - self.template_data = template_data - - def write(self, stream): - """Applies the template to the template data and writes the result to the given file-like - stream.""" - - stream.write(pystache.render(self._template, self.template_data)) diff --git a/src/python/twitter/pants/base/hash_utils.py b/src/python/twitter/pants/base/hash_utils.py deleted file mode 100644 index 96debf7db..000000000 --- a/src/python/twitter/pants/base/hash_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import hashlib - - -def hash_all(strs, digest=None): - """Returns a hash of the concatenation of all the strings in strs. - - If a hashlib message digest is not supplied a new sha1 message digest is used. - """ - digest = digest or hashlib.sha1() - for s in strs: - digest.update(s) - return digest.hexdigest() - - -def hash_file(path, digest=None): - """Hashes the contents of the file at the given path and returns the hash digest in hex form. - - If a hashlib message digest is not supplied a new sha1 message digest is used. - """ - digest = digest or hashlib.sha1() - with open(path, 'rb') as fd: - s = fd.read(8192) - while s: - digest.update(s) - s = fd.read(8192) - return digest.hexdigest() diff --git a/src/python/twitter/pants/base/mustache.py b/src/python/twitter/pants/base/mustache.py deleted file mode 100644 index 498d0ed3a..000000000 --- a/src/python/twitter/pants/base/mustache.py +++ /dev/null @@ -1,85 +0,0 @@ -import os -import pkgutil -import urlparse - -import pystache - - -class MustacheRenderer(object): - """Renders text using mustache templates.""" - - @staticmethod - def expand(args): - # Add foo? for each foo in the map that evaluates to true. - # Mustache needs this, especially in cases where foo is a list: there is no way to render a - # block exactly once iff a list is not empty. - # Note: if the original map contains foo?, it will take precedence over our synthetic foo?. - def convert_val(x): - # Pystache can't handle sets, so we convert to maps of key->True. - if isinstance(x, set): - return dict([(k, True) for k in x]) - elif isinstance(x, dict): - return MustacheRenderer.expand(x) - elif isinstance(x, list): - return [convert_val(e) for e in x] - else: - return x - items = [(key, convert_val(val)) for (key, val) in args.items()] - ret = dict([(key + '?', True) for (key, val) in items if val and not key.endswith('?')]) - ret.update(dict(items)) - return ret - - def __init__(self, template_dir=None, package_name=None): - """Create a renderer that finds templates by name in one of two ways. - - * If template_dir is specified, finds template foo in the file foo.mustache in that dir. - * Otherwise, if package_name is specified, finds template foo embedded in that - package under templates/foo.mustache. - * Otherwise will not find templates by name, so can only be used with an existing - template string. - """ - self._template_dir = template_dir - self._package_name = package_name - self._pystache_renderer = pystache.Renderer(search_dirs=template_dir) - - def render_name(self, template_name, args): - # TODO: Precompile and cache the templates? - if self._template_dir: - # Let pystache find the template by name. - return self._pystache_renderer.render_name(template_name, MustacheRenderer.expand(args)) - else: - # Load the named template embedded in our package. - template = pkgutil.get_data(self._package_name, - os.path.join('templates', template_name + '.mustache')) - return self.render(template, args) - - def render(self, template, args): - return self._pystache_renderer.render(template, MustacheRenderer.expand(args)) - - def render_callable(self, inner_template_name, arg_string, outer_args): - """Handle a mustache callable. - - In a mustache template, when foo is callable, ``{{#foo}}arg_string{{/foo}}`` is replaced - with the result of calling ``foo(arg_string)``. The callable must interpret ``arg_string``. - - This method provides an implementation of such a callable that does the following: - - #. Parses the arg_string as CGI args. - #. Adds them to the original args that the enclosing template was rendered with. - #. Renders some other template against those args. - #. Returns the resulting text. - - Use by adding - ``{ 'foo': lambda x: self._renderer.render_callable('foo_template', x, args) }`` - to the args of the outer template, which can then contain ``{{#foo}}arg_string{{/foo}}``. - """ - # First render the arg_string (mustache doesn't do this for you, and it may itself - # contain mustache constructs). - rendered_arg_string = self.render(arg_string, outer_args) - # Parse the inner args as CGI args. - inner_args = dict([(k, v[0]) for k, v in urlparse.parse_qs(rendered_arg_string).items()]) - # Order matters: lets the inner args override the outer args. - args = dict(outer_args.items() + inner_args.items()) - # Render. - return self.render_name(inner_template_name, args) - diff --git a/src/python/twitter/pants/base/parse_context.py b/src/python/twitter/pants/base/parse_context.py deleted file mode 100644 index 06198c676..000000000 --- a/src/python/twitter/pants/base/parse_context.py +++ /dev/null @@ -1,224 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import collections -import copy -import os - -from functools import partial - -from contextlib import contextmanager - -from twitter.common.dirutil.fileset import Fileset -from twitter.common.lang import Compatibility - -from .build_environment import get_buildroot -from .build_file import BuildFile -from .config import Config - - -class ParseContext(object): - """Defines the context of a parseable BUILD file target and provides a mechanism for targets to - discover their context when invoked via eval. - """ - - class ContextError(Exception): - """Indicates an action that requires a BUILD file parse context was attempted outside any.""" - - _active = collections.deque([]) - _parsed = set() - - _strs_to_exec = [ - "from twitter.pants.base.build_file_context import *", - "from twitter.common.quantity import Amount, Time", - ] - - @classmethod - def add_to_exec_context(cls, str_to_exec): - """This hook allows for adding symbols to the execution context in which BUILD files are - parsed. This should only be used for importing symbols that are used fairly ubiquitously in - BUILD files, and possibly for appending to sys.path to get local python code on the python - path. - - This will be phased out in favor of a more robust plugin architecture that supports import - injection and path amendment.""" - cls._strs_to_exec.append(str_to_exec) - - @classmethod - def locate(cls): - """Attempts to find the current root directory and buildfile. - - If there is an active parse context (see do_in_context), then it is returned. - """ - if not cls._active: - raise cls.ContextError('No parse context active.') - return next(reversed(cls._active)) - - @classmethod - def path(cls, relpath=None): - """Returns the active parse context path or `os.getcwd()` if there is no active context. - - If relpath is specified the path returned will be joined to it but in either case the returned - path will be absolute. - """ - base = os.getcwd() if not ParseContext._active else cls.locate().current_buildfile.parent_path - return os.path.abspath(os.path.join(base, relpath) if relpath else base) - - @classmethod - @contextmanager - def temp(cls, basedir=None): - """Activates a temporary parse context in the given basedir relative to the build root or else - in the build root dir itself if no basedir is specified. - """ - context = cls(BuildFile(get_buildroot(), basedir or 'BUILD.temp', must_exist=False)) - with cls.activate(context): - yield - - @classmethod - @contextmanager - def activate(cls, ctx): - """Activates the given ParseContext.""" - if hasattr(ctx, '_on_context_exit'): - raise cls.ContextError('Context actions registered outside this parse context arg active') - - try: - cls._active.append(ctx) - ctx._on_context_exit = [] - yield - finally: - for func, args, kwargs in ctx._on_context_exit: - func(*args, **kwargs) - del ctx._on_context_exit - cls._active.pop() - - def __init__(self, buildfile): - self.buildfile = buildfile - self._active_buildfile = buildfile - self._parsed = False - - @classmethod - def default_globals(cls, config=None): - """ - Has twitter.pants.*, but not file-specfic things like __file__ - If you want to add new imports to be available to all BUILD files, add a section to the config - similar to: - - [parse] - headers: ['from test import get_jar',] - - You may also need to add new roots to the sys.path. see _run in pants_exe.py - """ - to_exec = list(cls._strs_to_exec) - if config: - # TODO: This can be replaced once extensions are enabled with - # https://github.com/pantsbuild/pants/issues/5 - to_exec.extend(config.getlist('parse', 'headers', default=[])) - - pants_context = {} - for str_to_exec in to_exec: - ast = compile(str_to_exec, '', 'exec') - Compatibility.exec_function(ast, pants_context) - - return pants_context - - def parse(self, **globalargs): - """The entry point to parsing of a BUILD file. - - from twitter.pants.targets.sources import SourceRoot - - See locate(). - """ - if self.buildfile not in ParseContext._parsed: - buildfile_family = tuple(self.buildfile.family()) - - pants_context = self.default_globals(Config.load()) - - with ParseContext.activate(self): - for buildfile in buildfile_family: - self._active_buildfile = buildfile - # We may have traversed a sibling already, guard against re-parsing it. - if buildfile not in ParseContext._parsed: - ParseContext._parsed.add(buildfile) - - buildfile_dir = os.path.dirname(buildfile.full_path) - - # TODO(John Sirois): XXX imports are done here to prevent a cycles - from twitter.pants.targets.jvm_binary import Bundle - from twitter.pants.targets.sources import SourceRoot - - class RelativeBundle(Bundle): - def __init__(self, mapper=None, relative_to=None): - super(RelativeBundle, self).__init__( - base=buildfile_dir, - mapper=mapper, - relative_to=relative_to) - - # TODO(John Sirois): This is not build-dictionary friendly - rework SourceRoot to allow - # allow for doc of both register (as source_root) and source_root.here(*types). - class RelativeSourceRoot(object): - @staticmethod - def here(*allowed_target_types): - """Registers the cwd as a source root for the given target types.""" - SourceRoot.register(buildfile_dir, *allowed_target_types) - - def __init__(self, basedir, *allowed_target_types): - SourceRoot.register(os.path.join(buildfile_dir, basedir), *allowed_target_types) - - eval_globals = copy.copy(pants_context) - eval_globals.update({ - 'ROOT_DIR': buildfile.root_dir, - '__file__': buildfile.full_path, - 'globs': partial(Fileset.globs, root=buildfile_dir), - 'rglobs': partial(Fileset.rglobs, root=buildfile_dir), - 'zglobs': partial(Fileset.zglobs, root=buildfile_dir), - 'source_root': RelativeSourceRoot, - 'bundle': RelativeBundle - }) - eval_globals.update(globalargs) - Compatibility.exec_function(buildfile.code(), eval_globals) - - def on_context_exit(self, func, *args, **kwargs): - """ Registers a command to invoke just before this parse context is exited. - - It is an error to attempt to register an on_context_exit action outside an active parse - context. - """ - if not hasattr(self, '_on_context_exit'): - raise self.ContextError('Can only register context exit actions when a parse context ' - 'is active') - - if not callable(func): - raise TypeError('func must be a callable object') - - self._on_context_exit.append((func, args, kwargs)) - - def do_in_context(self, work): - """Executes the callable work in this parse context.""" - if not callable(work): - raise TypeError('work must be a callable object') - - with ParseContext.activate(self): - return work() - - def __repr__(self): - return '%s(%s)' % (type(self).__name__, self.buildfile) - - @property - def current_buildfile(self): - """ This property return the current build file being parsed from all BUILD files co-located - with this BUILD file within the family. - """ - return self._active_buildfile diff --git a/src/python/twitter/pants/base/rcfile.py b/src/python/twitter/pants/base/rcfile.py deleted file mode 100644 index ad50fed73..000000000 --- a/src/python/twitter/pants/base/rcfile.py +++ /dev/null @@ -1,101 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.lang import Compatibility -from twitter.common.log import logger -from twitter.pants.base.config import Config - - -log = logger(name='rcfile') - - -class RcFile(object): - """Handles rcfile-style configuration files. - - Precedence is given to rcfiles that come last in the given sequence of paths. - The effect is as if each rcfile in paths overlays the next in a walk from left to right. - """ - - # TODO(John Sirois): localize handling of this flag value back into pants_exe.py once the new old - # split is healed. - _DISABLE_PANTS_RC_OPTION = '--no-pantsrc' - - @staticmethod - def install_disable_rc_option(parser): - parser.add_option(RcFile._DISABLE_PANTS_RC_OPTION, action = 'store_true', dest = 'nopantsrc', - default = False, help = 'Specifies that pantsrc files should be ignored.') - - def __init__(self, paths, default_prepend=True, process_default=False): - """ - :param paths: The rcfiles to apply default subcommand options from. - :param default_prepend: Whether to prepend (the default) or append if default options - are specified with the ``options`` key. - :param process_default: True to process options in the [DEFAULT] section and apply - regardless of goal. - """ - - self.default_prepend = default_prepend - self.process_default = process_default - - if not paths: - raise ValueError('One or more rcfile paths must be specified') - - if isinstance(paths, Compatibility.string): - paths = [paths] - self.paths = [os.path.expanduser(path) for path in paths] - - def apply_defaults(self, commands, args): - """Augment arguments with defaults found for the given commands. - - The returned arguments will be a new copy of the given args with possibly extra augmented - arguments. - - Default options are applied from the following keys under a section with the name of the - sub-command the default options apply to: - - * `options` - These options are either prepended or appended to the command line args as - specified in the constructor with default_prepend. - * `prepend-options` - These options are prepended to the command line args. - * `append-options` - These options are appended to the command line args. - """ - - args = args[:] - - if RcFile._DISABLE_PANTS_RC_OPTION in args: - return args - - config = Config.create_parser() - read_from = config.read(self.paths) - if not read_from: - log.debug('no rcfile found') - return args - - log.debug('using rcfiles: %s to modify args' % ','.join(read_from)) - - def get_rcopts(command, key): - return config.get(command, key).split() if config.has_option(command, key) else [] - - commands = list(commands) - if self.process_default: - commands.insert(0, Config.DEFAULT_SECTION) - - for cmd in commands: - opts = get_rcopts(cmd, 'options') - args = (opts + args) if self.default_prepend else (args + opts) - args = get_rcopts(cmd, 'prepend-options') + args + get_rcopts(cmd, 'append-options') - return args diff --git a/src/python/twitter/pants/base/revision.py b/src/python/twitter/pants/base/revision.py deleted file mode 100644 index 3b560c21f..000000000 --- a/src/python/twitter/pants/base/revision.py +++ /dev/null @@ -1,99 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import re - -from itertools import izip_longest - - -class Revision(object): - """Represents a software revision that is comparable to another revision describing the same - software. - """ - class BadRevision(Exception): - """Indicates a problem parsing a revision.""" - - @classmethod - def _parse_atom(cls, atom): - try: - return int(atom) - except ValueError: - return atom - - @classmethod - def semver(cls, rev): - """Attempts to parse a Revision from a semantic version. - - See http://semver.org/ for the full specification. - """ - def parse_extra(delimiter, value): - if not value: - return None, None - else: - components = value.split(delimiter, 1) - return components[0], None if len(components) == 1 else components[1] - - def parse_patch(patch): - patch, pre_release = parse_extra('-', patch) - if pre_release: - pre_release, build = parse_extra('+', pre_release) - else: - patch, build = parse_extra('+', patch) - return patch, pre_release, build - - def parse_components(value): - if not value: - yield None - else: - for atom in value.split('.'): - yield cls._parse_atom(atom) - - try: - major, minor, patch = rev.split('.', 2) - patch, pre_release, build = parse_patch(patch) - components = [int(major), int(minor), int(patch)] - components.extend(parse_components(pre_release)) - components.extend(parse_components(build)) - return cls(*components) - except ValueError: - raise cls.BadRevision("Failed to parse '%s' as a semantic version number" % rev) - - @classmethod - def lenient(cls, rev): - """A lenient revision parser that tries to split the version into logical components with - heuristics inspired by PHP's version_compare. - """ - rev = re.sub(r'(\d)([a-zA-Z])', r'\1.\2', rev) - rev = re.sub(r'([a-zA-Z])(\d)', r'\1.\2', rev) - return cls(*map(cls._parse_atom, re.split(r'[.+_\-]', rev))) - - def __init__(self, *components): - self._components = components - - @property - def components(self): - """Returns a list of this revision's components from most major to most minor.""" - return list(self._components) - - def __cmp__(self, other): - for ours, theirs in izip_longest(self._components, other._components, fillvalue=0): - difference = cmp(ours, theirs) - if difference != 0: - return difference - return 0 - - def __repr__(self): - return '%s(%s)' % (self.__class__.__name__, ', '.join(map(repr, self._components))) diff --git a/src/python/twitter/pants/base/run_info.py b/src/python/twitter/pants/base/run_info.py deleted file mode 100644 index d1d2f3183..000000000 --- a/src/python/twitter/pants/base/run_info.py +++ /dev/null @@ -1,87 +0,0 @@ -import getpass -import os -import re -import socket -import time - -from twitter.common.dirutil import safe_mkdir_for - -from .build_environment import get_scm, get_buildroot - - -class RunInfo(object): - """A little plaintext file containing very basic info about a pants run. - - Can only be appended to, never edited. - """ - - @classmethod - def dir(cls, config): - """Returns the configured base directory run info files are stored under.""" - # TODO(John Sirois): This is centralized, but in an awkward location. Isolate RunInfo reading - # and writing in 1 package or class that could naturally know this location and synthesize - # info_file names. - return config.getdefault('info_dir', - default=os.path.join(config.getdefault('pants_workdir'), 'runs')) - - def __init__(self, info_file): - self._info_file = info_file - safe_mkdir_for(self._info_file) - self._info = {} - if os.path.exists(self._info_file): - with open(self._info_file, 'r') as infile: - info = infile.read() - for m in re.finditer("""^([^:]+):(.*)$""", info, re.MULTILINE): - self._info[m.group(1).strip()] = m.group(2).strip() - - def path(self): - return self._info_file - - def get_info(self, key): - return self._info.get(key, None) - - def __getitem__(self, key): - ret = self.get_info(key) - if ret is None: - raise KeyError(key) - return ret - - def get_as_dict(self): - return self._info.copy() - - def add_info(self, key, val): - """Adds the given info and returns a dict composed of just this added info.""" - return self.add_infos((key, val)) - - def add_infos(self, *keyvals): - """Adds the given info and returns a dict composed of just this added info.""" - infos = dict(keyvals) - with open(self._info_file, 'a') as outfile: - for key, val in infos.items(): - key = key.strip() - val = str(val).strip() - if ':' in key: - raise Exception, 'info key must not contain a colon' - outfile.write('%s: %s\n' % (key, val)) - self._info[key] = val - return infos - - def add_basic_info(self, run_id, timestamp): - """Adds basic build info and returns a dict composed of just this added info.""" - datetime = time.strftime('%A %b %d, %Y %H:%M:%S', time.localtime(timestamp)) - user = getpass.getuser() - machine = socket.gethostname() - path = get_buildroot() - return self.add_infos(('id', run_id), ('timestamp', timestamp), ('datetime', datetime), - ('user', user), ('machine', machine), ('path', path)) - - def add_scm_info(self): - """Adds SCM-related info and returns a dict composed of just this added info.""" - scm = get_scm() - if scm: - revision = scm.commit_id - tag = scm.tag_name or 'none' - branch = scm.branch_name or revision - else: - revision, tag, branch = 'none', 'none', 'none' - return self.add_infos(('revision', revision), ('tag', tag), ('branch', branch)) diff --git a/src/python/twitter/pants/base/target.py b/src/python/twitter/pants/base/target.py deleted file mode 100644 index 9f1804adf..000000000 --- a/src/python/twitter/pants/base/target.py +++ /dev/null @@ -1,457 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import collections -import os -import sys - -from twitter.common.collections import OrderedSet, maybe_list -from twitter.common.lang import Compatibility - -from .address import Address -from .build_manual import manual -from .hash_utils import hash_all -from .parse_context import ParseContext - - -class TargetDefinitionException(Exception): - """Thrown on errors in target definitions.""" - - def __init__(self, target, msg): - address = getattr(target, 'address', None) - if address is None: - try: - location = ParseContext.locate().current_buildfile - except ParseContext.ContextError: - location = 'unknown location' - address = 'unknown target of type %s in %s' % (target.__class__.__name__, location) - super(Exception, self).__init__('Error with %s: %s' % (address, msg)) - - -class AbstractTarget(object): - - @property - def is_concrete(self): - """Returns true if a target resolves to itself.""" - targets = list(self.resolve()) - return len(targets) == 1 and targets[0] == self - - @property - def has_resources(self): - """Returns True if the target has an associated set of Resources.""" - return hasattr(self, 'resources') and self.resources - - @property - def is_exported(self): - """Returns True if the target provides an artifact exportable from the repo.""" - # TODO(John Sirois): fixup predicate dipping down into details here. - return self.has_label('exportable') and self.provides - - @property - def is_internal(self): - """Returns True if the target is internal to the repo (ie: it might have dependencies).""" - return self.has_label('internal') - - @property - def is_jar(self): - """Returns True if the target is a jar.""" - return False - - @property - def is_java_agent(self): - """Returns `True` if the target is a java agent.""" - return self.has_label('java_agent') - - @property - def is_jvm_app(self): - """Returns True if the target produces a java application with bundled auxiliary files.""" - return False - - @property - def is_thrift(self): - """Returns True if the target has thrift IDL sources.""" - return False - - @property - def is_jvm(self): - """Returns True if the target produces jvm bytecode.""" - return self.has_label('jvm') - - @property - def is_codegen(self): - """Returns True if the target is a codegen target.""" - return self.has_label('codegen') - - @property - def is_synthetic(self): - """Returns True if the target is a synthetic target injected by the runtime.""" - return self.has_label('synthetic') - - @property - def is_jar_library(self): - """Returns True if the target is an external jar library.""" - return self.has_label('jars') - - @property - def is_java(self): - """Returns True if the target has or generates java sources.""" - return self.has_label('java') - - @property - def is_apt(self): - """Returns True if the target exports an annotation processor.""" - return self.has_label('apt') - - @property - def is_python(self): - """Returns True if the target has python sources.""" - return self.has_label('python') - - @property - def is_scala(self): - """Returns True if the target has scala sources.""" - return self.has_label('scala') - - @property - def is_scalac_plugin(self): - """Returns True if the target builds a scalac plugin.""" - return self.has_label('scalac_plugin') - - @property - def is_test(self): - """Returns True if the target is comprised of tests.""" - return self.has_label('tests') - - def resolve(self): - """Returns an iterator over the target(s) this target represents.""" - yield self - - -@manual.builddict() -class Target(AbstractTarget): - """The baseclass for all pants targets. - - Handles registration of a target amongst all parsed targets as well as location of the target - parse context. - """ - - _targets_by_address = None - _addresses_by_buildfile = None - - @classmethod - def identify(cls, targets): - """Generates an id for a set of targets.""" - return cls.combine_ids(target.id for target in targets) - - @classmethod - def maybe_readable_identify(cls, targets): - """Generates an id for a set of targets. - - If the set is a single target, just use that target's id.""" - return cls.maybe_readable_combine_ids([target.id for target in targets]) - - @staticmethod - def combine_ids(ids): - """Generates a combined id for a set of ids.""" - return hash_all(sorted(ids)) # We sort so that the id isn't sensitive to order. - - @classmethod - def maybe_readable_combine_ids(cls, ids): - """Generates combined id for a set of ids, but if the set is a single id, just use that.""" - ids = list(ids) # We can't len a generator. - return ids[0] if len(ids) == 1 else cls.combine_ids(ids) - - @classmethod - def get_all_addresses(cls, buildfile): - """Returns all of the target addresses in the specified buildfile if already parsed; otherwise, - parses the buildfile to find all the addresses it contains and then returns them. - """ - def lookup(): - if buildfile in cls._addresses_by_buildfile: - return cls._addresses_by_buildfile[buildfile] - else: - return OrderedSet() - - addresses = lookup() - if addresses: - return addresses - else: - ParseContext(buildfile).parse() - return lookup() - - @classmethod - def _clear_all_addresses(cls): - cls._targets_by_address = {} - cls._addresses_by_buildfile = collections.defaultdict(OrderedSet) - - @classmethod - def get(cls, address): - """Returns the specified module target if already parsed; otherwise, parses the buildfile in the - context of its parent directory and returns the parsed target. - """ - def lookup(): - return cls._targets_by_address.get(address, None) - - target = lookup() - if target: - return target - else: - ParseContext(address.buildfile).parse() - return lookup() - - @classmethod - def resolve_all(cls, targets, *expected_types): - """Yield the resolved concrete targets checking each is a subclass of one of the expected types - if specified. - """ - if targets: - for target in maybe_list(targets, expected_type=Target): - concrete_targets = [t for t in target.resolve() if t.is_concrete] - for resolved in concrete_targets: - if expected_types and not isinstance(resolved, expected_types): - raise TypeError('%s requires types: %s and found %s' % (cls, expected_types, resolved)) - yield resolved - - def __init__(self, name, reinit_check=True, exclusives=None): - """ - :param string name: The target name. - """ - # See "get_all_exclusives" below for an explanation of the exclusives parameter. - # This check prevents double-initialization in multiple-inheritance situations. - # TODO(John Sirois): fix target inheritance - use super() to linearize or use alternatives to - # multiple inheritance. - if not reinit_check or not hasattr(self, '_initialized'): - if not isinstance(name, Compatibility.string): - self.address = '%s:%s' % (ParseContext.locate().current_buildfile, str(name)) - raise TargetDefinitionException(self, "Invalid target name: %s" % name) - self.name = name - self.description = None - - self.address = self._locate() - - # TODO(John Sirois): Transition all references to self.identifier to eliminate id builtin - # ambiguity - self.id = self._create_id() - - self._register() - - self.labels = set() - - self._initialized = True - - self.declared_exclusives = collections.defaultdict(set) - if exclusives is not None: - for k in exclusives: - self.declared_exclusives[k].add(exclusives[k]) - self.exclusives = None - - # For synthetic codegen targets this will be the original target from which - # the target was synthesized. - self._derived_from = self - - @property - def derived_from(self): - """Returns the target this target was derived from. - - If this target was not derived from another, returns itself. - """ - return self._derived_from - - @derived_from.setter - def derived_from(self, value): - """Sets the target this target was derived from. - - Various tasks may create targets not written down in any BUILD file. Often these targets are - derived from targets written down in BUILD files though in which case the derivation chain - should be maintained. - """ - if value and not isinstance(value, AbstractTarget): - raise ValueError('Expected derived_from to be a Target, given %s of type %s' - % (value, type(value))) - self._derived_from = value - - def get_declared_exclusives(self): - return self.declared_exclusives - - def add_to_exclusives(self, exclusives): - if exclusives is not None: - for key in exclusives: - self.exclusives[key] |= exclusives[key] - - def get_all_exclusives(self): - """ Get a map of all exclusives declarations in the transitive dependency graph. - - For a detailed description of the purpose and use of exclusives tags, - see the documentation of the CheckExclusives task. - - """ - if self.exclusives is None: - self._propagate_exclusives() - return self.exclusives - - def _propagate_exclusives(self): - if self.exclusives is None: - self.exclusives = collections.defaultdict(set) - self.add_to_exclusives(self.declared_exclusives) - # This may perform more work than necessary. - # We want to just traverse the immediate dependencies of this target, - # but for a general target, we can't do that. _propagate_exclusives is overridden - # in subclasses when possible to avoid the extra work. - self.walk(lambda t: self._propagate_exclusives_work(t)) - - def _propagate_exclusives_work(self, target): - # Note: this will cause a stack overflow if there is a cycle in - # the dependency graph, so exclusives checking should occur after - # cycle detection. - if hasattr(target, "declared_exclusives"): - self.add_to_exclusives(target.declared_exclusives) - return None - - def _post_construct(self, func, *args, **kwargs): - """Registers a command to invoke after this target's BUILD file is parsed.""" - ParseContext.locate().on_context_exit(func, *args, **kwargs) - - def _create_id(self): - """Generates a unique identifier for the BUILD target. - - The generated id is safe for use as a path name on unix systems. - """ - buildfile_relpath = os.path.dirname(self.address.buildfile.relpath) - if buildfile_relpath in ('.', ''): - return self.name - else: - return "%s.%s" % (buildfile_relpath.replace(os.sep, '.'), self.name) - - def _locate(self): - parse_context = ParseContext.locate() - return Address(parse_context.current_buildfile, self.name) - - def _register(self): - existing = self._targets_by_address.get(self.address) - if existing and existing is not self: - if existing.address.buildfile != self.address.buildfile: - raise TargetDefinitionException(self, "already defined in a sibling BUILD " - "file: %s" % existing.address.buildfile.relpath) - else: - raise TargetDefinitionException(self, "duplicate to %s" % existing) - - self._targets_by_address[self.address] = self - self._addresses_by_buildfile[self.address.buildfile].add(self.address) - - @property - def identifier(self): - """A unique identifier for the BUILD target. - - The generated id is safe for use as a path name on unix systems. - """ - return self.id - - def walk(self, work, predicate=None): - """Walk of this target's dependency graph visiting each node exactly once. - - If a predicate is supplied it will be used to test each target before handing the target to - work and descending. Work can return targets in which case these will be added to the walk - candidate set if not already walked. - - :param work: Callable that takes a :py:class:`twitter.pants.base.target.Target` - as its single argument. - :param predicate: Callable that takes a :py:class:`twitter.pants.base.target.Target` - as its single argument and returns True if the target should passed to ``work``. - """ - if not callable(work): - raise ValueError('work must be callable but was %s' % work) - if predicate and not callable(predicate): - raise ValueError('predicate must be callable but was %s' % predicate) - self._walk(set(), work, predicate) - - def _walk(self, walked, work, predicate=None): - for target in self.resolve(): - if target not in walked: - walked.add(target) - if not predicate or predicate(target): - additional_targets = work(target) - if hasattr(target, '_walk'): - target._walk(walked, work, predicate) - if additional_targets: - for additional_target in additional_targets: - if hasattr(additional_target, '_walk'): - additional_target._walk(walked, work, predicate) - - @manual.builddict() - def with_description(self, description): - """Set a human-readable description of this target.""" - self.description = description - return self - - def add_labels(self, *label): - self.labels.update(label) - - def remove_label(self, label): - self.labels.remove(label) - - def has_label(self, label): - return label in self.labels - - def __eq__(self, other): - return isinstance(other, Target) and self.address == other.address - - def __hash__(self): - return hash(self.address) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "%s(%s)" % (type(self).__name__, self.address) - - @staticmethod - def has_jvm_targets(targets): - """Returns true if the given sequence of targets contains at least one jvm target as determined - by is_jvm(...) - """ - - return len(list(Target.extract_jvm_targets(targets))) > 0 - - @staticmethod - def extract_jvm_targets(targets): - """Returns an iterator over the jvm targets the given sequence of targets resolve to. The - given targets can be a mix of types and only valid jvm targets (as determined by is_jvm(...) - will be returned by the iterator. - """ - - for target in targets: - if target is None: - print('Warning! Null target!', file=sys.stderr) - continue - for real_target in target.resolve(): - if real_target.is_jvm: - yield real_target - - def has_sources(self, extension=None): - """Returns True if the target has sources. - - If an extension is supplied the target is further checked for at least 1 source with the given - extension. - """ - return (self.has_label('sources') and - (not extension or - (hasattr(self, 'sources') and - any(source.endswith(extension) for source in self.sources)))) - - -Target._clear_all_addresses() diff --git a/src/python/twitter/pants/base/timer.py b/src/python/twitter/pants/base/timer.py deleted file mode 100644 index bb3d52fed..000000000 --- a/src/python/twitter/pants/base/timer.py +++ /dev/null @@ -1,104 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import time - -from collections import namedtuple -from contextlib import contextmanager - -from twitter.common.lang import Compatibility - - -Timing = namedtuple('Timing', ['label', 'times', 'overlapping']) - - -class Timer(object): - def __init__(self): - self._timings = [] - - @contextmanager - def timing(self, label): - """Convenient timing context. - - Use like this: - - with timer.timing(label): - ... the work that will be timed ... - """ - start = self.now() - yield None - elapsed = self.now() - start - self.log(label, [elapsed]) - - def now(self): - return time.monotonic() if Compatibility.PY3 else time.time() - - def log(self, label, times, overlapping=False): - """Code that has to measure its own timings directly can log them here. - - If labels are of the form prefix:suffix, then the sum of all times of consecutively-logged - timings with the same prefix will also be logged. - - Set overlapping to True if you're logging a timing that overlaps with other, already-logged - timings. - """ - self._timings.append(Timing(label, times, overlapping)) - - def print_timings(self): - grand_total_time = 0 - - last_prefix = None - total_time_for_prefix = 0 - num_timings_with_prefix = 0 - - def maybe_print_timings_for_prefix(): - if num_timings_with_prefix > 1: - print('[%(prefix)s] total: %(total).3fs' % { - 'prefix': last_prefix, - 'total': total_time_for_prefix - }) - - for timing in self._timings: - total_time = sum(timing.times) - if not timing.overlapping: - grand_total_time += total_time - - pos = timing.label.find(':') - if pos != -1: - prefix = timing.label[0:pos] - if prefix == last_prefix and not timing.overlapping: - total_time_for_prefix += total_time - num_timings_with_prefix += 1 - else: - maybe_print_timings_for_prefix() - total_time_for_prefix = total_time - num_timings_with_prefix = 1 - last_prefix = prefix - - if len(timing.times) > 1: - print('[%(label)s(%(numsteps)d)] %(timings)s -> %(total).3fs' % { - 'label': timing.label, - 'numsteps': len(timing.times), - 'timings': ','.join('%.3fs' % time for time in timing.times), - 'total': total_time - }) - else: - print('[%(label)s] %(total).3fs' % { - 'label': timing.label, - 'total': total_time - }) - maybe_print_timings_for_prefix() - print('total: %.3fs' % grand_total_time) diff --git a/src/python/twitter/pants/base/worker_pool.py b/src/python/twitter/pants/base/worker_pool.py deleted file mode 100644 index 439d9b478..000000000 --- a/src/python/twitter/pants/base/worker_pool.py +++ /dev/null @@ -1,148 +0,0 @@ -from multiprocessing.pool import ThreadPool -import threading -from twitter.pants.reporting.report import Report - - -class Work(object): - """Represents multiple concurrent calls to the same callable.""" - def __init__(self, func, args_tuples, workunit_name=None): - # A callable. - self.func = func - - # A list of tuples of args. func will be called once per tuple, concurrently. - # The length of this list is the cardinality of the work. - self.args_tuples = args_tuples - - # If specified, each invocation will be executed in a workunit of this name. - self.workunit_name = workunit_name - - -class WorkerPool(object): - """A pool of workers. - - Workers are threads, and so are subject to GIL constraints. Submitting CPU-bound work - may not be effective. Use this class primarily for IO-bound work. - """ - - def __init__(self, parent_workunit, run_tracker, num_workers): - self._run_tracker = run_tracker - # All workers accrue work to the same root. - self._pool = ThreadPool(processes=num_workers, - initializer=self._run_tracker.register_thread, - initargs=(parent_workunit, )) - # We mustn't shutdown when there are pending workchains, as they may need to submit work - # in the future, and the pool doesn't know about this yet. - self._pending_workchains = 0 - self._pending_workchains_cond = threading.Condition() # Protects self._pending_workchains. - - self._shutdown_hooks = [] - - def add_shutdown_hook(self, hook): - self._shutdown_hooks.append(hook) - - def submit_async_work(self, work, workunit_parent=None, on_success=None, on_failure=None): - """Submit work to be executed in the background. - - - work: The work to execute. - - workunit_parent: If specified, work is accounted for under this workunit. - - on_success: If specified, a callable taking a single argument, which will be a list - of return values of each invocation, in order. Called only if all work succeeded. - - on_failure: If specified, a callable taking a single argument, which is an exception - thrown in the work. - - Don't do work in on_success: not only will it block the result handling thread, but - that thread is not a worker and doesn't have a logging context etc. Use it just to - submit further work to the pool. - """ - if work is None or len(work.args_tuples) == 0: # map_async hangs on 0-length iterables. - if on_success: - on_success([]) - else: - def do_work(*args): - self._do_work(work.func, *args, workunit_name=work.workunit_name, - workunit_parent=workunit_parent, on_failure=on_failure) - self._pool.map_async(do_work, work.args_tuples, chunksize=1, callback=on_success) - - def submit_async_work_chain(self, work_chain, workunit_parent, done_hook=None): - """Submit work to be executed in the background. - - - work_chain: An iterable of Work instances. Will be invoked serially. Each instance may - have a different cardinality. There is no output-input chaining: the argument - tuples must already be present in each work instance. If any work throws an - exception no subsequent work in the chain will be attempted. - - workunit_parent: Work is accounted for under this workunit. - - done_hook: If not None, invoked with no args after all work is done, or on error. - """ - def done(): - if done_hook: - done_hook() - with self._pending_workchains_cond: - self._pending_workchains -= 1 - self._pending_workchains_cond.notify() - - def error(e): - done() - self._run_tracker.log(Report.ERROR, '%s' % e) - - # We filter out Nones defensively. There shouldn't be any, but if a bug causes one, - # Pants might hang indefinitely without this filtering. - work_iter = iter(filter(None, work_chain)) - def submit_next(): - try: - self.submit_async_work(work_iter.next(), workunit_parent=workunit_parent, - on_success=lambda x: submit_next(), on_failure=error) - except StopIteration: - done() # The success case. - - with self._pending_workchains_cond: - self._pending_workchains += 1 - try: - submit_next() - except Exception as e: # Handles errors in the submission code. - done() - self._run_tracker.log(Report.ERROR, '%s' % e) - raise - - def submit_work_and_wait(self, work, workunit_parent=None): - """Submit work to be executed on this pool, but wait for it to complete. - - - work: The work to execute. - - workunit_parent: If specified, work is accounted for under this workunit. - - Returns a list of return values of each invocation, in order. Throws if any invocation does. - """ - if work is None or len(work.args_tuples) == 0: # map hangs on 0-length iterables. - return [] - else: - def do_work(*args): - return self._do_work(work.func, *args, workunit_name=work.workunit_name, - workunit_parent=workunit_parent) - # We need to specify a timeout explicitly, because otherwise python ignores SIGINT when waiting - # on a condition variable, so we won't be able to ctrl-c out. - return self._pool.map_async(do_work, work.args_tuples, chunksize=1).get(timeout=1000000000) - - def _do_work(self, func, args_tuple, workunit_name, workunit_parent, on_failure=None): - try: - if workunit_name: - with self._run_tracker.new_workunit_under_parent(name=workunit_name, parent=workunit_parent): - return func(*args_tuple) - else: - return func(*args_tuple) - except Exception as e: - if on_failure: - # Note that here the work's workunit is closed. So, e.g., it's OK to use on_failure() - # to close an ancestor workunit. - on_failure(e) - raise - - def shutdown(self): - with self._pending_workchains_cond: - while self._pending_workchains > 0: - self._pending_workchains_cond.wait() - self._pool.close() - self._pool.join() - for hook in self._shutdown_hooks: - hook() - - def abort(self): - self._pool.terminate() diff --git a/src/python/twitter/pants/base/workunit.py b/src/python/twitter/pants/base/workunit.py deleted file mode 100644 index 7e604f80b..000000000 --- a/src/python/twitter/pants/base/workunit.py +++ /dev/null @@ -1,204 +0,0 @@ -import os -import re -import time -import uuid - -from twitter.common.dirutil import safe_mkdir_for -from twitter.common.rwbuf.read_write_buffer import FileBackedRWBuf # XXX pull back into pants - - -class WorkUnit(object): - """A hierarchical unit of work, for the purpose of timing and reporting. - - A WorkUnit can be subdivided into further WorkUnits. The WorkUnit concept is deliberately - decoupled from the phase/task hierarchy. This allows some flexibility in having, say, - sub-units inside a task. E.g., there might be one WorkUnit representing an entire pants run, - and that can be subdivided into WorkUnits for each phase. Each of those can be subdivided into - WorkUnits for each task, and a task can subdivide that into further work units, if finer-grained - timing and reporting is needed. - """ - - # The outcome of a workunit. - # It can only be set to a new value <= the old one. - ABORTED = 0 - FAILURE = 1 - WARNING = 2 - SUCCESS = 3 - UNKNOWN = 4 - - @staticmethod - def choose_for_outcome(outcome, aborted_val, failure_val, warning_val, success_val, unknown_val): - """Returns one of the 5 arguments, depending on the outcome.""" - if outcome not in range(0, 5): - raise Exception('Invalid outcome: %s' % outcome) - return (aborted_val, failure_val, warning_val, success_val, unknown_val)[outcome] - - @staticmethod - def outcome_string(outcome): - """Returns a human-readable string describing the outcome.""" - return WorkUnit.choose_for_outcome(outcome, 'ABORTED', 'FAILURE', 'WARNING', 'SUCCESS', 'UNKNOWN') - - # Labels describing a workunit. Reporting code can use this to decide how to display - # information about this workunit. - # - # Note that a workunit can have multiple labels where this makes sense, e.g., TOOL, COMPILER and NAILGUN. - SETUP = 0 # Parsing build files etc. - PHASE = 1 # Executing a phase. - GOAL = 2 # Executing a goal. - GROUP = 3 # Executing a group. - - BOOTSTRAP = 4 # Invocation of code to fetch a tool. - TOOL = 5 # Single invocations of a tool. - MULTITOOL = 6 # Multiple consecutive invocations of the same tool. - COMPILER = 7 # Invocation of a compiler. - - TEST = 8 # Running a test. - JVM = 9 # Running a tool via the JVM. - NAILGUN = 10 # Running a tool via nailgun. - RUN = 11 # Running a binary. - REPL = 12 # Running a repl. - - def __init__(self, run_tracker, parent, name, labels=None, cmd=''): - """ - - run_tracker: The RunTracker that tracks this WorkUnit. - - parent: The containing workunit, if any. E.g., 'compile' might contain 'java', 'scala' etc., - 'scala' might contain 'compile', 'split' etc. - - name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'. - - labels: An optional iterable of labels. The reporters can use this to decide how to - display information about this work. - - cmd: An optional longer string representing this work. - E.g., the cmd line of a compiler invocation. - """ - self._outcome = WorkUnit.UNKNOWN - - self.run_tracker = run_tracker - self.parent = parent - self.children = [] - - self.name = name - self.labels = set(labels or ()) - self.cmd = cmd - self.id = uuid.uuid4() - - # In seconds since the epoch. Doubles, to account for fractional seconds. - self.start_time = 0 - self.end_time = 0 - - # A workunit may have multiple outputs, which we identify by a name. - # E.g., a tool invocation may have 'stdout', 'stderr', 'debug_log' etc. - self._outputs = {} # name -> output buffer. - - # Do this last, as the parent's _self_time() might get called before we're - # done initializing ourselves. - # TODO: Ensure that a parent can't be ended before all its children are. - if self.parent: - self.parent.children.append(self) - - def has_label(self, label): - return label in self.labels - - def start(self): - """Mark the time at which this workunit started.""" - self.start_time = time.time() - - def end(self): - """Mark the time at which this workunit ended.""" - self.end_time = time.time() - for output in self._outputs.values(): - output.close() - is_tool = self.has_label(WorkUnit.TOOL) - path = self.path() - self.run_tracker.cumulative_timings.add_timing(path, self.duration(), is_tool) - self.run_tracker.self_timings.add_timing(path, self._self_time(), is_tool) - - def outcome(self): - """Returns the outcome of this workunit.""" - return self._outcome - - def set_outcome(self, outcome): - """Set the outcome of this work unit. - - We can set the outcome on a work unit directly, but that outcome will also be affected by - those of its subunits. The right thing happens: The outcome of a work unit is the - worst outcome of any of its subunits and any outcome set on it directly.""" - if outcome < self._outcome: - self._outcome = outcome - self.choose(0, 0, 0, 0, 0) # Dummy call, to validate outcome. - if self.parent: self.parent.set_outcome(self._outcome) - - _valid_name_re = re.compile(r'\w+') - - def output(self, name): - """Returns the output buffer for the specified output name (e.g., 'stdout').""" - m = WorkUnit._valid_name_re.match(name) - if not m or m.group(0) != name: - raise Exception('Invalid output name: %s' % name) - if name not in self._outputs: - path = os.path.join(self.run_tracker.info_dir, 'tool_outputs', '%s.%s' % (self.id, name)) - safe_mkdir_for(path) - self._outputs[name] = FileBackedRWBuf(path) - return self._outputs[name] - - def outputs(self): - """Returns the map of output name -> output buffer.""" - return self._outputs - - def choose(self, aborted_val, failure_val, warning_val, success_val, unknown_val): - """Returns one of the 5 arguments, depending on our outcome.""" - return WorkUnit.choose_for_outcome(self._outcome, - aborted_val, failure_val, warning_val, success_val, unknown_val) - - def duration(self): - """Returns the time (in fractional seconds) spent in this workunit and its children.""" - return (self.end_time or time.time()) - self.start_time - - def start_time_string(self): - """A convenient string representation of start_time.""" - return time.strftime('%H:%M:%S', time.localtime(self.start_time)) - - def start_delta_string(self): - """A convenient string representation of how long after the run started we started.""" - delta = int(self.start_time) - int(self.root().start_time) - return '%02d:%02d' % (delta / 60, delta % 60) - - def root(self): - ret = self - while ret.parent is not None: - ret = ret.parent - return ret - - def ancestors(self): - """Returns a list consisting of this workunit and those enclosing it, up to the root.""" - ret = [] - workunit = self - while workunit is not None: - ret.append(workunit) - workunit = workunit.parent - return ret - - def path(self): - """Returns a path string for this workunit, E.g., 'all:compile:jvm:scalac'.""" - return ':'.join(reversed([w.name for w in self.ancestors()])) - - def unaccounted_time(self): - """Returns non-leaf time spent in this workunit. - - This assumes that all major work should be done in leaves. - TODO: Is this assumption valid? - """ - return 0 if len(self.children) == 0 else self._self_time() - - def to_dict(self): - """Useful for providing arguments to templates.""" - ret = {} - for key in ['name', 'cmd', 'id', 'start_time', 'end_time', - 'outcome', 'start_time_string', 'start_delta_string']: - val = getattr(self, key) - ret[key] = val() if hasattr(val, '__call__') else val - ret['parent'] = self.parent.to_dict() if self.parent else None - return ret - - def _self_time(self): - """Returns the time spent in this workunit outside of any children.""" - return self.duration() - sum([child.duration() for child in self.children]) - diff --git a/src/python/twitter/pants/bin/__init__.py b/src/python/twitter/pants/bin/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/bin/pants_exe.py b/src/python/twitter/pants/bin/pants_exe.py deleted file mode 100644 index 35bc9a4cf..000000000 --- a/src/python/twitter/pants/bin/pants_exe.py +++ /dev/null @@ -1,200 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import optparse -import os -import sys -import traceback - -from twitter.common.dirutil import Lock - -from twitter.pants.base.build_environment import get_buildroot, get_version -from twitter.pants.base.address import Address -from twitter.pants.base.config import Config -from twitter.pants.base.rcfile import RcFile -from twitter.pants.commands.command import Command -from twitter.pants.commands.register import register_commands -from twitter.pants.goal.initialize_reporting import initial_reporting -from twitter.pants.goal.run_tracker import RunTracker -from twitter.pants.reporting.report import Report -from twitter.pants.tasks.nailgun_task import NailgunTask - - -_HELP_ALIASES = set([ - '-h', - '--help', - 'help', -]) - -_BUILD_COMMAND = 'build' -_LOG_EXIT_OPTION = '--log-exit' -_VERSION_OPTION = '--version' - -def _do_exit(result=0, msg=None): - if msg: - print(msg, file=sys.stderr) - if _LOG_EXIT_OPTION in sys.argv and result == 0: - print("\nSUCCESS\n") - sys.exit(result) - - -def _exit_and_fail(msg=None): - _do_exit(result=1, msg=msg) - - -def _find_all_commands(): - for cmd in Command.all_commands(): - cls = Command.get_command(cmd) - yield '%s\t%s' % (cmd, cls.__doc__) - - -def _help(version, root_dir): - print('Pants %s @ PANTS_BUILD_ROOT: %s' % (version, root_dir)) - print() - print('Available subcommands:\n\t%s' % '\n\t'.join(_find_all_commands())) - print() - print("""Default subcommand flags can be stored in ~/.pantsrc using the 'options' key of a -section named for the subcommand in ini style format, ie: - [build] - options: --log-exit""") - _exit_and_fail() - - -def _add_default_options(command, args): - expanded_options = RcFile(paths=['/etc/pantsrc', '~/.pants.rc']).apply_defaults([command], args) - if expanded_options != args: - print("(using ~/.pantsrc expansion: pants %s %s)" % (command, ' '.join(expanded_options)), - file=sys.stderr) - return expanded_options - - -def _synthesize_command(root_dir, args): - register_commands() - command = args[0] - - if command in Command.all_commands(): - subcommand_args = args[1:] if len(args) > 1 else [] - return command, _add_default_options(command, subcommand_args) - - if command.startswith('-'): - _exit_and_fail('Invalid command: %s' % command) - - # assume 'build' if a command was omitted. - try: - Address.parse(root_dir, command) - return _BUILD_COMMAND, _add_default_options(_BUILD_COMMAND, args) - except: - _exit_and_fail('Failed to execute pants build: %s' % traceback.format_exc()) - - -def _parse_command(root_dir, args): - command, args = _synthesize_command(root_dir, args) - return Command.get_command(command), args - - -try: - import psutil - - def _process_info(pid): - process = psutil.Process(pid) - return '%d (%s)' % (pid, ' '.join(process.cmdline)) -except ImportError: - def _process_info(pid): - return '%d' % pid - - -def _run(): - """ - To add additional paths to sys.path, add a block to the config similar to the following: - [main] - roots: ['src/python/twitter/pants_internal/test/',] - """ - version = get_version() - if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION: - _do_exit(version) - - root_dir = get_buildroot() - if not os.path.exists(root_dir): - _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) - - if len(sys.argv) < 2 or (len(sys.argv) == 2 and sys.argv[1] in _HELP_ALIASES): - _help(version, root_dir) - - command_class, command_args = _parse_command(root_dir, sys.argv[1:]) - - parser = optparse.OptionParser(version=version) - RcFile.install_disable_rc_option(parser) - parser.add_option(_LOG_EXIT_OPTION, - action='store_true', - default=False, - dest='log_exit', - help = 'Log an exit message on success or failure.') - - config = Config.load() - - # TODO: This can be replaced once extensions are enabled with - # https://github.com/pantsbuild/pants/issues/5 - roots = config.getlist('parse', 'roots', default=[]) - sys.path.extend(map(lambda root: os.path.join(root_dir, root), roots)) - - # XXX(wickman) This should be in the command goal, not un pants_exe.py! - run_tracker = RunTracker.from_config(config) - report = initial_reporting(config, run_tracker) - run_tracker.start(report) - - url = run_tracker.run_info.get_info('report_url') - if url: - run_tracker.log(Report.INFO, 'See a report at: %s' % url) - else: - run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') - - command = command_class(run_tracker, root_dir, parser, command_args) - try: - if command.serialized(): - def onwait(pid): - print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr) - return True - runfile = os.path.join(root_dir, '.pants.run') - lock = Lock.acquire(runfile, onwait=onwait) - else: - lock = Lock.unlocked() - try: - result = command.run(lock) - _do_exit(result) - except KeyboardInterrupt: - command.cleanup() - raise - finally: - lock.release() - finally: - run_tracker.end() - # Must kill nailguns only after run_tracker.end() is called, because there may still - # be pending background work that needs a nailgun. - if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \ - or config.get('nailgun', 'autokill', default=False): - NailgunTask.killall(None) - -def main(): - try: - _run() - except KeyboardInterrupt: - _exit_and_fail('Interrupted by user.') - - -if __name__ == '__main__': - main() diff --git a/src/python/twitter/pants/binary_util.py b/src/python/twitter/pants/binary_util.py deleted file mode 100644 index 1f53b720d..000000000 --- a/src/python/twitter/pants/binary_util.py +++ /dev/null @@ -1,173 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import posixpath -import subprocess - -from contextlib import closing, contextmanager - -from twitter.common import log -from twitter.common.contextutil import temporary_file -from twitter.common.dirutil import chmod_plus_x, safe_delete, safe_open -from twitter.common.lang import Compatibility - -if Compatibility.PY3: - import urllib.request as urllib_request - import urllib.error as urllib_error -else: - import urllib2 as urllib_request - import urllib2 as urllib_error - -from .base.config import Config -from .tasks.task_error import TaskError - - -_ID_BY_OS = { - 'linux': lambda release, machine: ('linux', machine), - 'darwin': lambda release, machine: ('darwin', release.split('.')[0]), -} - - -_PATH_BY_ID = { - ('linux', 'x86_64'): ['linux', 'x86_64'], - ('linux', 'amd64'): ['linux', 'x86_64'], - ('linux', 'i386'): ['linux', 'i386'], - ('darwin', '9'): ['mac', '10.5'], - ('darwin', '10'): ['mac', '10.6'], - ('darwin', '11'): ['mac', '10.7'], - ('darwin', '12'): ['mac', '10.8'], - ('darwin', '13'): ['mac', '10.9'], -} - - -def select_binary(base_path, version, name, config=None): - """Selects a binary matching the current os and architecture. - - Raises TaskError if no binary of the given version and name could be found. - """ - # TODO(John Sirois): finish doc of the path structure expexcted under base_path - config = config or Config.load() - bootstrap_dir = config.getdefault('pants_bootstrapdir') - baseurl = config.getdefault('pants_support_baseurl') - timeout_secs = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30) - - sysname, _, release, _, machine = os.uname() - os_id = _ID_BY_OS[sysname.lower()] - if os_id: - middle_path = _PATH_BY_ID[os_id(release, machine)] - if middle_path: - binary_path = os.path.join(base_path, *(middle_path + [version, name])) - bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path) - if not os.path.exists(bootstrapped_binary_path): - url = posixpath.join(baseurl, binary_path) - log.info('Fetching %s binary from: %s' % (name, url)) - downloadpath = bootstrapped_binary_path + '~' - try: - with closing(urllib_request.urlopen(url, timeout=timeout_secs)) as binary: - with safe_open(downloadpath, 'wb') as bootstrapped_binary: - bootstrapped_binary.write(binary.read()) - - os.rename(downloadpath, bootstrapped_binary_path) - chmod_plus_x(bootstrapped_binary_path) - except (IOError, urllib_error.HTTPError, urllib_error.URLError) as e: - raise TaskError('Failed to fetch binary from %s: %s' % (url, e)) - finally: - safe_delete(downloadpath) - log.debug('Selected %s binary bootstrapped to: %s' % (name, bootstrapped_binary_path)) - return bootstrapped_binary_path - raise TaskError('No %s binary found for: %s' % (name, (sysname, release, machine))) - - -@contextmanager -def safe_args(args, - max_args=None, - config=None, - argfile=None, - delimiter='\n', - quoter=None, - delete=True): - """ - Yields args if there are less than a limit otherwise writes args to an argfile and yields an - argument list with one argument formed from the path of the argfile. - - :args The args to work with. - :max_args The maximum number of args to let though without writing an argfile. If not specified - then the maximum will be loaded from config. - :config Used to lookup the configured maximum number of args that can be passed to a subprocess; - defaults to the default config and looks for key 'max_subprocess_args' in the DEFAULTS. - :argfile The file to write args to when there are too many; defaults to a temporary file. - :delimiter The delimiter to insert between args written to the argfile, defaults to '\n' - :quoter A function that can take the argfile path and return a single argument value; - defaults to: - lambda f: '@' + f - :delete If True deletes any arg files created upon exit from this context; defaults to True. - """ - max_args = max_args or (config or Config.load()).getdefault('max_subprocess_args', int, 10) - if len(args) > max_args: - def create_argfile(fp): - fp.write(delimiter.join(args)) - fp.close() - return [quoter(fp.name) if quoter else '@%s' % fp.name] - - if argfile: - try: - with safe_open(argfile, 'w') as fp: - yield create_argfile(fp) - finally: - if delete and os.path.exists(argfile): - os.unlink(argfile) - else: - with temporary_file(cleanup=delete) as fp: - yield create_argfile(fp) - else: - yield args - - -def _mac_open(files): - subprocess.call(['open'] + list(files)) - - -def _linux_open(files): - cmd = "xdg-open" - if not _cmd_exists(cmd): - raise TaskError("The program '%s' isn't in your PATH. Please install and re-run this " - "goal." % cmd) - for f in list(files): - subprocess.call([cmd, f]) - - -# From: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python -def _cmd_exists(cmd): - return subprocess.call(["/usr/bin/which", cmd], shell=False, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) == 0 - -_OPENER_BY_OS = { - 'darwin': _mac_open, - 'linux': _linux_open -} - - -def ui_open(*files): - """Attempts to open the given files using the preferred native viewer or editor.""" - if files: - osname = os.uname()[0].lower() - if not osname in _OPENER_BY_OS: - print('Sorry, open currently not supported for ' + osname) - else: - _OPENER_BY_OS[osname](files) diff --git a/src/python/twitter/pants/cache/__init__.py b/src/python/twitter/pants/cache/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/cache/artifact.py b/src/python/twitter/pants/cache/artifact.py deleted file mode 100644 index 0d52d3c19..000000000 --- a/src/python/twitter/pants/cache/artifact.py +++ /dev/null @@ -1,111 +0,0 @@ -import os -import shutil -import errno -import tarfile -from twitter.common.contextutil import open_tar -from twitter.common.dirutil import safe_mkdir_for, safe_mkdir - - -class ArtifactError(Exception): - pass - - -class Artifact(object): - """Represents a set of files in an artifact.""" - def __init__(self, artifact_root): - # All files must be under this root. - self._artifact_root = artifact_root - - # The files known to be in this artifact, relative to artifact_root. - self._relpaths = set() - - def get_paths(self): - for relpath in self._relpaths: - yield os.path.join(self._artifact_root, relpath) - - def override_paths(self, paths): # Use with care. - self._relpaths = set([os.path.relpath(path, self._artifact_root) for path in paths]) - - def collect(self, paths): - """Collect the paths (which must be under artifact root) into this artifact.""" - raise NotImplementedError() - - def extract(self): - """Extract the files in this artifact to their locations under artifact root.""" - raise NotImplementedError() - - -class DirectoryArtifact(Artifact): - """An artifact stored as loose files under a directory.""" - def __init__(self, artifact_root, directory): - Artifact.__init__(self, artifact_root) - self._directory = directory - - def collect(self, paths): - for path in paths or (): - relpath = os.path.relpath(path, self._artifact_root) - dst = os.path.join(self._directory, relpath) - safe_mkdir(os.path.dirname(dst)) - if os.path.isdir(path): - shutil.copytree(path, dst) - else: - shutil.copy(path, dst) - self._relpaths.add(relpath) - - def extract(self): - for dir_name, _, filenames in os.walk(self._directory): - for filename in filenames: - filename = os.path.join(dir_name, filename) - relpath = os.path.relpath(filename, self._directory) - dst = os.path.join(self._artifact_root, relpath) - safe_mkdir_for(dst) - shutil.copy(filename, dst) - self._relpaths.add(relpath) - - -class TarballArtifact(Artifact): - """An artifact stored in a tarball.""" - def __init__(self, artifact_root, tarfile, compress): - Artifact.__init__(self, artifact_root) - self._tarfile = tarfile - self._compress = compress - - def collect(self, paths): - # In our tests, gzip is slightly less compressive than bzip2 on .class files, - # but decompression times are much faster. - mode = 'w:gz' if self._compress else 'w' - with open_tar(self._tarfile, mode, dereference=True, errorlevel=2) as tarout: - for path in paths or (): - # Adds dirs recursively. - relpath = os.path.relpath(path, self._artifact_root) - tarout.add(path, relpath) - self._relpaths.add(relpath) - - def extract(self): - try: - with open_tar(self._tarfile, 'r', errorlevel=2) as tarin: - # Note: We create all needed paths proactively, even though extractall() can do this for us. - # This is because we may be called concurrently on multiple artifacts that share directories, - # and there will be a race condition inside extractall(): task T1 A) sees that a directory - # doesn't exist and B) tries to create it. But in the gap between A) and B) task T2 creates - # the same directory, so T1 throws "File exists" in B). - # This actually happened, and was very hard to debug. - # Creating the paths here up front allows us to squelch that "File exists" error. - paths = [] - dirs = set() - for tarinfo in tarin.getmembers(): - paths.append(tarinfo.name) - if tarinfo.isdir(): - dirs.add(tarinfo.name) - else: - dirs.add(os.path.dirname(tarinfo.name)) - for d in dirs: - try: - os.makedirs(os.path.join(self._artifact_root, d)) - except OSError as e: - if e.errno != errno.EEXIST: - raise - tarin.extractall(self._artifact_root) - self._relpaths.update(paths) - except tarfile.ReadError as e: - raise ArtifactError(e.message) diff --git a/src/python/twitter/pants/cache/artifact_cache.py b/src/python/twitter/pants/cache/artifact_cache.py deleted file mode 100644 index 84b116279..000000000 --- a/src/python/twitter/pants/cache/artifact_cache.py +++ /dev/null @@ -1,81 +0,0 @@ -import os - -# Note throughout the distinction between the artifact_root (which is where the artifacts are -# originally built and where the cache restores them to) and the cache root path/URL (which is -# where the artifacts are cached). - - -class ArtifactCache(object): - """A map from cache key to a set of build artifacts. - - The cache key must uniquely identify the inputs (sources, compiler flags etc.) needed to - build the artifacts. Cache keys are typically obtained from a CacheKeyGenerator. - - Subclasses implement the methods below to provide this functionality. - """ - - class CacheError(Exception): - """Indicates a problem writing to or reading from the cache.""" - pass - - def __init__(self, log, artifact_root): - """Create an ArtifactCache. - - All artifacts must be under artifact_root. - """ - self.log = log - self.artifact_root = artifact_root - - def insert(self, cache_key, paths): - """Cache the output of a build. - - If there is an existing set of artifacts for this key they are deleted. - - TODO: Check that they're equal? They might not have to be if there are multiple equivalent - outputs. - - cache_key: A CacheKey object. - paths: List of absolute paths to generated dirs/files. These must be under the artifact_root. - """ - missing_files = filter(lambda f: not os.path.exists(f), paths) - try: - if missing_files: - raise ArtifactCache.CacheError('Tried to cache nonexistent files: %s' % missing_files) - self.try_insert(cache_key, paths) - except Exception as e: - self.log.error('Error while writing to artifact cache: %s. ' % e) - - def try_insert(self, cache_key, paths): - """Attempt to cache the output of a build, without error-handling. - - cache_key: A CacheKey object. - paths: List of absolute paths to generated dirs/files. These must be under the artifact_root. - """ - pass - - def has(self, cache_key): - pass - - def use_cached_files(self, cache_key): - """Use the files cached for the given key. - - Returns an appropriate Artifact instance if files were found and used, None otherwise. - Callers will typically only care about the truthiness of the return value. They usually - don't need to tinker with the returned instance. - - cache_key: A CacheKey object. - """ - pass - - def delete(self, cache_key): - """Delete the artifacts for the specified key. - - Deleting non-existent artifacts is a no-op. - """ - pass - - def prune(self, age_hours): - """Clean up cache files older than age_hours, if possible.""" - pass - - diff --git a/src/python/twitter/pants/cache/cache_setup.py b/src/python/twitter/pants/cache/cache_setup.py deleted file mode 100644 index c4ca9a5d3..000000000 --- a/src/python/twitter/pants/cache/cache_setup.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import urlparse -from twitter.pants.cache.pinger import Pinger -from twitter.pants.cache.combined_artifact_cache import CombinedArtifactCache -from twitter.pants.cache.local_artifact_cache import LocalArtifactCache -from twitter.pants.cache.restful_artifact_cache import RESTfulArtifactCache - - -def select_best_url(spec, pinger, log): - urls = spec.split('|') - if len(urls) == 1: - return urls[0] # No need to ping if we only have one option anyway. - netlocs = map(lambda url: urlparse.urlparse(url)[1], urls) - pingtimes = pinger.pings(netlocs) # List of pairs (host, time in ms). - log.debug('Artifact cache server ping times: %s' % - ', '.join(['%s: %3f secs' % p for p in pingtimes])) - argmin = min(xrange(len(pingtimes)), key=lambda i: pingtimes[i][1]) - best_url = urls[argmin] - if pingtimes[argmin][1] == Pinger.UNREACHABLE: - return None # No reachable artifact caches. - log.debug('Best artifact cache is %s' % best_url) - return best_url - - -def create_artifact_cache(log, artifact_root, spec, task_name, action='using'): - """Returns an artifact cache for the specified spec. - - spec can be: - - a path to a file-based cache root. - - a URL of a RESTful cache root. - - a bar-separated list of URLs, where we'll pick the one with the best ping times. - - A list of the above, for a combined cache. - """ - if not spec: - raise ValueError('Empty artifact cache spec') - if isinstance(spec, basestring): - if spec.startswith('/') or spec.startswith('~'): - path = os.path.join(spec, task_name) - log.info('%s %s local artifact cache at %s' % (task_name, action, path)) - return LocalArtifactCache(log, artifact_root, path) - elif spec.startswith('http://') or spec.startswith('https://'): - # Caches are supposed to be close, and we don't want to waste time pinging on no-op builds. - # So we ping twice with a short timeout. - pinger = Pinger(timeout=0.5, tries=2) - best_url = select_best_url(spec, pinger, log) - if best_url: - url = best_url.rstrip('/') + '/' + task_name - log.info('%s %s remote artifact cache at %s' % (task_name, action, url)) - return RESTfulArtifactCache(log, artifact_root, url) - else: - log.warn('%s has no reachable artifact cache in %s.' % (task_name, spec)) - return None - else: - raise ValueError('Invalid artifact cache spec: %s' % spec) - elif isinstance(spec, (list, tuple)): - caches = filter(None, [ create_artifact_cache(log, artifact_root, x, task_name, action) for x in spec ]) - return CombinedArtifactCache(caches) if caches else None diff --git a/src/python/twitter/pants/cache/combined_artifact_cache.py b/src/python/twitter/pants/cache/combined_artifact_cache.py deleted file mode 100644 index 1a741d2a9..000000000 --- a/src/python/twitter/pants/cache/combined_artifact_cache.py +++ /dev/null @@ -1,50 +0,0 @@ -from twitter.pants.cache.artifact_cache import ArtifactCache - - -class CombinedArtifactCache(ArtifactCache): - """An artifact cache that delegates to a list of other caches.""" - def __init__(self, artifact_caches, backfill=True): - """We delegate to artifact_caches, a list of ArtifactCache instances, in order. - - If backfill is true then we populate earlier caches that were missing an artifact, - if that artifact was found in a later cache. This is useful for priming a local cache - from a remote one. - """ - if not artifact_caches: - raise ValueError('Must provide at least one underlying artifact cache') - log = artifact_caches[0].log - artifact_root = artifact_caches[0].artifact_root - if any(x.artifact_root != artifact_root for x in artifact_caches): - raise ValueError('Combined artifact caches must all have the same artifact root.') - ArtifactCache.__init__(self, log, artifact_root) - self._artifact_caches = artifact_caches - self._backfill = backfill - - def insert(self, cache_key, paths): - for cache in self._artifact_caches: # Insert into all. - cache.insert(cache_key, paths) - - def has(self, cache_key): - return any(cache.has(cache_key) for cache in self._artifact_caches) - - def use_cached_files(self, cache_key): - to_backfill = [] - for cache in self._artifact_caches: - artifact = cache.use_cached_files(cache_key) - if not artifact: - if self._backfill: - to_backfill.append(cache) - else: - paths = list(artifact.get_paths()) - for cache in to_backfill: - cache.insert(cache_key, paths) - return artifact - return None - - def delete(self, cache_key): - for cache in self._artifact_caches: # Delete from all. - cache.delete(cache_key) - - def prune(self, age_hours): - for cache in self._artifact_caches: - cache.prune(age_hours) diff --git a/src/python/twitter/pants/cache/local_artifact_cache.py b/src/python/twitter/pants/cache/local_artifact_cache.py deleted file mode 100644 index 97a82800b..000000000 --- a/src/python/twitter/pants/cache/local_artifact_cache.py +++ /dev/null @@ -1,75 +0,0 @@ -import os -import shutil -import uuid - -from twitter.common.dirutil import safe_mkdir, safe_mkdir_for, safe_delete -from twitter.pants.cache.artifact import TarballArtifact, ArtifactError -from twitter.pants.cache.artifact_cache import ArtifactCache - - -class LocalArtifactCache(ArtifactCache): - """An artifact cache that stores the artifacts in local files.""" - def __init__(self, log, artifact_root, cache_root, compress=True, copy_fn=None): - """ - cache_root: The locally cached files are stored under this directory. - copy_fn: An optional function with the signature copy_fn(absolute_src_path, relative_dst_path) that - will copy cached files into the desired destination. If unspecified, a simple file copy is used. - """ - ArtifactCache.__init__(self, log, artifact_root) - self._cache_root = os.path.expanduser(cache_root) - self._compress = compress - - def copy(src, rel_dst): - dst = os.path.join(self.artifact_root, rel_dst) - safe_mkdir_for(dst) - shutil.copy(src, dst) - - self._copy_fn = copy_fn or copy - safe_mkdir(self._cache_root) - - def try_insert(self, cache_key, paths): - tarfile = self._cache_file_for_key(cache_key) - safe_mkdir_for(tarfile) - # Write to a temporary name (on the same filesystem), and move it atomically, so if we - # crash in the middle we don't leave an incomplete or missing artifact. - tarfile_tmp = tarfile + '.' + str(uuid.uuid4()) + '.tmp' - if os.path.exists(tarfile_tmp): - os.unlink(tarfile_tmp) - - artifact = TarballArtifact(self.artifact_root, tarfile_tmp, self._compress) - artifact.collect(paths) - # Note: Race condition here if multiple pants runs (in different workspaces) - # try to write the same thing at the same time. However since rename is atomic, - # this should not result in corruption. It may however result in a missing artifact - # If we crash between the unlink and the rename. But that's OK. - if os.path.exists(tarfile): - os.unlink(tarfile) - os.rename(tarfile_tmp, tarfile) - - def has(self, cache_key): - return os.path.isfile(self._cache_file_for_key(cache_key)) - - def use_cached_files(self, cache_key): - try: - tarfile = self._cache_file_for_key(cache_key) - if os.path.exists(tarfile): - artifact = TarballArtifact(self.artifact_root, tarfile, self._compress) - artifact.extract() - return artifact - else: - return None - except Exception as e: - self.log.warn('Error while reading from local artifact cache: %s' % e) - return None - - def delete(self, cache_key): - safe_delete(self._cache_file_for_key(cache_key)) - - def prune(self, age_hours): - pass - - def _cache_file_for_key(self, cache_key): - # Note: it's important to use the id as well as the hash, because two different targets - # may have the same hash if both have no sources, but we may still want to differentiate them. - return os.path.join(self._cache_root, cache_key.id, cache_key.hash) + \ - '.tar.gz' if self._compress else '.tar' diff --git a/src/python/twitter/pants/cache/pinger.py b/src/python/twitter/pants/cache/pinger.py deleted file mode 100644 index 5aed0ddb3..000000000 --- a/src/python/twitter/pants/cache/pinger.py +++ /dev/null @@ -1,49 +0,0 @@ -import httplib -from multiprocessing.pool import ThreadPool -import socket -from twitter.common.contextutil import Timer - - -_global_pinger_memo = {} # netloc -> rt time in secs. - -class Pinger(object): - # Signifies that a netloc is unreachable. - UNREACHABLE = 999999 - - def __init__(self, timeout, tries): - """Try pinging the given number of times, each with the given timeout.""" - self._timeout = timeout - self._tries = tries - - def ping(self, netloc): - """Time a single roundtrip to the netloc. - - Note that we don't use actual ICMP pings, because cmd-line ping is - inflexible and platform-dependent, so shelling out to it is annoying, - and the ICMP python lib can only be called by the superuser. - """ - if netloc in _global_pinger_memo: - return _global_pinger_memo[netloc] - - host, colon, portstr = netloc.partition(':') - port = int(portstr) if portstr else None - rt_secs = Pinger.UNREACHABLE - for _ in xrange(self._tries): - try: - with Timer() as timer: - conn = httplib.HTTPConnection(host, port, timeout=self._timeout) - conn.request('HEAD', '/') # Doesn't actually matter if this exists. - conn.getresponse() - new_rt_secs = timer.elapsed - except Exception: - new_rt_secs = Pinger.UNREACHABLE - rt_secs = min(rt_secs, new_rt_secs) - _global_pinger_memo[netloc] = rt_secs - return rt_secs - - def pings(self, netlocs): - pool = ThreadPool(processes=len(netlocs)) - rt_secs = pool.map(self.ping, netlocs, chunksize=1) - pool.close() - pool.join() - return zip(netlocs, rt_secs) diff --git a/src/python/twitter/pants/cache/read_write_artifact_cache.py b/src/python/twitter/pants/cache/read_write_artifact_cache.py deleted file mode 100644 index 0ec424a44..000000000 --- a/src/python/twitter/pants/cache/read_write_artifact_cache.py +++ /dev/null @@ -1,56 +0,0 @@ -from twitter.pants.cache.artifact_cache import ArtifactCache - - -class ReadWriteArtifactCache(ArtifactCache): - """An artifact cache that delegates to one cache for reading and another for writing. - - The name is slightly misleading: all caches are read-write. But I couldn't think - of a better one. - """ - def __init__(self, read_artifact_cache, write_artifact_cache): - """Either cache can be None, in which case we don't read from/write to it.""" - artifact_roots = [] - logs = [] - def get_root_and_log(cache): - if cache is not None: - artifact_roots.append(cache.artifact_root) - logs.append(cache.log) - get_root_and_log(read_artifact_cache) - get_root_and_log(write_artifact_cache) - if len(artifact_roots) == 0: - # Parent will never be accessed, so this is OK. In fact, it's a good way to ensure it. - artifact_root = None - log = None - else: - artifact_root = artifact_roots[0] - log = logs[0] - if len(artifact_roots) > 1 and artifact_roots[1] != artifact_root: - raise ValueError('Read and write artifact caches must have the same artifact root.') - ArtifactCache.__init__(self, log, artifact_root) - self._read_artifact_cache = read_artifact_cache - self._write_artifact_cache = write_artifact_cache - - def insert(self, cache_key, paths): - if self._write_artifact_cache: - self._write_artifact_cache.insert(cache_key, paths) - - def has(self, cache_key): - if self._read_artifact_cache: - return self._read_artifact_cache.has(cache_key) - else: - return False - - def use_cached_files(self, cache_key): - if self._read_artifact_cache: - return self._read_artifact_cache.use_cached_files(cache_key) - else: - return None - - def delete(self, cache_key): - if self._write_artifact_cache: - self._write_artifact_cache.delete(cache_key) - - def prune(self, age_hours): - if self._write_artifact_cache: - self._write_artifact_cache.prune(age_hours) - diff --git a/src/python/twitter/pants/cache/restful_artifact_cache.py b/src/python/twitter/pants/cache/restful_artifact_cache.py deleted file mode 100644 index a0cf08168..000000000 --- a/src/python/twitter/pants/cache/restful_artifact_cache.py +++ /dev/null @@ -1,115 +0,0 @@ -import httplib -import urlparse -from twitter.common.contextutil import temporary_file_path, temporary_file -from twitter.common.quantity import Amount, Data -from twitter.pants.cache.artifact import TarballArtifact -from twitter.pants.cache.artifact_cache import ArtifactCache - - -class RESTfulArtifactCache(ArtifactCache): - """An artifact cache that stores the artifacts on a RESTful service.""" - - READ_SIZE = int(Amount(4, Data.MB).as_(Data.BYTES)) - - def __init__(self, log, artifact_root, url_base, compress=True): - """ - url_base: The prefix for urls on some RESTful service. We must be able to PUT and GET to any - path under this base. - compress: Whether to compress the artifacts before storing them. - """ - ArtifactCache.__init__(self, log, artifact_root) - parsed_url = urlparse.urlparse(url_base) - if parsed_url.scheme == 'http': - self._ssl = False - elif parsed_url.scheme == 'https': - self._ssl = True - else: - raise ValueError('RESTfulArtifactCache only supports HTTP and HTTPS') - self._timeout_secs = 4.0 - self._netloc = parsed_url.netloc - self._path_prefix = parsed_url.path.rstrip('/') - self.compress = compress - - def try_insert(self, cache_key, paths): - with temporary_file_path() as tarfile: - artifact = TarballArtifact(self.artifact_root, tarfile, self.compress) - artifact.collect(paths) - - with open(tarfile, 'rb') as infile: - remote_path = self._remote_path_for_key(cache_key) - if not self._request('PUT', remote_path, body=infile): - raise self.CacheError('Failed to PUT to %s. Error: 404' % self._url_string(remote_path)) - - def has(self, cache_key): - return self._request('HEAD', self._remote_path_for_key(cache_key)) is not None - - def use_cached_files(self, cache_key): - # This implementation fetches the appropriate tarball and extracts it. - remote_path = self._remote_path_for_key(cache_key) - try: - # Send an HTTP request for the tarball. - response = self._request('GET', remote_path) - if response is None: - return None - - done = False - with temporary_file() as outfile: - total_bytes = 0 - # Read the data in a loop. - while not done: - data = response.read(self.READ_SIZE) - outfile.write(data) - if len(data) < self.READ_SIZE: - done = True - total_bytes += len(data) - outfile.close() - self.log.debug('Read %d bytes from artifact cache at %s' % - (total_bytes,self._url_string(remote_path))) - - # Extract the tarfile. - artifact = TarballArtifact(self.artifact_root, outfile.name, self.compress) - artifact.extract() - return artifact - except Exception as e: - self.log.warn('Error while reading from remote artifact cache: %s' % e) - return None - - def delete(self, cache_key): - remote_path = self._remote_path_for_key(cache_key) - self._request('DELETE', remote_path) - - def prune(self, age_hours): - # Doesn't make sense for a client to prune a remote server. - # Better to run tmpwatch on the server. - pass - - def _remote_path_for_key(self, cache_key): - # Note: it's important to use the id as well as the hash, because two different targets - # may have the same hash if both have no sources, but we may still want to differentiate them. - return '%s/%s/%s%s' % (self._path_prefix, cache_key.id, cache_key.hash, - '.tar.gz' if self.compress else '.tar') - - def _connect(self): - if self._ssl: - return httplib.HTTPSConnection(self._netloc, timeout=self._timeout_secs) - else: - return httplib.HTTPConnection(self._netloc, timeout=self._timeout_secs) - - # Returns a response if we get a 200, None if we get a 404 and raises an exception otherwise. - def _request(self, method, path, body=None): - self.log.debug('Sending %s request to %s' % (method, self._url_string(path))) - # TODO(benjy): Keep connection open and reuse? - conn = self._connect() - conn.request(method, path, body=body) - response = conn.getresponse() - # Allow all 2XX responses. E.g., nginx returns 201 on PUT. HEAD may return 204. - if int(response.status / 100) == 2: - return response - elif response.status == 404: - return None - else: - raise self.CacheError('Failed to %s %s. Error: %d %s' % (method, self._url_string(path), - response.status, response.reason)) - - def _url_string(self, path): - return '%s://%s%s' % (('https' if self._ssl else 'http'), self._netloc, path) diff --git a/src/python/twitter/pants/commands/__init__.py b/src/python/twitter/pants/commands/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/commands/build.py b/src/python/twitter/pants/commands/build.py deleted file mode 100644 index ed86d91d4..000000000 --- a/src/python/twitter/pants/commands/build.py +++ /dev/null @@ -1,127 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import sys -import traceback - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.address import Address -from twitter.pants.base.config import Config -from twitter.pants.base.target import Target -from twitter.pants.commands.command import Command -from twitter.pants.python.interpreter_cache import PythonInterpreterCache -from twitter.pants.python.python_builder import PythonBuilder - - -class Build(Command): - """Builds a specified target.""" - - __command__ = 'build' - - def setup_parser(self, parser, args): - parser.set_usage("\n" - " %prog build (options) [spec] (build args)\n" - " %prog build (options) [spec]... -- (build args)") - parser.add_option("-t", "--timeout", dest="conn_timeout", type="int", - default=Config.load().getdefault('connection_timeout'), - help="Number of seconds to wait for http connections.") - parser.add_option('-i', '--interpreter', dest='interpreter', default=None, - help='The interpreter requirement for this chroot.') - parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', - help='Show verbose output.') - parser.disable_interspersed_args() - parser.epilog = ('Builds the specified Python target(s). Use ./pants goal for JVM and other ' - 'targets.') - - def __init__(self, run_tracker, root_dir, parser, argv): - Command.__init__(self, run_tracker, root_dir, parser, argv) - - if not self.args: - self.error("A spec argument is required") - - self.config = Config.load() - self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) - self.interpreter_cache.setup() - interpreters = self.interpreter_cache.select_interpreter( - list(self.interpreter_cache.matches([self.options.interpreter] - if self.options.interpreter else ['']))) - if len(interpreters) != 1: - self.error('Unable to detect suitable interpreter.') - else: - self.debug('Selected %s' % interpreters[0]) - self.interpreter = interpreters[0] - - try: - specs_end = self.args.index('--') - if len(self.args) > specs_end: - self.build_args = self.args[specs_end+1:len(self.args)+1] - else: - self.build_args = [] - except ValueError: - specs_end = 1 - self.build_args = self.args[1:] if len(self.args) > 1 else [] - - self.targets = OrderedSet() - for spec in self.args[0:specs_end]: - try: - address = Address.parse(root_dir, spec) - except: - self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) - - try: - target = Target.get(address) - except: - self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) - - if not target: - self.error("Target %s does not exist" % address) - self.targets.update(tgt for tgt in target.resolve() if tgt.is_concrete) - - def debug(self, message): - if self.options.verbose: - print(message, file=sys.stderr) - - def execute(self): - print("Build operating on targets: %s" % self.targets) - - python_targets = OrderedSet() - for target in self.targets: - if target.is_python: - python_targets.add(target) - else: - self.error("Cannot build target %s" % target) - - if python_targets: - status = self._python_build(python_targets) - else: - status = -1 - - return status - - def _python_build(self, targets): - try: - executor = PythonBuilder(self.run_tracker, self.root_dir) - return executor.build( - targets, - self.build_args, - interpreter=self.interpreter, - conn_timeout=self.options.conn_timeout) - except: - self.error("Problem executing PythonBuilder for targets %s: %s" % (targets, - traceback.format_exc())) diff --git a/src/python/twitter/pants/commands/command.py b/src/python/twitter/pants/commands/command.py deleted file mode 100644 index a743f98e6..000000000 --- a/src/python/twitter/pants/commands/command.py +++ /dev/null @@ -1,108 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -from twitter.common.collections import OrderedSet -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.target import Target - - -class Command(object): - """Baseclass for all pants subcommands.""" - - @staticmethod - def get_command(name): - return Command._commands.get(name, None) - - @staticmethod - def all_commands(): - return Command._commands.keys() - - _commands = {} - - @classmethod - def _register(cls): - """Register a command class.""" - command_name = cls.__dict__.get('__command__', None) - if command_name: - Command._commands[command_name] = cls - - @staticmethod - def scan_addresses(root_dir, base_path=None): - """Parses all targets available in BUILD files under base_path and - returns their addresses. If no base_path is specified, root_dir is - assumed to be the base_path""" - - addresses = OrderedSet() - for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): - addresses.update(Target.get_all_addresses(buildfile)) - return addresses - - @classmethod - def serialized(cls): - return False - - def __init__(self, run_tracker, root_dir, parser, args): - """run_tracker: The (already opened) RunTracker to track this run with - root_dir: The root directory of the pants workspace - parser: an OptionParser - args: the subcommand arguments to parse""" - self.run_tracker = run_tracker - self.root_dir = root_dir - - # Override the OptionParser's error with more useful output - def error(message=None, show_help=True): - if message: - print(message + '\n') - if show_help: - parser.print_help() - parser.exit(status=1) - parser.error = error - self.error = error - - self.setup_parser(parser, args) - self.options, self.args = parser.parse_args(args) - self.parser = parser - - def setup_parser(self, parser, args): - """Subclasses should override and confiure the OptionParser to reflect - the subcommand option and argument requirements. Upon successful - construction, subcommands will be able to access self.options and - self.args.""" - - pass - - def error(self, message=None, show_help=True): - """Reports the error message, optionally followed by pants help, and then exits.""" - - def run(self, lock): - """Subcommands that are serialized() should override if they need the ability to interact with - the global command lock. - The value returned should be an int, 0 indicating success and any other value indicating - failure.""" - return self.execute() - - def execute(self): - """Subcommands that do not require serialization should override to perform the command action. - The value returned should be an int, 0 indicating success and any other value indicating - failure.""" - raise NotImplementedError('Either run(lock) or execute() must be over-ridden.') - - def cleanup(self): - """Called on SIGINT (e.g., when the user hits ctrl-c). - Subcommands may override to perform cleanup before exit.""" - pass diff --git a/src/python/twitter/pants/commands/goal.py b/src/python/twitter/pants/commands/goal.py deleted file mode 100644 index 13e6ab22a..000000000 --- a/src/python/twitter/pants/commands/goal.py +++ /dev/null @@ -1,1039 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import inspect -import multiprocessing -import os -import re -import sys -import signal -import socket -import time -import traceback - -from contextlib import contextmanager -from optparse import Option, OptionParser - -from twitter.common import log -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_rmtree, safe_mkdir -from twitter.common.lang import Compatibility -from twitter.common.log.options import LogOptions - -from twitter.pants import binary_util -from twitter.pants.base.address import Address -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.config import Config -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.rcfile import RcFile -from twitter.pants.base.run_info import RunInfo -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.commands.command import Command -from twitter.pants.engine.engine import Engine -from twitter.pants.engine.group_engine import GroupEngine -from twitter.pants.goal import Context, GoalError, Phase -from twitter.pants.goal import Goal as goal, Group as group -from twitter.pants.goal.initialize_reporting import update_reporting -from twitter.pants.reporting.reporting_server import ReportingServer, ReportingServerManager -from twitter.pants.tasks import Task, TaskError -from twitter.pants.tasks.console_task import ConsoleTask -from twitter.pants.tasks.list_goals import ListGoals -from twitter.pants.tasks.targets_help import TargetsHelp - -try: - import colors -except ImportError: - turn_off_colored_logging = True -else: - turn_off_colored_logging = False - -StringIO = Compatibility.StringIO - - -def _list_goals(context, message): - """Show all installed goals.""" - context.log.error(message) - # Execute as if the user had run "./pants goals". - return Goal.execute(context, 'goals') - - -goal(name='goals', action=ListGoals).install().with_description('List all documented goals.') - - -goal(name='targets', action=TargetsHelp).install().with_description('List all target types.') - - -class Help(Task): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - default = None - if len(args) > 1 and (not args[1].startswith('-')): - default = args[1] - del args[1] - option_group.add_option(mkflag("goal"), dest="help_goal", default=default) - - def execute(self, targets): - goal = self.context.options.help_goal - if goal is None: - return self.list_goals('You must supply a goal name to provide help for.') - phase = Phase(goal) - if not phase.goals(): - return self.list_goals('Goal %s is unknown.' % goal) - - parser = OptionParser() - parser.set_usage('%s goal %s ([target]...)' % (sys.argv[0], goal)) - parser.epilog = phase.description - Goal.add_global_options(parser) - Phase.setup_parser(parser, [], [phase]) - parser.parse_args(['--help']) - - def list_goals(self, message): - return _list_goals(self.context, message) - -goal(name='help', action=Help).install().with_description('Provide help for the specified goal.') - - -def _set_bool(option, opt_str, value, parser): - setattr(parser.values, option.dest, not opt_str.startswith("--no")) - - -class SpecParser(object): - """Parses goal target specs; either simple target addresses or else sibling (:) or descendant - (::) selector forms - """ - - def __init__(self, root_dir): - self._root_dir = root_dir - - def _get_dir(self, spec): - path = spec.split(':', 1)[0] - if os.path.isdir(path): - return path - else: - if os.path.isfile(path): - return os.path.dirname(path) - else: - return spec - - def _parse_addresses(self, spec): - if spec.endswith('::'): - dir = self._get_dir(spec[:-len('::')]) - for buildfile in BuildFile.scan_buildfiles(self._root_dir, os.path.join(self._root_dir, dir)): - for address in Target.get_all_addresses(buildfile): - yield address - elif spec.endswith(':'): - dir = self._get_dir(spec[:-len(':')]) - for address in Target.get_all_addresses(BuildFile(self._root_dir, dir)): - yield address - else: - yield Address.parse(self._root_dir, spec) - - def parse(self, spec): - """Parses the given target spec into one or more targets. - - Returns a generator of target, address pairs in which the target may be None if the address - points to a non-existent target. - """ - for address in self._parse_addresses(spec): - target = Target.get(address) - yield target, address - - -class Goal(Command): - """Lists installed goals or else executes a named goal.""" - - __command__ = 'goal' - - GLOBAL_OPTIONS = [ - Option("-t", "--timeout", dest="conn_timeout", type='int', - default=Config.load().getdefault('connection_timeout'), - help="Number of seconds to wait for http connections."), - Option("-x", "--time", action="store_true", dest="time", default=False, - help="Times goal phases and outputs a report."), - Option("-e", "--explain", action="store_true", dest="explain", default=False, - help="Explain the execution of goals."), - Option("-k", "--kill-nailguns", action="store_true", dest="cleanup_nailguns", default=False, - help="Kill nailguns before exiting"), - Option("-d", "--logdir", dest="logdir", - help="[%default] Forks logs to files under this directory."), - Option("-l", "--level", dest="log_level", type="choice", choices=['debug', 'info', 'warn'], - help="[info] Sets the logging level to one of 'debug', 'info' or 'warn'." - "if set."), - Option("-q", "--quiet", action="store_true", dest="quiet", default=False, - help="Squelches all console output apart from errors."), - Option("--no-colors", dest="no_color", action="store_true", default=turn_off_colored_logging, - help="Do not colorize log messages."), - Option("-n", "--dry-run", action="store_true", dest="dry_run", default=False, - help="Print the commands that would be run, without actually running them."), - - Option("--read-from-artifact-cache", "--no-read-from-artifact-cache", action="callback", - callback=_set_bool, dest="read_from_artifact_cache", default=True, - help="Whether to read artifacts from cache instead of building them, if configured to do so."), - Option("--write-to-artifact-cache", "--no-write-to-artifact-cache", action="callback", - callback=_set_bool, dest="write_to_artifact_cache", default=True, - help="Whether to write artifacts to cache if configured to do so."), - - # NONE OF THE ARTIFACT CACHE FLAGS BELOW DO ANYTHING ANY MORE. - # TODO: Remove them once all uses of them are killed. - Option("--verify-artifact-cache", "--no-verify-artifact-cache", action="callback", - callback=_set_bool, dest="verify_artifact_cache", default=False, - help="Whether to verify that cached artifacts are identical after rebuilding them."), - - Option("--local-artifact-cache-readonly", "--no-local-artifact-cache-readonly", action="callback", - callback=_set_bool, dest="local_artifact_cache_readonly", default=False, - help="If set, we don't write to local artifact caches, even when writes are enabled."), - # Note that remote writes are disabled by default, so you have control over who's populating - # the shared cache. - Option("--remote-artifact-cache-readonly", "--no-remote-artifact-cache-readonly", action="callback", - callback=_set_bool, dest="remote_artifact_cache_readonly", default=True, - help="If set, we don't write to remote artifact caches, even when writes are enabled."), - - Option("--all", dest="target_directory", action="append", - help="DEPRECATED: Use [dir]: with no flag in a normal target position on the command " - "line. (Adds all targets found in the given directory's BUILD file. Can be " - "specified more than once.)"), - Option("--all-recursive", dest="recursive_directory", action="append", - help="DEPRECATED: Use [dir]:: with no flag in a normal target position on the command " - "line. (Adds all targets found recursively under the given directory. Can be " - "specified more than once to add more than one root target directory to scan.)"), - ] - - output = None - - @staticmethod - def add_global_options(parser): - for option in Goal.GLOBAL_OPTIONS: - parser.add_option(option) - - @staticmethod - def parse_args(args): - goals = OrderedSet() - specs = OrderedSet() - help = False - explicit_multi = False - - def is_spec(spec): - return os.sep in spec or ':' in spec - - for i, arg in enumerate(args): - help = help or 'help' == arg - if not arg.startswith('-'): - specs.add(arg) if is_spec(arg) else goals.add(arg) - elif '--' == arg: - if specs: - raise GoalError('Cannot intermix targets with goals when using --. Targets should ' - 'appear on the right') - explicit_multi = True - del args[i] - break - - if explicit_multi: - spec_offset = len(goals) + 1 if help else len(goals) - specs.update(arg for arg in args[spec_offset:] if not arg.startswith('-')) - - return goals, specs - - @classmethod - def execute(cls, context, *names): - parser = OptionParser() - cls.add_global_options(parser) - phases = [Phase(name) for name in names] - Phase.setup_parser(parser, [], phases) - options, _ = parser.parse_args([]) - context = Context(context.config, options, context.run_tracker, context.target_roots, - requested_goals=list(names)) - return cls._execute(context, phases, print_timing=False) - - @staticmethod - def _execute(context, phases, print_timing): - engine = GroupEngine(print_timing=print_timing) - return engine.execute(context, phases) - - # TODO(John Sirois): revisit wholesale locking when we move py support into pants new - @classmethod - def serialized(cls): - # Goal serialization is now handled in goal execution during group processing. - # The goal command doesn't need to hold the serialization lock; individual goals will - # acquire the lock if they need to be serialized. - return False - - def __init__(self, run_tracker, root_dir, parser, args): - self.targets = [] - Command.__init__(self, run_tracker, root_dir, parser, args) - - @contextmanager - def check_errors(self, banner): - errors = {} - def error(key, include_traceback=False): - exc_type, exc_value, _ = sys.exc_info() - msg = StringIO() - if include_traceback: - frame = inspect.trace()[-2] - filename = frame[1] - lineno = frame[2] - funcname = frame[3] - code = ''.join(frame[4]) if frame[4] else None - traceback.print_list([(filename, lineno, funcname, code)], file=msg) - if exc_type: - msg.write(''.join(traceback.format_exception_only(exc_type, exc_value))) - errors[key] = msg.getvalue() - sys.exc_clear() - - yield error - - if errors: - msg = StringIO() - msg.write(banner) - invalid_keys = [key for key, exc in errors.items() if not exc] - if invalid_keys: - msg.write('\n %s' % '\n '.join(invalid_keys)) - for key, exc in errors.items(): - if exc: - msg.write('\n %s =>\n %s' % (key, '\n '.join(exc.splitlines()))) - # The help message for goal is extremely verbose, and will obscure the - # actual error message, so we don't show it in this case. - self.error(msg.getvalue(), show_help=False) - - def setup_parser(self, parser, args): - self.config = Config.load() - Goal.add_global_options(parser) - - # We support attempting zero or more goals. Multiple goals must be delimited from further - # options and non goal args with a '--'. The key permutations we need to support: - # ./pants goal => goals - # ./pants goal goals => goals - # ./pants goal compile src/java/... => compile - # ./pants goal compile -x src/java/... => compile - # ./pants goal compile src/java/... -x => compile - # ./pants goal compile run -- src/java/... => compile, run - # ./pants goal compile run -- src/java/... -x => compile, run - # ./pants goal compile run -- -x src/java/... => compile, run - - if not args: - args.append('goals') - - if len(args) == 1 and args[0] in set(['-h', '--help', 'help']): - def format_usage(usages): - left_colwidth = 0 - for left, right in usages: - left_colwidth = max(left_colwidth, len(left)) - lines = [] - for left, right in usages: - lines.append(' %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right)) - return '\n'.join(lines) - - usages = [ - ("%prog goal goals ([spec]...)", Phase('goals').description), - ("%prog goal help [goal] ([spec]...)", Phase('help').description), - ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."), - ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."), - ] - parser.set_usage("\n%s" % format_usage(usages)) - parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else " - "attempts to achieve the specified goal for the listed targets." """ - Note that target specs accept two special forms: - [dir]: to include all targets in the specified directory - [dir]:: to include all targets found in all BUILD files recursively under - the directory""") - - parser.print_help() - sys.exit(0) - else: - goals, specs = Goal.parse_args(args) - self.requested_goals = goals - - with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): - # Bootstrap goals by loading any configured bootstrap BUILD files - with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error: - with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): - for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []): - try: - buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot())) - ParseContext(buildfile).parse() - except (TypeError, ImportError, TaskError, GoalError): - error(path, include_traceback=True) - except (IOError, SyntaxError): - error(path) - # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. - self.run_tracker.run_info.add_scm_info() - - # Bootstrap user goals by loading any BUILD files implied by targets. - spec_parser = SpecParser(self.root_dir) - with self.check_errors('The following targets could not be loaded:') as error: - with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): - for spec in specs: - try: - for target, address in spec_parser.parse(spec): - if target: - self.targets.append(target) - # Force early BUILD file loading if this target is an alias that expands - # to others. - unused = list(target.resolve()) - else: - siblings = Target.get_all_addresses(address.buildfile) - prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' - error('%s => %s?:\n %s' % (address, prompt, - '\n '.join(str(a) for a in siblings))) - except (TypeError, ImportError, TaskError, GoalError): - error(spec, include_traceback=True) - except (IOError, SyntaxError, TargetDefinitionException): - error(spec) - - self.phases = [Phase(goal) for goal in goals] - - rcfiles = self.config.getdefault('rcfiles', type=list, - default=['/etc/pantsrc', '~/.pants.rc']) - if rcfiles: - rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) - - # Break down the goals specified on the command line to the full set that will be run so we - # can apply default flags to inner goal nodes. Also break down goals by Task subclass and - # register the task class hierarchy fully qualified names so we can apply defaults to - # baseclasses. - - sections = OrderedSet() - for phase in Engine.execution_order(self.phases): - for goal in phase.goals(): - sections.add(goal.name) - for clazz in goal.task_type.mro(): - if clazz == Task: - break - sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) - - augmented_args = rcfile.apply_defaults(sections, args) - if augmented_args != args: - del args[:] - args.extend(augmented_args) - sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args)) - - Phase.setup_parser(parser, args, self.phases) - - def run(self, lock): - # TODO(John Sirois): Consider moving to straight python logging. The divide between the - # context/work-unit logging and standard python logging doesn't buy us anything. - - # Enable standard python logging for code with no handle to a context/work-unit. - if self.options.log_level: - LogOptions.set_stderr_log_level((self.options.log_level or 'info').upper()) - logdir = self.options.logdir or self.config.get('goals', 'logdir', default=None) - if logdir: - safe_mkdir(logdir) - LogOptions.set_log_dir(logdir) - log.init('goals') - else: - log.init() - - # Update the reporting settings, now that we have flags etc. - def is_console_task(): - for phase in self.phases: - for goal in phase.goals(): - if issubclass(goal.task_type, ConsoleTask): - return True - return False - - is_explain = self.options.explain - update_reporting(self.options, is_console_task() or is_explain, self.run_tracker) - - if self.options.dry_run: - print('****** Dry Run ******') - - context = Context( - self.config, - self.options, - self.run_tracker, - self.targets, - requested_goals=self.requested_goals, - lock=lock) - - if self.options.recursive_directory: - context.log.warn( - '--all-recursive is deprecated, use a target spec with the form [dir]:: instead') - for dir in self.options.recursive_directory: - self.add_target_recursive(dir) - - if self.options.target_directory: - context.log.warn('--all is deprecated, use a target spec with the form [dir]: instead') - for dir in self.options.target_directory: - self.add_target_directory(dir) - - unknown = [] - for phase in self.phases: - if not phase.goals(): - unknown.append(phase) - - if unknown: - _list_goals(context, 'Unknown goal(s): %s' % ' '.join(phase.name for phase in unknown)) - return 1 - - return Goal._execute(context, self.phases, print_timing=self.options.time) - - def cleanup(self): - # TODO: Make this more selective? Only kill nailguns that affect state? E.g., checkstyle - # may not need to be killed. - NailgunTask.killall(log.info) - sys.exit(1) - - -# Install all default pants provided goals -from twitter.pants.targets.benchmark import Benchmark -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_tests import JavaTests as junit_tests -from twitter.pants.targets.jvm_binary import JvmBinary -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.targets.scala_tests import ScalaTests -from twitter.pants.targets.scalac_plugin import ScalacPlugin -from twitter.pants.tasks.antlr_gen import AntlrGen -from twitter.pants.tasks.benchmark_run import BenchmarkRun -from twitter.pants.tasks.binary_create import BinaryCreate -from twitter.pants.tasks.bootstrap_jvm_tools import BootstrapJvmTools -from twitter.pants.tasks.build_lint import BuildLint -from twitter.pants.tasks.builddictionary import BuildBuildDictionary -from twitter.pants.tasks.bundle_create import BundleCreate -from twitter.pants.tasks.check_exclusives import CheckExclusives -from twitter.pants.tasks.check_published_deps import CheckPublishedDeps -from twitter.pants.tasks.checkstyle import Checkstyle -from twitter.pants.tasks.detect_duplicates import DuplicateDetector -from twitter.pants.tasks.filedeps import FileDeps -from twitter.pants.tasks.ivy_resolve import IvyResolve -from twitter.pants.tasks.jar_create import JarCreate -from twitter.pants.tasks.javadoc_gen import JavadocGen -from twitter.pants.tasks.junit_run import JUnitRun -from twitter.pants.tasks.jvm_compile.java.java_compile import JavaCompile -from twitter.pants.tasks.jvm_compile.scala.scala_compile import ScalaCompile -from twitter.pants.tasks.jvm_run import JvmRun -from twitter.pants.tasks.listtargets import ListTargets -from twitter.pants.tasks.markdown_to_html import MarkdownToHtml -from twitter.pants.tasks.nailgun_task import NailgunTask -from twitter.pants.tasks.pathdeps import PathDeps -from twitter.pants.tasks.prepare_resources import PrepareResources -from twitter.pants.tasks.protobuf_gen import ProtobufGen -from twitter.pants.tasks.jar_publish import JarPublish -from twitter.pants.tasks.scala_repl import ScalaRepl -from twitter.pants.tasks.scaladoc_gen import ScaladocGen -from twitter.pants.tasks.scrooge_gen import ScroogeGen -from twitter.pants.tasks.specs_run import SpecsRun -from twitter.pants.tasks.thrift_gen import ThriftGen - - -def _cautious_rmtree(root): - real_buildroot = os.path.realpath(os.path.abspath(get_buildroot())) - real_root = os.path.realpath(os.path.abspath(root)) - if not real_root.startswith(real_buildroot): - raise TaskError('DANGER: Attempting to delete %s, which is not under the build root!') - safe_rmtree(real_root) - -try: - import daemon - def _async_cautious_rmtree(root): - if os.path.exists(root): - new_path = root + '.deletable.%f' % time.time() - os.rename(root, new_path) - with daemon.DaemonContext(): - _cautious_rmtree(new_path) -except ImportError: - pass - -class Invalidator(ConsoleTask): - def execute(self, targets): - build_invalidator_dir = self.context.config.get('tasks', 'build_invalidator') - _cautious_rmtree(build_invalidator_dir) -goal( - name='invalidate', - action=Invalidator, - dependencies=['ng-killall'] -).install().with_description('Invalidate all targets') - - -class Cleaner(ConsoleTask): - def execute(self, targets): - _cautious_rmtree(self.context.config.getdefault('pants_workdir')) -goal( - name='clean-all', - action=Cleaner, - dependencies=['invalidate'] -).install().with_description('Cleans all build output') - - -class AsyncCleaner(ConsoleTask): - def execute(self, targets): - _async_cautious_rmtree(self.context.config.getdefault('pants_workdir')) -goal( - name='clean-all-async', - action=AsyncCleaner, - dependencies=['invalidate'] -).install().with_description('Cleans all build output in a background process') - - -class NailgunKillall(ConsoleTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(NailgunKillall, cls).setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("everywhere"), dest="ng_killall_everywhere", - default=False, action="store_true", - help="[%default] Kill all nailguns servers launched by pants for " - "all workspaces on the system.") - - def execute(self, targets): - NailgunTask.killall(everywhere=self.context.options.ng_killall_everywhere) - -goal( - name='ng-killall', - action=NailgunKillall -).install().with_description('Kill any running nailgun servers spawned by pants.') - - -class RunServer(ConsoleTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(RunServer, cls).setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("port"), dest="port", action="store", type="int", default=0, - help="Serve on this port. Leave unset to choose a free port automatically (recommended if " - "using pants concurrently in multiple workspaces on the same host).") - option_group.add_option(mkflag("allowed-clients"), dest="allowed_clients", - default=["127.0.0.1"], action="append", - help="Only requests from these IPs may access this server. Useful for temporarily showing " \ - "build results to a colleague. The special value ALL means any client may connect. " \ - "Use with caution, as your source code is exposed to all allowed clients!") - - def console_output(self, targets): - DONE = '__done_reporting' - - port = ReportingServerManager.get_current_server_port() - if port: - return ['Server already running at http://localhost:%d' % port] - - def run_server(reporting_queue): - def report_launch(actual_port): - reporting_queue.put( - 'Launching server with pid %d at http://localhost:%d' % (os.getpid(), actual_port)) - - def done_reporting(): - reporting_queue.put(DONE) - - try: - # We mustn't block in the child, because the multiprocessing module enforces that the - # parent either kills or joins to it. Instead we fork a grandchild that inherits the queue - # but is allowed to block indefinitely on the server loop. - if not os.fork(): - # Child process. - info_dir = RunInfo.dir(self.context.config) - # If these are specified explicitly in the config, use those. Otherwise - # they will be None, and we'll use the ones baked into this package. - template_dir = self.context.config.get('reporting', 'reports_template_dir') - assets_dir = self.context.config.get('reporting', 'reports_assets_dir') - settings = ReportingServer.Settings(info_dir=info_dir, template_dir=template_dir, - assets_dir=assets_dir, root=get_buildroot(), - allowed_clients=self.context.options.allowed_clients) - server = ReportingServer(self.context.options.port, settings) - actual_port = server.server_port() - ReportingServerManager.save_current_server_port(actual_port) - report_launch(actual_port) - done_reporting() - # Block forever here. - server.start() - except socket.error: - done_reporting() - raise - - # We do reporting on behalf of the child process (necessary, since reporting may be buffered in a - # background thread). We use multiprocessing.Process() to spawn the child so we can use that - # module's inter-process Queue implementation. - reporting_queue = multiprocessing.Queue() - proc = multiprocessing.Process(target=run_server, args=[reporting_queue]) - proc.daemon = True - proc.start() - s = reporting_queue.get() - ret = [] - while s != DONE: - ret.append(s) - s = reporting_queue.get() - # The child process is done reporting, and is now in the server loop, so we can proceed. - server_port = ReportingServerManager.get_current_server_port() - if server_port: - binary_util.ui_open('http://localhost:%d/run/latest' % server_port) - return ret - -goal( - name='server', - action=RunServer, - serialize=False, -).install().with_description('Run the pants reporting server.') - -class KillServer(ConsoleTask): - pidfile_re = re.compile(r'port_(\d+)\.pid') - def console_output(self, targets): - pidfiles_and_ports = ReportingServerManager.get_current_server_pidfiles_and_ports() - if not pidfiles_and_ports: - return ['No server found.'] - # There should only be one pidfile, but in case there are many, we kill them all here. - for pidfile, port in pidfiles_and_ports: - with open(pidfile, 'r') as infile: - pidstr = infile.read() - try: - os.unlink(pidfile) - pid = int(pidstr) - os.kill(pid, signal.SIGKILL) - return ['Killed server with pid %d at http://localhost:%d' % (pid, port)] - except (ValueError, OSError): - return [] - -goal( - name='killserver', - action=KillServer, - serialize=False, -).install().with_description('Kill the pants reporting server.') - - -# TODO(pl): Make the dependency of every other phase on this phase less explicit -goal( - name='bootstrap-jvm-tools', - action=BootstrapJvmTools, -).install('bootstrap').with_description('Bootstrap tools needed for building') - -# TODO(John Sirois): Resolve eggs -goal( - name='ivy', - action=IvyResolve, - dependencies=['gen', 'check-exclusives', 'bootstrap'] -).install('resolve').with_description('Resolves jar dependencies and produces dependency reports.') - -goal(name='check-exclusives', - dependencies=['gen'], - action=CheckExclusives).install('check-exclusives').with_description( - 'Check exclusives declarations to verify that dependencies are consistent.') - -# TODO(John Sirois): gen attempted as the sole Goal should gen for all known gen types but -# recognize flags to narrow the gen set -goal(name='thrift', action=ThriftGen).install('gen').with_description('Generate code.') -goal(name='scrooge', - dependencies=['bootstrap'], - action=ScroogeGen).install('gen') -goal(name='protoc', action=ProtobufGen).install('gen') -goal(name='antlr', - dependencies=['bootstrap'], - action=AntlrGen).install('gen') - -goal( - name='checkstyle', - action=Checkstyle, - dependencies=['gen', 'resolve'] -).install().with_description('Run checkstyle against java source code.') - -# When chunking a group, we don't need a new chunk for targets with no sources at all -# (which do sometimes exist, e.g., when creating a BUILD file ahead of its code). -def _has_sources(target, extension): - return target.has_sources(extension) or target.has_label('sources') and not target.sources - -# Note: codegen targets shouldn't really be 'is_java' or 'is_scala', but right now they -# are so they don't cause a lot of islands while chunking. The jvm group doesn't act on them -# anyway (it acts on their synthetic counterparts) so it doesn't matter where they get chunked. -# TODO: Make chunking only take into account the targets actually acted on? This would require -# task types to declare formally the targets they act on. -def _is_java(target): - return (target.is_java or - (isinstance(target, (JvmBinary, junit_tests, Benchmark)) - and _has_sources(target, '.java'))) and not target.is_apt - -def _is_scala(target): - return (target.is_scala or - (isinstance(target, (JvmBinary, junit_tests, Benchmark)) - and _has_sources(target, '.scala'))) - - -goal(name='scala', - action=ScalaCompile, - group=group('jvm', _is_scala), - dependencies=['gen', 'resolve', 'check-exclusives', 'bootstrap']).install('compile').with_description( - 'Compile both generated and checked in code.' - ) - -class AptCompile(JavaCompile): pass # So they're distinct in log messages etc. - -goal(name='apt', - action=AptCompile, - group=group('jvm', lambda t: t.is_apt), - dependencies=['gen', 'resolve', 'check-exclusives', 'bootstrap']).install('compile') - -goal(name='java', - action=JavaCompile, - group=group('jvm', _is_java), - dependencies=['gen', 'resolve', 'check-exclusives', 'bootstrap']).install('compile') - - -goal(name='prepare', action=PrepareResources).install('resources') - - -# TODO(John Sirois): pydoc also -goal(name='javadoc', - action=JavadocGen, - dependencies=['compile', 'bootstrap']).install('doc').with_description('Create documentation.') -goal(name='scaladoc', - action=ScaladocGen, - dependencies=['compile', 'bootstrap']).install('doc') - - -if MarkdownToHtml.AVAILABLE: - goal(name='markdown', - action=MarkdownToHtml - ).install('markdown').with_description('Generate html from markdown docs.') - - -class ScaladocJarShim(ScaladocGen): - def __init__(self, context, output_dir=None, confs=None): - super(ScaladocJarShim, self).__init__(context, - output_dir=output_dir, - confs=confs, - active=False) - - -class JavadocJarShim(JavadocGen): - def __init__(self, context, output_dir=None, confs=None): - super(JavadocJarShim, self).__init__(context, - output_dir=output_dir, - confs=confs, - active=False) - - -goal(name='javadoc_publish', - action=JavadocJarShim).install('publish') -goal(name='scaladoc_publish', - action=ScaladocJarShim).install('publish') -goal(name='jar', - action=JarCreate, - dependencies=['compile', 'resources', 'bootstrap']).install('jar').with_description('Create one or more jars.') -goal(name='check_published_deps', - action=CheckPublishedDeps -).install('check_published_deps').with_description( - 'Find references to outdated artifacts published from this BUILD tree.') - -goal(name='jar_create_publish', - action=JarCreate, - dependencies=['compile', 'resources']).install('publish') - -goal(name='publish', - action=JarPublish).install('publish').with_description('Publish one or more artifacts.') - -goal(name='junit', - action=JUnitRun, - dependencies=['compile', 'resources', 'bootstrap']).install('test').with_description('Test compiled code.') - -goal(name='specs', - action=SpecsRun, - dependencies=['compile', 'resources', 'bootstrap']).install('test') - -goal(name='bench', - action=BenchmarkRun, - dependencies=['compile', 'resources', 'bootstrap']).install('bench') - -# TODO(John Sirois): Create pex's in binary phase -goal( - name='binary', - action=BinaryCreate, - dependencies=['jar', 'bootstrap'] -).install().with_description('Create a jvm binary jar.') -goal( - name='dup', - action=DuplicateDetector, -).install('binary') -goal( - name='bundle', - action=BundleCreate, - dependencies=['binary', 'bootstrap'] -).install().with_description('Create an application bundle from binary targets.') - -# run doesn't need the serialization lock. It's reasonable to run some code -# in a workspace while there's a compile going on unrelated code. -goal( - name='detect-duplicates', - action=DuplicateDetector, - dependencies=['jar'] -).install().with_description('Detect duplicate classes and resources on the classpath.') - -goal( - name='jvm-run', - action=JvmRun, - dependencies=['compile', 'resources', 'bootstrap'], - serialize=False, -).install('run').with_description('Run a (currently JVM only) binary target.') - -goal( - name='jvm-run-dirty', - action=JvmRun, - serialize=False, -).install('run-dirty').with_description('Run a (currently JVM only) binary target, using ' + - 'only currently existing binaries, skipping compilation') - -# repl doesn't need the serialization lock. It's reasonable to have -# a repl running in a workspace while there's a compile going on unrelated code. -goal( - name='scala-repl', - action=ScalaRepl, - dependencies=['compile', 'resources', 'bootstrap'], - serialize=False, -).install('repl').with_description( - 'Run a (currently Scala only) REPL with the classpath set according to the targets.') - -goal( - name='scala-repl-dirty', - action=ScalaRepl, - serialize=False, -).install('repl-dirty').with_description( - 'Run a (currently Scala only) REPL with the classpath set according to the targets, ' + - 'using the currently existing binaries, skipping compilation') - -goal( - name='filedeps', - action=FileDeps -).install('filedeps').with_description('Print out a list of all files the target depends on') - -goal( - name='pathdeps', - action=PathDeps -).install('pathdeps').with_description( - 'Print out a list of all paths containing build files the target depends on') - -goal( - name='list', - action=ListTargets -).install('list').with_description('List available BUILD targets.') - -goal( - name='buildlint', - action=BuildLint, - dependencies=['compile'], # To pick up missing deps. -).install() - -goal( - name='builddict', - action=BuildBuildDictionary, -).install() - -from twitter.pants.tasks.idea_gen import IdeaGen - -goal( - name='idea', - action=IdeaGen, - dependencies=['jar', 'bootstrap'] -).install().with_description('Create an IntelliJ IDEA project from the given targets.') - - -from twitter.pants.tasks.eclipse_gen import EclipseGen - -goal( - name='eclipse', - action=EclipseGen, - dependencies=['jar', 'bootstrap'] -).install().with_description('Create an Eclipse project from the given targets.') - - -from twitter.pants.tasks.provides import Provides - -goal( - name='provides', - action=Provides, - dependencies=['jar', 'bootstrap'] -).install().with_description('Emit the list of symbols provided by the given targets.') - - -from twitter.pants.tasks.python.setup import SetupPythonEnvironment - -goal( - name='python-setup', - action=SetupPythonEnvironment, -).install('setup').with_description( -"Setup the target's build environment.") - -from twitter.pants.tasks.paths import Path, Paths - -goal( - name='path', - action=Path, -).install().with_description('Find a dependency path from one target to another') - -goal( - name='paths', - action=Paths, -).install().with_description('Find all dependency paths from one target to another') - - -from twitter.pants.tasks.dependees import ReverseDepmap - -goal( - name='dependees', - action=ReverseDepmap -).install().with_description('Print a reverse dependency mapping for the given targets') - - -from twitter.pants.tasks.depmap import Depmap - -goal( - name='depmap', - action=Depmap -).install().with_description('Generates either a textual dependency tree or a graphviz' - ' digraph dotfile for the dependency set of a target') - - -from twitter.pants.tasks.dependencies import Dependencies - -goal( - name='dependencies', - action=Dependencies -).install().with_description('Extract textual infomation about the dependencies of a target') - - -from twitter.pants.tasks.filemap import Filemap - -goal( - name='filemap', - action=Filemap -).install().with_description('Outputs a mapping from source file to' - ' the target that owns the source file') - - -from twitter.pants.tasks.minimal_cover import MinimalCover - -goal( - name='minimize', - action=MinimalCover -).install().with_description('Print the minimal cover of the given targets.') - - -from twitter.pants.tasks.filter import Filter - -goal( - name='filter', - action=Filter -).install().with_description('Filter the input targets based on various criteria.') - - -from twitter.pants.tasks.sorttargets import SortTargets - -goal( - name='sort', - action=SortTargets -).install().with_description('Topologically sort the input targets.') - - -from twitter.pants.tasks.roots import ListRoots - -goal( - name='roots', - action=ListRoots, -).install('roots').with_description("Prints the source roots and associated target types defined in the repo.") diff --git a/src/python/twitter/pants/commands/help.py b/src/python/twitter/pants/commands/help.py deleted file mode 100644 index 1439eedbf..000000000 --- a/src/python/twitter/pants/commands/help.py +++ /dev/null @@ -1,46 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from copy import copy - -from twitter.pants.commands.command import Command - - -class Help(Command): - """Provides help for available commands or a single specified command.""" - - __command__ = 'help' - - def setup_parser(self, parser, args): - self.parser = copy(parser) - - parser.set_usage("%prog help ([command])") - parser.epilog = """Lists available commands with no arguments; otherwise prints help for the - specifed command.""" - - def __init__(self, run_tracker, root_dir, parser, argv): - Command.__init__(self, run_tracker, root_dir, parser, argv) - - if len(self.args) > 1: - self.error("The help command accepts at most 1 argument.") - self.subcommand = self.args[0] - - def execute(self): - subcommand_class = Command.get_command(self.subcommand) - if not subcommand_class: - self.error("'%s' is not a recognized subcommand." % self.subcommand) - command = subcommand_class(self.run_tracker, self.root_dir, self.parser, ['--help']) - return command.execute() diff --git a/src/python/twitter/pants/commands/py.py b/src/python/twitter/pants/commands/py.py deleted file mode 100644 index 6c03e5eee..000000000 --- a/src/python/twitter/pants/commands/py.py +++ /dev/null @@ -1,194 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import signal -import sys -import tempfile - -from twitter.common.python.pex import PEX -from twitter.common.python.pex_builder import PEXBuilder - -from twitter.pants.base.address import Address -from twitter.pants.base.config import Config -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target -from twitter.pants.commands.command import Command -from twitter.pants.python.interpreter_cache import PythonInterpreterCache -from twitter.pants.python.python_chroot import PythonChroot -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_requirement import PythonRequirement - - -class Py(Command): - """Python chroot manipulation.""" - - __command__ = 'py' - - def setup_parser(self, parser, args): - parser.set_usage('\n' - ' %prog py (options) [spec] args\n') - parser.disable_interspersed_args() - parser.add_option('-t', '--timeout', dest='conn_timeout', type='int', - default=Config.load().getdefault('connection_timeout'), - help='Number of seconds to wait for http connections.') - parser.add_option('--pex', dest='pex', default=False, action='store_true', - help='Dump a .pex of this chroot instead of attempting to execute it.') - parser.add_option('--ipython', dest='ipython', default=False, action='store_true', - help='Run the target environment in an IPython interpreter.') - parser.add_option('-r', '--req', dest='extra_requirements', default=[], action='append', - help='Additional Python requirements to add to this chroot.') - parser.add_option('-i', '--interpreter', dest='interpreter', default=None, - help='The interpreter requirement for this chroot.') - parser.add_option('-e', '--entry_point', dest='entry_point', default=None, - help='The entry point for the generated PEX.') - parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', - help='Show verbose output.') - parser.epilog = """Interact with the chroot of the specified target.""" - - def __init__(self, run_tracker, root_dir, parser, argv): - Command.__init__(self, run_tracker, root_dir, parser, argv) - - self.target = None - self.extra_targets = [] - self.config = Config.load() - self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) - self.interpreter_cache.setup() - interpreters = self.interpreter_cache.select_interpreter( - list(self.interpreter_cache.matches([self.options.interpreter] - if self.options.interpreter else ['']))) - if len(interpreters) != 1: - self.error('Unable to detect suitable interpreter.') - self.interpreter = interpreters[0] - - for req in self.options.extra_requirements: - with ParseContext.temp(): - self.extra_targets.append(PythonRequirement(req, use_2to3=True)) - - # We parse each arg in the context of the cli usage: - # ./pants command (options) [spec] (build args) - # ./pants command (options) [spec]... -- (build args) - # Our command token and our options are parsed out so we see args of the form: - # [spec] (build args) - # [spec]... -- (build args) - binaries = [] - for k in range(len(self.args)): - arg = self.args.pop(0) - if arg == '--': - break - - def not_a_target(debug_msg): - self.debug('Not a target, assuming option: %s.' % e) - # We failed to parse the arg as a target or else it was in valid address format but did not - # correspond to a real target. Assume this is the 1st of the build args and terminate - # processing args for target addresses. - self.args.insert(0, arg) - - target = None - try: - address = Address.parse(root_dir, arg) - target = Target.get(address) - if target is None: - not_a_target(debug_msg='Unrecognized target') - break - except Exception as e: - not_a_target(debug_msg=e) - break - - for resolved in filter(lambda t: t.is_concrete, target.resolve()): - if isinstance(resolved, PythonBinary): - binaries.append(resolved) - else: - self.extra_targets.append(resolved) - - if len(binaries) == 0: - # treat as a chroot - pass - elif len(binaries) == 1: - # We found a binary and are done, the rest of the args get passed to it - self.target = binaries[0] - else: - self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % ( - arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries) - )) - - if self.target is None: - if not self.extra_targets: - self.error('No valid target specified!') - self.target = self.extra_targets.pop(0) - - def debug(self, message): - if self.options.verbose: - print(message, file=sys.stderr) - - def execute(self): - if self.options.pex and self.options.ipython: - self.error('Cannot specify both --pex and --ipython!') - - if self.options.entry_point and self.options.ipython: - self.error('Cannot specify both --entry_point and --ipython!') - - if self.options.verbose: - print('Build operating on target: %s %s' % (self.target, - 'Extra targets: %s' % ' '.join(map(str, self.extra_targets)) if self.extra_targets else '')) - - builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter, - pex_info=self.target.pexinfo if isinstance(self.target, PythonBinary) else None) - - if self.options.entry_point: - builder.set_entry_point(self.options.entry_point) - - if self.options.ipython: - if not self.config.has_section('python-ipython'): - self.error('No python-ipython sections defined in your pants.ini!') - - builder.info.entry_point = self.config.get('python-ipython', 'entry_point') - if builder.info.entry_point is None: - self.error('Must specify entry_point for IPython in the python-ipython section ' - 'of your pants.ini!') - - requirements = self.config.getlist('python-ipython', 'requirements', default=[]) - - with ParseContext.temp(): - for requirement in requirements: - self.extra_targets.append(PythonRequirement(requirement)) - - executor = PythonChroot( - self.target, - self.root_dir, - builder=builder, - interpreter=self.interpreter, - extra_targets=self.extra_targets, - conn_timeout=self.options.conn_timeout) - - executor.dump() - - if self.options.pex: - pex_name = os.path.join(self.root_dir, 'dist', '%s.pex' % self.target.name) - builder.build(pex_name) - print('Wrote %s' % pex_name) - return 0 - else: - builder.freeze() - pex = PEX(builder.path(), interpreter=self.interpreter) - po = pex.run(args=list(self.args), blocking=False) - try: - return po.wait() - except KeyboardInterrupt: - po.send_signal(signal.SIGINT) - raise diff --git a/src/python/twitter/pants/commands/register.py b/src/python/twitter/pants/commands/register.py deleted file mode 100644 index d0bd55cb3..000000000 --- a/src/python/twitter/pants/commands/register.py +++ /dev/null @@ -1,10 +0,0 @@ - -from twitter.pants.commands.build import Build -from twitter.pants.commands.goal import Goal -from twitter.pants.commands.help import Help -from twitter.pants.commands.py import Py -from twitter.pants.commands.setup_py import SetupPy - -def register_commands(): - for cmd in (Build, Goal, Help, Py, SetupPy): - cmd._register() diff --git a/src/python/twitter/pants/commands/setup_py.py b/src/python/twitter/pants/commands/setup_py.py deleted file mode 100644 index e90fff8a0..000000000 --- a/src/python/twitter/pants/commands/setup_py.py +++ /dev/null @@ -1,384 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import ast -from collections import defaultdict -import itertools -import os -import pprint - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_rmtree -from twitter.common.dirutil.chroot import Chroot -from twitter.common.python.installer import InstallerBase, Packager -from twitter.pants.base.address import Address -from twitter.pants.base.config import Config -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.commands.command import Command -from twitter.pants.python.antlr_builder import PythonAntlrBuilder -from twitter.pants.python.thrift_builder import PythonThriftBuilder -from twitter.pants.targets.python_antlr_library import PythonAntlrLibrary -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_requirement import PythonRequirement -from twitter.pants.targets.python_target import PythonTarget -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary - - -SETUP_BOILERPLATE = """ -# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS -# Target: %(setup_target)s - -from setuptools import setup - -setup(** -%(setup_dict)s -) -""" - - -class SetupPyRunner(InstallerBase): - def __init__(self, source_dir, setup_command, **kw): - self.__setup_command = setup_command.split() - super(SetupPyRunner, self).__init__(source_dir, **kw) - - def _setup_command(self): - return self.__setup_command - - -class SetupPy(Command): - """Generate setup.py-based Python projects from python_library targets.""" - - GENERATED_TARGETS = { - PythonAntlrLibrary: PythonAntlrBuilder, - PythonThriftLibrary: PythonThriftBuilder, - } - SOURCE_ROOT = 'src' - __command__ = 'setup_py' - - @classmethod - def _combined_dependencies(cls, target): - dependencies = getattr(target, 'dependencies', OrderedSet()) - if isinstance(target, PythonTarget) and target.provides: - return dependencies | OrderedSet(target.provides.binaries.values()) - else: - return dependencies - - @classmethod - def _construct_provider_map(cls, root_target, descendant, parents, providers, depmap): - if isinstance(descendant, PythonTarget) and descendant.provides: - providers.append(descendant) - for dependency in cls._combined_dependencies(descendant): - for prv in providers: - for dep in dependency.resolve(): - depmap[prv].add(dep) - if dep in parents: - raise TargetDefinitionException(root_target, - '%s and %s combined have a cycle!' % (root_target, dep)) - parents.add(dep) - cls._construct_provider_map(root_target, dep, parents, providers, depmap) - parents.remove(dep) - if isinstance(descendant, PythonTarget) and descendant.provides: - assert providers[-1] == descendant - providers.pop() - - @classmethod - def construct_provider_map(cls, root_target): - """Construct a mapping of provider => minimal target set within :root_target. - - The algorithm works in the following fashion: - - 1. Recursively resolve every dependency starting at root_target (the thing - that setup_py is being called against). This includes the dependencies - of any binaries attached to the PythonArtifact using with_binaries - 2. For every PythonTarget that provides a PythonArtifact, add an - entry for it to depmap[], keyed on the artifact name, containing - an OrderedSet of all transitively resolved children - dependencies. - 3. Any concrete target with sources that is provided by another PythonArtifact - other than the one being built with setup_py will be elided. - - Downsides: - - Explicitly requested dependencies may be elided if transitively included by others, - e.g. - python_library( - ..., - dependencies = [ - pants('src/python/twitter/common/dirutil'), - pants('src/python/twitter/common/python'), - ] - ) - will result in only twitter.common.python being exported even if top-level sources - directly reference twitter.common.dirutil, which could be considered a leak. - """ - depmap = defaultdict(OrderedSet) - cls._construct_provider_map(root_target, root_target, parents=set(), providers=[], - depmap=depmap) - return depmap - - @classmethod - def minified_dependencies(cls, root_target): - """Minify the dependencies of a PythonTarget.""" - depmap = cls.construct_provider_map(root_target) - root_deps = depmap.pop(root_target, OrderedSet()) - - def elide(target): - if any(target in depset for depset in depmap.values()): - root_deps.discard(target) - - root_target.walk(elide) - return root_deps - - @classmethod - def iter_entry_points(cls, target): - """Yields the name, entry_point pairs of binary targets in this PythonArtifact.""" - for name, binary_target in target.provides.binaries.items(): - concrete_target = binary_target.get() - if not isinstance(concrete_target, PythonBinary) or concrete_target.entry_point is None: - raise TargetDefinitionException(target, - 'Cannot add a binary to a PythonArtifact if it does not contain an entry_point.') - yield name, concrete_target.entry_point - - @classmethod - def declares_namespace_package(cls, filename): - """Given a filename, walk its ast and determine if it is declaring a namespace package. - Intended only for __init__.py files though it will work for any .py.""" - with open(filename) as fp: - init_py = ast.parse(fp.read(), filename) - calls = [node for node in ast.walk(init_py) if isinstance(node, ast.Call)] - for call in calls: - if len(call.args) != 1: - continue - if isinstance(call.func, ast.Attribute) and call.func.attr != 'declare_namespace': - continue - if isinstance(call.func, ast.Name) and call.func.id != 'declare_namespace': - continue - if isinstance(call.args[0], ast.Name) and call.args[0].id == '__name__': - return True - return False - - @classmethod - def iter_generated_sources(cls, target, root, config=None): - config = config or Config.load() - # This is sort of facepalmy -- python.new will make this much better. - for target_type, target_builder in cls.GENERATED_TARGETS.items(): - if isinstance(target, target_type): - builder_cls = target_builder - break - else: - raise TypeError( - 'write_generated_sources could not find suitable code generator for %s' % type(target)) - - builder = builder_cls(target, root, config) - builder.generate() - for root, _, files in os.walk(builder.package_root): - for fn in files: - target_file = os.path.join(root, fn) - yield os.path.relpath(target_file, builder.package_root), target_file - - @classmethod - def nearest_subpackage(cls, package, all_packages): - """Given a package, find its nearest parent in all_packages.""" - def shared_prefix(candidate): - zipped = itertools.izip(package.split('.'), candidate.split('.')) - matching = itertools.takewhile(lambda pair: pair[0] == pair[1], zipped) - return [pair[0] for pair in matching] - shared_packages = list(filter(None, map(shared_prefix, all_packages))) - return '.'.join(max(shared_packages, key=len)) if shared_packages else package - - @classmethod - def find_packages(cls, chroot): - """Detect packages, namespace packages and resources from an existing chroot. - - Returns a tuple of: - set(packages) - set(namespace_packages) - map(package => set(files)) - """ - base = os.path.join(chroot.path(), cls.SOURCE_ROOT) - packages, namespace_packages = set(), set() - resources = defaultdict(set) - - def iter_files(): - for root, _, files in os.walk(base): - module = os.path.relpath(root, base).replace(os.path.sep, '.') - for filename in files: - yield module, filename, os.path.join(root, filename) - - # establish packages, namespace packages in first pass - for module, filename, real_filename in iter_files(): - if filename != '__init__.py': - continue - packages.add(module) - if cls.declares_namespace_package(real_filename): - namespace_packages.add(module) - - # second pass establishes non-source content (resources) - for module, filename, real_filename in iter_files(): - if filename.endswith('.py'): - if module not in packages: - # TODO(wickman) Consider changing this to a full-on error as it - # could indicate bad BUILD hygiene. - # raise cls.UndefinedSource('%s is source but does not belong to a package!' % filename) - print('WARNING! %s is source but does not belong to a package!' % real_filename) - else: - continue - submodule = cls.nearest_subpackage(module, packages) - if submodule == module: - resources[submodule].add(filename) - else: - assert module.startswith(submodule + '.') - relative_module = module[len(submodule) + 1:] - relative_filename = os.path.join(relative_module.replace('.', os.path.sep), filename) - resources[submodule].add(relative_filename) - - return packages, namespace_packages, resources - - def setup_parser(self, parser, args): - parser.set_usage("\n" - " %prog setup_py (options) [spec]\n") - parser.add_option("--run", dest="run", default=None, - help="The command to run against setup.py. Don't forget to quote " - "any additional parameters. If no run command is specified, " - "pants will by default generate and dump the source distribution.") - parser.add_option("--recursive", dest="recursive", default=False, action="store_true", - help="Transitively run setup_py on all provided downstream targets.") - - def __init__(self, run_tracker, root_dir, parser, argv): - Command.__init__(self, run_tracker, root_dir, parser, argv) - - if not self.args: - self.error("A spec argument is required") - - self._config = Config.load() - self._root = root_dir - - address = Address.parse(root_dir, self.args[0]) - self.target = Target.get(address) - if self.target is None: - self.error('%s is not a valid target!' % self.args[0]) - - if not self.target.provides: - self.error('Target must provide an artifact.') - - def write_contents(self, root_target, chroot): - """Write contents of the target.""" - def write_target_source(target, src): - chroot.link(os.path.join(target.target_base, src), os.path.join(self.SOURCE_ROOT, src)) - # check parent __init__.pys to see if they also need to be linked. this is to allow - # us to determine if they belong to regular packages or namespace packages. - while True: - src = os.path.dirname(src) - if not src: - # Do not allow the repository root to leak (i.e. '.' should not be a package in setup.py) - break - if os.path.exists(os.path.join(target.target_base, src, '__init__.py')): - chroot.link(os.path.join(target.target_base, src, '__init__.py'), - os.path.join(self.SOURCE_ROOT, src, '__init__.py')) - - def write_codegen_source(relpath, abspath): - chroot.link(abspath, os.path.join(self.SOURCE_ROOT, relpath)) - - def write_target(target): - if isinstance(target, tuple(self.GENERATED_TARGETS.keys())): - for relpath, abspath in self.iter_generated_sources(target, self._root, self._config): - write_codegen_source(relpath, abspath) - else: - for source in list(target.sources) + list(target.resources): - write_target_source(target, source) - - write_target(root_target) - for dependency in self.minified_dependencies(root_target): - if isinstance(dependency, PythonTarget) and not dependency.provides: - write_target(dependency) - - def write_setup(self, root_target, chroot): - """Write the setup.py of a target. Must be run after writing the contents to the chroot.""" - setup_keywords = root_target.provides.setup_py_keywords - - package_dir = {'': self.SOURCE_ROOT} - packages, namespace_packages, resources = self.find_packages(chroot) - - if namespace_packages: - setup_keywords['namespace_packages'] = list(sorted(namespace_packages)) - - if packages: - setup_keywords.update( - package_dir=package_dir, - packages=list(sorted(packages)), - package_data=dict((package, list(rs)) for (package, rs) in resources.items())) - - install_requires = set() - for dep in self.minified_dependencies(root_target): - if isinstance(dep, PythonRequirement): - install_requires.add(str(dep.requirement)) - elif isinstance(dep, PythonTarget) and dep.provides: - install_requires.add(dep.provides.key) - setup_keywords['install_requires'] = list(install_requires) - - for binary_name, entry_point in self.iter_entry_points(root_target): - if 'entry_points' not in setup_keywords: - setup_keywords['entry_points'] = {} - if 'console_scripts' not in setup_keywords['entry_points']: - setup_keywords['entry_points']['console_scripts'] = [] - setup_keywords['entry_points']['console_scripts'].append( - '%s = %s' % (binary_name, entry_point)) - - chroot.write(SETUP_BOILERPLATE % { - 'setup_dict': pprint.pformat(setup_keywords, indent=4), - 'setup_target': repr(root_target) - }, 'setup.py') - - # make sure that setup.py is included - chroot.write('include *.py'.encode('utf8'), 'MANIFEST.in') - - def run_one(self, target): - dist_dir = self._config.getdefault('pants_distdir') - chroot = Chroot(dist_dir, name=target.provides.name) - self.write_contents(target, chroot) - self.write_setup(target, chroot) - target_base = '%s-%s' % (target.provides.name, target.provides.version) - setup_dir = os.path.join(dist_dir, target_base) - safe_rmtree(setup_dir) - os.rename(chroot.path(), setup_dir) - - if not self.options.run: - print('Running packager against %s' % setup_dir) - setup_runner = Packager(setup_dir) - tgz_name = os.path.basename(setup_runner.sdist()) - print('Writing %s' % os.path.join(dist_dir, tgz_name)) - os.rename(setup_runner.sdist(), os.path.join(dist_dir, tgz_name)) - safe_rmtree(setup_dir) - else: - print('Running %s against %s' % (self.options.run, setup_dir)) - setup_runner = SetupPyRunner(setup_dir, self.options.run) - setup_runner.run() - - def execute(self): - if self.options.recursive: - setup_targets = OrderedSet() - def add_providing_target(target): - if isinstance(target, PythonTarget) and getattr(target, 'provides', None): - setup_targets.add(target) - return OrderedSet(target.provides.binaries.values()) - self.target.walk(add_providing_target) - else: - setup_targets = [self.target] - - for target in setup_targets: - if isinstance(target, PythonTarget) and target.provides: - self.run_one(target) diff --git a/src/python/twitter/pants/docs/.gitignore b/src/python/twitter/pants/docs/.gitignore deleted file mode 100644 index e3a60eea0..000000000 --- a/src/python/twitter/pants/docs/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# Ignore Sphinx output directory and generated reference docs. -_build -base.rst -targets.rst -tasks.rst -build_dictionary.rst -goals_reference.rst diff --git a/src/python/twitter/pants/docs/3rdparty.rst b/src/python/twitter/pants/docs/3rdparty.rst deleted file mode 100644 index f5b38452a..000000000 --- a/src/python/twitter/pants/docs/3rdparty.rst +++ /dev/null @@ -1,36 +0,0 @@ -######################## -Third-Party Dependencies -######################## - -Despite your best recruiting efforts, most software is still written -by people outside your organization. Your code can import -some of this *third-party* code. In the usual Pants way, a build target -*depends* on something to enable importing its code. -Special dependencies represent third-party code. - -To help all your code depend on the same version of third-party code, -it's handy to keep these special dependencies in -one place in your source tree. By convention, Pants-using source trees -use a ``3rdparty/`` directory to hold these dependencies. - -If two parts of your code depend on two versions of the same package, some tool -will pick one version to use. The behavior depends on the tool, but you can -be sure that one part of your code is *not* using the version it expects. -This is known as a *diamond dependencies problem* or -*dependency hell*; you don't want it. - -By keeping external dependencies in one place, you make it easier -for all your code to depend on the same version and avoid surprises. - -Beware: some version dependencies "hide." You depend on an external -packages; an external package itself depends on others and -"knows" what versions of those packages it depends on. Even though -all your code depends on the version specified in -``3rdparty/``, you might depend on something which, in turn, -depends on some other version. - -.. toctree:: - - 3rdparty_jvm - 3rdparty_py - diff --git a/src/python/twitter/pants/docs/3rdparty_jvm.rst b/src/python/twitter/pants/docs/3rdparty_jvm.rst deleted file mode 100644 index bb1ca3c59..000000000 --- a/src/python/twitter/pants/docs/3rdparty_jvm.rst +++ /dev/null @@ -1,134 +0,0 @@ -#################### -JVM 3rdparty Pattern -#################### - -In general, we use :doc:`the 3rdparty idiom <3rdparty>` to organize -dependencies on code from outside the source tree. This document -describes how to make this work for JVM (Java or Scala) code. - -Your JVM code can pull in code written elsewhere. -Pants uses `Ivy `_, a tool based on Maven's -jar-sharing. You should know the -(`Maven/Ivy groupId, artifactId, and version `_) -you want to use. - -************ -3rdparty/jvm -************ - -**The JVM part of 3rdparty is organized by org (Maven groupId)** Under there, -see if there's already a ``3rdparty/jvm/path/to/org/BUILD`` file. -If there isn't, then you want to create one. E.g., to import -``com.sun.jersey-apache-client``, look in ``3rdparty/jvm/com/sun`` -for a likely-looking ``BUILD`` file--in this example, -``3rdparty/jvm/com/google/sun/jersey/BUILD``. - -In the appropriate ``BUILD`` file, you want to find a -:ref:`bdict_dependencies` with a :ref:`bdict_jar` dependency: - -.. literalinclude:: ../../../../../3rdparty/jvm/com/sun/jersey/BUILD - :end-before: jersey-server - - -Here, the -:ref:`bdict_dependencies` name defines a target address that other build -targets can refer to. The :ref:`bdict_jar` dependencies refer to Jars known -to your Ivy resolver. - -If there's already a ``jar`` importing the code you want but with a -*different* version, then you probably want to talk to other folks in your -organization to agree on one version. (If there's already a ``jar`` importing -the code you want with the version you want, then great. Leave it there.) - -(You don't *need* a tree of ``BUILD`` files; you could instead have, e.g., one -``3rdparty/jvm/BUILD`` file. In a large organization, a tree can ease some -things. For example, ``git log`` quicky answers questions like "Who set up -this dependency? Who cares if I bump the version?") - -Additionally, some families of jars have different groupId's but are logically -part of the same project, or need to have their rev's kept in sync. For example, -(``com.fasterxml.jackson.core``, ``com.fasterxml.jackson.dataformat``). -Sometimes it makes sense to define these in a single build file, -such as ``3rdparty/jvm/com/fasterxml/jackson/BUILD`` for the jackson family of jars. - -********************** -Your Code's BUILD File -********************** - -To set up your code to import the external jar, you'll add a -dependency to the appropriate Java target[s] in your ``BUILD`` file -and add ``import`` statements in your Java code. - -For example, your ``BUILD`` file might have - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/handler/BUILD - :start-after: java_library: - :end-before: src/java - -And your Java code might have:: - - import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; - -****************************************** -Troubleshooting a JVM Dependencies Problem -****************************************** - -If you're working in JVM (Java or Scala) and suspect you're pulling in -different versions of some package, you can dump your dependency "tree" -with versions with an Ivy resolve report. To generate a report for -a target such as the ``pingpong`` example:: - - $ ./pants goal resolve src/java/com/twitter/common/examples/pingpong --ivy-open - -Ivy's report shows which things depend on which versions. You can see which -package is pulling in the package-version you didn't expect. (It might not -be clear which version you want to use; but at least you'll know what's -causing the problem.) - -**If you notice a small number of wrong-version things,** then in a JVM -target, you can depend on a ``jar`` that specifies a version and -sets ``force=True`` to *force* using that version:: - - scala_library( - name = "loadtest", - dependencies = [ - pants('3rdparty/bijection:bijection-scrooge'), - # our 3rdparty/BUILD still has 6.1.4 as the default version, but - # finagle-[core|thrift] version 6.1.4 is superceded (evicted) by - # version 6.4.1 - # Force inclusion of version 6.1.4, until we're bumped to finagle 6.4.1+ - jar(org='com.twitter', name='iago', rev='0.6.3', force=True), - jar(org='com.twitter', name='finagle-core', rev='6.1.4', force=True), - jar(org='com.twitter', name='finagle-thrift', rev='6.1.4', force=True), - ], - sources = ["LoadTestRecordProcessor.scala"]) - -**If you notice that one "foreign" dependency pulls in mostly wrong things,** -tell Pants not to pull in its dependencies. In your ``3rdparty/.../BUILD`` -file, call the ``jar``\'s ``intransitive`` method; then carefully add -hand-picked versions:: - - dependencies(name="retro-naming-factory", - dependencies=[ - jar(org='retro', name='retro-factory', rev='5.0.18').intransitive(), - # Don't use retro's expected (old, incompatible) common-logging - # version, yipe; use the same version we use everywhere else: - pants('3rdparty/common-logging'), - ]) - -**If you notice a small number of transitive dependencies to exclude** -Rather than mark the ``jar`` intransitive, you can ``exclude`` some -transitive dependencies from JVM targets:: - - java_library(name = 'loadtest', - dependencies = [ - pants('3rdparty/storm:storm'), - ], - sources = globs('*.java'), - excludes = [ - exclude('org.sonatype.sisu.inject', 'cglib') - ] - ) - - - diff --git a/src/python/twitter/pants/docs/3rdparty_py.rst b/src/python/twitter/pants/docs/3rdparty_py.rst deleted file mode 100644 index 1622ba838..000000000 --- a/src/python/twitter/pants/docs/3rdparty_py.rst +++ /dev/null @@ -1,44 +0,0 @@ -####################### -Python 3rdparty Pattern -####################### - -In general, we use :doc:`the 3rdparty idiom <3rdparty>` to organize -dependencies on code from outside the source tree. This document -describes how to make this work for Python code. - -Your Python code can pull in code written elsewhere. Pants fetches code -via a library that uses pip-style specifications (name and version-range). - -*************** -3rdparty/python -*************** - -**The Python part of 3rdparty is in 3rdparty/python/BUILD**. - -In this ``BUILD`` file, you want a ``python_requirement`` like:: - - python_requirement(name="beautifulsoup", - requirement="BeautifulSoup==3.2.0") - -.. TODO existing python sample code doesn't have a 3rdparty requirement; - cobbled this example together from non-exemplary code - -********************** -Your Code's BUILD File -********************** - -In your code's ``BUILD`` file, introduce a dependency on the ``3rdparty`` -target:: - - # src/python/scrape_html/BUILD - python_binary(name = "scrape_html", - source = "scrape_html.py", - dependencies = [ - pants('3rdparty/python:beautifulsoup'), - ] - ) - -Then in your Python code, you can ``import`` from that package:: - - # src/python/scrape_html/scrape_html.py - from BeautifulSoup import BeautifulSoup diff --git a/src/python/twitter/pants/docs/JVMProjects.rst b/src/python/twitter/pants/docs/JVMProjects.rst deleted file mode 100644 index 25f8293e0..000000000 --- a/src/python/twitter/pants/docs/JVMProjects.rst +++ /dev/null @@ -1,275 +0,0 @@ -####################### -JVM Projects with Pants -####################### - -Assuming you know the :doc:`basic Pants concepts ` and have -gone through the :doc:`first_tutorial`, you've made a great start towards -using Pants to work with Java and Scala code. This page goes into some of -the details. - -If you are accustomed to the Maven tool and contemplating moving to Pants, -you are not alone; :doc:`from_maven` has some advice. - -************************** -Relevant Goals and Targets -************************** - -When working with JVM languages, the following goals and targets are -especially relevant. - -**Deployable Bundle** *Runnable Binary, optionally with non-JVM files* - - Deployable bundles are directories, optionally archived, that contain - all files necessary to run the application. The ``bundle`` goal is - used to create these deployable bundles from either ``jvm_binary`` - or ``jvm_app`` targets. - - Bundling a ``jvm_binary`` target is appropriate when your application - is entirely jar-based; that is, its entirely class files and resources - packaged inside the jars themselves. If you application requires - "extra stuff" (e.g.: start scripts, config files) use a ``jvm_app`` - which allows you to include files in the bundle directory that are - supplemental to the binary jar and its dependencies. - You can learn :ref:`more about bundles `. - -**Runnable Binary** - - On its own, a ``jvm_binary`` BUILD target describes an executable ``.jar`` - (something you can run with ``java -jar``). The jar is described as - executable because it contains a manifest file that specifies the main - class as well as classpath for all dependencies. If your program - contains only jars (and resources packaged in those jars), this is - all you need to run the binary. Use ``./pants goal binary`` to - compile its code; ``./pants goal run`` to run it "in place". - -**Importable Code** - - ``java_library`` BUILD targets make Java source code ``import``\able. The - rule of thumb is that each directory of ``.java`` files has a ``BUILD`` file - with a ``java_library`` target. A JVM target that has a ``java_library`` in - its ``dependencies`` can import its code. ``scala_library`` targets are - similar, but compiled with Scala. - - To use pre-built ``.jar``\s, a JVM target can depend on a ``jar``, a - reference to published code; these ``jar``\s normally live in a - :doc:`directory called 3rdparty <3rdparty>`. - - Pants can ``publish`` a JVM library so code in other repos can use it; - if the ``*_library`` target has a ``provides`` parameter, that specifies - the repo/address at which to publish. - - An ``annotation_processor`` BUILD target defines a Java library - one containing one or more annotation processors. - -**Tests** - - A ``junit_tests`` BUILD target holds source code for some JUnit tests; - typically, it would have one or more ``java_library`` targets as dependencies - and would import and test their code. - - A ``scala_specs`` target is similar, but has source code for Scala specs. - - The Pants ``test`` goal runs tests. - -**Generated Code** - - A ``java_thrift_library`` generates Java code from ``.thrift`` source; a JVM - target that has this target in its ``dependencies`` can ``import`` the - generated Java code. A ``java_protobuf_library`` is similar, but generates - Java code from protobuffer source. - -************************* -BUILD for a Simple Binary -************************* - -The `Twitter Commons Java pingpong sample -`_ -code shows the BUILD file for a simple Java binary (in the ``main/`` directory): - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/main/BUILD - :start-after: under the License. - -This small program has just one library, a `java_library`. -The rule of thumb is that -each directory of ``.java`` or ``.scala`` files has a library target. If you -find -yourself thinking "we should move some of this code to another directory," -you probably also want to set up a ``BUILD` file with a ``java_library`` -(or ``scala_library``) target. - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/handler/BUILD - :start-after: java_library: - -This library depends on other build targets and jars; if your code imports -something, that implies a ``BUILD`` dependency. -Some of the depended-upon targets come from the same repository; for example -``.../common/application``. If we peeked at that ``BUILD`` target, we'd see it -was another ``java_library``. -Some of these dependencies are ``jar``\ s built elsewhere. - -Depending on a Jar -================== - -The `pingpong-lib` example depends on some jars. Instead of compiling -from source, Pants invokes `ivy` to fetch these jars. To reduce danger -of version conflicts, we use the :doc:`3rdparty` idiom: we keep references -to these "third-party" jars together in ``BUILD`` files under the -``3rdparty/jvm/`` directory. Thus, ``pingpong-lib`` has some dependencies like: - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/handler/BUILD - :start-after: java_library: - :end-before: src/java - -The ``BUILD`` files in ``3rdparty/jvm/``, e.g., -``3rdparty/jvm/com/sun/jersey/BUILD``, have targets like: - -.. literalinclude:: ../../../../../3rdparty/jvm/com/sun/jersey/BUILD - :lines: 3-4 - -Those :ref:`jar() things ` are references to public jars. - -*********************** -The Usual Commands: JVM -*********************** - -**Make sure code compiles and tests pass:** - Use the ``test`` goal with the targets you're interested in. If they are - test targets, Pants runs the tests. If they aren't test targets, Pants will - still compile them since it knows it must compile before it can test. - - ``pants goal test src/java/com/myorg/myproject tests/java/com/myorg/myproject`` - - **Run just those two troublesome tests:** (assuming they're JUnit tests; other - frameworks use other flags) - - ``pants goal test tests/java/com/myorg/myproject --test-junit-test=com.myorg.myproject.HarshTest --test-junit-test=com.myorg.myproject.HarsherTest`` - -**Packaging Binaries** - To create a jar containing just the code built by a JVM target, use the - `jar` goal:: - - pants goal jar src/java/com/myorg/myproject - - To create "bundle" (a runnable thing and its dependencies):: - - ./pants goal bundle src/main/java/yourproject --bundle-archive=zip -v - - If your bundle is JVM, it's a zipfile which can run by means of an - ``unzip`` and seting your ``CLASSPATH`` to ``$BASEDIR/my_service.jar`` - (where ``$BASEDIR is`` the directory you've just unzipped). - -**Get Help** - Get the list of goals:: - - ./pants goal goals - - Get help for one goal:: - - ./pants goal help onegoal - -********* -Toolchain -********* - -Pants uses `Ivy `_ to resolve ``jar`` dependencies. -To change how Pants resolves these, use ``--ivy-*`` command-line -parameters along with ``--resolve-*`` parameters. - -Pants uses `Nailgun `_ to speed up -compiles. It's a JVM daemon that runs in the background; this saves time -for JVM startup and class loading. - -.. TODO this is a good place to mention goal ng-killall, but I don't want** - folks doing it willy-nilly. Would be good to prefix the mention with** - something saying symptoms when you'd want to. - -Pants uses Jmake, a dependency tracking compiler facade. - -************************** -Java7 vs Java6, Which Java -************************** - -Pants uses the java on your ``PATH`` (not ``JAVA_HOME``). -To specify a specific java version for just one pants invocation:: - - PATH=/usr/lib/jvm/java-1.7.0-openjdk7/bin:${PATH} ./pants goal ... - -If you sometimes need to compile some code in Java 6 and sometimes Java 7, -you can use a command-line arg to specify Java version:: - - --compile-javac-args='-target 7 -source 7' - -*BUT* beware: if you switch between Java versions, Pants doesn't realize when -it needs to rebuild. If you build with version 7, change some code, then build -with version 6, java 6 will try to understand java 7-generated classfiles -and fail. Thus, if you've been building with one Java version and are switching -to another, you probably need to:: - - ./pants goal clean-all - -so that the next build starts from scratch. - -.. _jvm_bundles: - -**************************************** -Bundles: Deploy-able Runnable File Trees -**************************************** - -You can enjoy your web service on your development machine's ``localhost``, -but to let other people enjoy it, you probably want to copy it to a server -machine. With Pants, the easiest way to do this is to create a *bundle*: a -directory tree of ``.jar`` and helper files. If your ``jvm_app`` has -a ``bundles`` parameter, it can specify trees of files to include in the tree. - -If you want to set up a tree of static files but don't need it to be runnable, -you can define a ``jvm_app`` target with bundles (and/or resources) but whose -``jvm_binary`` has no source or main; the resulting bundle will have -the files you want (along with a couple of not-so-useful stub ``.jar`` files). - -**Generating a Bundle** - -Invoke ``./pants goal bundle`` on a JVM app or JVM binary target. - -**Contents of a Bundle** - -A bundle is basically a directory tree containing ``.jar``\s. The -``.jar`` in the top-level directory has a manifest so you can run -it with ``java -jar``:: - - $ find . - pingpong.jar - libs/ - libs/org.scala-lang-scala-library-2.9.2.jar - libs/org.sonatype.sisu.inject-cglib-2.2.1-v20090111.jar - libs/pingpong.jar - libs/src.java.com.twitter.common.examples.pingpong.pingpong-lib.jar - libs/... - -If your ``jvm_app`` has a ``bundles`` parameter, this might specify -directories of files to copy into the generated bundle. E.g., your -``jvm_app``` might have a ``bundles`` like :: - - bundles = [ bundle().add(rglobs('tools/config/*')), ] - -In this case, you'd expect files from this directory to show up in -the bundle:: - - tools/config/ - tools/config/launcher.scala - tools/config/... - -**Deploying a Bundle** - -Instead of just creating a directory tree, you can pass -``--bundle-archive`` to ``.pants goal bundle`` to generate -an archive file (a zipped tarfile or some other format) instead. -You can copy the archive somewhere, then unpack it on -the destination machine. If there are some "standard jars" that are -already on the destination machine, you might want to exclude them -from the archive. - -.. toctree:: - :maxdepth: 1 - - from_maven - diff --git a/src/python/twitter/pants/docs/Makefile b/src/python/twitter/pants/docs/Makefile deleted file mode 100644 index 52fb9949f..000000000 --- a/src/python/twitter/pants/docs/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pants.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pants.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/pants" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pants" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/src/python/twitter/pants/docs/ThriftDeps.rst b/src/python/twitter/pants/docs/ThriftDeps.rst deleted file mode 100644 index 78a425e0a..000000000 --- a/src/python/twitter/pants/docs/ThriftDeps.rst +++ /dev/null @@ -1,210 +0,0 @@ -#################################### -Using Pants with Thrift Dependencies -#################################### - -`Apache Thrift `_ is a popular framework for working with -data types and service interfaces. It uses an Interface Definition Language (IDL) to -define these types and interfaces. There are tools to generate code in "real" programming -languages from Thrift IDL files. Two programs, perhaps in different -programming languages, should be able to communicate over Thrift interfaces by using -this generated code. - -Pants knows Thrift. For each Thrift file you use, your codebase has some ``BUILD`` targets -that represent "real" code generated from IDL code. You can write code in your favorite -language that imports the generated code. To make the import work, your code's -``BUILD`` target depends on the appropriate Thrift ``BUILD`` target. - -*************** -Generating Code -*************** - -You have some Thrift; you want to use it from your "regular" programming -language. Normally, to make, e.g., Java code usable, you set up a -``java_library`` target with sources ``*.java`` and then depend -on that target; Thrift works similarly, but you use a different target -type that generates Java code from ``*.thrift``. -You can define Java, Python, or Scala library targets whose code is -Thrift-generated -by setting up *lang*\_thrift_library targets. (Scala is tricky; you -use a ``java_thrift_library`` with carefully-chosen parameters.) -Other targets can depend -on a *lang*\_thrift_library and their code can then import the generated code. - -Target Example -============== - -This example sets up a ``java_thrift_library`` target; its source -is Thrift; it generates Java. :: - - # Target defined in src/thrift/com/twitter/mybird/BUILD: - java_thrift_library(name='mybird', - # Specify dependencies for thrift IDL file includes. - dependencies=[ - pants('src/thrift/com/twitter/otherbird'), - ], - sources=globs('*.thrift') - ) - -Pants knows that before it compiles such a target, it must first generate Java -code from the Thrift IDL files. Users can -depend on this target like any other internal target. In this case, users would -add a dependency on ``pants('src/thrift/com/twitter/mybird')``. - -One *lang*\_thrift_library can depend on another; use this if one of your -Thrift files ``include``\s a Thrift file that lives in another target. - -Configuring -=========== - -Here are some popular ``*_thrift_library`` configurations: - -**Java** - -Use Apache Thrift compiler (the default):: - - java_thrift_library(...) - -...or Scrooge:: - - java_thrift_library( - compiler='scrooge') - -**Python** :: - - python_thrift_library(...) - -**Scala** :: - - java_thrift_library( # Yes, a "java" library to generate Scala - compiler='scrooge', # default compiler does not gen Scala; Scrooge does - language='scala', - # maybe set an rpc_style - ) - - -************** -Thrift Example -************** - -Let's look at some sample code that puts all of this together. - -* Thrift IDL code (``.thrift`` files) -* ``BUILD`` targets for the Thrift IDL code -* Java code that ``import``\s code generated from Thrift -* ``BUILD`` target dependencies that allow that ``import`` - - -Thrift IDL -========== - -Our example uses two Thrift files, one of which ``include``\s the other. -They look pretty ordinary. The include-d Thrift, -``src/thrift/com/twitter/common/examples/distance/distance.thrift``, -is regular Thrift (albeit with a ``#namespace`` comment used for Thrift -that will be compiled with both Apache Thrift and Scrooge): - -.. include:: ../../../../../src/thrift/com/twitter/common/examples/distance/distance.thrift - :code: - -The include-ing Thrift, -``src/thrift/com/twitter/common/examples/precipitation/precipitation.thrift``, -also looks ordinary. (The include path is a little tricky: it's based on -source roots. Thus, if your source tree has more than one root -``foo`` and ``bar`` and has Thrift in both, code in foo can ``include`` -code from ``bar`` without mentioning ``bar`` in the include path. -Since twitter/commons has just one source root, this trickiness doesn't -arise in our example.): - -.. include:: ../../../../../src/thrift/com/twitter/common/examples/precipitation/precipitation.thrift - :code: - -BUILD Targets -============= - -In a ``BUILD`` file, we use a ``java_thrift_library`` or -``python_thrift_library`` to generate "real" code from Thrift. -Our example just uses Java; -thus, the ``BUILD`` file for ``distance.thrift`` looks like - -.. include:: ../../../../../src/thrift/com/twitter/common/examples/distance/BUILD - :code: python - :start-after: cd ../precipitation) - -Notice the target type is :ref:`bdict_java_thrift_library`, and this target -staked its claim to our distance thrift IDL file. JVM library targets -(e.g.: :ref:`bdict_java_library`, :ref:`bdict_scala_library`) that depend on -this target will simply see generated code from the IDL. Since no additional -options are specified we use the defaults; however, if we need more -control over how code is generated we control that through arguments provided -by :ref:`bdict_java_thrift_library`. - -.. NOTE:: - While the name ``java_thrift_library`` might make you think it generates - Java, it can also generate other target languages via - the ``language`` parameter (scala for example). For Python code, however, - use :ref:`bdict_python_thrift_library`. - -.. TODO(travis): How to specify the repo thrift gen defaults? - -As with "regular" languages, for one target's code to include another's, -a target should have the other in its ``dependencies``. Thus, to allow -``precipitation.thrift`` to depend on ``distance.thrift``, we set up -``.../precipitation/BUILD`` like so: - -.. include:: ../../../../../src/thrift/com/twitter/common/examples/precipitation/BUILD - :code: python - :start-after: includes other thrift - -Using in "Regular" Code -======================= - -We want to use the Thrift-generated interface from "regular" code. In this Java -example, we want to ``import`` the generated code. In our Java, the ``import`` -statements use the names from the ``.thrift`` files' ``namespace``\s: - -.. include:: ../../../../../tests/java/com/twitter/common/examples/usethrift/UseThriftTest.java - :code: java - :start-after: from Java. - -As usual, for code in one target to use code from another, one target needs to -depend on the other. Thus, our Java code's target has the ``*_thrift_library`` -target whose code it uses in its dependencies: - -.. include:: ../../../../../tests/java/com/twitter/common/examples/usethrift/BUILD - :code: python - :start-after: using Thrift from Java, though. - -.. _thriftdeps_publish: - -********** -Publishing -********** - -Publishing a *lang*\_thrift_library is like -:doc:`publishing a "regular" library `. -The targets use ``provides`` parameters. It might look something like:: - - java_thrift_library(name='eureka-java', - sources=['eureka.thrift'], - dependencies=[ - pants('src/thrift/org/archimedes/volume:volume-java'), - ], - language='java', - provides=artifact( - org='org.archimedes', - name='eureka-java', - repo=pants('BUILD.archimedes:jar-public'), - )) - - java_thrift_library(name='eureka-scala', - sources=['eureka.thrift'], - dependencies=[ - pants('src/thrift/org/archimedes/volume:volume-scala'), - ], - compiler='scrooge', - language='scala', - provides=artifact( - org='org.archimedes', - name='eureka-scala', - repo=pants('BUILD.archimedes:jar-public'), - )) diff --git a/src/python/twitter/pants/docs/_templates/layout.html b/src/python/twitter/pants/docs/_templates/layout.html deleted file mode 100644 index 2063f2e95..000000000 --- a/src/python/twitter/pants/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ -{% extends "!layout.html" %} - -{% block extrahead %} -{{ super() }} - - - -{% endblock %} diff --git a/src/python/twitter/pants/docs/build_files.rst b/src/python/twitter/pants/docs/build_files.rst deleted file mode 100644 index c3f3be462..000000000 --- a/src/python/twitter/pants/docs/build_files.rst +++ /dev/null @@ -1,326 +0,0 @@ -BUILD files -=========== - -To tell Pants about your source code, you have files named ``BUILD`` in -directories of your source tree. These files define build-able targets -and specify source code layout. This page goes into some detail about -``BUILD`` files. If you instead wanted API docs for things defined in -``BUILD`` files (``java_library``, ``python_binary``, etc.), please see the -:doc:`BUILD Dictionary`. If you want less detail-y -information about ``BUILD`` files, :doc:`first_tutorial` is a good place -to start. - -``BUILD`` files are little Python scripts with -:doc:`some useful functions` -automatically imported. Thus, function invocations look -``like(this)``, lists look ``[like, this]``, and dictionaries (hashmaps) -look ``{"like": "this"}``; Python's syntax uses -`indentation `_ -to indicate scope; if you copy-paste some ``BUILD`` code from one place to -another, you might need to adjust the indentation. You can -`learn more about Python `_\, -but you should be able to "get along" in ``BUILD`` files if you know -functions, lists, dictionaries, and indentation. - -.. _debugging: - -Debugging a BUILD File -********************** - -If you're curious to know how Pants interprets your ``BUILD`` file, these -goals can be especially helpful: - -``list`` Did I define the targets I meant to? :: - - $ ./pants goal list src/java/com/twitter/common/examples/pingpong: - src/java/com/twitter/common/examples/pingpong/BUILD:pingpong-lib - src/java/com/twitter/common/examples/pingpong/BUILD:pingpong - -``list ::`` List **every** target to find out: -Did a change in one ``BUILD`` file break others? :: - - $ ./pants goal list :: - ...lots of output... - File "twitter/pants/targets/internal.py", line 174, in dependencies - File "twitter/pants/targets/internal.py", line 189, in _maybe_apply_deps - File "twitter/pants/targets/internal.py", line 195, in update_dependencies - File "twitter/pants/targets/pants_target.py", line 60, in resolve - KeyError: 'Failed to find target for: src/python/twitter/pants/docs/BUILD:obsolete' - $ # Instead of listing all targets, a strack trace. We found a problem - -``depmap`` Do I pull in the dependencies I expect? -(JVM languages only) (This lists dependencies from your source; it doesn't catch -dependencies pulled in from 3rdparty ``.jars``):: - - $ ./pants goal depmap src/java/com/twitter/common/examples/pingpong:pingpong-lib - internal-src.java.com.twitter.common.examples.pingpong.pingpong-lib - internal-src.java.com.twitter.common.application.application - internal-src.java.com.twitter.common.application.modules.applauncher - internal-src.java.com.twitter.common.application.action - ...more output... - -``filedeps`` What source files do I depend on? :: - - $ ./pants goal filedeps src/java/com/twitter/common/examples/pingpong:pingpong-lib - ~archie/pantsbuild/src/java/com/twitter/common/util/Stat.java - ~archie/pantsbuild/src/java/com/twitter/common/net/http/handlers/pprof/ContentionProfileHandler.java - ...more output... - -.. _usage-default-target: - -Default Target -************** - -A build target with the same name as the ``BUILD`` file's containing -directory is the *default target*. To signal \"*this* is the main useful -target here" and as a convenience to users, you should always have a default. - -Consider these libraries that use ``tugboat`` functionality. You can see that -this code depends on just the default ``tugboat`` target, and thus uses just core -functionality:: - - # depends on plain ol' tugboat - java_library(name='theodore', - dependencies=[ - pants('src/java/com/twitter/tugboat'), - ], - sources=globs('*.java'), - ) - -You can tell this code uses optional functionality because it depends on a specific -non-default target:: - - # depends on optional tugboat functionality - java_library(name='hank', - dependencies=[ - pants('src/java/com/twitter/tugboat:hispeed'), - ], - sources=globs('*.java'), - ) - -Default targets are more convenient to reference on the command line and less -verbose as build dependencies. For example, consider the following names for the -same target:: - - src/java/com/twitter/tugboat/BUILD:tugboat # absolute target name - src/java/com/twitter/tugboat/BUILD # references default target "tugboat" - src/java/com/twitter/tugboat # references default build file "BUILD" and default target "tugboat" - src/java/com/twitter/tugboat/ # trailing slashes are ignored - useful for command-line completion - -By providing a target with the default name, you simplify interacting with your target from the -command-line. This gives users a better experience using your library. -In BUILD files, dependencies are less verbose, which improves readability. - -.. _usage-avoid-rglobs: - -Avoid rglobs -************ - -Many pants targets have `sources`, a list of filenames owned by the target. -It's common pattern to specify source files with `globs`; it's a common -*anti-pattern*, especially in targets hastily converted from Maven poms, -to specify source files with `rglobs`, the recursive version of `globs`. - -While valid, `rglobs` increases the chances of multiple targets -claiming the same sources. Consider the following refactor adding a -subpackage:: - - # 'maint' subpackage has been added. - src/java/com/twitter/tugboat/BUILD - src/java/com/twitter/tugboat/Tugboat.java - src/java/com/twitter/tugboat/maint/BUILD - src/java/com/twitter/tugboat/maint/MaintenanceLog.java - - # target src/java/com/twitter/tugboat - # Existing target now untentionally claims the 'maint' package. - java_library(name='tugboat', - sources=rglobs('*.java'), - ) - - # target src/java/com/twitter/tugboat/maint - # Sources are claimed by multiple targets. - java_library(name='maint', - sources=globs('*.java'), - ) - -Existing tugboat users now depend on tugboat's `maint` package, even though the dependency was -never intended. **Avoiding rglobs helps avoid surprises.** - -Using ``rglobs`` also makes it easy to fall into making circular dependencies. You want to avoid -circular dependencies. If you later want to factor your big target into a few -focused-purpose targets, you'll have to untangle those circular dependencies. - -When a target should claim files in subpackages, it's both easy and recommended to explicitly list -which subpackages should be claimed. :: - - # target src/java/com/twitter/tugboat - # Intentionally claims the 'maint' package. - java_library(name='tugboat', - sources=globs( - '*.java', - 'maint/*.java', - ), - ) - -Define Separate Targets for Interface and Implementation -******************************************************** - -If your code defines an API to be used by other modules, define a target -that builds just that interface. - -Many programs provide a plugin interface so users can provide their own functionality. For example, -a tool might define a `DataImporter` interface and let users provide -plugins for each data source. - -The simple approach of providing a single BUILD target for both interface and implementations has a -significant drawback: anyone wishing to implement the interface must depend on all dependencies -for all implementations co-published with the interface. The classpath bloats. -The risk of dependency conflicts increases greatly. For example:: - - # Less than ideal layout - interface and implementations together. - src/java/com/twitter/etl/from/BUILD - src/java/com/twitter/etl/from/DataImporter.java - src/java/com/twitter/etl/from/FileDataImporter.java - src/java/com/twitter/etl/from/HBaseDataImporter.java - - # DO NOT bundle interface and implementations - forces extra dependencies. - java_library(name='from', - dependencies=[ - pants('3rdparty/jvm/org/apache/hbase'), - ], - sources=globs('*.java'), - ) - -An improved code organization uses separate packages when many fellow travelers are introduced. For -example, if ``FileDataImporter.java`` only uses standard library classes its entirely appropriate to -package it with the interface. HBase, however, its quite large itself, has many transitive -dependencies, and is only required by jobs that actually read from HBase. **Implementations with -many fellow travelers should be published as separate pants targets.** :: - - # Ideal repo layout - hbase as a subpackage and separate target. - src/java/com/twitter/etl/from/BUILD - src/java/com/twitter/etl/from/DataImporter.java - src/java/com/twitter/etl/from/FileDataImporter.java - src/java/com/twitter/etl/from/hbase/BUILD - src/java/com/twitter/etl/from/hbase/HBaseDataImporter.java - - # pants target src/java/com/twitter/etl/from - # Including FileDataImporter is appropriate because it has no additional dependencies. - java_library(name='from', - dependencies=[], # no extra dependencies - sources=globs('*.java'), - ) - - # pants target src/java/com/twitter/etl/from/hbase - java_library(name='hbase', - dependencies=[ - pants('3rdparty/jvm/org/apache/hbase'), - ], - sources=globs('*.java'), - ) - -Existing code using a package for both an interface and implementations should still expose the interface separately. :: - - # Less than ideal layout - interface and implementations together. - src/java/com/twitter/etl/from/BUILD - src/java/com/twitter/etl/from/DataImporter.java - src/java/com/twitter/etl/from/FileDataImporter.java - src/java/com/twitter/etl/from/HBaseDataImporter.java - - # Default target contains interface and lightweight implementation. - java_library(name='from', - sources=[ - 'DataImporter.java', - 'FileDataImporter.java', - ], - ) - - # Implementation with heavyweight dependencies exposed separately. - java_library(name='hbase', - dependencies=[ - pants('3rdparty/jvm/org/apache/hbase'), - ], - sources=['HBaseDataImporter.java'], - ) - -Depend on API in Library Targets, Implementation in Binary Targets -****************************************************************** - -Some projects helpfully publish their API separately from implementation, especially if multiple -implementations are available. SLF4J is a widely-used example. - -Consider the following library target that depends on both `slf4j-api` and the specific implementation `slf4j-jdk14`. :: - - # Incorrect - forces a logging implementation on all library users. - scala_library(name='mylib', - dependencies=[ - pants('3rdparty:slf4j-api'), - pants('3rdparty:slf4j-jdk14'), - ], - ) - - jvm_binary(name='mybin', - dependencies=[pants(':mylib')], - ) - -Structure these dependencies to only depending on the API in library code. -Allow binary targets to specify the logging implementation of their choosing. :: - - # Better approach - only depend on API in a library target. - scala_library(name='mylib', - dependencies=[ - pants('3rdparty:slf4j-api'), - ], - ) - - # Bring your own API implementation in the binary. - jvm_binary(name='mybin', - dependencies=[ - pants('3rdparty:slf4j-jdk14'), - pants(':mylib'), - ], - ) - - -Which ``BUILD`` Files are "Executed"? (and how?) -************************************************ - -``BUILD`` files are little Python scripts. When you -notice a target in one ``BUILD`` file can depend on a target in another -``BUILD`` file, you might think those little Python scripts are linked -together into one big Python program, but that's not exactly what's going on. -If one ``BUILD`` file has a Python variable ``x = "Hello world"`` and another -``BUILD`` file says ``print(x)``, you'll get an error: ``x`` is not defined. - -Pants executes ``BUILD`` files separately. Commands in ``BUILD`` files define -targets and register those targets in a Pants data structure. - -Though your repo might contain many ``BUILD`` files, Pants might not execute all -of them. If you invoke:: - - ./pants goal test tests/java/com/twitter/common/examples/pingpong:pingpong - -Pants executes the source tree's top-level ``BUILD`` file (executed on every Pants run) and -``tests/java/com/twitter/common/examples/pingpong/BUILD``. The ``pingpong`` target -depends on targets from other ``BUILD`` files, so Pants executes those ``BUILD`` -files, too; it iterates over the dependency tree, executing ``BUILD`` files as it -goes. It does *not* execute ``BUILD`` files that don't contain targets in that -dependency tree. - -If there's some ``BUILD`` code that should be executed on every run, put it in -the source tree's top-level ``BUILD`` file; that gets executed on every Pants run. - - -BUILD.* files -************* - -We call them "``BUILD`` files" because they're usually named ``BUILD``, but -they can also be named ``BUILD.something``, where *something* is typically -a short nickname for an organization, e.g., ``BUILD.twitter``. This can be -handy if your organization has some internal definitions that you need to -combine with code that you open-source, perhaps a ``credentials`` definition -that only makes sense behind your organization's firewall. - -A build target defined in ``BUILD.foo`` can't have the same ``name`` as -a build target defined in the same directory's ``BUILD`` file; they share -a namespace. diff --git a/src/python/twitter/pants/docs/conf.py b/src/python/twitter/pants/docs/conf.py deleted file mode 100644 index 5eb977612..000000000 --- a/src/python/twitter/pants/docs/conf.py +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# -# pants documentation build configuration file, created by -# sphinx-quickstart on Wed Aug 28 20:58:14 2013. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -try: - import sphinx_rtd_theme -except ImportError: - raise RuntimeError(''.join([ - 'Failed importing sphinx_rtd_theme. You likely need to:\n\n', - ' pip install sphinx_rtd_theme\n\n', - 'For more information, see https://github.com/snide/sphinx_rtd_theme\n\n', - ])) - - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('../../..')) - -pants_egg_dir = os.path.abspath('../../../../../.pants.d/python/eggs') -if not os.path.exists(pants_egg_dir): - raise RuntimeError(''.join([ - 'Pants egg dependencies do not exist! Please build pants before ', - 'generating the documentation site. ', - 'Missing dir: %s' % pants_egg_dir, - ])) -for egg in os.listdir(pants_egg_dir): - sys.path.insert(0, os.path.join(pants_egg_dir, egg)) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'pants' -copyright = u'2013, Twitter' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1' -# The full version, including alpha/beta/rc tags. -release = '0.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -#html_theme = 'pydoctheme' -html_theme = 'sphinx_rtd_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -html_logo = 'pants-logo.png' - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -html_favicon = 'pants-logo.ico' - - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'pantsdoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'pants.tex', u'pants Documentation', - u'Twitter', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'pants', u'pants Documentation', - [u'Twitter'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'pants', u'pants Documentation', - u'Twitter', 'pants', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - -autoclass_content = 'both' diff --git a/src/python/twitter/pants/docs/credits.rst b/src/python/twitter/pants/docs/credits.rst deleted file mode 100644 index 870d3be63..000000000 --- a/src/python/twitter/pants/docs/credits.rst +++ /dev/null @@ -1,17 +0,0 @@ -####### -Credits -####### - -Pants was originally written by John Sirois. - -Major contributors in alphabetical order: - -- Alec Thomas -- Benjy Weinberger -- Bill Farner -- Brian Wickman -- David Buchfuhrer -- John Sirois -- Mark McBride - -If you are a contributor, please add your name to the list! diff --git a/src/python/twitter/pants/docs/dev.rst b/src/python/twitter/pants/docs/dev.rst deleted file mode 100644 index 1d75a38f4..000000000 --- a/src/python/twitter/pants/docs/dev.rst +++ /dev/null @@ -1,23 +0,0 @@ -###################### -Pants Developer Center -###################### - -Welcome to the pants developer center. This page is intended for developers who wish -to contribute changes to pants itself. In the following guides you'll learn the -workflow for making changes, get your editor setup (if that's your thing), -how to build & test pants, and finally how to get your change committed. - -.. toctree:: - :maxdepth: 1 - - howto_contribute - howto_develop - dev_tasks - intellij - internals - targets - tasks - base - release - docs - credits diff --git a/src/python/twitter/pants/docs/dev_tasks.rst b/src/python/twitter/pants/docs/dev_tasks.rst deleted file mode 100644 index 583979b29..000000000 --- a/src/python/twitter/pants/docs/dev_tasks.rst +++ /dev/null @@ -1,236 +0,0 @@ -###################### -Task Developer's Guide -###################### - -This page documents how to develop pants tasks, enabling you to teach pants -how to do things it does not already know how to do today. This page makes -more sense if you know the concepts from :doc:`internals`. - - -**************** -PageRank Example -**************** - -Let's dive in an look at a fully-functional task. -Generating reports its a common user request, as folks often want to learn -more about their builds. Target dependencies and dependees are a couple -examples. Let's explore how to generate a new report, such as running PageRank -over the targets graph. Perhaps you're just curious to see what the most -popular targets are, or maybe you want to use that information to focus -testing efforts. - -Let's remind ourselves of the simplified -`PageRank algorithm `_ :: - - PR(A) = (1-d) + d (PR(T1)/C(T1) + ... + PR(Tn)/C(Tn)) - -Where ``T`` are our targets, and dependencies are analogous to inbound links. -To perform such a calculation we simply need to walk the targets graph to -identify all target dependees, then perform the above calculation some number -of iterations, and finally display the results. - -Now let's look at PageRank. First, notice we subclass ``ConsoleTask`` which -provides conveniences when generating reports. Also, notice we don't define -an explicit constructor as there's no configuration to setup nor do we need -to register product requirements. We implement ``console_output`` as -required by ``ConsoleTask`` which parses the targets graph, calculates -pagerank, then returns the report lines. - -.. literalinclude:: pagerank.py - :lines: 1-19 - -When processing dependencies we populate the maps by walking a filtered -targets graph. It's quite common for tasks to only know how to handle -specific target types. - -.. literalinclude:: pagerank.py - :lines: 21-32 - -Now let's calculate pagerank. - -.. literalinclude:: pagerank.py - :lines: 34-41 - -And finally return the report lines. - -.. literalinclude:: pagerank.py - :lines: 43-46 - -Let's see the report in action! Here we'll look at the most popular -target dependencies. As expected, foundational jars and targets -are identified. Let's say we wanted to restrict this report to -internal or external-only targets. Well... that's your homework :) - -:: - - $ ./pants goal pagerank src/java/com/twitter/common/:: | head - 8.283371 - com.google.code.findbugs-jsr305-1.3.9 - 7.433371 - javax.inject-javax.inject-1 - 7.433371 - com.google.guava-guava-14.0.1 - 3.107220 - commons-lang-commons-lang-2.5 - 2.537617 - com.google.inject-guice-3.0 - 2.519704 - JavaLibrary(src/java/com/twitter/common/base/BUILD:base) - 2.205346 - javax.servlet-servlet-api-2.5 - 2.042915 - org.hamcrest-hamcrest-core-1.2 - 1.898855 - org.slf4j-slf4j-jdk14-1.6.1 - 1.898855 - org.slf4j-slf4j-api-1.6.1 - -As you can see, generating reports is quite simple. We have the opportunity -to configure the task, and implement a simple interface that processes the -targets graph and generates a report on what it finds out. - - -************* -Core Concepts -************* - - -Task Base Class -=============== - -Let's examine the Task class, which is the "abstract class" -we'll need to subclass. The following simplified example highlights -the most useful methods. - -* :py:class:`twitter.pants.tasks.__init__.Task` - This is the base class - used to implement all the stuff pants knows how to do. When instantiating - a task it has the opportunity to perform setup actions, or fetch - configuration info from the context or ``pants.ini``. If it needs - products produced by some other task it must register interest in - those products (e.g.: "I'm a java compiler, I need java sources."). - -* :py:meth:`twitter.pants.tasks.__init__.Task.execute` - Do some work. - This is where the task does its thing. In addition to anything stashed - away during instantiation, it has access to the targets graph. - -* :py:meth:`twitter.pants.tasks.__init__.Task.setup_parser` - Specify - command-line flags. These are useful for functionality that may be - modified per-invocation. Use ``pants.ini`` for configuration that - should always be used in the repo. - - -Targets Graph Traversal -======================= - -Many tasks involve traversing the targets graph looking for targets of -particular types, and taking actions on those targets. For this reason -its important to understand now to navigate the targets graph. - -The targets graph is provided to your -:py:meth:`twitter.pants.tasks.__init__.Task.execute` method, and you have -exclusive access to read and/or mutate it in place during execution. -Its provided as the list of *active concrete targets*. *Active* meaning -these targets are reachable by one or more ``target_roots`` specified on -the command-line; *concrete* meaning all targets resolve to themselves, -with any intermediate bags of ``dependencies`` removed. - -Let's explore how to collect all targets of a particular type. :: - - def execute(self, targets): - interesting_targets = set() - for target in targets: - target.walk(lambda t: interesting_targets.add(t), - lambda t: isinstance(t, FooLibrary) - -First we need to iterate over ``targets``, which are the active concrete targets. -Then we ``walk`` each concrete target, providing as the first parameter -a callable that each walked target will be passed to. We also provide a callable -as the optional second parameter which filters the targets. - -Traversing the targets graph is key to task development, as most tasks perform -some operation on the targets "in play." We iterate over the active concrete -targets, ``walk``\ ing each one with our visiting callable. By walking the -targets graph you can identify exactly which targets are necessary to implement -your task. - - -Task Installation -================= - -Tasks must be installed before they are available for use. -Fortunately this is a simple process. They are installed -in ``goal.py`` as follows: :: - - from twitter.pants.tasks.pagerank import PageRank - goal(name='pagerank', action=PageRank).install().with_description('PageRank the given targets.') - - - -Task Configuration -================== - -Tasks may be configured in two ways, through a configuration file checked -into the repo, and via command-line flags. - -The configuration file is always called ``pants.ini`` and is a standard -``ini`` file loaded with ``ConfigParser``. During instantiation, tasks have -access to a :py:class:`twitter.pants.base.config.Config` -to read these settings. :: - - # Let's read mykey from the mytask pants.ini section. - self.context.config.get('mytask', 'mykey') - -Command-line flag values are also available during task instantiation. :: - - # Access a command-line flag the task defined. - self.context.options.myflag - - -JVM Tool Bootstrapping -====================== - -If you want to integrate an existing JVM-based tool with a pants task, you need -to be able to bootstrap it, i.e., fetch it and create a classpath with which to run it. - -Your job as a task -developer is to set up the arguments passed to your tool (e.g.: source file names -to compile) and do something useful after the tool has run. For example, a code -generation tool would identify targets that own IDL sources, pass those sources -as arguments to the code generator, create targets of the correct type to own -generated sources, and mutate the targets graph rewriting dependencies on targets -owning IDL sources to point at targets that own the generated code. - -Tools are specified by targets in a special BUILD.tools file in the root of -your workspace. To bootstrap it you register that target under some key -in the task's __init__ method. Then, a special bootstrapping task will use Ivy -to resolve those targets. Then, in your execute() method you can get the -classpath for the tool using its registration key. - -`Scalastyle `_ is a tool that enforces style policies -for scala code. Let's examine a simplified task that uses this tool. This example has been -condensed from the Scalastyle task provided by pants; please see its sources for a real-world -example, including exemplary configuration and error handling (which your task will have too, -right :) :: - - class Scalastyle(NailgunTask): - def __init__(self, context): - NailgunTask.__init__(self, context) - self._scalastyle_config = self.context.config.get_required('scalastyle, 'config') - self._scalastyle_bootstrap_key = 'scalastyle' - self.register_jvm_tool(self._scalastyle_bootstrap_key, [':scalastyle']) - - def execute(self, targets): - srcs = get_scala_sources(targets) - cp = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._scalastyle_bootstrap_key) - result = self.runjava(main='org.scalastyle.Main', - classpath=cp, - args=['-c', self._scalastyle_config] + srcs) - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % ('org.scalastyle.Main', result)) - -Notice how we subclass ``NailgunTask``. This takes advantage of -`Nailgun `_ to speed up any tool with -a fixed classpath. -Our constructor is straightforward, simply identifying the configuration file and -registering the tool. -Our ``execute`` magically finds all the scala sources to check (we're focusing on -bootstrapping here), and fetches the classpath to use. Pay attention to the ``runjava`` line - -that's where the tool classpath is used. We simply say what main to execute, with what -classpath, and what program args to use. As Scalastyle is a barrier in our build, we fail -the build if files do not conform to the configured policy. - -Note that the above description was a slight simplification. The bootstrapping task doesn't -actually invoke Ivy. Instead it creates a callback that invokes Ivy in just the right -way. This callback is called lazily on demand, the first time you call get_jvm_tool_classpath() -with a given key. This lazy invocation improves performance - we only invoke Ivy when we know -we really do need to use the tool. diff --git a/src/python/twitter/pants/docs/docs.rst b/src/python/twitter/pants/docs/docs.rst deleted file mode 100644 index afc31a254..000000000 --- a/src/python/twitter/pants/docs/docs.rst +++ /dev/null @@ -1,50 +0,0 @@ -======================= -About the documentation -======================= - -Pants documentation is generated from `reStructuredText`_ sources by `Sphinx`_, -the tool Python itself is documented with. This site was modeled on -the `Python documentation`_. - -.. _reStructuredText: http://docutils.sf.net/rst.html -.. _Sphinx: http://sphinx.pocoo.org/ -.. _Python Documentation: http://docs.python.org - -------------------- -Generating the site -------------------- - -The following commands must be run from the pants repo root. - -:: - - # Sphinx must be installed locally to generate the site. - # This is only required once per machine. - easy_install -U Sphinx - - # Build pants, which triggers downloading egg dependencies - # which are required when Sphinx inspects pants sources. - cd /path/to/pants/repo - rm pants.pex - ./pants.bootstrap - # Build the BUILD dictionary data. - ./pants goal builddict # (or ./pants py src/python/twitter/pants goal builddict to try out local tweaks) - - # Doc generation commands must be run from the doc dir. - cd src/python/twitter/pants/docs - # Generate rst files. - ./gen.py - # Generate the site. - make clean html - -The site will be generated into ``_build/html``, which should not be checked -in. ``open _build/html/index.html`` to view your changes. - -------------------- -Publishing the site -------------------- - -Publishing the site simply involves making the contents of ``_build/html`` -available on a web server. - -.. TODO(travis): Update publishing section with how to publish. diff --git a/src/python/twitter/pants/docs/first_concepts.rst b/src/python/twitter/pants/docs/first_concepts.rst deleted file mode 100644 index dd7219206..000000000 --- a/src/python/twitter/pants/docs/first_concepts.rst +++ /dev/null @@ -1,149 +0,0 @@ -######################### -Pants Conceptual Overview -######################### - -Pants is a build system for software. -It works particularly well for a source code repository -that contains many distinct but interdependent pieces. - -Pants is similar to ``make``, ``maven``, ``ant``, ``gradle``, ``sbt``, etc.; -but pants pursues different design goals. Pants optimizes for - -* building multiple, dependent things from source -* building code in a variety of languages -* speed of build execution - -A Pants build "sees" only the target it's building and the transitive -dependencies of that target. -This approach works well for a big repository containing several things; -a tool that builds everything would bog down. - -***************** -Goals and Targets -***************** - -To use Pants, you must understand a few concepts: - -**Goals** are the "verbs" of Pants. - When you invoke Pants, you name - goals on the command line to say what Pants should do. For example, - to run tests, you would invoke Pants with the ``test`` goal. - To create a bundle--an archive containing a runnable binary and resource - files--you would invoke Pants with the ``bundle`` goal. - These goals are built into Pants. - -**Targets** are the "nouns" of Pants, things pants can act upon. - You annotate your source code with ``BUILD`` files to define these targets. - For example, if your ``tests/com/twitter/mybird/`` directory contains - JUnit tests, you have a ``tests/com/twitter/mybird/BUILD`` file with - a ``junit_tests`` target definition. - As you change your source code, you'll occasionally change the set of Targets - by editing ``BUILD`` files. E.g., if you refactor some code, moving part of - it to a new directory, you'll probably set up a new ``BUILD`` file with - a target to build that new directory's code. - -When you invoke Pants, you specify goals and targets: the actions to -take, and the things to carry out those actions upon. -Together, your chosen goals and targets determine what Pants produces. -Invoking the ``bundle`` goal produces an archive; invoking the -``test`` goal displayes test results on the console. Assuming you didn't -duplicate code between folders, targets in ``tests/com/twitter/mybird/`` -will have different code than those in ``tests/com/twitter/otherbird/``. - -Goals can "depend" on other goals. For example, there -are ``test`` and ``compile`` goals. If you invoke Pants with the ``test`` -goal, Pants "knows" it must compile tests before it can run them, and -does so. (This can be confusing: you can invoke the ``test`` goal on -a target that isn't actually a test. You might think this would be a no-op. -But since Pants knows it must compile things before it tests them, it will -compile the target.) - -Targets can "depend" on other targets. For example, if your ``foo`` code -imports code from another target ``bar``, then ``foo`` depends on ``bar``. -You specify this dependency in ``foo``\'s target definition in its ``BUILD`` -file. If you invoke Pants to compile ``foo``, it "knows" it also needs to -compile ``bar``, and does so. - -************ -Target Types -************ - -Each Pants build target has a *type*, such as ``java_library`` or -``python_binary``. Pants uses the type to determine how to apply -goals to that target. - -**Library Targets** - To define an "importable" thing, you want a library target type, such as - ``java_library`` or ``python_library``. - Another target whose code imports a library target's code should list - the library target in its ``dependencies``. - -**Binary Targets** - To define a "runnable" thing, you want a ``jvm_binary`` or ``python_binary`` - target. - A binary probably has a ``main`` and dependencies. (We encourage a binary's - main to be separate from the libraries it uses to run, if any.) - -**External Dependencies** - Not everything's source code is in your repository. - Your targets can depend on ``.jar``\s or ``.eggs``\s from elsewhere. - -**Test Targets** - To define a collection of tests, you want a ``junit_tests``, ``scala_specs``, - ``python_tests``, or ``python_test_suite`` target. - The test target depends upon the targets whose code it tests. This isn't just - logical, it's handy, too: you can compute dependencies to figure out what - tests to run if you change some target's code. - -For a list of all Target types (and other things that can go in ``BUILD`` -files), see :doc:`build_dictionary`. - -*************** -What Pants Does -*************** - -When you invoke Pants, you specify goals (actions to take) and targets -(things to act upon). - -**Pants plans a list of goals.** You specify one or more goals on the command -line. Pants knows that some goals depend on others. If you -invoke Pants with, say, the ``test`` goal to test some code, -Pants knows it must first compile -code; before it can compile code, it needs to resolve artifact dependencies -and generate code from IDL files (e.g., Thrift). Pants thus generates a -topologically-sorted list of goals, a *build execution plan*. -This plan might look something like - - resolve-idl -> gen -> resolve -> compile -> resources -> test - -Pants does *not* consider targets while planning; -some of these goals might thus turn out to be no-ops. -E.g., Pants might plan a ``gen`` (generate code) goal even if you don't, in -fact, use any generated code. - -**Pants computes a target dependencies graph.** It starts with the target[s] -you specify on the command line. It notes which targets they depend on, which -targets those targets depend on, which targets *those* targets depend on, -and so on. - -**Pants then attempts to carry out its planned goals.** It proceeds goal by -goal. If it has a problem carrying out one goal, it does not continue to the -other goals. (Thus, if you attempt to test targets *A* and *B*, but there's a -compilation error in *A*, then Pants won't test *B* even if it compiled fine.) - -For each goal, Pants attempts to apply that goal to all targets in its computed -dependency tree[s]. It starts with depended-upon targets and works its way up to -depending targets. -Each Pants target has a type; Pants uses this to determine how to apply a -goal to that target. -In many cases, applying a goal to a target is a no-op. -In the more interesting cases, Pants does something. -It probably invokes other tools. -For example, depending on the code in the relevant targets, that "compile" -goal might invoke ``javac`` a few times and ``scalac``. - -********* -Next Step -********* - -If you're ready to give Pants a try, go to :doc:`first_tutorial`. diff --git a/src/python/twitter/pants/docs/first_tutorial.rst b/src/python/twitter/pants/docs/first_tutorial.rst deleted file mode 100644 index fe088960d..000000000 --- a/src/python/twitter/pants/docs/first_tutorial.rst +++ /dev/null @@ -1,248 +0,0 @@ -############## -First Tutorial -############## - -This tutorial walks you through some first steps with Pants build: invoking -commands, looking at the files that define build-able things. It assumes -you're already familiar with -:doc:`basic Pants build concepts `. -It assumes you're working in a source tree that already has ``pants`` -installed (such as -`twitter/commons `_). - -The first time you run ``pants``, try it without arguments. This makes -Pants "bootstrap" itself, downloading and compiling things it needs:: - - ./pants - -Now you're ready to invoke pants for more useful things. - -You invoke pants with *goals* (like ``test`` or ``bundle``) and the -*build targets* to use (like -``tests/java/com/twitter/common/examples/pingpong/BUILD:pingpong``). For example, :: - - ./pants goal test tests/java/com/twitter/common/examples/pingpong/BUILD:pingpong - -Goals (the "verbs" of Pants) produce new files from Targets (the "nouns"). - -As a code author, you define your code's `build targets` in BUILD files. -A build target might produce some output file[s]; -it might have sources and/or depend on other build targets. -There might be several BUILD files in the codebase; a target in -one can depend on a target in another. Typically, a directory's BUILD -file defines the target[s] whose sources are files in that directory. - -************** -Invoking Pants -************** - -Pants knows about goals ("verbs" like ``bundle`` and ``test``) and targets -(build-able things in your source code). A typical pants command-line -invocation looks like :: - - $ ./pants goal test tests/java/com/twitter/common/examples/pingpong/BUILD:pingpong - -Looking at the pieces of this we see - -``./pants`` - That ``./`` isn't a typo. A source tree that's been set up with Pants build - has a ``pants`` executable in its top-level directory. - - The first time you run ``./pants``, it might take a while: it will probably - auto-update by downloading the latest version. - -``goal`` - Magic word that you use on most Pants command lines. - We hope that someday we won't need to use this magic word anymore. - As a rule of thumb, if you work with JVM code, you need ``goal``; - if you work with Python code, you leave it out. - -``test`` - ``test`` is a *goal*, a "verb" that Pants knows about. The ``test`` goal runs tests - and reports results. - (When working with Python code, you don't normally specify a goal on the - command line. Instead - Pants "figures out" what to do based on the targets. E.g., it runs tests on test - targets.) - - Some goals are ``gen`` (generate code from Thrift, Antlr, Protocol Buffer), - ``compile``, and ``eclipse`` (generate an Eclipse project). - Pants knows that some of these goals depend on each other. E.g., in this example, - before it run tests, it must compile the code. - - You can specify more than one goal on a command line. E.g., to generate an - Eclipse project *and* run tests, we could have said ``eclipse tests``. - -``tests/java/com/twitter/common/examples/pingpong/BUILD:pingpong`` - This is a *build target*, a "build-able" thing in your source code. To define - these, you set up configuration files named ``BUILD`` in your source code file - tree. (You'll see more about these later.) - - Targets can depend on other targets. E.g., a test suite target normally depends - on another target containing the "library" code to test; to build the test code, - Pants also must build the library code. - - You can specify more than one target on a command line. Pants will carry out - its goals on all specified targets. E.g., you might use this to generate an Eclipse - project based on Pingpong's source and tests. - -Output -====== - -Pants produces files, both build outputs and intermediate files generated -"along the way". These files live in directories under the top-level directory: - -``dist/`` - By default, build outputs go in the ``dist/`` directory. So far, you've - just run the ``test`` goal, which doesn't output a file. But if you'd - instead invoked, for example, the ``bundle`` goal on a ``jvm_binary`` - target, Pants would have populated this directory with many JVM ``.jar`` - files. - -``.pants.d/`` - Intermediate files go in the ``.pants.d/`` directory. You don't want - to rely on files in there; if the Pants implementation changes, it's - likely to change how it uses intermediate files. You don't want to - edit/delete files in there; you may confuse Pants. But if you want to - peek at some generated code, the code is probably in here somewhere. - -Multiple Goals, Multiple Targets -================================ - -You can specify multiple goals and multiple targets. Pants applies all the goals to -all the targets, skipping things that wouldn't make sense. E.g., you could - -* Invoke ``eclipse`` and ``test`` goals to both generate an Eclipse project and run tests. -* Specify both test-suite and "library" targets so Eclipse sees all the source code. - -In this example, it doesn't make sense to run library code as a test, so Pants doesn't do -that. Since pants knows that the ``test`` goal depends on the ``compile`` goal, it -*does* compile the library. - -One tricky side effect of this: -You can invoke a goal that doesn't make sense for a target. E.g., you can invoke -the ``test`` goal on a target that's not a test suite. Pants won't complain. -It knows that it should compile code before it tests it; it will happily compile -the build targets. If you're not watching closely, you might see a lot of output -scrolling past and think it was running tests. - -Help -==== - -To get help about a Pants goal, invoke ``./pants goal help`` *goalname*. This lists -the many command-line options you can pass for that goal. E.g., :: - - $ ./pants goal help test - Usage: ./pants.pex goal test ([target]...) - - Options: - -h, --help show this help message and exit - -t CONN_TIMEOUT, --timeout=CONN_TIMEOUT - Number of seconds to wait for http connections. - ... - --test-specs-color, --no-test-specs-color - [True] Emit test result with ANSI terminal color - codes. - - Test compiled code. - -For a list of available goals, ``./pants goal goals``. - -For help with things that aren't goals (Most Python operations aren't goals), use :: - - ./pants help - -If you want help diagnosing some strange Pants behavior, you might verbose output. -To get this, instead of just invoking ``./pants``, set some environment variables: -``PEX_VERBOSE=1 PANTS_VERBOSE=1 PYTHON_VERBOSE=1 ./pants``. - -*********** -BUILD Files -*********** - -We told pants what target to build, but where are these defined? Scattered -around the source tree are ``BUILD`` files. These ``BUILD`` files define -targets. For example, this code snippet of -``java/com/twitter/common/examples/pingpong/main/BUILD`` defines the binary -program we compiled and ran. This target is named ``main`` and -is of type ``jvm_binary``: - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/main/BUILD - :start-after: under the License. - -That ``dependencies`` is interesting. This build target depends on -other build targets; the ``dependencies`` lists those other targets. -To build a runnable Java binary, we need to first compile its dependencies. - -The ``main`` binary has one dependency, -``pants('src/java/com/twitter/common/examples/pingpong/handler')``. -That src/.../handler is the *address* of another target. Addresses look, -roughly, like ``path/to/BUILD:targetname``. -We can see this build target in the ``.../pingpong/handler/BUILD`` file: - -.. literalinclude:: ../../../../java/com/twitter/common/examples/pingpong/handler/BUILD - :start-after: java_library: - -Pants uses dependency information to figure out how to build your code. -You might find it useful for other purposes, too. For example, if you change -a library's code, you might want to know which test targets depend on that -library: you might want to run those tests to make sure they still work. - -Anatomy of a ``BUILD`` Target -============================= - -A target definition in a ``BUILD`` file looks something like :: - - scala_library( - name='util', - dependencies = [pants('3rdparty:commons-math'), - pants('3rdparty:thrift'), - pants('src/main/scala/com/foursquare/auth'), - pants(':base')], - sources=globs('*.scala'), - ) - -Here, ``scala_library`` is the target's *type*. Different target types support -different arguments. The following arguments are pretty common: - -**name** - We use a target's name to refer to the target. This argument isn't just - "pretty common," it's required. You use names on the - command line to specify which targets to operate on. You also use names - in ``BUILD`` files when one target refers to another, e.g. in - ``dependencies``: -**dependencies** - List of things this target depends upon. If this target's code imports code - that "lives" in other targets, list those targets here. If this - target imports code that "lives" in ``.jar``\s/``.egg``\s from elsewhere, - refer to them here. -**sources** - List of source files. The `globs` function is handy here. - -****************** -The Usual Commands -****************** - -**Make sure code compiles and tests pass:** - Use the ``test`` goal with the targets you're interested in. If they are - test targets, Pants runs the tests. If they aren't test targets, Pants will - still compile them since it knows it must compile before it can test. - - ``pants goal test src/java/com/myorg/myproject tests/java/com/myorg/myproject`` - -**Get Help** - Get the list of goals:: - - ./pants goal goals - - Get help for one goal:: - - ./pants goal help onegoal - -**** -Next -**** - -To learn more about working with Python projects, see :doc:`python-readme`. - -To learn more about working with Java projects, see :doc:`JVMProjects` diff --git a/src/python/twitter/pants/docs/from_maven.rst b/src/python/twitter/pants/docs/from_maven.rst deleted file mode 100644 index 172715d3f..000000000 --- a/src/python/twitter/pants/docs/from_maven.rst +++ /dev/null @@ -1,79 +0,0 @@ -####################### -Pants for Maven Experts -####################### - -If you're used to Maven and learning Pants, you're part of a growing crowd. -Here are some things that helped other folks come up to speed. - -The good news is that Pants and Maven are pretty similar. Both tools use -several configuration-snippet files near in source code directories -to specify how to build those directories' source code. -Both tools use the configuration-snippet files to build up a model of your -source code, then execute tasks in a lifecycle over that model. -Pants targets tend to be finer-grained than Maven's projects; but if you -use subprojects in Maven, Pants targets might feel familiar. -Both expect code to be laid out in directories in a consistent way. -If you're used to Maven's commands, many of Pants' goals will feel eerily -familiar. - -Pants uses Ivy to manage artifact fetching and publishing; Ivy's behavior -here is pretty similar to Maven. - -Three Pants features that especially confuse Maven experts as they move -to pants are - -* Pants has a first-class mechanism for targets depending on other targets on - the local file system -* Pants targets do not specify version numbers; versions are only determined - during release -* BUILD files are python code that pants evaluates dynamically. - -The first two points are a significant departure from Maven's handling of -inter-project dependencies. The last point isn't necessary for understanding -how to read and write most BUILD files, but is helpful to be aware of. - -Folks switching a Maven-built codebase to Pants often encounter another source -of confusion: they uncover lurking jar-dependency version conflicts. -JVM projects can inadvertently end up relying on classpath order for -correctness; any two build tools will order their classpaths differently. -If your project depends on two versions of the same jar (all too easy to -do with transitive dependencies), then your Maven build chose one version, -but Pants might end up choosing another: Pants is -likely to generate a differently-ordered ``CLASSPATH`` than Maven did. -You can fix these, making your build configuration more robust along the way; -see :doc:`3rdparty_jvm` for advice. - -***************** -Pants Equivalents -***************** - -``exec:java`` run a binary - ``goal run`` - -``-Xdebug`` run a binary in the debugger - ``goal run --jvm-run-debug`` - -``-Dtest=com.foo.BarSpec -Dmaven.surefire.debug=true test`` run one test in the debugger - ``goal test --test-junit-debug --test-junit-test=com.foo.BarSpec`` or - ``goal test --test-specs-debug --test-specs-test=com.foo.BarSpec`` - -***************************** -Depending on Source, not Jars -***************************** - -Pants arose in an environment of a big multi-project repo. Several teams -contributed code to the same source tree; projects depended on each other. -Getting those dependencies to work with Maven was tricky. As the number of -engineers grew, it wasn't so easy to have one team ask another team to release -a new jar. Using snapshot dependencies mostly worked, but it wasn't always clear -what needed rebuilding when pulling fresh code from origin; if you weren't sure -and didn't want to investigate, the safe thing was to rebuild everything your -project depended upon. Alas, for a big tree of Scala code, that might take 45 -minutes. - -Pants has a first-class concept of "depend on whatever version of this project -is defined on disk," and caches targets based on their fingerprints (i.e. SHAs -of the contents of the files and command line options used to build the -target). When code changes (e.g., after a git pull), pants recompiles only -those targets whose source files have differing contents. - diff --git a/src/python/twitter/pants/docs/gen.py b/src/python/twitter/pants/docs/gen.py deleted file mode 100755 index c8e49d044..000000000 --- a/src/python/twitter/pants/docs/gen.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python2.7 - -import os -import os.path -import shutil - -from string import Template - - -TEMPLATE = Template('\n'.join([ - ':mod:`$name` Module', - '-----------------------------------------------', - '', - '.. automodule:: twitter.pants.$otype.$name', - ' :members:', - '', '', -])) - -def gen_targets_reference(targets_rst, targets_dir): - lines = [ - 'Targets Reference', - '=================', - '', - 'This page documents targets available as part of the pants build system.', - '', '', - ] - - for filename in sorted([filename for filename in os.listdir(targets_dir) if filename.endswith('.py')]): - if filename == '__init__.py': - continue # Skip because renaming targets causes duplicates. - root, _ = os.path.splitext(filename) - lines.append(TEMPLATE.substitute(otype='targets', name=root)) - - with open(targets_rst, 'w') as fh: - fh.write('\n'.join(lines)) - -def gen_base_reference(rst_filename, dirname): - lines = [ - 'Base Reference', - '==============', - '', - 'This page documents base classes of the pants build system.', - '', '', - ] - - for filename in sorted([filename for filename in os.listdir(dirname) if filename.endswith('.py')]): - if filename == '__init__.py': - continue # Skip because renaming targets causes duplicates. - root, _ = os.path.splitext(filename) - lines.append(TEMPLATE.substitute(otype='base', name=root)) - - with open(rst_filename, 'w') as fh: - fh.write('\n'.join(lines)) - -def copy_builddict(docs_dir): - for filename in ['build_dictionary.rst', 'goals_reference.rst']: - filepath = os.path.abspath(os.path.join(docs_dir, - '../../../../../dist/builddict', filename)) - try: - shutil.copy(filepath, docs_dir) - except IOError as e: - raise IOError("Forgot to `./pants goal builddict` first? \n\n%s" % e) - -def main(): - docs_dir = os.path.dirname(os.path.abspath(__file__)) - pants_src_dir = os.path.dirname(docs_dir) - tasks_dir = os.path.join(pants_src_dir, 'tasks') - - copy_builddict(docs_dir) - - with open(os.path.join(docs_dir, 'tasks.rst'), 'w') as tasks_rst: - tasks_rst.write('\n'.join([ - 'Tasks Reference', - '===============', - '', - 'This page documents tasks available as part of the pants build system.', - '', '', - ])) - for filename in sorted([filename for filename in os.listdir(tasks_dir) if filename.endswith('.py')]): - root, _ = os.path.splitext(filename) - tasks_rst.write(TEMPLATE.substitute(otype='tasks', name=root)) - - targets_rst = os.path.join(docs_dir, 'targets.rst') - gen_targets_reference(targets_rst, os.path.join(pants_src_dir, 'targets')) - - gen_base_reference(os.path.join(docs_dir, 'base.rst'), os.path.join(pants_src_dir, 'base')) - -if __name__ == '__main__': - main() diff --git a/src/python/twitter/pants/docs/howto_contribute.rst b/src/python/twitter/pants/docs/howto_contribute.rst deleted file mode 100644 index 67fbca0cb..000000000 --- a/src/python/twitter/pants/docs/howto_contribute.rst +++ /dev/null @@ -1,171 +0,0 @@ -######################## -Pants Contributors Guide -######################## - -This page documents how to make contributions to Pants. If you've -:doc:`developed a change to Pants `, it passes all -tests, and you'd like to "send -it upstream", here's what to do: - -.. TODO: Document the release process. -.. TODO: Coding Conventions section - -************ -Mailing List -************ - -Join the `pants-devel Google Group -`_ -to keep in touch with other pants developers. - - -**************** -Life of a Change -**************** - -Let's walk through the process of making a change to pants. At a high level -we'll do the following: - -* Identify the change you'd like to make (e.g.: fix a bug, add a feature). -* Get the code. -* Make your change on a branch. -* Get a code review. -* Commit your change to master. - -Overall it's quite straightforward. Please note - despite being hosted on -GitHub - we do not use pull requests because we prefer a linear commit history -and doing code reviews with Review Board. - - -Identify the change -=================== - -It's a good idea to make sure the work you'll be embarking on is generally -agreed to be in a useful direction for the project before getting too far -along. - -If there is a pre-existing github issue filed and un-assigned, feel free to -grab it and ask any clarifying questions needed on `pants-devel -`_. If there is an issue -you'd like to work on that's assigned and stagnant, please ping the assignee -and finally `pants-devel -`_ before taking over -ownership for the issue. - -If you have an idea for new work that's not yet been discussed on `pants-devel -`_, then start a -conversation there to vet the proposal. Once the group agrees it's worth -a spike you can file a github issue and assign it to yourself. - - -Getting Pants Source Code -========================= - -After deciding on the change you'd like to make we'll need to get the code. :: - - git clone https://github.com/twitter/commons - -After getting the code, you may want to familiarize yourself with the -:doc:`internals` or :doc:`howto_develop`. We'll create a new branch off master -and make our changes. :: - - git checkout -b $FEATURE_BRANCH - -Run the CI Tests -================ - -Before posting a review but certainly before the branch ships you should run -relevant tests. If you're not sure what those are you can always run the -same test set-up that's run on `Travis CI -`_. - -To run the full jvm and python suite including a pants self-rebuild. :: - - ./build-support/bin/ci.sh - -You can also skip certain steps including pants bootstrapping. Just use the -``-h`` argument to get command line help on the options available. - - -Code Review -=========== - -Now that your change is complete, we'll post it for review. -We use https://rbcommons.com to host code reviews and -`rbt `_ (RBTools) - -Posting the First Draft ------------------------ - -**Before posting your first review,** you must create an -account at https://rbcommons.com . To create one, visit -https://rbcommons.com/account/login/ and click "Create one now." -Once your account exists, mail the -`pants-devel Google Group -`_ -and ask for your account to be added to the group. - -To set up local tools, run `./rbt status`. -(``./rbt`` is a wrapper around the usual RBTools ``rbt`` script.) -The first time this runs it will bootstrap and you will be asked to log in. -Subsequent runs use your cached login credentials. - -Post your change for review:: - - ./rbt post -o -g - -This will create a new review, but not yet publish it. - -At the provided URL, there's a web form. To get your change reviewed, -you must fill in the change description, reviewers, testing done, etc. -To make sure it gets seen, add ``pants-reviews`` to the Groups field. -When the review looks good, publish it. -An email will be sent to ``pants-devel`` mailing list and the reviewers -will take a look. (For your first review, double-check that the mail got sent; -rbcommons tries to "spoof" mail from you and it doesn't work for everybody's -email address.) - -Iterating ---------- - -If reviewers have feedback, there might -be a few iterations before finally getting a Ship It. -As reviewers enter feedback, the rbcommons page updates; it should also -send you mail (but sometimes its "spoof" fails). - -If those reviews inspire you to change some code, great. Change some code, -commit locally. To update the code review with the new diff where - is a review number like 123:: - - ./rbt post -o -r - -Look over the fields in the web form; perhaps some could use updating. -Press the web form's Publish button. - -Commit Your Change -================== - -At this point you've made a change, had it reviewed and are ready to -complete things by getting your change in master. (If you're not a committer, -please ask one do do this section for you.) :: - - cd /path/to/pants/repo - ./build-support/bin/ci.sh - git checkout master - git pull - git merge --squash $FEATURE_BRANCH - git commit -a - -Here, fix up the commit message: replace ``git``'s default message -("Squashed commit of the following... ") with a summary. -Finally, :: - - git push origin master - -The very last step is closing the review. The change is now complete. Huzzah! - -**If you're a committer committing someone else's review,** a handy way to -patch a local branch with a diff from rbcommons where - is a review number like 123:: - - ./rbt patch -c diff --git a/src/python/twitter/pants/docs/howto_develop.rst b/src/python/twitter/pants/docs/howto_develop.rst deleted file mode 100644 index d058bb72b..000000000 --- a/src/python/twitter/pants/docs/howto_develop.rst +++ /dev/null @@ -1,94 +0,0 @@ -###################### -Pants Developers Guide -###################### - -This page describes the developer workflow when changing Pants itself. (If you -wanted instructions for using Pants to develop other programs, please see -:doc:`first_tutorial`.) - -.. Getting the source code section. - - -******************** -Running from sources -******************** - -As pants is implemented in python it can be run directly from sources. -Use the ``pants.bootstrap`` script. :: - - $ PANTS_DEV=1 ./pants.bootstrap goal goals - *** running pants in dev mode from ./src/python/twitter/pants/bin/pants_exe.py *** - - -Notice this invocation specifies the ``PANTS_DEV`` environment variable. -By defining ``PANTS_DEV`` pants will be run from sources. - - -******************** -Building a Pants PEX -******************** - -While you can build a Pants PEX in the usual Python way, ``pants.bootstrap`` -is a nice wrapper. If you call it without the ``PANTS_DEV=1`` -environment described above, it - - * Checks the source tree's top directory for a ``pants.pex`` and runs it - if it exists. Otherwise ``pants.bootstrap``... - * Builds a new ``pants.pex``, moves it to the source tree's top - directory, and runs that. - -It looks something like:: - - $ rm pants.pex - $ ./pants.bootstrap goal my-new-feature - Build operating on targets: OrderedSet([PythonBinary(src/python/twitter/pants/BUILD:pants)]) - Building PythonBinary PythonBinary(src/python/twitter/pants/BUILD:pants): - Wrote /Users/travis/src/science/dist/pants.pex - AMAZING NEW FEATURE PRINTS HERE - $ ls pants.pex # gets moved here, though originally "Wrote" to ./dist/ - pants.pex - $ ./pants.bootstrap goal my-new-feature - AMAZING NEW FEATURE PRINTS HERE - -Using ``./pants.bootstrap`` to launch Pants thus -gives a handy workflow: generate ``pants.pex``. Go back and forth -between trying the generated ``pants.pex`` and fixing source code -as inspired by its misbehaviors. When the fixed source code is in a -consistent state, remove ``pants.pex`` so that it will get replaced -on the next ``pants.bootstrap`` run. - -(The ``./pants`` launcher, like ``./pants.bootstrap``, checks for a -``pants.pex`` in the source tree's top directory and uses that ``pants.pex`` -if found.) - -******* -Testing -******* - -Running Tests -============= - -Pants has many tests. There are BUILD targets to run those tests. -We try to keep them passing. -To make sure a change passes *all* of Pants' tests, use the -``tests/python/twitter/pants:all`` target. -*Do not* use ``PANTS_DEV=1`` when running tests at this time -as that modifies ``sys.path`` in such a way as resources will -not be discovered correctly. :: - - ./pants.bootstrap tests/python/twitter/pants:all - -To try all the tests in a few configurations, you can run the same script -that our Travis CI does. This can take a while, but it's a good idea to -run it before you contribute a change or merge it to master:: - - ./build-support/bin/ci.sh - -Before :doc:`contributing a change ` to Pants, -make sure it passes all tests. - -For convenience, some other test targets enable more granular test running. -Please see the BUILD files for details. - -.. Writing Tests section -.. Documenting section diff --git a/src/python/twitter/pants/docs/images/intellij-new-project-1.png b/src/python/twitter/pants/docs/images/intellij-new-project-1.png deleted file mode 100644 index af87d6e5e..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-new-project-1.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/images/intellij-new-project-2.png b/src/python/twitter/pants/docs/images/intellij-new-project-2.png deleted file mode 100644 index 61a48868a..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-new-project-2.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/images/intellij-project-structure-modules-dependencies.png b/src/python/twitter/pants/docs/images/intellij-project-structure-modules-dependencies.png deleted file mode 100644 index efda2967b..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-project-structure-modules-dependencies.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/images/intellij-project-structure-modules-sources.png b/src/python/twitter/pants/docs/images/intellij-project-structure-modules-sources.png deleted file mode 100644 index 2c3ce10d4..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-project-structure-modules-sources.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/images/intellij-project-structure-project.png b/src/python/twitter/pants/docs/images/intellij-project-structure-project.png deleted file mode 100644 index f41917a24..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-project-structure-project.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/images/intellij-run.png b/src/python/twitter/pants/docs/images/intellij-run.png deleted file mode 100644 index a690aca88..000000000 Binary files a/src/python/twitter/pants/docs/images/intellij-run.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/index.rst b/src/python/twitter/pants/docs/index.rst deleted file mode 100644 index ce474b5ee..000000000 --- a/src/python/twitter/pants/docs/index.rst +++ /dev/null @@ -1,81 +0,0 @@ -Welcome to the Pants build system. -================================== - -Pants is a build system for software projects in a variety of -languages. It works particularly well for a source code repository -that contains many distinct projects. - -Getting started using Pants ---------------------------- - -Tutorials and basic concepts. How to use Pants to build things. -How to configure build-able things in BUILD files. - -.. toctree:: - :maxdepth: 1 - - first_concepts - first_tutorial - target_addresses - JVMProjects - python-readme - build_files - invoking - tshoot - -Troubleshooting ---------------- - -* Something that usually works just failed? See :doc:`tshoot`. - -* Publishing can fail in more ways. See :doc:`publish`. - -Pants Patterns --------------- - -Common Pants build idioms. - -.. toctree:: - :maxdepth: 1 - - 3rdparty - ThriftDeps - publish - -Advanced Documentation ----------------------- - -.. toctree:: - :maxdepth: 1 - - setup_repo - install - - -Pants Reference Documentation ------------------------------ - -.. toctree:: - :maxdepth: 1 - - build_dictionary - goals_reference - - -Contributing to Pants ---------------------- - -How to develop Pants itself and contribute your changes. - -.. toctree:: - :maxdepth: 1 - - dev - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/src/python/twitter/pants/docs/install.rst b/src/python/twitter/pants/docs/install.rst deleted file mode 100644 index cd36f7130..000000000 --- a/src/python/twitter/pants/docs/install.rst +++ /dev/null @@ -1,52 +0,0 @@ -################ -Installing Pants -################ - -This page documents how to install pants. Three installation methods are -described, each with different tradeoffs, allowing you can choose the -right method for your particular needs. - -************************ -System-wide Installation -************************ - -The simplest installation method is installing for all users on your system. :: - - pip install twitter.pants - -This installs pants (and its dependencies) into your Python distribution -site-packages, making it available to all users on your system. This -installation method requires root access and may cause dependency conflicts -with other pip-installed applications. - - -***************************** -Virtualenv-based Installation -***************************** - -`Virtualenv `_ is a tool for creating isolated -Python environments. This is the recommended way of installing pants locally -as it does not modify the system Python libraries. :: - - $ virtualenv /tmp/pants - $ source /tmp/pants/bin/activate - $ pip install twitter.pants \ - --allow-external elementtree \ - --allow-unverified elementtree - $ pants - -To simplify a virtualenv-based installation, consider adding a wrapper script -to your repo. See https://github.com/twitter/commons/blob/master/pants for an -example. - - -********************** -PEX-based Installation -********************** - -To support hermetic builds and not depend on a local pants installation -(e.g.: CI machines may prohibit software installation), some sites fetch -a pre-build `pants.pex` whose version is checked-into `pants.ini`. If your site -uses such an installation, please ask around for details. - -.. TODO(travis): Should we provide an example fetcher script? diff --git a/src/python/twitter/pants/docs/intellij.rst b/src/python/twitter/pants/docs/intellij.rst deleted file mode 100644 index daa905f4e..000000000 --- a/src/python/twitter/pants/docs/intellij.rst +++ /dev/null @@ -1,98 +0,0 @@ -#################################### -Pants Development with IntelliJ IDEA -#################################### - -This page documents how to develop pants with `IntelliJ IDEA `_\. - -.. TODO: Add a comment about using "pants goal idea" to generate project files - when the goals reference is available. Users may mistakenly end up - here when looking to auto-generate projects for JVM libraries. - - -************** -IntelliJ Setup -************** - -As pants is a python application, the "Ultimate" (aka paid-for) edition of -IntelliJ is required, as is the Python plugin. You'll need to: - -* Download "IntelliJ IDEA Ultimate Edition" from http://www.jetbrains.com/. -* Within IntelliJ, install the Python plugin. - - -************* -Project Setup -************* - -While pants can generate IntelliJ IDEA projects for Java/Scala targets, it -cannot yet generate projects for Python targets. For this reason you must -manually create the project. This section walks you through that process. - -Open IntelliJ and "Create New Project". - -.. image:: images/intellij-new-project-1.png - -In the "New Project" window, select "Python Module" and specify the "Project -name" and "Project location." These should be outside the pants source repo so -they survive `clean-all` and `git clean` should those be necessary. - -.. image:: images/intellij-new-project-2.png - -Open the "File -> Project Structure" window. In the "Project", specify your -python interpreter. - -.. image:: images/intellij-project-structure-project.png - -In the "Modules" section, make a new module and add two source roots, for the -sources and tests. Think of source roots as what your would put on the -PYTHONPATH - the parent directory of what you'll import. Mark the "twitter" -directory as sources or tests. - -.. image:: images/intellij-project-structure-modules-sources.png - -Lastly, we need to add egg dependencies. From the pants source repo, fetch the -eggs and move to a location outside the source repo so they survive -cleaning. :: - - $ ./build-support/python/clean.sh - $ ./pants.bootstrap - $ mv .pants.d/python/eggs ~/Documents/IntelliJ/pants-eggs - -In the Project Structure -> Modules -> Dependencies dialog, add the eggs as -follows. - -* Select the Dependencies tab. -* Click on "+" Button. -* Select "Jars or directories" and add the directory eggs were moved to above. -* Add as a "jar directory" (this is important). - -.. image:: images/intellij-project-structure-modules-dependencies.png - -Now your project set is complete! - - -********************************** -Running Pants within IntelliJ IDEA -********************************** - -In addition to editing pants code in IntelliJ, pants itself can be run/debug -from within the editor. This is particularly useful for fast iteration both -within the pants repo, and running pants from sources against a different -repo. - -Open the "Run -> Edit Configurations..." dialog box. - -* Add a new Python configuration. -* Set the "Script" to - `/Users/travis/src/commons/src/python/twitter/pants/bin/pants_exe.py` -* Set the "Script parameters" to your pants command-line args, - such as `goal goals`. -* Set the "Working directory" to where you want to run pants from. Note this - could be an entirely different repo from where the pants source code lives. - This is very useful for making a pants change and testing in the repo where - you use pants. - -.. image:: images/intellij-run.png - -After creating the run configuration, simply run or debug pants from within -the editor using all the features that provides you. diff --git a/src/python/twitter/pants/docs/internals.rst b/src/python/twitter/pants/docs/internals.rst deleted file mode 100644 index 7142367e5..000000000 --- a/src/python/twitter/pants/docs/internals.rst +++ /dev/null @@ -1,248 +0,0 @@ -############### -Pants Internals -############### - -Pants is a build tool. This document describes some of its internals, -concentrating on how to tailor Pants to your needs, such as integrating it with -other tools. - -If you want to run Pants or to write BUILD files, you probably want -the :doc:`first_concepts` instead. But if you want to support a new tool or a -new language, read on. - -********************************* -How Some Base Classes Interrelate -********************************* - -**Target** - An addressable thing, such as a :mod:`python_binary` or :mod:`scala_tests`. - To add support for a new language, you probably want to add new target types - to represent things you can build with that language. Most Target instances can - depend on other Target instances. As a rule of thumb, if code in Target _A_ - imports code in Target *B*, then *A* depends on *B*. If *A* - depends on *B*, then when carrying out some goal on *A*, you can be sure that - goal has been carried out on *B*. - - TODO: if there are one or more exemplary Target classes, link to them. - -**goal** a.k.a. **Phase** - From the users' point of view, when they invoke - `pants test binary src/python/mach_turtle` the "goals" are `test` and `binary`, - the actions requested. Internally, these are implemented in terms of - Goals, Phases, and Tasks. Confusingly (and hopefully to be changed), the - things that the user calls "goals" are actually Phases. A Phase has one or - more Goals: - -**Goal** - The glue that binds Phases and Tasks together. A Phase has one or - more Goals. A Goal has a Task, which does the actual work of invoking tools. - A `compile` Phase, for example, could contain a Goal for Python - compilation, a Goal for Java compilation, etc.; each of those Goals has - one Task. If you want an existing Phase to do something new, instead of - cramming your code into an existing Goal, you probably want to define a new - Goal and `install` it in the existing Phase. A Goal can depend on Phases, - expressing that Pants should carry out those Phases before carrying out the - Goal. For example, the java-test Goal depends on the `compile` Phase because - testing uncompiled code is hard. - -**Task** - The thing that does the actual work for some Goal. It looks - at the environment and Targets, invokes some tool, generates things, and reports - success/failure. It can define command-line flags to check. - If you're giving Pants the ability to do something new, you're probably - adding a Task. See :doc:`dev_tasks`. - -**Context** - An API to the state of the world. A Task uses this to find out - things like the flags the user set on the command line, `pants.ini` config, - and the state of the build cache. The task uses `context.products` to - communicate results and requests for build results. - -********************************* -Examining a Phase-Goal-Task Chain -********************************* - -It's not so easy to figure out in your head which Goals+Tasks are -invoked for some command line command. The dependency relationships -between Phases, Goals, and Tasks can get complex. The `--explain` -flag helps here. Instead of building something, it echoes a summary of -the phases, goals, and tasks it would use to build something. For -example, you can find out what happens on a `compile`:: - - $./pants goal compile --explain - Phase Execution Order: - - resolve-idl -> thriftstore-codegen -> gen -> resolve -> compile - - Phase [Goal->Task] Order: - - resolve-idl [idl->IdlResolve, extract->Extract] - thriftstore-codegen [thriftstore-codegen->ThriftstoreCodeGen] - gen [thrift->ThriftGen, scrooge->ScroogeGen, protoc->ProtobufGen, antlr->AntlrGen - resolve [ivy->IvyResolve] - compile [checkstyle->Checkstyle] - $ - -This tells you that the `resolve` phase comes before the `compile` phase, the -`gen` phase comes before that, etc. There is more than one Goal registered for -the `gen` phase. In the `gen [thrift->ThriftGen,...` text, `thrift` is -the name of a Goal and `ThriftGen` is the name of its Task class. - -******************** -Defining a Goal/Task -******************** - -Defining a new Goal (meant here in the glue-that-binds-Phases-and-Tasks sense) -tells Pants of some new action it can take. This might -be a new phase or adding new meaning for an old phase (e.g., telling -the "gen" code-generation phase about some new way to generate code). - -A task can mutate the target graph. If it, say, generates some Java code -from some other language, it can create a Java target and make things that -depended on an ``otherlang_gen`` target instead depend on the created -Java target. - -.. Where to Put it - =============== - TODO: this - -Basic Task -========== - -A Goal's ``Task`` is the class that does the "work". -When you define a new ``Task`` class, you'll want to have at least - -* ``setup_parser(cls, option_group, args, mkflag)`` - Defines command-line flags user can use. -* ``__init__(self, context)`` - The ``context`` encapsulates the outside world. - It has values of command-line flags for this invocation; - an API to see configuration from :ref:`pants.ini `; - a way to get files that are products of other build steps. -* ``execute(self, targets)`` - Actually do something; perhaps generate some products from some sources. - -There are some base ``Task`` classes to help you get started. E.g., if your -goal just outputs information to the console, subclass ``ConsoleTask``. - -Group -===== - -A few ``Goal``\s have group parameters. Specifically, the JVM compile goals:: - - goal(name='scalac', - action=ScalaCompile, - group=group('jvm', is_scala), - dependencies=['gen', 'resolve']).install('compile').with_description( - 'Compile both generated and checked in code.' - ) - goal(name='apt', - action=JavaCompile, - group=group('jvm', is_apt), - dependencies=['gen', 'resolve']).install('compile') - goal(name='javac', - action=JavaCompile, - group=group('jvm', is_java), - dependencies=['gen', 'resolve']).install('compile') - -A goal normally operates on one target at a time. -But some tools, e.g., ``javac`` can operate on many many inputs with one -invocation. Such tools might be more efficient used that way. -Perhaps there's a lot of overhead starting up the tool, but it takes -about as long to compile 10 source files as to compile one. -A ``goal`` with a ``group`` will try to operate on more than one target at -a time. - -*********** -Code Layout -*********** - -`./ `_ - Top-level directory - **`__init__.py`** Among other things, defines the symbols - visible in `BUILD` files. If you add a - Target type, this file should import it. - **`BUILD`** Dogfood and/or recursion. - **`*.md`** Docs too important for `docs/`. - -`base `_ - Defines `Target` and other fundamental pieces/base classes. - As a rule of thumb, code in ``base`` shouldn't ``import`` anything in - non-base Pants; but many things in non-base Pants ``import`` from ``base``. - If you're editing code in ``base`` and find yourself referring to - the JVM (or other target-language-specific things), you're probably editing - the wrong thing and want to look further up the inheritance tree. - -`bin `_ - The "main" of Pants itself lives here. - -`commands `_ - Before we had goals we had commands, and they lived here. - **goal.py** Many Goals and Phases are defined here. - -`docs `_ - Documentation. The source of this very document you're reading now lives here. - -`goal `_ - The source of `Context`, `Goal`, and `Phase` (some - important classes) lives here. If you extend pants to work with other - tools/languages, hopefully you won't need to edit these; but you'll - probably look at them to see the flow of control. - -`java `_ - (TODO OMG bluffing) Utility classes useful to many things that work - with Java code. - -`python `_ - (TODO OMG bluffing) Utility classes useful to many things that work - with Python code. - -`targets `_ - Source of the Target classes; e.g., the code behind `jvm_binary` - lives here. If you define a new Target type, add its code here. - -`tasks `_ - Source of the Task classes. E.g., `junit_run`, the code that - invokes JUnit if someone tests a `java_tests` target. - -`tests/.../pants `_ - Tests for Pants. These tend to be ``python_tests`` exercising Pants functions. - ``twitter.pants.base_build_root_test.BaseBuildRootTest`` is a very handy - class; it has methods to set up and tear down little source trees with - ``BUILD`` files. - -.. ********* - .pants.d/ - ********* - - TODO: this. - -.. ****************** - BUILD file parsing - ****************** - - TODO: this. - -.. ************** - ivy resolution - ************** - - TODO: this. - -.. ******* - hashing - ******* - - TODO: this. - -.. ************* - task batching - ************* - - TODO: this. - -.. *************** - product mapping - *************** - - TODO: this. diff --git a/src/python/twitter/pants/docs/invoking.rst b/src/python/twitter/pants/docs/invoking.rst deleted file mode 100644 index 4263656b6..000000000 --- a/src/python/twitter/pants/docs/invoking.rst +++ /dev/null @@ -1,63 +0,0 @@ -Invoking Pants Build -==================== - -This page discusses some advanced features of invoking the Pants build tool on the command -line. We assume you already know the :doc:`basic command-line structure `, -something like :: - - ./pants goal test bundle path/to/target path/to/another/target - -For a full description of specifying target addresses, see **TODO** - -rc files --------- - -If there's a command line flag that you always (or nearly always) use, -you might set up a configuration file to ease this. A typical Pants -installation looks for machine-specific settings in ``/etc/pantsrc`` and -personal settings in ``~/.pants.new.rc``, with personal settings overriding -machine-specific settings. - -For example, suppose that every time you invoke Pants to compile Java code, you -pass flags ``--compile-javac-args=-source --compile-javac-args=7 ---compile-javac-args=-target --compile-javac-args=7``. -Instead of passing them on the command line each time, you could set up a -``~/.pants.new.rc`` \file:: - - [javac] - options: - --compile-javac-args=-source --compile-javac-args=7 - --compile-javac-args=-target --compile-javac-args=7 - -With this configuration, Pants will have these flags on by -default. - -``--compile-javac-*`` flags go in the ``[javac]`` section; -generally, ``--compile``-*foo*\-* flags go in the ``[foo]`` section. -``--test-junit-*`` flags go in the ``[junit]`` section; -generally, ``--test``-*bar*\-* flags go in the ``[bar]`` section. -``--idea-*`` flags go in the ``[idea]`` section. - -If you know the Pants internals well enough to know the name of a -``Task`` class, you can use that class' name as a category to set -command-line options affecting it:: - - [twitter.pants.tasks.nailgun_task.NailgunTask] - # Don't spawn compilation daemons on this shared build server - options: --no-ng-daemons - -Although ``/etc/pantsrc`` and ``~/.pants.new.rc`` are the typical places for -this configuration, you can check :ref:`pants.ini ` -to find out what your source tree uses. :: - - # excerpt from pants.ini - [DEFAULT] - # Look for these rcfiles - they need not exist on the system - rcfiles: ['/etc/pantsrc', '~/.pants.new.rc'] - -In this list, later files override earlier ones. - -These files are formatted as -`Python config files `_, -parsed by `ConfigParser `_. - diff --git a/src/python/twitter/pants/docs/pagerank.py b/src/python/twitter/pants/docs/pagerank.py deleted file mode 100644 index 7411961ae..000000000 --- a/src/python/twitter/pants/docs/pagerank.py +++ /dev/null @@ -1,46 +0,0 @@ -from __future__ import division - -from collections import defaultdict - -from twitter.pants import is_concrete # XXX This target doesn't exist -from twitter.pants.tasks.console_task import ConsoleTask - - -class PageRank(ConsoleTask): - """Measure how "depended-upon" the targets are.""" - - def console_output(self, targets): - dependencies_by_target = defaultdict(set) - dependees_by_target = defaultdict(set) - pagerank_by_target = defaultdict(lambda: 1.0) - - self._calc_deps(targets, dependencies_by_target, dependees_by_target) - self._pagerank(dependees_by_target, dependencies_by_target, pagerank_by_target) - return self._report(pagerank_by_target) - - def _calc_deps(self, targets, dependencies_by_target, dependees_by_target): - for target in filter(lambda x: hasattr(x, "dependencies"), targets): - if not dependencies_by_target.has_key(target): - for dependency in target.dependencies: - for resolved in dependency.resolve(): - if is_concrete(resolved): - dependencies_by_target[target].add(resolved) - - for dependency in target.dependencies: - for resolved in dependency.resolve(): - if is_concrete(resolved): - dependees_by_target[resolved].add(target) - - def _pagerank(self, dependees_by_target, dependencies_by_target, pagerank_by_target): - """Calculate PageRank.""" - d = 0.85 - for x in range(0, 100): - for target, dependees in dependees_by_target.iteritems(): - contributions = map( - lambda t: pagerank_by_target[t] / len(dependencies_by_target[t]), dependees) - pagerank_by_target[target] = (1-d) + d * sum(contributions) - - def _report(self, pagerank_by_target): - """Yield the report lines.""" - for target in sorted(pagerank_by_target, key=pagerank_by_target.get, reverse=True): - yield '%f - %s' % (pagerank_by_target[target], target) diff --git a/src/python/twitter/pants/docs/pants-logo.ico b/src/python/twitter/pants/docs/pants-logo.ico deleted file mode 100644 index 09060b630..000000000 Binary files a/src/python/twitter/pants/docs/pants-logo.ico and /dev/null differ diff --git a/src/python/twitter/pants/docs/pants-logo.png b/src/python/twitter/pants/docs/pants-logo.png deleted file mode 100644 index 11a43fbc1..000000000 Binary files a/src/python/twitter/pants/docs/pants-logo.png and /dev/null differ diff --git a/src/python/twitter/pants/docs/pants_essentials_techtalk.asc b/src/python/twitter/pants/docs/pants_essentials_techtalk.asc deleted file mode 100644 index 30f6cff21..000000000 --- a/src/python/twitter/pants/docs/pants_essentials_techtalk.asc +++ /dev/null @@ -1,317 +0,0 @@ -= The Pants Build System - -//// -Use https://github.com/twitter/cdk to generate this deck. -//// - -== Overview - -Today we're learning the _essentials_. Practical knowledge that will help you _get stuff done_. - -**** -* Assumption is students already knows how to write code and have used build systems - just not pants. -* Quick review of what build systems do, and what they don't do. -* High-level overview of how pants works. -* Learn how to use and configure pants. -* Demo of using pants. -**** - - -== 10,000-Foot View of Pants - -[options="incremental"] -* Build system that _scales_. -* Put all code in a single repo; pick & choose exactly what you need. -* Simplifies dependency management by _eliminating versions for internal sources_. -* BUILD files define +Targets+ - "nouns" describing things that can be built. -* Pants provides +Tasks+ - "verbs" defining things the system can do. -* Use the CLI to say what tasks should be applied to what targets. - - -== Targets Graph - "nouns" of the build - -["graphviz"] ----- -include::targets_graph.dot['Example Targets Graph'] ----- - -**** -* Resolvers typically deal with the dependency graph for you. -* Pants gives you that control for internal sources. -* Targets describe things that can be built. -* They're typed. -* They typically have dependencies. -* Each supported language has targets mapped to conventions of that community. -**** - -== Tasks Graph - "verbs" of the build - -["graphviz"] ----- -include::tasks_graph.dot['Example Tasks Graph'] ----- - -**** -* Tasks are grouped into goals. -* Tasks and goals have dependencies. -* Tasks are topologically sorted. -* "On the way in" tasks register stuff they're interested in. -* "On the way out" tasks produce stuff other tasks are interested in. -* Only does the work actually needed. -**** - -== Let's Walk Through a Build - -["graphviz"] ----- -include::tasks_graph_tsort.dot['Sorted Tasks Graphs'] ----- - -["graphviz"] ----- -include::targets_graph.dot['Targets Graph'] ----- - -== [notransition]#Let's Walk Through a Build# - -["graphviz"] ----- -include::tasks_graph_tsort.dot['Sorted Tasks Graphs'] ----- - -["graphviz"] ----- -include::targets_graph_gen.dot['Targets Graph'] ----- - -**** -* Note how the `scrooge` task will walk the targets graph looking for `thrift_library` targets. -* Uses scrooge to generate sources from the thrift IDL. -* Mutates the graph adding a new library that owns generated sources. -* Rewrites dependencies to the synthetic library. -**** - -== Anatomy of a Pants Command-Line - -Pants is exclusively controlled via its command-line interface. Learn to use it well. - ----- -./pants goal goalname [goalname ...] target [target ...] [args ...] ----- - ----- -./pants goal goals ----- - ----- -./pants goal help mygoal ----- - -**** -* Command lines specify one or more goals, one or more targets, and options. -**** - -== Target Addresses - -Addresses are the BUILD file + target name. You can specify them several ways. - ----- -path/to/mybird/BUILD:mybird -path/to/mybird:mybird -path/to/mybird/:mybird -path/to/mybird/ -path/to/mybird -:mybird (relative to the current build file) ----- - ----- -path/to/mybird: -path/to/mybird:: ----- - -**** -* Top addresses are explicit. -* Bottom addresses are expansive. -**** - -== Targets - -Here's a simple library target. You might find this in +src/java/com/twitter/mybird/BUILD+. - -[source,python] ----- -java_library(name='mybird', - dependencies=[ - pants('3rdparty:guava'), - ], - sources=globs('*.java'), -) ----- - -What _exactly_ can a +java_library+ look like? Check the BUILD Dictionary. - -== Common JVM Targets - -When developing on the JVM, the following target types are most frequently used: - -* +java_library+ - A collection of Java code. -* +scala_library+ - A collection of Scala code. -* +junit_tests+ - Tests a JVM library with JUnit. -* +scala_specs+ - Tests a Scala library. -* +jar+ - A jar dependency. -* +resources+ - A set of resource files to be embedded in a library or binary. -* +jvm_binary+ - A JVM binary optionally identifying a launcher main class. -* +jvm_app+ - A jvm app package consisting of a binary plus additional bundles of files. - - -== Tasks You'll Use Every Day - -[options="incremental"] -* +./pants goal compile+ -* +./pants goal test+ -* +./pants goal bundle+ -* +./pants goal publish+ -* +./pants goal idea+ - -== Pants Patterns - -* 3rdparty Dependencies -* Thrift -* Bundles - -**** -* Talk about common pants patterns users will find themselves doing regularly. -* Patterns (or "recipes") are the best way we know to use a number of primitives -together to best achieve a specific outcome. -**** - -== External Dependencies - -You probably use code from outside the repo. - ----- -# 3rdparty/jvm/com/twitter/mybird:mybird -dependencies(name='mybird', - dependencies=[ - jar(org='com.twitter.mybird', name='mybird', rev='1.0.0') - ] -) ----- - -**** -* Recommended target addresses 3rdparty/$LANG/$ORG:$NAME -* All internal sources use same external library version. Eliminate diamond dependency issues. -* Aids discoverability. -* Allows specifying OWNERS files. -* Per-language conventions within language subdir. Use JVM for Java/Scala. -**** - - -== External (Diamond) Dependencies - -What should happen here? Avoid this by all internal sources using the same 3rdparty library version. - -["graphviz"] ----- -digraph G { - java_library_a [label="foobird"]; - java_library_b [label="barbird"]; - java_library_c [label="bazbird"]; - java_library_a -> java_library_b -> guava_12; - java_library_a -> java_library_c -> guava_14; -} ----- - -**** -* Ask what version of Guava should be on the foobird classpath? -* Note most likely the highest version number will end up on the classpath, which could lead to runtime errors due to missing classes, methods, etc. -* Note pants will not solve this for folks, but it does help simplify dependency management. -* Each `jar` should be defined once for the whole repo, and everyone uses it. -**** - - -== Pants and Thrift - -IDL files live in your repo: - ----- -java_thrift_library(name='mybird-scala', - sources=globs('*.thrift'), - language='scala', - rpc_style='finagle', -) ----- - -IDL files from another repo: - ----- -idl_jar_thrift_library(name='mybird-scala', - thrift_jar=thrift_jar('com.twitter', 'mybird-thrift-scala-only', '1.0'), - language='scala', - rpc_style='finagle', -) ----- - - -== Pants and Thrift - -Need to codegen your IDL into a bunch of languages? -There's a helper for that. - ----- -create_thrift_libraries( - base_name='mybird', - sources=['mybird.thrift'], - provides_java_name='mybird-thrift-java', - provides_scala_name='mybird-thrift-scala', -) ----- - - -== Deploy Bundles, jar-only case - -Pants bundles have been suitable for launching on Aurora since day one. - ----- -# in mybird/BUILD -jvm_binary(name='mybird-bin', - main='com.twitter.mybird.Main', - dependencies=[pants('src/java/com/twitter/mybird')], -) ----- - ----- -./pants goal bundle mybird:mybird-bin --bundle-archive=zip ----- - -The raw bundle and zip are created in the +dist+ dir. - - -== Deploy Bundles, jar and "extra stuff" case - -Arbitrary files can be included in the bundle. - ----- -# in mybird/BUILD -jvm_app(name='mybird-app', - binary=pants(':mybird-bin'), - bundles=[bundle(relative_to='common').add(rglobs('common/*')), -]) ----- - ----- -./pants goal bundle mybird:mybird-app --bundle-archive=zip ----- - -The raw bundle and zip are created in the +dist+ dir. - -== Getting Help - -* Tasks online help: +./pants goal goals+ -* Targets online help: +./pants goal targets+ -* Dash H is your friend: +./pants goal ... -h+ -* See the BUILD Dictionary. - -**** -Call special attention to the BUILD Dictionary, showing users how to find it and looking at it for a couple minutes. -**** diff --git a/src/python/twitter/pants/docs/publish.rst b/src/python/twitter/pants/docs/publish.rst deleted file mode 100644 index 881219b29..000000000 --- a/src/python/twitter/pants/docs/publish.rst +++ /dev/null @@ -1,263 +0,0 @@ -#################### -Publishing Artifacts -#################### - -A library owner/maintainer can *publish* versioned artifacts that -folks elsewhere can fetch and import. In the JVM world, these are jars -(with poms) -on a server that Maven (or Ivy) looks for. (In the Python world, these are -eggs; but as of late 2013, Pants doesn't help publish these.) - -.. WARNING:: - This page describes ``pants goal publish``. Alas, this goal is not, in fact, - built into Pants *yet*. If you work in an organization with a Pants guru, - you might have a ``publish`` goal. Otherwise, please consider this a sneak - preview of an upcoming feature. - -This page talks about publishing artifacts. We assume you already know enough -about Pants to *build* the library that underlies an artifact. -To *use* an artifact that has already been published from some other -source tree, see :doc:`3rdparty`. (To use a artifact that has been -published from *your own* source tree... don't do that. Instead, depend on -the ``*_library`` build target.) - -It's tricky to keep track of versions, label artifacts with versions, and -upload those artifacts. Pants eases these tasks. - -A library's build target specifies where to publish it. -For example, a :ref:`bdict_java_library` build target can have a ``provides`` -parameter of type :ref:`bdict_artifact`. The ``artifact`` specifies an -"address" similar to what you might see in ``3rdparty`` ``BUILD`` files: -an artifact's location. It does *not* specify a version; that changes -each time you publish. - -Pants' ``publish`` goal builds the library, bumps the library's version -number, and uploads the library to its repository. Actually, it does -quite a bit more than that. - -It uses `Semantic Versioning ("semver") `_ for versioning. -Versions are dotted number triads (e.g., 2.5.6); when Pants bumps a version, -it specifically bumps the patch number part. Thus, if the current version is -2.5.6, Pants bumps to 2.5.7. To publish a minor or major version instead of -a patch, you override the version number on the command line. - -**The pushdb:** To "remember" version numbers, Pants uses the pushdb. -The pushdb is a text file under source control. It lists artifacts with -their current version numbers and SHAs. When you publish artifacts, -pants edits this file and pushes it to the origin. - -***************** -Life of a Publish -***************** - -To publish a library's artifact, Pants bumps the version number and uploads -the artifact to a repository. When things go smoothly, that's all -you need to know. When things go wrong, it's good to know details. - -* Pants decides the version number to use based on pushdb's state. - - You can override the version number[s] to use via a command-line flag. - Pants does some sanity-checking: If you specify an override version less - than or equal to the last-published version (as noted in the pushdb), - Pants balks. - -* For each artifact to be published, it prompts you for confirmation. - (This is a chance for you to notice that you want to, e.g., - bump an artifact's minor revision instead of patch revision.) - -* Invokes a tool to upload the artifact to a repository. - (For JVM artifacts, this tool is Ivy.) - -* Commits pushdb. - -Things can go wrong; you can recover: - -* Uploading the artifact can fail for reasons you might expect for an upload: - authentication problems, transient connection problems, etc. - -* Uploading the artifact can fail for another reason: that artifact+version - already exists on the server. *In theory*, this shouldn't happen: Pants - bumps the version it found in the pushdb. But in practice, this can happen. :: - - Exception in thread "main" java.io.IOException: destination file exists and overwrite == false - ... - FAILURE: java -jar .../ivy/lib/ivy-2.2.0.jar ... exited non-zero (1) 'failed to push com.twitter#archimedes_common;0.0.42' - - It's usually a sign that something strange happened in a *previous* publish. - Perhaps someone published an artifact "by hand" instead of using Pants. - Perhaps someone used Pants to publish an artifact but it failed to update - the pushdb in source control. E.g., merge conflicts can happen, and folks - don't always recover from them correctly. - - In this situation, you probably want to pass ``--publish-override`` to - specify a version to use instead of the automatically-computed - already-existing version. Choose a version that's not already on the server. - Pants records this version in the pushdb, so hopefully the next - publisher won't have the same problem. - - Perhaps you are "racing" a colleague and just lost the race: - they published an artifact with that name+version. - - In this situation, you probably want to refresh your source tree - (``git pull`` or equivalent) to get the latest version of the pushdb - and try again. - -* Pushing the pushdb to origin can fail, even though artifact-uploading succeeded. Perhaps you - were publishing at about the same time someone else was; you might get a - merge conflict when trying to push. - - (There's a temptation to ignore this error: the artifact uploaded OK; nobody - expects a merge conflict when publishing. Alas, ignoring the error now means - that your *next* publish will probably fail, since Pants has lost track of - the current version number.) - - :ref:`Troubleshoot a Failed Push to Origin ` - -****** -How To -****** - -* Does your organization enforce a special branch for publishing? (E.g., perhaps - publishing is only allowed on the ``master`` branch.) If so, be on that branch - with no changes. - -* Consider trying a local publish first. This lets you test the to-be-published - artifact. E.g., to test with Maven configured to use ``~/.m2/repository`` - as a local repo, you could publish to that repo with - ``./pants goal publish --no-publish-dryrun --publish-local=~/.m2/repository`` - -* Start the publish: ``./pants goal publish --no-publish-dryrun [target]`` - Don't wander off; Pants will ask for confirmation as it goes - (making sure you aren't publishing artifact[s] you didn't mean to). - -******************************* -Restricting to "Release Branch" -******************************* - -Your organization might have a notion of a special "release branch": you want -all publishing to happen on this source control branch, which you maintain -extra-carefully. You can -:ref:`configure your repo ` -so the ``publish`` goal only allows ``publish``-ing from this special branch. - -*************** -Troubleshooting -*************** - -Sometimes publishing doesn't do what you want. The fix usually involves -publishing again, perhaps passing ``--publish-override``, -``--publish-force``, and/or ``--publish-restart-at``. The following -are some usual symptoms/questions: - -.. _publish-version-exists: - -Versioned Artifact Already Exists -================================= - -Pants attempted to compute the new version number to use based on the -contents of the pushdb; but apparently, someone previously published -that version of the artifact without updating the pushdb. - -Examine the publish repo to find out what version number you actually -want to use. - -Try publishing again, but pass ``--publish-override`` to specify the -version number to use instead of incrementing the version number from -the pushdb. - -.. _publish-pushdb-push: - -Failed to Push to Origin -======================== - -You might successfully publish your artifact but then fail to push -your pushdb change to origin:: - - To https://git.archimedes.org/owls - ! [rejected] master -> master (non-fast-forward) - error: failed to push some refs to 'https://git.archimedes.org/owls' - hint: Updates were rejected because the tip of your current branch is behind - hint: its remote counterpart. Merge the remote changes (e.g. 'git pull') - hint: before pushing again. - hint: See the 'Note about fast-forwards' in 'git push --help' for details. - -For some reason, git couldn't merge your branch (with the pushdb change) -to the branch on origin. -This might happen, for example, if you were "racing" someone else; they -perhaps pushed their change to master's pushdb before you could. -But it can also happen for other reasons; any local change that can't -be merged to the branch on origin. - -You are now in a bad state: you've pushed some artifacts, but the pushdb -doesn't "remember" them. - -The exact steps to fix things up depend on what was happening on your -local repo. Some good things to consider. - - * Update your source tree, discarding local changes. - In git, this might be a ``reset origin/master`` - (if ``master`` is your release branch) and a ``pull``. - * Look at the pushdb's source control history to if someone made a conflicting - publish. If so, contact them. - (You're about to try to publish again; if they also encountered - problems, they are probably also about to try again. You might want to - coordinate and take turns.) - * Try publishing again. - Since you uploaded new versions artifacts but the pushdb doesn't "remember" - that, you might run into errors with this publish, as ivy hits - Versioned Artifact Already Exists. - Use ``--publish_override`` to set version numbers for these. - -.. _publish-no-provides: - -Does not provide an artifact -============================ - -Pants gets the coordinates at which to publish a target from the target's -``provides`` parameter. Thus, if you try to publish a target with no -``provides``, Pants doesn't know what to do. It stops:: - - FAILURE: The following errors must be resolved to publish. - Cannot publish src/java/com/twitter/common/base/BUILD:base due to: - src/java/com/twitter/common/quantity/BUILD:quantity - Does not provide an artifact. - -The solution is to add a ``provides`` to the target that lacks one. - -Remember, to publish a target, the target's dependencies must also be published. -If any of those dependencies have changed since their last publish, Pants -tries to publish them before publishing the target you specify. Thus, you -might need to add a ``provides`` to one or more of these. - -********************************************** -Want to Publish Something? Publish Many Things -********************************************** - -If you publish a library that depends on others, you want to -publish them together. -Conversely, if you publish a low-level library that other libraries depend upon, -you want to publish those together, too. -Thus, if you want to publish one thing, you may find you should publish -many things. -Pants eases *part* of this: if you publish a library, it automatically -prompts you to also publish depended-upon libraries whose source code changed. -However, Pants does *not* -automatically publish dependees of a depended-upon library. -If you know you're about to publish a low-level library -(perhaps via a "dry run" publish), -you can use Pants' ``goal dependees`` to find other things to publish. - -For example, suppose your new library ``high-level`` depends on another -library, ``util``. -If you tested ``high-level`` with ``util`` version 1.2, you want ``util`` -1.2 published and available to ``high-level`` consumers. -Once you publish ``util`` version 1.2, people might use it. -If you previously published your ``another-high-level`` library -library depending on ``util`` version 1.1, ``another-high-level`` consumers -(who might also consume ``high-level``) might pick up version 1.2 and be sad -to find out that ``other-high-level`` doesn't work with the new ``util``. - -In this example, when you publish ``high-level``, Pants knows to also publish -``util``. -If Pants publishes ``util``, it does *not* automatically try to publish -``high-level`` or ``other-high-level``. - diff --git a/src/python/twitter/pants/docs/publish_via_git.sh b/src/python/twitter/pants/docs/publish_via_git.sh deleted file mode 100755 index 62ab410ac..000000000 --- a/src/python/twitter/pants/docs/publish_via_git.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -# Usage: -# sh publish_via_git.sh git@github.com:pantsbuild/pantsbuild.github.io.git -# -# or, to publish to a subdir under there: -# -# sh publish_via_git.sh git@github.com:pantsbuild/pantsbuild.github.io.git subdir -# -# Assuming you've already generated web content in _build/html/ , -# "publish" that content to a git repo. This is meant to work with -# github pages: put web content into a git repo, push to origin, -# a while later that content is served up on the web. -# -# We don't clear out old site contents. We just pile our stuff on top. -# If a file "went away", this won't remove it. - -root=$( - cd $(dirname $0) - /bin/pwd -) - -repo_url=$1 -path_within_url=$2 -out=/tmp/pantsdoc.$$ - -# When done, clean up tmp dir: -trap "rm -fr $out" 0 1 2 - -mkdir -p $out -cd $out -git clone $repo_url -cd `ls` -cp -R $root/_build/html/* ./$path_within_url -git add . -git commit -am"publish by $USER" -git push origin master diff --git a/src/python/twitter/pants/docs/python-readme.rst b/src/python/twitter/pants/docs/python-readme.rst deleted file mode 100644 index 7437d8333..000000000 --- a/src/python/twitter/pants/docs/python-readme.rst +++ /dev/null @@ -1,1026 +0,0 @@ -Using Pants for Python development -================================== - -Why use Pants for Python development? -------------------------------------- - -Pants makes the manipulation and distribution of hermetically sealed Python environments -painless. - -But why another system? - -Alternatives -^^^^^^^^^^^^ - -There are several solutions for package management in Python. Almost -everyone is familiar with running `sudo easy_install PackageXYZ`. This -leaves a lot to be desired. Over time, your Python installation will -collect dozens of packages, become annoyingly slow or even broken, and -reinstalling it will invariably break a number of the applications -that you were using. - -A marked improvement over the `sudo easy_install` model is virtualenv_ -to isolate Python environments on a project by project basis. This is -useful for development but does not directly solve any problems -related to deployment, whether it be to a production environment or to -your peers. It is also challenging to explain to a Python non-expert. - -.. _virtualenv: http://www.virtualenv.org - -A different solution altogether, `zc.buildout`_ attempts to provide a -framework and recipes for many common development environments. It -has arguably gone the farthest for automating environment -reproducibility amongst the popular tools, but shares the same -complexity problems as all the other abovementioned solutions. - -.. _zc.buildout: http://www.buildout.org/ - -Most solutions leave deployment as an afterthought. Why not make the -development and deployment environments the same by taking the -environment along with you? - -Pants and PEX -^^^^^^^^^^^^^ - -The lingua franca of Pants is the PEX file (PEX itself does not stand for -anything in particular, though in spirit you can think of it as a "Python -EXecutable".) - -**PEX files are single-file lightweight virtual Python environments.** - -The only difference is no virtualenv setup instructions or -`pip install foo bar baz`. PEX files are self-bootstrapping Python -environments with no strings attached and no side-effects. Just a simple -mechanism that unifies both your development and your deployment. - -Getting started ---------------- - -First it is necessary to install Pants. See :doc:`install`. - -It is also helpful to read the :doc:`first_concepts`. - - -TL;DR - 'Hello world!' with Pants Python ----------------------------------------- - -.. code-block:: bash - - $ git clone git://github.com/twitter/commons - $ cd commons - $ mkdir -p src/python/twitter/my_project - $ vi src/python/twitter/my_project/BUILD - - -`src/python/twitter/my_project/BUILD`:: - - python_binary( - name = 'hello_world', - source = 'hello_world.py' - ) - -.. code-block:: bash - - $ vi src/python/twitter/my_project/hello_world.py - - -`src/python/twitter/my_project/hello_world.py` might have contents:: - - print('Hello world!') - -To run directly: - -.. code-block:: bash - - $ ./pants py src/python/twitter/my_project:hello_world - Build operating on target: PythonBinary(src/python/twitter/my_project/BUILD:hello_world) - Hello world! - - -To build: - -.. code-block:: bash - - $ ./pants src/python/twitter/my_project:hello_world - Build operating on targets: OrderedSet([PythonBinary(src/python/twitter/my_project/BUILD:hello_world)]) - Building PythonBinary PythonBinary(src/python/twitter/my_project/BUILD:hello_world): - Wrote /Users/wickman/clients/science-py-csl/dist/hello_world.pex - - -and run separately: - -.. code-block:: bash - - $ dist/hello_world.pex - Hello world! - - -NOTE: The first time you run `./pants` will likely take a ridiculous amount -of time as Pants bootstraps itself inside your directory. Note, it never -installs anything in a global site-packages. - - -Describing Python environments in Pants ---------------------------------------- - -Build dependencies in Pants are managed with `BUILD` files that are -co-located with your source. These files are used to describe the following: - -1. libraries: bundles of sources and resources, that may or may not also depend on other libraries -2. binaries: a single source (the executable) and libraries it depends upon -3. requirements: external dependencies as resolved by dependency managers e.g. pypi in Python or ivy on the JVM - -The main point of Pants is to take these `BUILD` files and do something useful with them. - - -BUILD file format -^^^^^^^^^^^^^^^^^ - -These descriptions are stored in files named BUILD and colocated near the -binaries/libraries they describe. Let's take for example the -src/python/twitter/tutorial subtree in commons: - -.. code-block:: bash - - $ ls -lR src/python/twitter/tutorial/ - total 16 - -rw-r--r-- 1 wickman wheel 137 Apr 9 22:59 BUILD - -rw-r--r-- 1 wickman wheel 118 Apr 9 22:59 hello_world.py - - -Let's take a look at the BUILD file in `src/python/twitter/tutorial/BUILD`:: - - python_binary( - name = "hello_world", - source = "hello_world.py", - dependencies = [ - pants("src/python/twitter/common/app"), - ] - ) - -This BUILD file names one target: `hello_world`, which is a `python_binary` target. The `hello_world` target -contains one source file, `hello_world.py` and depends upon one other -target, the format of which will be described shortly. - -It should be noted that sources are relative to the location of the BUILD -file itself, e.g. `hello_world.py` inside of `src/python/twitter/tutorial/BUILD` actually refers to -`src/python/twitter/tutorial/hello_world.py`:: - - from twitter.common import app - - def main(): - print('Hello world!') - - app.main() - - -Dependencies, on the other hand, are relative to the *source root* of the repository which is defined -by the BUILD file that sits next to the `pants` command:: - - - # Define the repository layout - - source_root('src/antlr', doc, page, java_antlr_library, python_antlr_library) - source_root('src/java', annotation_processor, doc, jvm_binary, java_library, page) - source_root('src/protobuf', doc, java_protobuf_library, page) - source_root('src/python', doc, page, python_binary, python_library) - source_root('src/scala', doc, jvm_binary, page, scala_library) - source_root('src/thrift', doc, java_thrift_library, page, python_thrift_library) - - source_root('tests/java', doc, java_library, java_tests, page) - source_root('tests/python', doc, page, python_library, python_tests, python_test_suite) - source_root('tests/scala', doc, page, scala_library, scala_tests) - - -This file can be tailored to map to any source root structure such as Maven -style, Twitter style (as described above) or something more flat such as a -`setup.py`-based project. This however is an advanced topic that is not -covered in this document. - - -Addressing targets -^^^^^^^^^^^^^^^^^^ - -Within the `src/python/twitter/tutorial/BUILD`, only one target is defined, -specifically `hello_world`. This target is addressed by -`src/python/twitter/tutorial:hello_world` which means the target -`hello_world` within `src/python/twitter/tutorial/BUILD`. In general, -targets take the form `:` with the special cases: - -1. in the case of `path/to/directory/BUILD:target`, the `BUILD` component may be elided and instead `path/to/directory:target` may be used -2. `path/to/directory` is short form for `path/to/directory:directory`, so `src/python/twitter/common/app` is short form for `src/python/twitter/common/app/BUILD:app` - -`src/python/twitter/tutorial/BUILD` referenced `pants('src/python/twitter/common/app')` in its -dependencies. The `pants()` keyword is akin to a "pointer dereference" for an address. It will point -to whatever target is described at that address, in this case a `python_library` target: - -`src/python/twitter/common/app/BUILD`:: - - python_library( - name = "app", - sources = globs('*.py'), - dependencies = [ - pants('src/python/twitter/common/dirutil'), - pants('src/python/twitter/common/lang'), - pants('src/python/twitter/common/options'), - pants('src/python/twitter/common/util'), - pants('src/python/twitter/common/app/modules'), - ] - ) - -which in turn includes even more dependencies. The job of Pants is to manage the transitive closure -of all these dependencies and manipulate collections of these targets for you. - - -Python target types -^^^^^^^^^^^^^^^^^^^ - -BUILD files themselves are just Python. The only thing magical is that the -statement `from twitter.pants import *` has been autoinjected. This -provides a number of Python-specific targets such as: - -1. `python_library` -2. `python_binary` -3. `python_requirement` -4. `python_thrift_library` - -and a whole host of other targets including Java, Scala, Python, Markdown, -the universal `pants` target and so forth. See -`src/python/twitter/pants/__init__.py` for a comprehensive list of targets. - - -`python_library` -"""""""""""""""" - -A `python_library` target has a name, zero or more source files, zero or -more resource files, and zero or more dependencies. These dependencies may -include other `python_library`-like targets (`python_library`, -`python_thrift_library`, `python_antlr_library` and so forth) or -`python_requirement` targets. - - -`python_binary` -""""""""""""""" - -A `python_binary` target is almost identical to a `python_library` target except instead of `sources`, it takes one -of two possible parameters: - -1. `source`: The source file that should be executed within the "library" otherwise defined by `python_binary` - -2. `entry_point`: The entry point that should be executed within the "library" otherwise defined by -`python_binary`. Entry points take the format of `pkg_resources.EntryPoint`, which is something -akin to `some.module.name:my.attr` which means run the function pointed by `my.attr` inside the -module `some.module` inside the environment. The `:my.attr` component can be omitted and the -module is executed directly (presuming it has a `__main__.py`.) - - -`python_requirement` -"""""""""""""""""""" - -A `python_requirement` target describes an external dependency as understood by easy_install or pip. It takes only -a single non-keyword argument of the `Requirement`-style string, e.g. :: - - - python_requirement('django-celery') - python_requirement('tornado==2.2') - python_requirement('kombu>=2.1.1,<3.0') - - -This will resolve the dependency and its transitive closure, for example `django-celery` pulls down the following -dependencies: `celery>=2.5.1`, `django-picklefield>=0.2.0`, `ordereddict`, `python-dateutil`, -`kombu>=2.1.1,<3.0`, `anyjson>=0.3.1`, `importlib`, and `amqplib>=1.0`. - -Pants takes care of handling these dependencies for you. It will never install anything globally. Instead it will -build the dependency and cache it in `.pants.d` and assemble them a la carte into an execution environment. - -The `python_requirement` for a particular dependency should appear -only once in a BUILD file. It creates a local target name which can -then be included in other dependencies in the file.:: - - - python_requirement('django-celery') - - python_library( - name = 'mylib_1', - sources = [ - 'mylib_1.py', - ], - dependencies = [ - pants(':django-celery') - ] - ) - - python_library( - name = 'mylib_2', - sources = [ - 'mylib_2.py', - ], - dependencies = [ - pants(':django-celery') - ] - ) - -`python_thrift_library` -""""""""""""""""""""""" - -A `python_thrift_library` target takes the same arguments as `python_library` arguments, except that files described -in `sources` must be thrift files. If your library or binary depends upon this target type, Python bindings -will be autogeNerated and included within your environment. - - -`python_tests` -"""""""""""""" - -A `python_tests` target takes the same arguments as `python_library` arguments, with the addition of the optional -`coverage` argument that is a list of namespaces that you want to generate coverage data for. - - -Building your first PEX ------------------------ - -Now you're ready to build your first PEX file (technically you already have, -by building Pants itself.) By default if you specify `./pants `, it -assumes you mean `./pants build ` and does precisely that: - -.. code-block:: bash - - $ PANTS_VERBOSE=1 ./pants src/python/twitter/tutorial:hello_world - Build operating on targets: OrderedSet([PythonBinary(src/python/twitter/tutorial/BUILD:hello_world)]) - Resolver: Calling environment super => 0.046ms - Building PythonBinary PythonBinary(src/python/twitter/tutorial/BUILD:hello_world): - Building PythonBinary PythonBinary(src/python/twitter/tutorial/BUILD:hello_world): - Dumping library: PythonLibrary(src/python/twitter/common/app/BUILD:app) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/dirutil/BUILD:dirutil) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/lang/BUILD:lang) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/options/BUILD:options) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/util/BUILD:util) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/app/modules/BUILD:modules) [relative module: ] - Resolver: Calling environment super => 0.016ms - Dumping binary: twitter/tutorial/hello_world.py - Wrote /private/tmp/wickman-commons/dist/hello_world.pex - -You will see that despite specifying just one dependency, the transitive -closure of `hello_world` pulled in all of `src/python/twitter/common/app` -and its direct descendants. That's because those library targets depended -upon other library targets, than in turn depending on even more. At the end -of the day, we bundle up the closed set of all dependencies and bundle them -into `hello_world.pex`. - -Since it uses the `twitter.common.app` framework, we know we can fire it up -and poke around with `--help`: - -.. code-block:: bash - - $ dist/hello_world.pex --help - Options: - -h, --help, --short-help - show this help message and exit. - --long-help show options from all registered modules, not just the - __main__ module. - - -If we specify `--long-help`, we can see the help of transitively included -modules, e.g. `twitter.common.app` itself: - -.. code-block:: bash - - $ dist/hello_world.pex --long-help - Options: - -h, --help, --short-help - show this help message and exit. - --long-help show options from all registered modules, not just the - __main__ module. - - From module twitter.common.app: - --app_daemonize Daemonize this application. [default: False] - --app_profile_output=FILENAME - Dump the profiling output to a binary profiling - format. [default: None] - --app_daemon_stderr=TWITTER_COMMON_APP_DAEMON_STDERR - Direct this app\'s stderr to this file if daemonized. - [default: /dev/null] - --app_debug Print extra debugging information during application - initialization. [default: False] - --app_daemon_stdout=TWITTER_COMMON_APP_DAEMON_STDOUT - Direct this app's stdout to this file if daemonized . - [default: /dev/null] - --app_profiling Run profiler on the code while it runs. Note this can - cause slowdowns. [default: False] - --app_ignore_rc_file - Ignore default arguments from the rc file. [default: - False] - --app_pidfile=TWITTER_COMMON_APP_PIDFILE - The pidfile to use if --app_daemonize is specified. - [default: None] - - -Or we can simply execute it as intended: - -.. code-block:: bash - - $ dist/hello_world.pex - Hello world! - - - -Environment manipulation with `pants py` ----------------------------------------- - -We've only discussed so far the "pants build" command. There's also a -dedicated "py" command that allows you to manipulate the environments -described by `python_binary` and `python_library` targets, such as drop into -an interpreter with the environment set up for you. - -`pants py` semantics -^^^^^^^^^^^^^^^^^^^^ - -The default behavior of `pants py ` is the following: - -1. For `python_binary` targets, build the environment and execute the target -2. For one or more `python_library` targets, build the environment that is the transitive closure of all targets and drop into an interpreter. -3. For a combination of `python_binary` and `python_library` targets, build the transitive closure of all targets and execute the first binary target. - - -external dependencies -^^^^^^^^^^^^^^^^^^^^^ - -Let's take `src/python/twitter/tutorial/BUILD` and split out the dependencies from -our `hello_world` target into `hello_world_lib` and add dependencies upon -Tornado_ and psutil_. - -.. _Tornado: http://github.com/facebook/tornado -.. _psutil: http://code.google.com/p/psutil/ - -:: - - python_binary( - name = "hello_world", - source = "hello_world.py", - dependencies = [ - pants(":hello_world_lib") - ] - ) - - python_library( - name = "hello_world_lib", - dependencies = [ - pants("src/python/twitter/common/app"), - python_requirement("tornado"), - python_requirement("psutil"), - ] - ) - - -This uses the `python_requirement` target which can refer to any string in `pkg_resources.Requirement` format as -recognized by tools such as `easy_install` and `pip` as described above. - -Now that we've created a library-only target `src/python/twitter/tutorial:hello_world_lib`, let's drop -into it using `pants py` with verbosity turned on so that we can see what's -going on in the background: - -.. code-block:: bash - - $ PANTS_VERBOSE=1 ./pants py src/python/twitter/tutorial:hello_world_lib - Build operating on target: PythonLibrary(src/python/twitter/tutorial/BUILD:hello_world_lib) - Resolver: Calling environment super => 0.019ms - Building PythonBinary PythonLibrary(src/python/twitter/tutorial/BUILD:hello_world_lib): - Dumping library: PythonLibrary(src/python/twitter/tutorial/BUILD:hello_world_lib) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/app/BUILD:app) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/dirutil/BUILD:dirutil) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/lang/BUILD:lang) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/options/BUILD:options) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/util/BUILD:util) [relative module: ] - Dumping library: PythonLibrary(src/python/twitter/common/app/modules/BUILD:modules) [relative module: ] - Dumping requirement: tornado - Dumping requirement: psutil - Resolver: Calling environment super => 0.029ms - Resolver: Activating cache /private/tmp/wickman-commons/3rdparty/python => 356.432ms - Resolver: Resolved tornado => 357.219ms - Resolver: Activating cache /private/tmp/wickman-commons/.pants.d/.python.install.cache => 41.117ms - Resolver: Fetching psutil => 10144.264ms - Resolver: Building psutil => 1794.474ms - Resolver: Distilling psutil => 224.896ms - Resolver: Constructing distribution psutil => 2.855ms - Resolver: Resolved psutil => 12210.066ms - Dumping distribution: .../tornado-2.2-py2.6.egg - Dumping distribution: .../psutil-0.4.1-py2.6-macosx-10.4-x86_64.egg - Python 2.6.7 (r267:88850, Aug 31 2011, 15:49:05) - [GCC 4.2.1 (Apple Inc. build 5664)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - (InteractiveConsole) - >>> - - - -In the background, `pants` used cached version of `tornado` but fetched -`psutil` from pypi and any necessary transitive dependencies (none in this -case) and built a platform-specific version for us. - -You can convince yourself that the environment contains all the dependencies -by inspecting `sys.path` and importing libraries as you desire:: - - >>> import psutil - >>> help(psutil) - >>> from twitter.common import app - >>> help(app) - - -It should be stressed that *dependencies built by Pants are never installed globally*. -These dependencies only exist for the duration of the Python interpreter forked by Pants. - - -Running an application using `pants py` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Let us turn our `hello_world.py` into a basic `top` application using `tornado`:: - - - from twitter.common import app - - import psutil - import tornado.ioloop - import tornado.web - - class MainHandler(tornado.web.RequestHandler): - def get(self): - self.write('
Running pids:\n%s
' % '\n'.join(map(str, psutil.get_pid_list()))) - - def main(): - application = tornado.web.Application([ - (r"/", MainHandler) - ]) - application.listen(8888) - tornado.ioloop.IOLoop.instance().start() - - app.main() - -We have now split our application into two parts: the `hello_world` binary -target and the `hello_world_lib` library target. If we run `pants py -src/python/twitter/tutorial:hello_world_lib`, the default behavior is to -drop into an interpreter. - -If we run `pants py src/python/twitter/tutorial:hello_world`, the default behavior is to run -the binary target pointed to by `hello_world`: - -.. code-block:: bash - - $ ./pants py src/python/twitter/tutorial:hello_world - -Then point your browser to http://localhost:8888 - -pants py --pex -^^^^^^^^^^^^^^ - -There is also a `--pex` option to pants py that allows you to build a PEX -file from a union of python_library targets that does not necessarily have a -`python_binary` target defined for it. Since there is no entry point -specified, the resulting .pex file just behaves like a Python interpreter, -but with the sys.path bootstrapped for you: - -.. code-block:: bash - - $ ./pants py --pex src/python/twitter/tutorial:hello_world_lib - Build operating on target: PythonLibrary(src/python/twitter/tutorial/BUILD:hello_world_lib) - Wrote /private/tmp/wickman-commons/dist/hello_world_lib.pex - - $ ls -la dist/hello_world_lib.pex - -rwxr-xr-x 1 wickman wheel 1404174 Apr 10 13:00 dist/hello_world_lib.pex - -Now if you use dist/hello_world_lib.pex, since it has no entry point, it will drop you into an interpreter: - -.. code-block:: bash - - $ dist/hello_world_lib.pex - Python 2.6.7 (r267:88850, Aug 31 2011, 15:49:05) - [GCC 4.2.1 (Apple Inc. build 5664)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - (InteractiveConsole) - >>> import tornado - -As mentioned before, it's like a single-file lightweight alternative to a -virtualenv. We can even use it to run our `hello_world.py` application: - -.. code-block:: bash - - $ dist/hello_world_lib.pex src/python/twitter/tutorial/hello_world.py - -This can be an incredibly powerful and lightweight way to manage and deploy -virtual environments without using `virtualenv`. - -PEX file as interpreter -^^^^^^^^^^^^^^^^^^^^^^^ - -As mentioned above, PEX files without default entry points behave like Python interpreters that -carry their dependencies with them. For example, let's create a target that -provides a Fabric dependency within `src/python/twitter/tutorial/BUILD`:: - - python_library( - name = 'fabric', - dependencies = [ - python_requirement('Fabric') - ] - ) - -And let's build a fabric PEX file: - -.. code-block:: bash - - $ ./pants py --pex src/python/twitter/tutorial:fabric - Build operating on target: PythonLibrary(src/python/twitter/tutorial/BUILD:fabric) - Wrote /private/tmp/wickman-commons/dist/fabric.pex - -By default it does nothing more than drop us into an interpreter: - -.. code-block:: bash - - $ dist/fabric.pex - Python 2.6.7 (r267:88850, Aug 31 2011, 15:49:05) - [GCC 4.2.1 (Apple Inc. build 5664)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - (InteractiveConsole) - >>> - - -But suppose we have a local script that depends upon Fabric, `fabric_hello_world.py`:: - - - from fabric.api import * - - def main(): - local('echo hello world') - - if __name__ == '__main__': - main() - -We can now use `fabric.pex` as if it were a Python interpreter but with -fabric available in its environment. Note that fabric has never been -installed globally in any site-packages anywhere. It is just bundled inside -of fabric.pex: - -.. code-block:: bash - - $ dist/fabric.pex fabric_hello_world.py - [localhost] local: echo hello world - hello world - - - -python_binary entry_point -^^^^^^^^^^^^^^^^^^^^^^^^^ - -An advanced feature of `python_binary` targets, you may in addition specify -direct entry points into PEX files rather than a source file. For example, -if we wanted to build an a la carte `fab` wrapper for fabric:: - - python_binary(name = "fab", - entry_point = "fabric.main:main", - dependencies = [ - python_requirement("fabric"), - ] - ) - - -We build: - -.. code-block:: bash - - $ ./pants src/python/twitter/tutorial:fab - Build operating on targets: OrderedSet([PythonBinary(src/python/twitter/tutorial/BUILD:fab)]) - Building PythonBinary PythonBinary(src/python/twitter/tutorial/BUILD:fab): - Wrote /private/tmp/wickman-commons/dist/fab.pex - -And now `dist/fab.pex` behaves like a standalone `fab` binary: - -.. code-block:: bash - - $ dist/fab.pex -h - Usage: fab [options] [:arg1,arg2=val2,host=foo,hosts='h1;h2',...] ... - - Options: - -h, --help show this help message and exit - -d NAME, --display=NAME - print detailed info about command NAME - -F FORMAT, --list-format=FORMAT - formats --list, choices: short, normal, nested - -l, --list print list of possible commands and exit - --set=KEY=VALUE,... comma separated KEY=VALUE pairs to set Fab env vars - --shortlist alias for -F short --list - -V, --version show program's version number and exit - -a, --no_agent don't use the running SSH agent - -A, --forward-agent forward local agent to remote end - --abort-on-prompts abort instead of prompting (for password, host, etc) - ... - -Pants also has excellent support for JVM-based builds and can do similar -things like resolving external JARs and packaging them as standalone -environments with default entry points. - -Python Tests ------------- - -By default Python tests are run via `pytest`. Any option that `py.test` has can be used since -arguments are passed on by `pants`. - -Defining `python_tests` Targets -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -When setting up your test targets, the BUILD file will be something like:: - - python_tests( - name = "your_tests", - sources = globs("*.py"), - coverage = ["twitter.your_namespace"], - dependencies = [ - pants("3rdparty/python:mock") - pants("src/python/twitter/your_namespace") - ] - ) - -The above target is very similar to a `python_library` with the addition of the `coverage` argument. -`coverage` allows you to retrict the namespaces for which code coverage data is generated. - -Running Python Tests -^^^^^^^^^^^^^^^^^^^^ - -To run your Python tests, you use `./pants build` although `build` can be left off: - -.. code-block:: bash - - $ ./pants tests/python/twitter/your_tests/BUILD:your_tests - Build operating on targets: OrderedSet([PythonTests(tests/python/twitter/your_tests/BUILD:your_tests)]) - ================================================== test session starts =================================================== - platform darwin -- Python 2.6.7 -- pytest-2.3.5 - collected 15 items: - - tests/python/twitter/your_tests/module1_test.py .... - tests/python/twitter/your_tests/module2_test.py .... - tests/python/twitter/your_tests/module3_test.py .... - - =============================================== 15 passed in 0.44 seconds ================================================ - tests.python.twitter.your_tests.your_tests ..... SUCCESS - - -Sometimes you only want to run specific tests (or exclude them). The `-k` option controls the -tests to run. `-k` will do substring matches on test method names and can also use keywords like -`not` and `or` to refine results. - -.. code-block:: bash - - $ ./pants tests/python/twitter/your_tests/BUILD:your_tests -k 'module1_instantiation_test or module1_foo_test' -v - Build operating on targets: OrderedSet([PythonTests(tests/python/twitter/your_tests/BUILD:your_tests)]) - ================================================== test session starts =================================================== - platform darwin -- Python 2.6.7 -- pytest-2.3.5 - collected 15 items: - - tests/python/twitter/your_tests/module1_test.py:3: Module1Test.module1_instantiation_test PASSED - tests/python/twitter/your_tests/module1_test.py:21: Module1Test.module1_foo_test PASSED - - ======================= 13 tests deselected by '-kmodule1_instantiation_test or module1_foo_test' ======================== - ================================================ 2 passed in 0.14 seconds ================================================ - tests.python.twitter.your_tests.your_tests ..... SUCCESS - -You can also mark tests via a decorator:: - - - @pytest.mark.module1 - def module1_instantiation_test(): - # testing code here - - -Using `-m` you can specify the marks of tests that you want to execute. - -Getting Python Code Coverage -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To get code coverage data, set the `PANTS_PY_COVERAGE` environment variable: - -.. code-block:: bash - - $ PANTS_PY_COVERAGE=1 ./pants tests/python/twitter/your_tests/BUILD:your_tests - Build operating on targets: OrderedSet([PythonTests(tests/python/twitter/your_tests/BUILD:your_tests)]) - ============================================================ test session starts ============================================================ - platform darwin -- Python 2.6.7 -- pytest-2.3.5 - collected 15 items: - - tests/python/twitter/your_tests/module1_test.py .... - tests/python/twitter/your_tests/module2_test.py .... - tests/python/twitter/your_tests/module3_test.py .... - ---------------------------------------------- coverage: platform darwin, python 2.6.7-final-0 ---------------------------------------------- - Name Stmts Miss Branch BrMiss Cover - ------------------------------------------------------------------------------------------------------------------------------------------ - /private/var/folders/p0/ztm93vq94qzfc1nyfkq_4l7r0000gn/T/tmp6BcJ1r/twitter/your_namespace/__init__ 0 0 0 0 100% - /private/var/folders/p0/ztm93vq94qzfc1nyfkq_4l7r0000gn/T/tmp6BcJ1r/twitter/your_namespace/module1 62 62 8 8 0% - /private/var/folders/p0/ztm93vq94qzfc1nyfkq_4l7r0000gn/T/tmp6BcJ1r/twitter/your_namespace/module2 34 6 6 0 85% - /private/var/folders/p0/ztm93vq94qzfc1nyfkq_4l7r0000gn/T/tmp6BcJ1r/twitter/your_namespace/module3 170 170 51 51 0% - ------------------------------------------------------------------------------------------------------------------------------------------ - TOTAL 266 238 57 59 11% - Coverage HTML written to dir /Users/your_username/workspace/science/dist/coverage/tests/python/twitter/your_tests - ========================================================= 15 passed in 2.07 seconds ========================================================= - tests.python.twitter.your_tests.your_tests ..... SUCCESS - - -Interactve Debugging on Test Failure -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Passing `--pdb` to your test build will invoke the Python debugger if one of the tests fails. This can be useful for -inspecting the stat of objects especially if you are mocking interfaces. - -Using Other Testing Frameworks -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Although most tests can run under `pytest`, if you need to use a different testing framework, you -can. Set the `entry_point` keyword argument when calling python_tests:: - - python_tests( - name = 'tests', - sources = [], - dependencies = [ - pants('src/python/twitter/infraops/supplybird:supplybird-lib'), - pants('3rdparty/python:mock') - ], - entry_point="twitter.infraops.supplybird.core.run_tests" - ) - -The `entry_point` should exit with a non-zero status if there are any test failures. - -Keep in mind, however, that much of the above documentation assumes you are using `pytest`. - -Manipulating PEX behavior with environment variables ----------------------------------------------------- - -Given a PEX file, it is possible to alter its default behavior during invocation. - -PEX_INTERPRETER=1 -^^^^^^^^^^^^^^^^^ - -If you have a PEX file with a prescribed executable source or `entry_point` specified, it may still -occasionally be useful to drop into an interpreter with the environment bootstrapped. If you -set `PEX_INTERPRETER=1` in your environment, the PEX bootstrapper will skip any execution and instead -launch an interactive interpreter session. - - -PEX_VERBOSE=1 -^^^^^^^^^^^^^ - -If your environment is failing to bootstrap or simply bootstrapping very slowly, it can be useful to -set `PEX_VERBOSE=1` in your environment to get debugging output printed to the console. Debugging output -includes: - -1. Fetched dependencies -2. Built dependencies -3. Activated dependencies -4. Packages scrubbed out of `sys.path` -5. The `sys.path` used to launch the interpreter - -PEX_MODULE=entry_point -^^^^^^^^^^^^^^^^^^^^^^ - -If you have a PEX file without a prescribed entry point, or want to change -the `entry_point` for the duration of a single invocation, you can set -`PEX_MODULE=entry_point` using the same format as described in the -`python_binary` Pants target. - -This can be a useful tool for bundling up a number of packages together and -being able to use a single file to execute scripts from each of them. - -Another common pattern is to link `pytest` into your PEX file, and run -`PEX_MODULE=pytest my_pex.pex tests/*.py` to run your test suite in its -isolated environment. - -PEX_COVERAGE -^^^^^^^^^^^^ - -There is nascent support for performing code coverage within PEX files by -setting `PEX_COVERAGE=`. By default the coverage files will be written -into the current working directory with the file pattern `.coverage.`. This -requires that the `coverage` Python module has been linked into your PEX. - -You can then combine the coverage files by running `PEX_MODULE=coverage -my_pex.pex .coverage.suffix*` and run a report using `PEX_MODULE=coverage -my_pex.pex report`. Since PEX files are just zip files, `coverage` is able -to understand and extract source and line numbers from them in order to -produce coverage reports. - - -How PEX files work ------------------- - -the utility of zipimport and `__main__.py` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -As an aside, in Python, you may not know that you can import code from directories: - -.. code-block:: bash - - $ mkdir -p foo - $ touch foo/__init__.py - $ echo "print 'spam'" > foo/bar.py - $ python -c 'import foo.bar' - spam - - -All that is necessary is the presence of `__init__.py` to signal to the importer that we -are dealing with a package. Similarly, a directory can be made "executable": - -.. code-block:: bash - - $ echo "print 'i like flowers'" > foo/__main__.py - $ python foo - i like flowers - - -And because the `zipimport` module now provides a default import hook for -Pythons >= 2.4, if the Python import framework sees a zip file, with the -inclusion of a proper `__init__.py`, it can be treated similarly to a -directory. But since a directory can be executable, if we just drop a -`__main__.py` into a zip file, it suddenly becomes executable: - -.. code-block:: bash - - $ pushd foo && zip /tmp/flower.zip __main__.py && popd - /tmp/foo /tmp - adding: __main__.py (stored 0%) - /tmp - $ python flower.zip - i like flowers - -And since zip files don't actually start until the zip magic number, you can -embed arbitrary strings at the beginning of them and they're still valid -zips. Hence simple PEX files are born: - -.. code-block:: bash - - $ echo '#!/usr/bin/env python2.6' > flower.pex && cat flower.zip >> flower.pex - $ chmod +x flower.pex - $ ./flower.pex - i like flowers - - -Remember `pants.pex`? - -.. code-block:: bash - - $ unzip -l pants.pex | tail -2 - warning [pants.pex]: 25 extra bytes at beginning or within zipfile - (attempting to process anyway) - -------- ------- - 7900812 543 files - - $ head -c 25 pants.pex - #!/usr/bin/env python2.6 - -PEX `__main__.py` -^^^^^^^^^^^^^^^^^ - -The `__main__.py` in a real PEX file is somewhat special:: - - import os - import sys - - __entry_point__ = None - if '__file__' in locals() and __file__ is not None: - __entry_point__ = os.path.dirname(__file__) - elif '__loader__' in locals(): - from pkgutil import ImpLoader - if hasattr(__loader__, 'archive'): - __entry_point__ = __loader__.archive - elif isinstance(__loader__, ImpLoader): - __entry_point__ = os.path.dirname(__loader__.get_filename()) - - if __entry_point__ is None: - sys.stderr.write('Could not launch python executable!\n') - sys.exit(2) - - sys.path.insert(0, os.path.join(__entry_point__, '.bootstrap')) - - from twitter.common.python.importer import monkeypatch - monkeypatch() - del monkeypatch - - from twitter.common.python.pex import PEX - PEX(__entry_point__).execute() - -`PEX` is just a class that manages requirements (often embedded within PEX -files as egg distributions in the `.deps` directory) and autoimports them -into the `sys.path`, then executes a prescribed entry point. - -If you read the code closely, you'll notice that it relies upon monkeypatching `zipimport`. Inside -the `twitter.common.python` library we've provided a recursive zip importer derived from Google's -`pure Python zipimport `_ module -that allows for depending upon eggs within eggs or zips (and so forth) so that PEX files need not -extract egg dependencies to disk a priori. This even extends to C extensions (.so and .dylib -files) which are written to disk long enough to be dlopened before being unlinked. - -Strictly speaking this monkeypatching is not necessary and we may consider -making that optional. - -Advanced Pants/PEX features ---------------------------- - -TODO: converting python_library targets to eggs - -TODO: auto dependency resolution from within PEX files - -TODO: dynamically self-updating PEX files - -TODO: tailoring your dependency resolution environment with pants.ini, including local cheeseshop mirrors diff --git a/src/python/twitter/pants/docs/release.rst b/src/python/twitter/pants/docs/release.rst deleted file mode 100644 index 7706913b6..000000000 --- a/src/python/twitter/pants/docs/release.rst +++ /dev/null @@ -1,55 +0,0 @@ -############### -Release Process -############### - -This page describes how to make a versioned release of Pants. - -.. note:: As of March 2014 this process is being formalized. If doing releases, - please check back often as the process is evolving. - -At a high level, releasing pants involves: - -* Deciding what/when to release. At present this is ad-hoc, typically when - a change has been made and the author wants to use a version incorporating - that change. -* Publish to PyPi. -* Announce the release on `pants-users`. - - -*************** -Publish to PyPi -*************** - -Pants and the common libraries are published to the -`Python Package Index `_ per the Python -community convention. - -At this time version numbers are checked-into BUILD files. Send a review -updating version numbers for the libraries you will be publishing. You can -generate a list of libraries requiring publishing with: :: - - $ ./pants.bootstrap goal dependencies \ - src/python/twitter/pants:_pants_transitional_publishable_library_ | sort -u | grep -v = - src/python/twitter/common/collections/BUILD:collections - src/python/twitter/common/config/BUILD:config - src/python/twitter/common/confluence/BUILD:confluence - - -After updating the checked-in version numbers, publish locally and verify the release. :: - - PANTS_DEV=1 ./pants.bootstrap setup_py --recursive src/python/twitter/pants:pants-packaged - VENV_DIR=$(mktemp -d -t pants.XXXXX) - virtualenv $VENV_DIR - source $VENV_DIR/bin/activate - pip install --allow-external elementtree --allow-unverified elementtree \ - --find-links=file://$(pwd)/dist twitter.pants==0.0.17 - pants goal list :: - deactivate - -Now that we've smoke-tested this release, publish to PyPi. :: - - PANTS_DEV=1 ./pants.bootstrap setup_py --recursive --run='sdist upload' \ - src/python/twitter/pants:pants-packaged - -Check PyPi to ensure everything looks good. Finally, announce the release to -`pants-users` and `pants-devel`. diff --git a/src/python/twitter/pants/docs/setup_repo.rst b/src/python/twitter/pants/docs/setup_repo.rst deleted file mode 100644 index fea1075ef..000000000 --- a/src/python/twitter/pants/docs/setup_repo.rst +++ /dev/null @@ -1,195 +0,0 @@ -################################# -Set up Your Source Tree for Pants -################################# - -If you're setting up the Pants build tool to work in your source tree, you -probably want to configure Pants' behavior. (Once it's set up, most -folks should be able to use the :doc:`first_concepts` -and not worry about these things.) - -.. _setup-pants-ini: - -****************************** -Configuring with ``pants.ini`` -****************************** - -Pants Build is very configurable. Your source tree's top-level directory should -contain a ``pants.ini`` file that sets many, many options. You can modify a broad range of -settings here, including specific binaries to use in your toolchain, -arguments to pass to tools, etc. - -These files are formatted as -`Python config files `_, -parsed by `ConfigParser `_. -Thus, they look something like:: - - [section] - setting1: value1 - setting2: value2 - -The ``[DEFAULT]`` section is special: its values are available in other sections. -It's thus handy for defining values that will be used in several contexts, as in these -excerpts that define/use ``thrift_workdir``:: - - [DEFAULT] - thrift_workdir: %(pants_workdir)s/thrift - - [thrift-gen] - workdir: %(thrift_workdir)s - - [java-compile] - args: [ - '-C-Tnowarnprefixes', '-C%(thrift_workdir)s', - ] - -It's also handy for defining values that are used in several contexts, since these values -will be available in all those contexts. The code that combines DEFAULT values with -others is in Pants' -`base/config.py `_. - -.. TODO update base/config.py link if/when source code moves - -**************************************** -Configure Code Layout with `source_root` -**************************************** - -Maybe someday all the world's programmers will agree on the one true directory -structure for source code. Until then, you'll want some -:ref:`bdict_source_root` rules to specify which directories hold -your code. A typical programming language has a notion of *base paths* -for imports; you configure pants to tell it those base paths. - -If your project's source tree is laid out for Maven, there's a shortcut function -`maven_layout` that configures source roots for Maven's expected -source code tree structure. - -Organized by Language -===================== - -If your top-level ``BUILD`` file is ``top/BUILD`` and your main Java code is in -``top/src/java/com/foo/`` and your Java tests are in ``top/src/javatest/com/foo/``, -then your top-level `BUILD` file might look like:: - - # top/BUILD - source_root('src/java') - source_root('src/javatest') - ... - -Pants can optionally enforce that only certain target types are allowed under each source root:: - - # top/BUILD - source_root('src/java', annotation_processor, doc, jvm_binary, java_library, page) - source_root('src/javatest', doc, java_library, java_tests, page) - ... - - -Organized by Project -==================== - -If your top-level ``BUILD`` file is ``top/BUILD`` and the Java code for your -Theodore and Hank projects live in ``top/theodore/src/java/com/foo/``, -then your top-level `BUILD` file might not contain any ``source_root`` statements. -Instead, ``theodore/BUILD`` and ``hank/BUILD`` might look like:: - - # top/(project)/BUILD - source_root('src/java') - source_root('src/javatest') - ... - -Or:: - - # top/(project)/BUILD - source_root('src/java', annotation_processor, doc, jvm_binary, java_library, page) - source_root('src/javatest', doc, java_library, java_tests, page) - ... - - -`BUILD.*` and environment-specific config ------------------------------------------ - -When we said `BUILD` files were named `BUILD`, we really meant `BUILD` -or *BUILD*\ .\ `something`. If you have some rules that make sense for folks -in one environment but not others, you might put them into a separate -BUILD file named *BUILD*\ .\ `something`. - -****************************************** -Top-level `BUILD.*` for tree-global config -****************************************** - -When you invoke ``./pants goal something src/foo:foo`` it processes -the code in `src/foo/BUILD` and the code in `./BUILD` *and* `./BUILD.*`. If you -distribute code to different organizations, and want different configuration -for them, you might put the relevant config code in `./BUILD.something`. -You can give that file to some people and not-give it to others. - -For example, you might work at the Foo Corporation, which maintains a fleet -of machines to run big test jobs. You might define a new `goal` type to -express sending a test job to the fleet:: - - goal(name='test_on_fleet', - action=SendTestToFleet, - dependencies=[]).install().with_description('Send test to Foo fleet') - -If the testing fleet is only available on Foo's internal network and you -open-source this code, you don't want to expose `test_on_fleet` to the world. -You'd just get complaints about `Host testfleet.intranet.foo.com not found` -errors. - -You might put this code in a `./BUILD.foo` in the top-level directory of the -internal version of the source tree; then hold back this file when mirroring for -the public version. Thus, the foo-internal-only rules will be available -inside Foo, but not to the world. - -********************************************** -BUILD.* in the source tree for special targets -********************************************** - -If you distribute code to different organizations, you might want to expose some -targets to one organization but not to another. You can do this by defining -those targets in a `BUILD.*` file. You can give that file to some people and -not-give it to others. This code will be processed by people invoking pants -on this directory only if they have the file. - -For example, you might work at the Foo Corporation, which maintains a fleet -of machines to run big test jobs. You might define a humungous test job -as a convenient way to send many many tests to the fleet :: - - # src/javatest/com/foo/BUILD.foo - - # many-many test: Run this on the fleet, not your workstation - # (unless you want to wait a few hours for results) - junit_tests(name='many-many', - dependencies = [ - 'bar/BUILD:all', - 'baz/BUILD:all', - 'garply/BUILD:all', - ],) - -If you don't want to make this test definition available to the public (lest -they complain about how long it takes), you might put this in a `BUILD.foo` -file and hold back this file when mirroring for the public repository. - -.. _setup_publish_restrict_branch: - -*************************************** -Restricting Publish to "Release Branch" -*************************************** - -Your organization might have a notion of a special "release branch": you want -:doc:`artifact publishing ` -to happen on this source control branch, which you maintain -extra-carefully. To configure this, set up a ``JarPublish`` -subclass in an always-used ``BUILD`` file (in most repos, this -means a ``BUILD`` file in the top directory). This ``JarPublish`` -subclass should use ``restrict_push_branches``. Set up your repo's -``publish`` goal to use this class:: - - # ./BUILD.myorg - class MyorgJarPublish(JarPublish): - def __init__(self, context): - super(MyorgJarPublish, self).__init__(context, restrict_push_branches=['master']) - - goal(name='publish', - action=MyorgJarPublish).install('publish').with_description('Publish one or more artifacts.') - -If a user invokes ``goal publish`` from some other branch, Pants balks. diff --git a/src/python/twitter/pants/docs/target_addresses.rst b/src/python/twitter/pants/docs/target_addresses.rst deleted file mode 100644 index 581c96ad5..000000000 --- a/src/python/twitter/pants/docs/target_addresses.rst +++ /dev/null @@ -1,70 +0,0 @@ -Target Addresses -================ - -Knowing how to specify exactly the targets you need is a critical skill when -using pants. This document describes target addresses and a multitude of ways -to specify them. - -Pants targets ("nouns" of the build) have an ``address``, a combination of the -``BUILD`` file path and target name. Addresses are used to reference targets -in ``BUILD`` files, and from the command-line to specify what targets to -perform the given actions on. - -You can specify targets several ways. Some are most useful when writing -``BUILD`` targets, while others are useful when invoking pants on the -command-line. Most specify a single target, but globs are available too. - -The following target addresses all specify the same single target. - -:: - - # Fully qualified target address is the BUILD file path plus target name. - $ ./pants goal list src/java/com/twitter/common/application/BUILD:application - src/java/com/twitter/common/application/BUILD:application - - # Specify the default target, which matches the parent directory name - $ ./pants goal list src/java/com/twitter/common/application/BUILD - src/java/com/twitter/common/application/BUILD:application - - # The BUILD file name is optional. - $ ./pants goal list src/java/com/twitter/common/application - src/java/com/twitter/common/application/BUILD:application - - # Trailing forward slashes are ignored to accommodate command-line completion. - ./pants goal list src/java/com/twitter/common/application/ - src/java/com/twitter/common/application/BUILD:application - - # Targets can be referenced relatively within the same BUILD file. - java_library(name='application', ...) - java_library(name='mybird', - dependencies=[pants(':application')], - ) - -Pants supports two globbing target selectors. These globbing selectors are -provided as a convenience on the command-line. For target dependencies, -explicit target names are used. - -A trailing single colon specifies a glob of targets at the specified location. - -:: - - $ ./pants goal list src/java/com/twitter/common/application: - src/java/com/twitter/common/application/BUILD:action - src/java/com/twitter/common/application/BUILD:application - -A trailing double colon specifies a recursive glob of targets at the specified -location. - -:: - - $ ./pants goal list src/java/com/twitter/common/application:: - src/java/com/twitter/common/application/BUILD:action - src/java/com/twitter/common/application/BUILD:application - src/java/com/twitter/common/application/http/BUILD:http - src/java/com/twitter/common/application/modules/BUILD:applauncher - src/java/com/twitter/common/application/modules/BUILD:lifecycle - src/java/com/twitter/common/application/modules/BUILD:http - src/java/com/twitter/common/application/modules/BUILD:log - src/java/com/twitter/common/application/modules/BUILD:stats - src/java/com/twitter/common/application/modules/BUILD:stats_export - src/java/com/twitter/common/application/modules/BUILD:thrift diff --git a/src/python/twitter/pants/docs/targets_graph.dot b/src/python/twitter/pants/docs/targets_graph.dot deleted file mode 100644 index 0ac8076da..000000000 --- a/src/python/twitter/pants/docs/targets_graph.dot +++ /dev/null @@ -1,19 +0,0 @@ -digraph G { - bina [label="jvm_binary"]; - testa [label="junit_tests"]; - - javaliba [label="java_library"]; - javalibb [label="java_library"]; - libc [label="scala_library"]; - libd [label="scala_library"]; - - jara [label="jar"]; - - thrifta [label="thrift_library"] - - bina -> javaliba -> javalibb -> jara; - javalibb -> thrifta; - javaliba -> libc -> jara; - testa -> libc; - testa -> libd; -} diff --git a/src/python/twitter/pants/docs/targets_graph_gen.dot b/src/python/twitter/pants/docs/targets_graph_gen.dot deleted file mode 100644 index 19ed7d523..000000000 --- a/src/python/twitter/pants/docs/targets_graph_gen.dot +++ /dev/null @@ -1,23 +0,0 @@ -// This graph has synthetic targets. -digraph G { - bina [label="jvm_binary"]; - testa [label="junit_tests"]; - - javaliba [label="java_library"]; - javalibb [label="java_library"]; - libc [label="scala_library"]; - libd [label="scala_library"]; - - jara [label="jar"]; - - thrifta [label="thrift_library"]; - thrift_gen_a [label="java_library", color=blue]; - thrifta -> thrift_gen_a [style=dashed, color=blue]; - - bina -> javaliba -> javalibb -> jara; - javalibb -> thrifta [color=white]; - javalibb -> thrift_gen_a; - javaliba -> libc -> jara; - testa -> libc; - testa -> libd; -} diff --git a/src/python/twitter/pants/docs/taskdev/falcon.gif b/src/python/twitter/pants/docs/taskdev/falcon.gif deleted file mode 100644 index 2bff5fb37..000000000 Binary files a/src/python/twitter/pants/docs/taskdev/falcon.gif and /dev/null differ diff --git a/src/python/twitter/pants/docs/taskdev/taskdev.asc b/src/python/twitter/pants/docs/taskdev/taskdev.asc deleted file mode 100644 index 66ba0fec9..000000000 --- a/src/python/twitter/pants/docs/taskdev/taskdev.asc +++ /dev/null @@ -1,278 +0,0 @@ -= Pants Task Development - -:docsdir: .. - -//// -Use https://github.com/twitter/cdk to generate this deck. - -TODO: -* Section on testing. -* Section on release process. -* Example where the targets graph is mutated. -* Example using the product map. -* Example with a new target type. -//// - - -== Agenda - -Today we're going to learn: - -* Refresher of tasks and targets. -* The Task API. -* All about Task configuration -* Development Workflow -* Write a PageRank report task - -New to pants? Consider taking the _essentials_ class first. - - -== Targets Graph - -["graphviz"] ----- -include::{docsdir}/targets_graph.dot[''] ----- - - -== Tasks Graph - -["graphviz"] ----- -include::{docsdir}/tasks_graph.dot[''] ----- - - -== Tasks & Targets Graphs - -["graphviz"] ----- -include::{docsdir}/tasks_graph_tsort.dot[''] ----- - -["graphviz"] ----- -include::{docsdir}/targets_graph.dot[''] ----- - -//// -* Mention each task is executed once and operates over all targets. - Other build systems (like Maven) execute the full lifecycle over each - target separately. -//// - -== Typical Task Categories - -Tasks typically fall into one of the following categories: - -* Transformer -* Verifier -* Reporter - -//// -Transformer -* give sources to a compiler which produces output files -* javac takes java source files and produces class files - -Verifier -* check assumptions about the targets graph are indeed true -* java source files pass checkstyle - -Reporter -* display information about targets -* forward and backward dependency reports -//// - - -== Task API - -Tasks are classes that extend the +Task+ base class. - ----- -Task.setup_parser(option_group, args, mkflag) -Task(context) -task.execute(targets) ----- - - -== Task.setup_parser - -+setup_parser+ is the first method called on your task. -Define command-line flags here. - ----- -@classmethod -def setup_parser(cls, option_group, args, mkflag): - option_group.add_option( - mkflag("skip"), mkflag("skip", negate=True), - dest="scalastyle_skip", - default=False, - action="callback", - callback=mkflag.set_bool, - help="[%default] Skip scalastyle.") ----- - - -== Task.\_\_init__ - -Constructor is called "on the way in." What typically happens? - -* Task configures itself. -* Registers interest in products it needs other tasks to produce. - -What's in +context+? - -* +self.context.config+ - access the configuration file -* +self.context.options+ - access command-line flags -* +self.context.log+ - access the logger -* +self.context.products+ - access the product mapping - -//// -* Be as defensive as possible during configuration. -* Raise ConfigError as early as possible, with a great error message. -* No/few configuration file options should be required; provide sensible defaults. -* Write great docstrings, which are surfaced in the Goals Reference. -//// - -== task.execute - -Execute is called "on the way out." Its where your task instance does its thing. - -* Given the list of _active concrete targets_. -* Check if anyone wants what the task produces. If not, exit early. -* Walk the targets graph looking for nodes it finds interesting. -* Do whatever needs to happen to the target. - - -== Task Configuration - -Pants is configured in two ways: - -* command-line flags - for per-invocation options -* repo configuration file - configure optional behavior for the whole repo - -Read +mykey+ from the +mytask+ configuration file section. - ----- - -self.context.config.get('mytask', 'mykey') ----- - -Access a command-line flag the task defined. - ----- -self.context.options.myflag ----- - -//// -* Command-line flags -* Repo Configuration file -* User Configuration file -* Does something really need to be configurable? -* Give it a sensible default. -* Read & validate configuration as early as possible and give good error messages. -//// - - -== Task Installation - -* Built-in tasks installed in +goal.py+. -* Repo tasks installed in any +BUILD+ file. - ----- -from twitter.pants.tasks.pagerank import PageRank -goal(name='pagerank', action=PageRank) - .install() - .with_description('PageRank the given targets.') ----- - -//// -* TODO: Discuss phases. -* TODO: Discuss options when installing tasks (ordering, phase dependencies). -* TODO: Discuss groups. -//// - - -== Development Workflow - -* Running from source -* Developing with IntelliJ - - -== Running from Source - -In the science repo: - -+PANTS_DEV=1 ./pants.bootstrap+ - -image::falcon.gif[falcon!] - - -== Pants + IntelliJ - -* Create project. -* Running within the editor. -* Setting +cwd+ to work with different repos (e.g.: science, birdcage) -* https://confluence.twitter.biz/display/DEVPROD/Pants+Dev+Setup - - -== Writing a task - -Now let's write a task. - -See the "Task Developer’s Guide" if you get stuck. - - -== PageRank Task - -Simplified algorithm: - ----- -PR(A) = (1-d) + d (PR(T1)/C(T1) + ... + PR(Tn)/C(Tn)) ----- - -* +d+ - dampening factor -* +T+ - targets -* +C+ - count of dependencies - -//// -* Each target starts with a default PR. -* Iterate some number of times, or until reaching a stopping condition. -//// - - -== PageRank Output - -Your task output should look something like: - ----- -$ ./pants goal pagerank src/java/com/twitter/common/:: | head -8.283371 - com.google.code.findbugs-jsr305-1.3.9 -7.433371 - javax.inject-javax.inject-1 -7.433371 - com.google.guava-guava-14.0.1 -3.107220 - commons-lang-commons-lang-2.5 -2.537617 - com.google.inject-guice-3.0 ----- - - -//// -Stuff I wrote but am not sure to discard or include. -Keeping here for now. - - -== Installation - -Pants is installed differently than most build systems. - -Most build systems are installed on a _machine_. - -Pants is installed in a _source repo_. - -**** -* pants and pants.ini are checked into a source repo. -* pants is a shell script that fetches a versioned pants.pex and launches. -* Reason is an artifact is derived from sources and specific tools. -* Version of build system to use is checked into the repo. -* Builds are reproducible because building at a particular sha will always use the same pants version. -**** -//// \ No newline at end of file diff --git a/src/python/twitter/pants/docs/tasks_graph.dot b/src/python/twitter/pants/docs/tasks_graph.dot deleted file mode 100644 index 2418bbd88..000000000 --- a/src/python/twitter/pants/docs/tasks_graph.dot +++ /dev/null @@ -1,33 +0,0 @@ -digraph G { - compound=true; - rankdir=RL; - node [shape=box]; - - subgraph cluster_gen { - label="gen goal"; - color=blue; - scrooge; - } - - subgraph cluster_resolve { - label="resolve goal"; - color=blue; - ivy; - } - - subgraph cluster_compile { - label="compile goal"; - color=blue; - checkstyle -> javac -> scalac; - } - - subgraph cluster_test { - label="test goal"; - color=blue; - specs -> junit; - } - - specs -> checkstyle [ltail=cluster_test, lhead=cluster_compile]; - checkstyle -> ivy [ltail=cluster_compile, lhead=cluster_resolve]; - ivy -> scrooge [ltail=cluster_resolve, lhead=cluster_gen]; -} diff --git a/src/python/twitter/pants/docs/tasks_graph_tsort.dot b/src/python/twitter/pants/docs/tasks_graph_tsort.dot deleted file mode 100644 index 35ea8daee..000000000 --- a/src/python/twitter/pants/docs/tasks_graph_tsort.dot +++ /dev/null @@ -1,5 +0,0 @@ -digraph G { - rankdir=RL; - node [shape=box]; - specs -> junit -> checkstyle -> javac -> scalac -> ivy -> scrooge; -} \ No newline at end of file diff --git a/src/python/twitter/pants/docs/tasks_graph_tsort_specs.dot b/src/python/twitter/pants/docs/tasks_graph_tsort_specs.dot deleted file mode 100644 index b9d553a19..000000000 --- a/src/python/twitter/pants/docs/tasks_graph_tsort_specs.dot +++ /dev/null @@ -1,6 +0,0 @@ -digraph G { - rankdir=RL; - node [shape=box]; - specs [color=red]; - specs -> junit -> checkstyle -> ivy -> protoc -> scrooge; -} diff --git a/src/python/twitter/pants/docs/tshoot.rst b/src/python/twitter/pants/docs/tshoot.rst deleted file mode 100644 index cb6389274..000000000 --- a/src/python/twitter/pants/docs/tshoot.rst +++ /dev/null @@ -1,65 +0,0 @@ -############### -Troubleshooting -############### - -Sometimes Pants doesn't do what you hoped. Sometimes it's a problem in your -code, but Pants' error handling is not so useful. (Please report these when -you see them. We want Pants' error messages to be useful.) Sometimes Pants -just plain doesn't work. (Please report these, too.) The following workarounds -can get you up and running again. - -.. _verbosity: - -***************** -Maximum Verbosity -***************** - -To run a Pants command so that it outputs much much more information to stdout, -you can set some environment variables and pass the ``-ldebug`` flag (along -with the parameters you meant to pass):: - - PEX_VERBOSE=1 PANTS_VERBOSE=1 PYTHON_VERBOSE=1 ./pants -ldebug ... - -This can be especially useful if you're trying to figure out what Pants -was "looking at" when it crashed. - -.. _washpants: - -********************* -Scrub the Environment -********************* - -If you suspect that Pants has cached some corrupt data somewhere, but don't -want to track down exactly what, you can reset your state: - -* **Clean many cached files:** ``./build-support/python/clean.sh`` -* **Clean more cached files:** If your source tree lives under source control, - use your source control tool to clean up more files. - For example with ``git``, you might do something like:: - $ git status # look for untracked files - $ git add path/to/file1 path/to/file2 # preserve untracked files you don't want deleted - $ git clean -fdx # delete all untracked files -* **Stop background processes:** - ``./pants goal ng-killall --ng-killall-everywhere`` - -***************** -Nailgun 5 seconds -***************** - -If Pants fails with a stack trace that ends with something like :: - - File "twitter/pants/tasks/nailgun_task.py", line 255, in _spawn_nailgun_server - File "twitter/pants/tasks/nailgun_task.py", line 226, in _await_nailgun_server - twitter.pants.java.nailgun_client.NailgunError: Failed to read ng output after 5 seconds - -Pants uses a program called nailgun to run some JVM jobs. Pants runs -nailgun as a server in the background and then sends requests to it. If -nailgun runs into problems, it might not respond. - -To debug this, look in ``./pants.d/ng/*/*``: these files should be named -``stdout`` and ``stderr``. - -One typical cause behind this symptom: if you removed your machine's Ivy cache, -Pants may try to use symbolic links to files that have gone away. -To recover from this, :ref:`washpants`. - diff --git a/src/python/twitter/pants/engine/__init__.py b/src/python/twitter/pants/engine/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/engine/engine.py b/src/python/twitter/pants/engine/engine.py deleted file mode 100644 index 63aba2c5c..000000000 --- a/src/python/twitter/pants/engine/engine.py +++ /dev/null @@ -1,218 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import time - -from abc import abstractmethod -from contextlib import contextmanager - -from twitter.common.collections.ordereddict import OrderedDict -from twitter.common.lang import AbstractClass - -from twitter.pants.goal import GoalError, Phase -from twitter.pants.tasks.task_error import TaskError - - -class Timer(object): - """Provides timing support for goal execution.""" - - @classmethod - @contextmanager - def begin(cls, timer=None): - """Begins a new ``Timer`` and yields it in a with context. - - The timer will be finished if not already by the block yielded to. - """ - t = Timer(timer) - try: - yield t - finally: - t.finish() - - def __init__(self, timer=None): - """Creates a timer that uses time.time for timing intervals by default. - - :param timer: A callable that returns the current time in fractional seconds. - """ - self._now = timer or time.time - if not(callable(self._now)): - # TODO(John Sirois): `def jake(bob): pass` is also callable - we want a no-args callable - - # create a better check. - raise ValueError('Timer must be a callable object.') - - self._timings = OrderedDict() - self._elapsed = None - self._start = self._now() - - def finish(self): - """Finishes this timer if not already finished. - - Calls to ``timed`` after this will raise a ValueError since the timing window is complete. - """ - if self._elapsed is None: - self._elapsed = self._now() - self._start - - @property - def timings(self): - """Returns the phase timings as an ordered mapping from the ``Phase`` objects executed to - ordered mappings of the ``Goal`` objects executed in the phase to the list of timings - corresponding to each execution of the goal. - - Note that the list of timings will be singleton for all goals except those participating in a - ``Group``. Grouped goals will have or more timings in the list corresponding to each chunk of - targets the goal executed against when iterating the group. - """ - return self._timings - - @property - def elapsed(self): - """Returns the total elapsed time in fractional seconds from the creation of this timer until - it was ``finished``. - """ - if self._elapsed is None: - raise ValueError('Timer has not been finished yet.') - return self._elapsed - - @contextmanager - def timed(self, goal): - """Records the time taken to execute the yielded block an records this timing against the given - goal's total runtime. - """ - if self._elapsed is not None: - raise ValueError('This timer is already finished.') - - start = self._now() - try: - yield - finally: - self._record(goal, self._now() - start) - - def _record(self, goal, elapsed): - phase = Phase.of(goal) - - phase_timings = self._timings.get(phase) - if phase_timings is None: - phase_timings = OrderedDict(()) - self._timings[phase] = phase_timings - - goal_timings = phase_timings.get(goal) - if goal_timings is None: - goal_timings = [] - phase_timings[goal] = goal_timings - - goal_timings.append(elapsed) - - def render_timing_report(self): - """Renders this timer's timings into the classic pants timing report format.""" - report = ('Timing report\n' - '=============\n') - for phase, timings in self.timings.items(): - phase_time = None - for goal, times in timings.items(): - if len(times) > 1: - report += '[%(phase)s:%(goal)s(%(numsteps)d)] %(timings)s -> %(total).3fs\n' % { - 'phase': phase.name, - 'goal': goal.name, - 'numsteps': len(times), - 'timings': ','.join('%.3fs' % t for t in times), - 'total': sum(times) - } - else: - report += '[%(phase)s:%(goal)s] %(total).3fs\n' % { - 'phase': phase.name, - 'goal': goal.name, - 'total': sum(times) - } - if not phase_time: - phase_time = 0 - phase_time += sum(times) - if len(timings) > 1: - report += '[%(phase)s] total: %(total).3fs\n' % { - 'phase': phase.name, - 'total': phase_time - } - report += 'total: %.3fs' % self.elapsed - return report - - -class Engine(AbstractClass): - """An engine for running a pants command line.""" - - @staticmethod - def execution_order(phases): - """Yields all phases needed to attempt the given phases in proper phase execution order.""" - - # Its key that we process phase dependencies depth first to maintain initial phase ordering as - # passed in when phase graphs are dependency disjoint. A breadth first sort could mix next - # order executions and violate the implied intent of the passed in phase ordering. - - processed = set() - - def order(_phases): - for phase in _phases: - if phase not in processed: - processed.add(phase) - for goal in phase.goals(): - for dep in order(goal.dependencies): - yield dep - yield phase - - for ordered in order(phases): - yield ordered - - def __init__(self, print_timing=False): - """Creates an engine that prints no timings by default. - - :param print_timing: ``True`` to print detailed timings at the end of the run. - """ - self._print_timing = print_timing - - def execute(self, context, phases): - """Executes the supplied phases and their dependencies against the given context. - - :param context: The pants run context. - :param list phases: A list of ``Phase`` objects representing the command line goals explicitly - requested. - :returns int: An exit code of 0 upon success and non-zero otherwise. - """ - with Timer.begin() as timer: - try: - self.attempt(timer, context, phases) - return 0 - except (TaskError, GoalError) as e: - message = '%s' % e - if message: - print('\nFAILURE: %s\n' % e) - else: - print('\nFAILURE\n') - return e.exit_code if isinstance(e, TaskError) else 1 - finally: - timer.finish() - if self._print_timing: - print(timer.render_timing_report()) - - @abstractmethod - def attempt(self, timer, context, phases): - """Given the target context and phases specified (command line goals), attempt to achieve all - goals. - - :param timer: A ``Timer`` that should be used to record goal timings. - :param context: The pants run context. - :param list phases: A list of ``Phase`` objects representing the command line goals explicitly - requested. - """ diff --git a/src/python/twitter/pants/engine/group_engine.py b/src/python/twitter/pants/engine/group_engine.py deleted file mode 100644 index 2755d3fc1..000000000 --- a/src/python/twitter/pants/engine/group_engine.py +++ /dev/null @@ -1,298 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict, namedtuple - -from twitter.common.collections import maybe_list, OrderedDict, OrderedSet - -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.goal import Goal -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.check_exclusives import ExclusivesMapping - -from .engine import Engine - - -class GroupMember(namedtuple('GroupMember', ['group', 'name', 'predicate'])): - """Represents a member of a goal group.""" - - @classmethod - def from_goal(cls, goal): - """Creates a ``GroupMember`` from goal metadata.""" - if not isinstance(goal, Goal): - raise ValueError('The given goal must be a ``Goal`` object, given %s' % goal) - if not goal.group: - raise ValueError('Can only form a GroupMember from goals with a group defined, goal %s ' - 'has no group' % goal.name) - return cls(goal.group.name, goal.name, goal.group.predicate) - - -class GroupIterator(object): - """Iterates the goals in a group over the chunks they own,""" - - def __init__(self, targets, group_members): - """Creates an iterator that yields tuples of ``(GroupMember, [chunk Targets])``. - - Chunks will be returned least dependant to most dependant such that a group member processing a - chunk can be assured that any dependencies of the chunk have been processed already. - - :param list targets: The universe of targets to divide up amongst group members. - :param list group_members: A list of group members that forms the group to iterate. - """ - # TODO(John Sirois): These validations should be happening sooner in the goal registration - # process. - assert len(map(lambda m: m.group, group_members)) != 1, 'Expected a single group' - assert len(map(lambda m: m.name, group_members)) == len(group_members), ( - 'Expected group members with unique names') - - self._targets = maybe_list(targets, expected_type=InternalTarget, raise_type=ValueError) - self._group_members = group_members - - def __iter__(self): - for chunk in self._create_chunks(): - for group_member in self._group_members: - member_chunk = filter(group_member.predicate, chunk) - if len(member_chunk) > 0: - yield group_member, member_chunk - - def _create_chunks(self): - def discriminator(tgt): - for group_member in self._group_members: - if group_member.predicate(tgt): - return group_member.name - return None - - # TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm - # specific, and all we care is that the Targets have dependencies defined - coalesced = InternalTarget.coalesce_targets(self._targets, discriminator) - coalesced = list(reversed(coalesced)) - - chunks = [] - flavor = None - chunk_start = 0 - for chunk_num, target in enumerate(coalesced): - target_flavor = discriminator(target) - if target_flavor != flavor and chunk_num > chunk_start: - chunks.append(OrderedSet(coalesced[chunk_start:chunk_num])) - chunk_start = chunk_num - flavor = target_flavor - if chunk_start < len(coalesced): - chunks.append(OrderedSet(coalesced[chunk_start:])) - return chunks - - -class ExclusivesIterator(object): - """Iterates over groups of compatible targets.""" - - @classmethod - def from_context(cls, context): - exclusives = context.products.get_data('exclusives_groups') - return cls(exclusives) - - def __init__(self, exclusives_mapping): - """Creates an iterator that yields lists of compatible targets.``. - - Chunks will be returned in least exclusive to most exclusive order. - - :param exclusives_mapping: An ``ExclusivesMapping`` that contains the exclusive chunked targets - to iterate. - """ - if not isinstance(exclusives_mapping, ExclusivesMapping): - raise ValueError('An ExclusivesMapping is required, given %s of type %s' - % (exclusives_mapping, type(exclusives_mapping))) - self._exclusives_mapping = exclusives_mapping - - def __iter__(self): - sorted_excl_group_keys = self._exclusives_mapping.get_ordered_group_keys() - for excl_group_key in sorted_excl_group_keys: - yield self._exclusives_mapping.get_targets_for_group_key(excl_group_key) - - -class GroupEngine(Engine): - """The classical phase engine that has direct knowledge of groups and the bang algorithm. - - For grouped goals this engine attempts to make as few passes as possible through the target groups - found. - """ - - class PhaseExecutor(object): - def __init__(self, context, phase, tasks_by_goal): - self._context = context - self._phase = phase - self._tasks_by_goal = tasks_by_goal - - @property - def phase(self): - return self._phase - - def attempt(self, timer, explain): - """Executes the named phase against the current context tracking goal executions in executed. - """ - - def execute_task(goal, task, targets): - """Execute and time a single goal that has had all of its dependencies satisfied.""" - with timer.timed(goal): - # TODO (Senthil Kumaran): - # Possible refactoring of the Task Execution Logic (AWESOME-1019) - if explain: - self._context.log.debug("Skipping execution of %s in explain mode" % goal.name) - else: - task.execute(targets) - - goals = self._phase.goals() - if not goals: - raise TaskError('No goals installed for phase %s' % self._phase) - - run_queue = [] - goals_by_group = {} - for goal in goals: - if goal.group: - group_name = goal.group.name - if group_name not in goals_by_group: - group_goals = [goal] - run_queue.append((group_name, group_goals)) - goals_by_group[group_name] = group_goals - else: - goals_by_group[group_name].append(goal) - else: - run_queue.append((None, [goal])) - - with self._context.new_workunit(name=self._phase.name, labels=[WorkUnit.PHASE]): - # OrderedSet takes care of not repeating chunked task execution mentions - execution_phases = defaultdict(OrderedSet) - - for group_name, goals in run_queue: - if not group_name: - goal = goals[0] - execution_phases[self._phase].add(goal.name) - with self._context.new_workunit(name=goal.name, labels=[WorkUnit.GOAL]): - execute_task(goal, self._tasks_by_goal[goal], self._context.targets()) - else: - with self._context.new_workunit(name=group_name, labels=[WorkUnit.GROUP]): - goals_by_group_member = OrderedDict((GroupMember.from_goal(g), g) for g in goals) - - # First, divide the set of all targets to be built into compatible chunks, based - # on their declared exclusives. Then, for each chunk of compatible exclusives, do - # further sub-chunking. At the end, we'll have a list of chunks to be built, - # which will go through the chunks of each exclusives-compatible group separately. - - # TODO(markcc); chunks with incompatible exclusives require separate ivy resolves. - # Either interleave the ivy task in this group so that it runs once for each batch of - # chunks with compatible exclusives, or make the compilation tasks do their own ivy - # resolves for each batch of targets they're asked to compile. - - goal_chunks = [] - - # We won't have exclusives calculated if stopping short for example during an explain. - if explain: - exclusive_chunks = [self._context.targets()] - else: - exclusive_chunks = ExclusivesIterator.from_context(self._context) - - for exclusive_chunk in exclusive_chunks: - # TODO(Travis Crawford): Targets should be filtered by is_concrete rather than - # is_internal, however, at this time python targets are not internal targets. - group_chunks = GroupIterator(filter(lambda t: t.is_internal, exclusive_chunk), - goals_by_group_member.keys()) - goal_chunks.extend(group_chunks) - - self._context.log.debug('::: created chunks(%d)' % len(goal_chunks)) - for i, (group_member, goal_chunk) in enumerate(goal_chunks): - self._context.log.debug(' chunk(%d) [flavor=%s]:\n\t%s' % ( - i, group_member.name, '\n\t'.join(sorted(map(str, goal_chunk))))) - - for group_member, goal_chunk in goal_chunks: - goal = goals_by_group_member[group_member] - execution_phases[self._phase].add((group_name, goal.name)) - with self._context.new_workunit(name=goal.name, labels=[WorkUnit.GOAL]): - execute_task(goal, self._tasks_by_goal[goal], goal_chunk) - - if explain: - tasks_by_goalname = dict((goal.name, task.__class__.__name__) - for goal, task in self._tasks_by_goal.items()) - - def expand_goal(goal): - if len(goal) == 2: # goal is (group, goal) - group_name, goal_name = goal - task_name = tasks_by_goalname[goal_name] - return "%s:%s->%s" % (group_name, goal_name, task_name) - else: - task_name = tasks_by_goalname[goal] - return "%s->%s" % (goal, task_name) - - for phase, goals in execution_phases.items(): - goal_to_task = ", ".join(expand_goal(goal) for goal in goals) - print("%s [%s]" % (phase, goal_to_task)) - - @classmethod - def _prepare(cls, context, phases): - tasks_by_goal = {} - - # We loop here because a prepared goal may introduce new BUILDs and thereby new Goals/Phases. - # We need to prepare these in a subsequent loop until the set of phases and goals quiesces. - prepared_goals = set() - round_num = 0 - while True: - phases = list(cls.execution_order(phases)) - if prepared_goals == reduce(lambda goals, p: goals | set(p.goals()), phases, set()): - break - - round_num += 1 - context.log.debug('Preparing goals in round %d' % round_num) - # Prepare tasks roots to leaves and allow for downstream tasks requiring products from - # upstream tasks they depend upon. - for phase in reversed(phases): - for goal in reversed(phase.goals()): - if goal not in prepared_goals: - context.log.debug('preparing: %s:%s' % (phase.name, goal.name)) - prepared_goals.add(goal) - task = goal.task_type(context) - tasks_by_goal[goal] = task - - return map(lambda p: cls.PhaseExecutor(context, p, tasks_by_goal), phases) - - def attempt(self, timer, context, phases): - phase_executors = self._prepare(context, phases) - - execution_phases = ' -> '.join(map(str, map(lambda e: e.phase.name, phase_executors))) - context.log.debug('Executing goals in phases %s' % execution_phases) - - explain = getattr(context.options, 'explain', None) - if explain: - print("Phase Execution Order:\n\n%s\n" % execution_phases) - print("Phase [Goal->Task] Order:\n") - - # We take a conservative locking strategy and lock in the widest needed scope. If we have a - # linearized set of phases as such (where x -> y means x depends on y and *z means z needs to be - # serialized): - # a -> b -> *c -> d -> *e - # Then we grab the lock at the beginning of e's execution and don't relinquish until the largest - # scope serialization requirement from c is past. - serialized_phase_executors = list(filter(lambda pe: pe.phase.serialize, phase_executors)) - outer_lock_holder = serialized_phase_executors[-1] if serialized_phase_executors else None - - if outer_lock_holder: - context.acquire_lock() - try: - for phase_executor in phase_executors: - phase_executor.attempt(timer, explain) - if phase_executor is outer_lock_holder: - context.release_lock() - finally: - # we may fail before we reach the outer lock holder - so make sure to clean up no matter what. - if outer_lock_holder: - context.release_lock() diff --git a/src/python/twitter/pants/fs/__init__.py b/src/python/twitter/pants/fs/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/fs/archive.py b/src/python/twitter/pants/fs/archive.py deleted file mode 100644 index 90ad6c84d..000000000 --- a/src/python/twitter/pants/fs/archive.py +++ /dev/null @@ -1,119 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -"""Support for wholesale archive creation and extraction in a uniform API across archive types.""" - -import os - -from abc import abstractmethod -from zipfile import ZIP_DEFLATED - -from twitter.common.collections.ordereddict import OrderedDict -from twitter.common.contextutil import open_tar, open_zip -from twitter.common.lang import AbstractClass - - -class Archiver(AbstractClass): - @classmethod - def extract(cls, path, outdir): - """Extracts an archive's contents to the specified outdir.""" - raise NotImplementedError() - - @abstractmethod - def create(self, basedir, outdir, name, prefix=None): - """Creates an archive of all files found under basedir to a file at outdir of the given name. - - If prefix is specified, it should be prepended to all archive paths. - """ - - -class TarArchiver(Archiver): - """An archiver that stores files in a tar file with optional compression.""" - - @classmethod - def extract(cls, path, outdir): - with open_tar(path, errorlevel=1) as tar: - tar.extractall(outdir) - - def __init__(self, mode, extension): - Archiver.__init__(self) - self.mode = mode - self.extension = extension - - def create(self, basedir, outdir, name, prefix=None): - tarpath = os.path.join(outdir, '%s.%s' % (name, self.extension)) - with open_tar(tarpath, self.mode, dereference=True, errorlevel=1) as tar: - tar.add(basedir, arcname=prefix or '') - return tarpath - - -class ZipArchiver(Archiver): - """An archiver that stores files in a zip file with optional compression.""" - - @classmethod - def extract(cls, path, outdir): - """OS X's python 2.6.1 has a bug in zipfile that makes it unzip directories as regular files. - - This method should work on for python 2.6-3.x. - """ - with open_zip(path) as zip: - for path in zip.namelist(): - # While we're at it, we also perform this safety test. - if path.startswith('/') or path.startswith('..'): - raise ValueError('Zip file contains unsafe path: %s' % path) - # Ignore directories. extract() will create parent dirs as needed. - if not path.endswith('/'): - zip.extract(path, outdir) - - def __init__(self, compression): - Archiver.__init__(self) - self.compression = compression - - def create(self, basedir, outdir, name, prefix=None): - zippath = os.path.join(outdir, '%s.zip' % name) - with open_zip(zippath, 'w', compression=ZIP_DEFLATED) as zip: - for root, _, files in os.walk(basedir): - for file in files: - full_path = os.path.join(root, file) - relpath = os.path.relpath(full_path, basedir) - if prefix: - relpath = os.path.join(prefix, relpath) - zip.write(full_path, relpath) - return zippath - - -TAR = TarArchiver('w:', 'tar') -TGZ = TarArchiver('w:gz', 'tar.gz') -TBZ2 = TarArchiver('w:bz2', 'tar.bz2') -ZIP = ZipArchiver(ZIP_DEFLATED) - -_ARCHIVER_BY_TYPE = OrderedDict(tar=TGZ, tgz=TGZ, tbz2=TBZ2, zip=ZIP) - -TYPE_NAMES = frozenset(_ARCHIVER_BY_TYPE.keys()) - -def archiver(typename): - """Returns Archivers in common configurations. - - The typename must correspond to one of the following: - 'tar' Returns a tar archiver that applies no compression and emits .tar files. - 'tgz' Returns a tar archiver that applies gzip compression and emits .tar.gz files. - 'tbz2' Returns a tar archiver that applies bzip2 compression and emits .tar.bz2 files. - 'zip' Returns a zip archiver that applies standard compression and emits .zip files. - """ - archiver = _ARCHIVER_BY_TYPE.get(typename) - if not archiver: - raise ValueError('No archiver registered for %r' % typename) - return archiver diff --git a/src/python/twitter/pants/fs/fs.py b/src/python/twitter/pants/fs/fs.py deleted file mode 100644 index 00478e4a6..000000000 --- a/src/python/twitter/pants/fs/fs.py +++ /dev/null @@ -1,58 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import hashlib -import os - - -# This is the max filename length for HFS+, extX and NTFS - the most likely filesystems pants will -# be run under. -# TODO(John Sirois): consider a better isolation layer -_MAX_FILENAME_LENGTH = 255 - - -def safe_filename(name, extension=None, digest=None, max_length=_MAX_FILENAME_LENGTH): - """Creates filename from name and extension ensuring that the final length is within the - max_length constraint. - - By default the length is capped to work on most filesystems and the fallback to achieve - shortening is a sha1 hash of the proposed name. - - Raises ValueError if the proposed name is not a simple filename but a file path. - Also raises ValueError when the name is simple but cannot be satisfactorily shortened with the - given digest. - - name: the proposed filename without extension - extension: an optional extension to append to the filename - digest: the digest to fall back on for too-long name, extension concatenations - should - support the hashlib digest api of update(string) and hexdigest - max_length: the maximum desired file name length - """ - if os.path.basename(name) != name: - raise ValueError('Name must be a filename, handed a path: %s' % name) - - ext = extension or '' - filename = name + ext - if len(filename) <= max_length: - return filename - else: - digest = digest or hashlib.sha1() - digest.update(name) - safe_name = digest.hexdigest() + ext - if len(safe_name) > max_length: - raise ValueError('Digest %s failed to produce a filename <= %d ' - 'characters for %s - got %s' % (digest, max_length, filename, safe_name)) - return safe_name diff --git a/src/python/twitter/pants/goal/__init__.py b/src/python/twitter/pants/goal/__init__.py deleted file mode 100644 index 00d35227f..000000000 --- a/src/python/twitter/pants/goal/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import inspect - -from optparse import OptionGroup - -from twitter.pants.base.build_manual import manual -from twitter.pants.tasks import Task - -from .error import GoalError -from .phase import Phase -from .context import Context -from .group import Group -from .run_tracker import RunTracker - - -class Mkflag(object): - """A factory for namespaced flags.""" - - def __init__(self, namespace): - """Creates a new Mkflag that will use the given namespace to prefix the flags it creates. - - namespace: Either a function accepting a separator string that returns a prefix string for the - flag or else a fixed prefix string for all flags. - """ - self._namespace = namespace if callable(namespace) else lambda sep: namespace - - def __call__(self, name, negate=False): - """Creates a prefixed flag with an optional negated prefix. - - name: The simple flag name to be prefixed. - negate: True to prefix the flag with '--no-'. - """ - return '--%s%s-%s' % ('no-' if negate else '', self._namespace('-'), name) - - def set_bool(self, option, opt_str, _, parser): - """An Option callback to parse bool flags that recognizes the --no- negation prefix.""" - setattr(parser.values, option.dest, not opt_str.startswith("--no")) - - -@manual.builddict() -class Goal(object): - def __init__(self, name, action, group=None, dependencies=None, serialize=True): - """ - :param name: the name of the goal. - :param action: the goal action object to invoke this goal. - :param dependencies: the names of other goals which must be achieved before invoking this goal. - :param serialize: a flag indicating whether or not the action to achieve this goal requires - the global lock. If true, the action will block until it can acquire the lock. - """ - self.serialize = serialize - self.name = name - self.group = group - self.dependencies = [Phase(d) for d in dependencies] if dependencies else [] - - if type(action) == type and issubclass(action, Task): - self._task = action - else: - args, varargs, keywords, defaults = inspect.getargspec(action) - if varargs or keywords or defaults: - raise GoalError('Invalid action supplied, cannot accept varargs, keywords or defaults') - if len(args) > 2: - raise GoalError('Invalid action supplied, must accept 0, 1, or 2 args') - - class FuncTask(Task): - def __init__(self, context): - Task.__init__(self, context) - - if not args: - self.action = lambda targets: action() - elif len(args) == 1: - self.action = lambda targets: action(self.context) - elif len(args) == 2: - self.action = lambda targets: action(self.context, targets) - else: - raise AssertionError('Unexpected fallthrough') - - def execute(self, targets): - self.action(targets) - - self._task = FuncTask - - def __repr__(self): - return "Goal(%s-%s; %s)" % (self.name, self.group, ','.join(map(str, self.dependencies))) - - @property - def task_type(self): - return self._task - - def setup_parser(self, phase, parser, args): - """Allows a task to add its command line args to the global sepcification.""" - def namespace(sep): - phase_leader = phase.goals() == [self] or self.name == phase.name - return self.name if phase_leader else '%s%s%s' % (phase.name, sep, self.name) - mkflag = Mkflag(namespace) - - option_group = OptionGroup(parser, title=namespace(':')) - self.task_setup_parser(option_group, args, mkflag) - if option_group.option_list: - parser.add_option_group(option_group) - - def task_setup_parser(self, group, args, mkflag): - """Allows a task to setup a parser. - Override this method if you want to initialize the task with more goal data.""" - self._task.setup_parser(group, args, mkflag) - - def prepare(self, context): - """Prepares a Task that can be executed to achieve this goal.""" - return self._task(context) - - def install(self, phase=None, first=False, replace=False, before=None, after=None): - """Install this goal in the specified phase (or a new phase with the same name as this Goal). - - The placement of the goal in the execution list of the phase defaults to the end but can be - influence by specifying exactly one of the following arguments: - - :param first: Places this goal 1st in the phase's execution list - :param replace: Replaces any existing goals in the phase with this goal - :param before: Places this goal before the named goal in the phase's execution list - :param after: Places this goal after the named goal in the phase's execution list - """ - phase = Phase(phase or self.name) - phase.install(self, first, replace, before, after) - return phase - - -__all__ = ( - 'Context', - 'Goal', - 'GoalError', - 'Group', - 'Phase', - 'RunTracker', -) diff --git a/src/python/twitter/pants/goal/aggregated_timings.py b/src/python/twitter/pants/goal/aggregated_timings.py deleted file mode 100644 index f660d2744..000000000 --- a/src/python/twitter/pants/goal/aggregated_timings.py +++ /dev/null @@ -1,40 +0,0 @@ -import os - -from collections import defaultdict - -from twitter.common.dirutil import safe_mkdir_for - - -class AggregatedTimings(object): - """Aggregates timings over multiple invocations of 'similar' work. - - If filepath is not none, stores the timings in that file. Useful for finding bottlenecks.""" - def __init__(self, path=None): - # Map path -> timing in seconds (a float) - self._timings_by_path = defaultdict(float) - self._tool_labels = set() - self._path = path - safe_mkdir_for(self._path) - - def add_timing(self, label, secs, is_tool=False): - """Aggregate timings by label. - - secs - a double, so fractional seconds are allowed. - is_tool - whether this label represents a tool invocation. - """ - self._timings_by_path[label] += secs - if is_tool: - self._tool_labels.add(label) - # Check existence in case we're a clean-all. We don't want to write anything in that case. - if self._path and os.path.exists(os.path.dirname(self._path)): - with open(self._path, 'w') as f: - for x in self.get_all(): - f.write('%(label)s: %(timing)s\n' % x) - - def get_all(self): - """Returns all the timings, sorted in decreasing order. - - Each value is a dict: { path: , timing: } - """ - return [{ 'label': x[0], 'timing': x[1], 'is_tool': x[0] in self._tool_labels} - for x in sorted(self._timings_by_path.items(), key=lambda x: x[1], reverse=True)] diff --git a/src/python/twitter/pants/goal/artifact_cache_stats.py b/src/python/twitter/pants/goal/artifact_cache_stats.py deleted file mode 100644 index fa11e0476..000000000 --- a/src/python/twitter/pants/goal/artifact_cache_stats.py +++ /dev/null @@ -1,48 +0,0 @@ -import os - -from collections import defaultdict, namedtuple - -from twitter.common.dirutil import safe_mkdir - - -# Lists of target addresses. -CacheStat = namedtuple('CacheStat', ['hit_targets', 'miss_targets']) - -class ArtifactCacheStats(object): - """Tracks the hits and misses in the artifact cache. - - If dir is specified, writes the hits and misses to files in that dir.""" - def __init__(self, dir=None): - def init_stat(): - return CacheStat([],[]) - self.stats_per_cache = defaultdict(init_stat) - self._dir = dir - safe_mkdir(self._dir) - - def add_hit(self, cache_name, tgt): - self._add_stat(0, cache_name, tgt) - - def add_miss(self, cache_name, tgt): - self._add_stat(1, cache_name, tgt) - - def get_all(self): - """Returns the cache stats as a list of dicts.""" - ret = [] - for cache_name, stat in self.stats_per_cache.items(): - ret.append({ - 'cache_name': cache_name, - 'num_hits': len(stat.hit_targets), - 'num_misses': len(stat.miss_targets), - 'hits': stat.hit_targets, - 'misses': stat.miss_targets - }) - return ret - - # hit_or_miss is the appropriate index in CacheStat, i.e., 0 for hit, 1 for miss. - def _add_stat(self, hit_or_miss, cache_name, tgt): - self.stats_per_cache[cache_name][hit_or_miss].append(tgt.address.reference()) - if self._dir and os.path.exists(self._dir): # Check existence in case of a clean-all. - suffix = 'misses' if hit_or_miss else 'hits' - with open(os.path.join(self._dir, '%s.%s' % (cache_name, suffix)), 'a') as f: - f.write(tgt.address.reference()) - f.write('\n') diff --git a/src/python/twitter/pants/goal/context.py b/src/python/twitter/pants/goal/context.py deleted file mode 100644 index 88b9ff841..000000000 --- a/src/python/twitter/pants/goal/context.py +++ /dev/null @@ -1,293 +0,0 @@ -from __future__ import print_function - -import os -import sys - -from collections import defaultdict -from contextlib import contextmanager - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import Lock -from twitter.common.process import ProcessProviderFactory -from twitter.common.process.process_provider import ProcessProvider - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.goal.products import Products -from twitter.pants.java.distribution.distribution import Distribution -from twitter.pants.reporting.report import Report -from twitter.pants.targets.pants_target import Pants -from twitter.pants.targets.sources import SourceRoot - - -# Utility definition for grabbing process info for locking. -def _process_info(pid): - try: - ps = ProcessProviderFactory.get() - ps.collect_set([pid]) - handle = ps.get_handle(pid) - cmdline = handle.cmdline().replace('\0', ' ') - return '%d (%s)' % (pid, cmdline) - except ProcessProvider.UnknownPidError: - return '%d' % pid - - -class Context(object): - """Contains the context for a single run of pants. - - Goal implementations can access configuration data from pants.ini and any flags they have exposed - here as well as information about the targets involved in the run. - - Advanced uses of the context include adding new targets to it for upstream or downstream goals to - operate on and mapping of products a goal creates to the targets the products are associated with. - """ - - class Log(object): - """A logger facade that logs into the pants reporting framework.""" - def __init__(self, run_tracker): - self._run_tracker = run_tracker - - def debug(self, *msg_elements): - self._run_tracker.log(Report.DEBUG, *msg_elements) - - def info(self, *msg_elements): - self._run_tracker.log(Report.INFO, *msg_elements) - - def warn(self, *msg_elements): - self._run_tracker.log(Report.WARN, *msg_elements) - - def error(self, *msg_elements): - self._run_tracker.log(Report.ERROR, *msg_elements) - - def fatal(self, *msg_elements): - self._run_tracker.log(Report.FATAL, *msg_elements) - - def __init__(self, config, options, run_tracker, target_roots, requested_goals=None, - lock=None, log=None, target_base=None): - self._config = config - self._options = options - self.run_tracker = run_tracker - self._lock = lock or Lock.unlocked() - self._log = log or Context.Log(run_tracker) - self._target_base = target_base or Target - - self._state = {} - self._products = Products() - self._buildroot = get_buildroot() - self._java_sysprops = None # Computed lazily. - self.requested_goals = requested_goals or [] - - self.replace_targets(target_roots) - - @property - def config(self): - """Returns a Config object containing the configuration data found in pants.ini.""" - return self._config - - @property - def options(self): - """Returns the command line options parsed at startup.""" - return self._options - - @property - def lock(self): - """Returns the global pants run lock so a goal can release it if needed.""" - return self._lock - - @property - def log(self): - """Returns the preferred logger for goals to use.""" - return self._log - - @property - def products(self): - """Returns the Products manager for the current run.""" - return self._products - - @property - def target_roots(self): - """Returns the targets specified on the command line. - - This set is strictly a subset of all targets in play for the run as returned by self.targets(). - Note that for a command line invocation that uses wildcard selectors : or ::, the targets - globbed by the wildcards are considered to be target roots. - """ - return self._target_roots - - @property - def java_sysprops(self): - """The system properties of the JVM we use.""" - # TODO: In the future we can use these to hermeticize the Java enivronment rather than relying - # on whatever's on the shell's PATH. E.g., you either specify a path to the Java home via a - # cmd-line flag or .pantsrc, or we infer one from java.home but verify that the java.version - # is a supported version. - if self._java_sysprops is None: - # TODO(John Sirois): Plumb a sane default distribution through 1 point of control - self._java_sysprops = Distribution.cached().system_properties - return self._java_sysprops - - @property - def java_home(self): - """Find the java home for the JVM we use.""" - # Implementation is a kind-of-insane hack: we run the jvm to get it to emit its - # system properties. On some platforms there are so many hard and symbolic links into - # the JRE dirs that it's actually quite hard to establish what path to use as the java home, - # e.g., for the purpose of rebasing. In practice, this seems to work fine. - # Note that for our purposes we take the parent of java.home. - return os.path.realpath(os.path.dirname(self.java_sysprops['java.home'])) - - @property - def ivy_home(self): - return os.path.realpath(self.config.get('ivy', 'cache_dir')) - - def __str__(self): - return 'Context(id:%s, state:%s, targets:%s)' % (self.id, self.state, self.targets()) - - def submit_foreground_work_and_wait(self, work, workunit_parent=None): - """Returns the pool to which tasks can submit foreground (blocking) work.""" - return self.run_tracker.foreground_worker_pool().submit_work_and_wait( - work, workunit_parent=workunit_parent) - - def submit_background_work_chain(self, work_chain, parent_workunit_name=None): - background_root_workunit = self.run_tracker.get_background_root_workunit() - if parent_workunit_name: - # We have to keep this workunit alive until all its child work is done, so - # we manipulate the context manually instead of using it as a contextmanager. - # This is slightly funky, but the with-context usage is so pervasive and - # useful elsewhere that it's worth the funkiness in this one place. - workunit_parent_ctx = self.run_tracker.new_workunit_under_parent( - name=parent_workunit_name, labels=[WorkUnit.MULTITOOL], parent=background_root_workunit) - workunit_parent = workunit_parent_ctx.__enter__() - done_hook = lambda: workunit_parent_ctx.__exit__(None, None, None) - else: - workunit_parent = background_root_workunit # Run directly under the root. - done_hook = None - self.run_tracker.background_worker_pool().submit_async_work_chain( - work_chain, workunit_parent=workunit_parent, done_hook=done_hook) - - def background_worker_pool(self): - """Returns the pool to which tasks can submit background work.""" - return self.run_tracker.background_worker_pool() - - @contextmanager - def new_workunit(self, name, labels=None, cmd=''): - """Create a new workunit under the calling thread's current workunit.""" - with self.run_tracker.new_workunit(name=name, labels=labels, cmd=cmd) as workunit: - yield workunit - - def acquire_lock(self): - """ Acquire the global lock for the root directory associated with this context. When - a goal requires serialization, it will call this to acquire the lock. - """ - def onwait(pid): - print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr) - return True - if self._lock.is_unlocked(): - runfile = os.path.join(self._buildroot, '.pants.run') - self._lock = Lock.acquire(runfile, onwait=onwait) - - def release_lock(self): - """Release the global lock if it's held. - Returns True if the lock was held before this call. - """ - if self._lock.is_unlocked(): - return False - else: - self._lock.release() - self._lock = Lock.unlocked() - return True - - def is_unlocked(self): - """Whether the global lock object is actively holding the lock.""" - return self._lock.is_unlocked() - - def replace_targets(self, target_roots): - """Replaces all targets in the context with the given roots and their transitive - dependencies. - """ - self._target_roots = list(target_roots) - - self._targets = OrderedSet() - for target in self._target_roots: - self.add_target(target) - self.id = Target.identify(self._targets) - - def add_target(self, target): - """Adds a target and its transitive dependencies to the run context. - - The target is not added to the target roots. - """ - def add_targets(tgt): - self._targets.update(tgt for tgt in tgt.resolve() if isinstance(tgt, self._target_base)) - target.walk(add_targets) - - def add_new_target(self, target_base, target_type, *args, **kwargs): - """Creates a new target, adds it to the context and returns it. - - This method ensures the target resolves files against the given target_base, creating the - directory if needed and registering a source root. - """ - if 'derived_from' in kwargs: - derived_from = kwargs.get('derived_from') - del kwargs['derived_from'] - else: - derived_from = None - target = self._create_new_target(target_base, target_type, *args, **kwargs) - self.add_target(target) - if derived_from: - target.derived_from = derived_from - return target - - def _create_new_target(self, target_base, target_type, *args, **kwargs): - if not os.path.exists(target_base): - os.makedirs(target_base) - SourceRoot.register(target_base, target_type) - with ParseContext.temp(target_base): - return target_type(*args, **kwargs) - - def remove_target(self, target): - """Removes the given Target object from the context completely if present.""" - if target in self.target_roots: - self.target_roots.remove(target) - self._targets.discard(target) - - def targets(self, predicate=None): - """Selects targets in-play in this run from the target roots and their transitive dependencies. - - If specified, the predicate will be used to narrow the scope of targets returned. - """ - return filter(predicate, self._targets) - - def dependents(self, on_predicate=None, from_predicate=None): - """Returns a map from targets that satisfy the from_predicate to targets they depend on that - satisfy the on_predicate. - """ - core = set(self.targets(on_predicate)) - dependees = defaultdict(set) - for target in self.targets(from_predicate): - if hasattr(target, 'dependencies'): - for dependency in target.dependencies: - if dependency in core: - dependees[target].add(dependency) - return dependees - - def resolve(self, spec): - """Returns an iterator over the target(s) the given address points to.""" - with ParseContext.temp(): - return Pants(spec).resolve() - - @contextmanager - def state(self, key, default=None): - value = self._state.get(key, default) - yield value - self._state[key] = value - - @contextmanager - def timing(self, label): - if self.timer: - with self.timer.timing(label): - yield - else: - yield diff --git a/src/python/twitter/pants/goal/error.py b/src/python/twitter/pants/goal/error.py deleted file mode 100644 index 40c7a13a5..000000000 --- a/src/python/twitter/pants/goal/error.py +++ /dev/null @@ -1,18 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -class GoalError(Exception): - """Raised to indicate a goal has failed.""" diff --git a/src/python/twitter/pants/goal/group.py b/src/python/twitter/pants/goal/group.py deleted file mode 100644 index 48daae84e..000000000 --- a/src/python/twitter/pants/goal/group.py +++ /dev/null @@ -1,31 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - - -class Group(object): - """Delineates a members of a group of targets that age sources for the same product types.""" - - def __init__(self, name, predicate): - """:param string name: A logical name for this group. - :param predicate: A predicate that returns ``True`` if a given target is a member of this - group. - """ - self.name = name - self.predicate = predicate - self.exclusives = None - - def __repr__(self): - return "Group(%s,%s)" % (self.name, self.predicate.__name__) diff --git a/src/python/twitter/pants/goal/initialize_reporting.py b/src/python/twitter/pants/goal/initialize_reporting.py deleted file mode 100644 index 570f46a7d..000000000 --- a/src/python/twitter/pants/goal/initialize_reporting.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -import sys - -from twitter.common.dirutil import safe_mkdir, safe_rmtree -from twitter.common.lang import Compatibility - -from twitter.pants.reporting.plaintext_reporter import PlainTextReporter -from twitter.pants.reporting.html_reporter import HtmlReporter -from twitter.pants.reporting.quiet_reporter import QuietReporter -from twitter.pants.reporting.report import ReportingError, Report -from twitter.pants.reporting.reporting_server import ReportingServerManager - -StringIO = Compatibility.StringIO - - -def initial_reporting(config, run_tracker): - """Sets up the initial reporting configuration. - - Will be changed after we parse cmd-line flags. - """ - reports_dir = config.get('reporting', 'reports_dir', - default=os.path.join(config.getdefault('pants_workdir'), 'reports')) - link_to_latest = os.path.join(reports_dir, 'latest') - if os.path.exists(link_to_latest): - os.unlink(link_to_latest) - - run_id = run_tracker.run_info.get_info('id') - if run_id is None: - raise ReportingError('No run_id set') - run_dir = os.path.join(reports_dir, run_id) - safe_rmtree(run_dir) - - html_dir = os.path.join(run_dir, 'html') - safe_mkdir(html_dir) - os.symlink(run_dir, link_to_latest) - - report = Report() - - # Capture initial console reporting into a buffer. We'll do something with it once - # we know what the cmd-line flag settings are. - outfile = StringIO() - capturing_reporter_settings = PlainTextReporter.Settings(outfile=outfile, log_level=Report.INFO, - color=False, indent=True, timing=False, - cache_stats=False) - capturing_reporter = PlainTextReporter(run_tracker, capturing_reporter_settings) - report.add_reporter('capturing', capturing_reporter) - - # Set up HTML reporting. We always want that. - template_dir = config.get('reporting', 'reports_template_dir') - html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO, - html_dir=html_dir, - template_dir=template_dir) - html_reporter = HtmlReporter(run_tracker, html_reporter_settings) - report.add_reporter('html', html_reporter) - - # Add some useful RunInfo. - run_tracker.run_info.add_info('default_report', html_reporter.report_path()) - port = ReportingServerManager.get_current_server_port() - if port: - run_tracker.run_info.add_info('report_url', 'http://localhost:%d/run/%s' % (port, run_id)) - - return report - -def update_reporting(options, is_console_task, run_tracker): - """Updates reporting config once we've parsed cmd-line flags.""" - - # Get any output silently buffered in the old console reporter, and remove it. - old_outfile = run_tracker.report.remove_reporter('capturing').settings.outfile - old_outfile.flush() - buffered_output = old_outfile.getvalue() - old_outfile.close() - - log_level = Report.log_level_from_string(options.log_level or 'info') - color = not options.no_color - timing = options.time - cache_stats = options.time # TODO: Separate flag for this? - - if options.quiet or is_console_task: - console_reporter = QuietReporter(run_tracker, - QuietReporter.Settings(log_level=log_level, color=color)) - else: - # Set up the new console reporter. - settings = PlainTextReporter.Settings(log_level=log_level, outfile=sys.stdout, color=color, - indent=True, timing=timing, cache_stats=cache_stats) - console_reporter = PlainTextReporter(run_tracker, settings) - console_reporter.emit(buffered_output) - console_reporter.flush() - run_tracker.report.add_reporter('console', console_reporter) - - if options.logdir: - # Also write plaintext logs to a file. This is completely separate from the html reports. - safe_mkdir(options.logdir) - run_id = run_tracker.run_info.get_info('id') - outfile = open(os.path.join(options.logdir, '%s.log' % run_id), 'w') - settings = PlainTextReporter.Settings(log_level=log_level, outfile=outfile, color=False, - indent=True, timing=True, cache_stats=True) - logfile_reporter = PlainTextReporter(run_tracker, settings) - logfile_reporter.emit(buffered_output) - logfile_reporter.flush() - run_tracker.report.add_reporter('logfile', logfile_reporter) diff --git a/src/python/twitter/pants/goal/phase.py b/src/python/twitter/pants/goal/phase.py deleted file mode 100644 index 250950324..000000000 --- a/src/python/twitter/pants/goal/phase.py +++ /dev/null @@ -1,175 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -from collections import defaultdict - -from twitter.pants.base.build_manual import manual - -from .error import GoalError - - -class SingletonPhases(type): - phases = dict() - renames = dict() - - def rename(cls, phase, name): - """ - Renames the given phase and ensures all future requests for the old name are mapped to the - given phase instance. - """ - cls.phases.pop(phase.name) - cls.renames[phase.name] = name - phase.name = name - cls.phases[name] = phase - - def __call__(cls, name): - name = cls.renames.get(name, name) - if name not in cls.phases: - cls.phases[name] = super(SingletonPhases, cls).__call__(name) - return cls.phases[name] - -# Python 2.x + 3.x wankery -PhaseBase = SingletonPhases(str('PhaseBase'), (object,), {}) - - -@manual.builddict() -class Phase(PhaseBase): - _goals_by_phase = defaultdict(list) - _phase_by_goal = dict() - - @classmethod - def clear(cls): - """Remove all phases and goals. - - This method is EXCLUSIVELY for use in tests. - """ - cls._goals_by_phase.clear() - cls._phase_by_goal.clear() - - @staticmethod - def of(goal): - return Phase._phase_by_goal[goal] - - @staticmethod - def goals_of_type(goal_class): - """Returns all installed goals of the specified type.""" - return [goal for goal in Phase._phase_by_goal.keys() if isinstance(goal, goal_class)] - - @staticmethod - def setup_parser(parser, args, phases): - def do_setup_parser(phase, setup): - for goal in phase.goals(): - if goal not in setup: - setup.add(goal) - for dep in goal.dependencies: - do_setup_parser(dep, setup) - goal.setup_parser(phase, parser, args) - - setup = set() - for phase in phases: - do_setup_parser(phase, setup) - - @staticmethod - def all(): - """Returns all registered goals as a sorted sequence of phase, goals tuples.""" - return sorted(Phase._goals_by_phase.items(), key=lambda pair: pair[0].name) - - def __init__(self, name): - self.name = name - self.description = None - - def with_description(self, description): - self.description = description - return self - - def install(self, goal, first=False, replace=False, before=None, after=None): - """ - Installs the given goal in this phase. The placement of the goal in this phases' execution - list defaults to the end but its position can be influence by specifying exactly one of the - following arguments: - - first: Places the goal 1st in the execution list - replace: Removes all existing goals in this phase and installs this goal - before: Places the goal before the named goal in the execution list - after: Places the goal after the named goal in the execution list - """ - - if (first or replace or before or after) and not (first ^ replace ^ bool(before) ^ bool(after)): - raise GoalError('Can only specify one of first, replace, before or after') - - Phase._phase_by_goal[goal] = self - - g = self.goals() - if replace: - del g[:] - g_names = map(lambda goal: goal.name, g) - if first: - g.insert(0, goal) - elif before in g_names: - g.insert(g_names.index(before), goal) - elif after in g_names: - g.insert(g_names.index(after) + 1, goal) - else: - g.append(goal) - return self - - def rename(self, name): - """Renames this goal.""" - PhaseBase.rename(self, name) - return self - - def copy_to(self, name): - """Copies this phase to the new named phase carrying along goal dependencies and description.""" - copy = Phase(name) - copy.goals().extend(self.goals()) - copy.description = self.description - return copy - - def remove(self, name): - """Removes the named goal from this phase's list of goals to attempt.""" - goals = self.goals() - for goal in goals: - if goal.name == name: - goals.remove(goal) - return self - raise GoalError('Goal %s does not exist in this phase, members are: %s' % (name, goals)) - - class UnsatisfiedDependencyError(GoalError): - """Raised when an operation cannot be completed due to an unsatisfied goal dependency.""" - - def uninstall(self): - """ - Removes the named phase and all its attached goals. Raises Phase.UnsatisfiedDependencyError - if the removal cannot be completed due to a dependency. - """ - for phase, goals in Phase._goals_by_phase.items(): - for goal in goals: - for dependee_phase in goal.dependencies: - if self is dependee_phase: - raise Phase.UnsatisfiedDependencyError( - '%s is depended on by %s:%s' % (self.name, phase.name, goal.name)) - del Phase._goals_by_phase[self] - - def goals(self): - return Phase._goals_by_phase[self] - - def serialize(self): - return any([x.serialize for x in self.goals()]) - - def __repr__(self): - return self.name diff --git a/src/python/twitter/pants/goal/products.py b/src/python/twitter/pants/goal/products.py deleted file mode 100644 index 20eedb309..000000000 --- a/src/python/twitter/pants/goal/products.py +++ /dev/null @@ -1,213 +0,0 @@ -import os - -from collections import defaultdict - -from twitter.common.collections import OrderedSet - - -class RootedProducts(object): - """Products of a build that have a concept of a 'root' directory. - - E.g., classfiles, under a root package directory.""" - def __init__(self, root): - self._root = root - self._rel_paths = OrderedSet() - - def add_abs_paths(self, abs_paths): - for abs_path in abs_paths: - if not abs_path.startswith(self._root): - raise Exception('%s is not under %s' % (abs_path, self._root)) - self._rel_paths.add(os.path.relpath(abs_path, self._root)) - - def add_rel_paths(self, rel_paths): - self._rel_paths.update(rel_paths) - - def root(self): - return self._root - - def rel_paths(self): - return self._rel_paths - - def abs_paths(self): - for relpath in self._rel_paths: - yield os.path.join(self._root, relpath) - - -class MultipleRootedProducts(object): - """A product consisting of multiple roots, with associated products.""" - def __init__(self): - self._rooted_products_by_root = {} - - def add_rel_paths(self, root, rel_paths): - self._get_products_for_root(root).add_rel_paths(rel_paths) - - def add_abs_paths(self, root, abs_paths): - self._get_products_for_root(root).add_abs_paths(abs_paths) - - def rel_paths(self): - for root, products in self._rooted_products_by_root.items(): - yield root, products.rel_paths() - - def abs_paths(self): - for root, products in self._rooted_products_by_root.items(): - yield root, products.abs_paths() - - def _get_products_for_root(self, root): - if root in self._rooted_products_by_root: - ret = self._rooted_products_by_root[root] - else: - ret = RootedProducts(root) - self._rooted_products_by_root[root] = ret - return ret - - -class Products(object): - """An out-of-band 'dropbox' where tasks can place build product information for later tasks to use. - - Historically, the only type of product was a ProductMapping. However this had some issues, as not - all products fit into the (basedir, [files-under-basedir]) paradigm. Also, ProductMapping docs - and varnames refer to targets, and implicitly expect the mappings to be keyed by a target, however - we sometimes also need to map sources to products. - - So in practice we ended up abusing this in several ways: - 1) Using fake basedirs when we didn't have a basedir concept. - 2) Using objects other than strings as 'product paths' when we had a need to. - 3) Using things other than targets as keys. - - Right now this class is in an intermediate stage, as we transition to a more robust Products concept. - The abuses have been switched to use 'data_products' (see below) which is just a dictionary - of product type (e.g., 'classes_by_target') to arbitrary payload. That payload can be anything, - but the MultipleRootedProducts class is useful for products that do happen to fit into the - (basedir, [files-under-basedir]) paradigm. - - The long-term future of Products is TBD. But we do want to make it easier to reason about - which tasks produce which products and which tasks consume them. Currently it's quite difficult - to match up 'requires' calls to the producers of those requirements, especially when the 'typename' - is in a variable, not a literal. - """ - class ProductMapping(object): - """Maps products of a given type by target. Each product is a map from basedir to a list of - files in that dir. - """ - - def __init__(self, typename): - self.typename = typename - self.by_target = defaultdict(lambda: defaultdict(list)) - - def empty(self): - return len(self.by_target) == 0 - - def add(self, target, basedir, product_paths=None): - """ - Adds a mapping of products for the given target, basedir pair. - - If product_paths are specified, these will over-write any existing mapping for this target. - - If product_paths is omitted, the current mutable list of mapped products for this target - and basedir is returned for appending. - """ - if product_paths is not None: - self.by_target[target][basedir].extend(product_paths) - else: - return self.by_target[target][basedir] - - def has(self, target): - """Returns whether we have a mapping for the specified target.""" - return target in self.by_target - - def get(self, target): - """ - Returns the product mapping for the given target as a tuple of (basedir, products list). - Can return None if there is no mapping for the given target. - """ - return self.by_target.get(target) - - def __getitem__(self, target): - """ - Support for subscripting into this mapping. Returns the product mapping for the given target - as a map of -> . - If no mapping exists, returns an empty map whose values default to empty lists. So you - can use the result without checking for None. - """ - return self.by_target[target] - - def itermappings(self): - """ - Returns an iterable over all pairs (target, product) in this mapping. - Each product is itself a map of -> . - """ - return self.by_target.iteritems() - - def keys_for(self, basedir, product): - """Returns the set of keys the given mapped product is registered under.""" - keys = set() - for key, mappings in self.by_target.items(): - for mapped in mappings.get(basedir, []): - if product == mapped: - keys.add(key) - break - return keys - - def __repr__(self): - return 'ProductMapping(%s) {\n %s\n}' % (self.typename, '\n '.join( - '%s => %s\n %s' % (str(target), basedir, outputs) - for target, outputs_by_basedir in self.by_target.items() - for basedir, outputs in outputs_by_basedir.items())) - - def __init__(self): - self.products = {} # type -> ProductMapping instance. - self.predicates_for_type = defaultdict(list) - - self.data_products = {} # type -> arbitrary object. - self.required_data_products = set() - - def require(self, typename, predicate=None): - """Registers a requirement that file products of the given type by mapped. - - If a target predicate is supplied, only targets matching the predicate are mapped. - """ - if predicate: - self.predicates_for_type[typename].append(predicate) - return self.products.setdefault(typename, Products.ProductMapping(typename)) - - def isrequired(self, typename): - """Returns a predicate that selects targets required for the given type if mappings are required. - - Otherwise returns None. - """ - if typename not in self.products: - return None - def combine(first, second): - return lambda target: first(target) or second(target) - return reduce(combine, self.predicates_for_type[typename], lambda target: False) - - def get(self, typename): - """Returns a ProductMapping for the given type name.""" - return self.require(typename) - - def require_data(self, typename): - """ Registers a requirement that data produced by tasks is required. - - typename: the name of a data product that should be generated. - """ - self.required_data_products.add(typename) - - def is_required_data(self, typename): - """ Checks if a particular data product is required by any tasks.""" - return typename in self.required_data_products - - def safe_create_data(self, typename, init_func): - """Ensures that a data item is created if it doesn't already exist.""" - # Basically just an alias for readability. - self.get_data(typename, init_func) - - def get_data(self, typename, init_func=None): - """ Returns a data product. - - If the product isn't found, returns None, unless init_func is set, in which case the product's - value is set to the return value of init_func(), and returned.""" - if typename not in self.data_products: - if not init_func: - return None - self.data_products[typename] = init_func() - return self.data_products.get(typename) diff --git a/src/python/twitter/pants/goal/run_tracker.py b/src/python/twitter/pants/goal/run_tracker.py deleted file mode 100644 index 340ca7116..000000000 --- a/src/python/twitter/pants/goal/run_tracker.py +++ /dev/null @@ -1,299 +0,0 @@ -from contextlib import contextmanager -import httplib -import json -import os -import sys -import threading -import time -import urllib -from urlparse import urlparse - -from twitter.pants.base.config import Config -from twitter.pants.base.run_info import RunInfo -from twitter.pants.base.worker_pool import WorkerPool -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.reporting.report import Report - -from .aggregated_timings import AggregatedTimings -from .artifact_cache_stats import ArtifactCacheStats - - -class RunTracker(object): - """Tracks and times the execution of a pants run. - - Also manages background work. - - Use like this: - - run_tracker.start() - with run_tracker.new_workunit('compile'): - with run_tracker.new_workunit('java'): - ... - with run_tracker.new_workunit('scala'): - ... - run_tracker.close() - - Can track execution against multiple 'roots', e.g., one for the main thread and another for - background threads. - """ - - # The name of the tracking root for the main thread (and the foreground worker threads). - DEFAULT_ROOT_NAME = 'main' - - # The name of the tracking root for the background worker threads. - BACKGROUND_ROOT_NAME = 'background' - - @classmethod - def from_config(cls, config): - if not isinstance(config, Config): - raise ValueError('Expected a Config object, given %s of type %s' % (config, type(config))) - info_dir = RunInfo.dir(config) - stats_upload_url = config.getdefault('stats_upload_url', default=None) - num_foreground_workers = config.getdefault('num_foreground_workers', default=8) - num_background_workers = config.getdefault('num_background_workers', default=8) - return cls(info_dir, - stats_upload_url=stats_upload_url, - num_foreground_workers=num_foreground_workers, - num_background_workers=num_background_workers) - - def __init__(self, - info_dir, - stats_upload_url=None, - num_foreground_workers=8, - num_background_workers=8): - self.run_timestamp = time.time() # A double, so we get subsecond precision for ids. - cmd_line = ' '.join(['./pants'] + sys.argv[1:]) - - # run_id is safe for use in paths. - millis = (self.run_timestamp * 1000) % 1000 - run_id = 'pants_run_%s_%d' % \ - (time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis) - - self.info_dir = os.path.join(info_dir, run_id) - self.run_info = RunInfo(os.path.join(self.info_dir, 'info')) - self.run_info.add_basic_info(run_id, self.run_timestamp) - self.run_info.add_info('cmd_line', cmd_line) - self.stats_url = stats_upload_url - - # Create a 'latest' symlink, after we add_infos, so we're guaranteed that the file exists. - link_to_latest = os.path.join(os.path.dirname(self.info_dir), 'latest') - if os.path.exists(link_to_latest): - os.unlink(link_to_latest) - os.symlink(self.info_dir, link_to_latest) - - # Time spent in a workunit, including its children. - self.cumulative_timings = AggregatedTimings(os.path.join(self.info_dir, 'cumulative_timings')) - - # Time spent in a workunit, not including its children. - self.self_timings = AggregatedTimings(os.path.join(self.info_dir, 'self_timings')) - - # Hit/miss stats for the artifact cache. - self.artifact_cache_stats = \ - ArtifactCacheStats(os.path.join(self.info_dir, 'artifact_cache_stats')) - - # Number of threads for foreground work. - self._num_foreground_workers = num_foreground_workers - - # Number of threads for background work. - self._num_background_workers = num_background_workers - - # We report to this Report. - self.report = None - - # self._threadlocal.current_workunit contains the current workunit for the calling thread. - # Note that multiple threads may share a name (e.g., all the threads in a pool). - self._threadlocal = threading.local() - - # For main thread work. Created on start(). - self._main_root_workunit = None - - # For concurrent foreground work. Created lazily if needed. - # Associated with the main thread's root workunit. - self._foreground_worker_pool = None - - # For background work. Created lazily if needed. - self._background_worker_pool = None - self._background_root_workunit = None - - self._aborted = False - - def register_thread(self, parent_workunit): - """Register the parent workunit for all work in the calling thread. - - Multiple threads may have the same parent (e.g., all the threads in a pool). - """ - self._threadlocal.current_workunit = parent_workunit - - def is_under_main_root(self, workunit): - """Is the workunit running under the main thread's root.""" - return workunit.root() == self._main_root_workunit - - def start(self, report): - """Start tracking this pants run. - - report: an instance of pants.reporting.Report.""" - self.report = report - self.report.open() - - self._main_root_workunit = WorkUnit(run_tracker=self, parent=None, labels=[], - name=RunTracker.DEFAULT_ROOT_NAME, cmd=None) - self.register_thread(self._main_root_workunit) - self._main_root_workunit.start() - self.report.start_workunit(self._main_root_workunit) - - @contextmanager - def new_workunit(self, name, labels=None, cmd=''): - """Creates a (hierarchical) subunit of work for the purpose of timing and reporting. - - - name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'. - - labels: An optional iterable of labels. The reporters can use this to decide how to - display information about this work. - - cmd: An optional longer string representing this work. - E.g., the cmd line of a compiler invocation. - - Use like this: - - with run_tracker.new_workunit(name='compile', labels=[WorkUnit.GOAL]) as workunit: - - - - Note that the outcome will automatically be set to failure if an exception is raised - in a workunit, and to success otherwise, so usually you only need to set the - outcome explicitly if you want to set it to warning. - """ - parent = self._threadlocal.current_workunit - with self.new_workunit_under_parent(name, parent=parent, labels=labels, cmd=cmd) as workunit: - self._threadlocal.current_workunit = workunit - try: - yield workunit - finally: - self._threadlocal.current_workunit = parent - - @contextmanager - def new_workunit_under_parent(self, name, parent, labels=None, cmd=''): - """Creates a (hierarchical) subunit of work for the purpose of timing and reporting. - - - name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'. - - parent: The new workunit is created under this parent. - - labels: An optional iterable of labels. The reporters can use this to decide how to - display information about this work. - - cmd: An optional longer string representing this work. - E.g., the cmd line of a compiler invocation. - - Task code should not typically call this directly. - """ - workunit = WorkUnit(run_tracker=self, parent=parent, name=name, labels=labels, cmd=cmd) - workunit.start() - try: - self.report.start_workunit(workunit) - yield workunit - except KeyboardInterrupt: - workunit.set_outcome(WorkUnit.ABORTED) - self._aborted = True - raise - except: - workunit.set_outcome(WorkUnit.FAILURE) - raise - else: - workunit.set_outcome(WorkUnit.SUCCESS) - finally: - self.report.end_workunit(workunit) - workunit.end() - - def log(self, level, *msg_elements): - """Log a message against the current workunit.""" - self.report.log(self._threadlocal.current_workunit, level, *msg_elements) - - def upload_stats(self): - """Send timing results to URL specified in pants.ini""" - def error(msg): - # Report aleady closed, so just print error. - print("WARNING: Failed to upload stats. %s" % msg) - - if self.stats_url: - params = { - 'run_info': json.dumps(self.run_info.get_as_dict()), - 'cumulative_timings': json.dumps(self.cumulative_timings.get_all()), - 'self_timings': json.dumps(self.self_timings.get_all()), - 'artifact_cache_stats': json.dumps(self.artifact_cache_stats.get_all()) - } - - headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} - url = urlparse(self.stats_url) - try: - if url.scheme == 'https': - http_conn = httplib.HTTPSConnection(url.netloc) - else: - http_conn = httplib.HTTPConnection(url.netloc) - http_conn.request('POST', url.path, urllib.urlencode(params), headers) - resp = http_conn.getresponse() - if resp.status != 200: - error("HTTP error code: %d" % resp.status) - except Exception as e: - error("Error: %s" % e) - - def end(self): - """This pants run is over, so stop tracking it. - - Note: If end() has been called once, subsequent calls are no-ops. - """ - if self._background_worker_pool: - if self._aborted: - self.log(Report.INFO, "Aborting background workers.") - self._background_worker_pool.abort() - else: - self.log(Report.INFO, "Waiting for background workers to finish.") - self._background_worker_pool.shutdown() - self.report.end_workunit(self._background_root_workunit) - self._background_root_workunit.end() - - if self._foreground_worker_pool: - if self._aborted: - self.log(Report.INFO, "Aborting foreground workers.") - self._foreground_worker_pool.abort() - else: - self.log(Report.INFO, "Waiting for foreground workers to finish.") - self._foreground_worker_pool.shutdown() - - self.report.end_workunit(self._main_root_workunit) - self._main_root_workunit.end() - - outcome = self._main_root_workunit.outcome() - if self._background_root_workunit: - outcome = min(outcome, self._background_root_workunit.outcome()) - outcome_str = WorkUnit.outcome_string(outcome) - log_level = WorkUnit.choose_for_outcome(outcome, Report.ERROR, Report.ERROR, - Report.WARN, Report.INFO, Report.INFO) - self.log(log_level, outcome_str) - - if self.run_info.get_info('outcome') is None: - try: - self.run_info.add_info('outcome', outcome_str) - except IOError: - pass # If the goal is clean-all then the run info dir no longer exists... - - self.report.close() - self.upload_stats() - - def foreground_worker_pool(self): - if self._foreground_worker_pool is None: # Initialize lazily. - self._foreground_worker_pool = WorkerPool(parent_workunit=self._main_root_workunit, - run_tracker=self, - num_workers=self._num_foreground_workers) - return self._foreground_worker_pool - - def get_background_root_workunit(self): - if self._background_root_workunit is None: - self._background_root_workunit = WorkUnit(run_tracker=self, parent=None, labels=[], - name='background', cmd=None) - self._background_root_workunit.start() - self.report.start_workunit(self._background_root_workunit) - return self._background_root_workunit - - - def background_worker_pool(self): - if self._background_worker_pool is None: # Initialize lazily. - self._background_worker_pool = WorkerPool(parent_workunit=self.get_background_root_workunit(), - run_tracker=self, - num_workers=self._num_background_workers) - return self._background_worker_pool diff --git a/src/python/twitter/pants/ivy/__init__.py b/src/python/twitter/pants/ivy/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/ivy/bootstrapper.py b/src/python/twitter/pants/ivy/bootstrapper.py deleted file mode 100644 index 0471f8375..000000000 --- a/src/python/twitter/pants/ivy/bootstrapper.py +++ /dev/null @@ -1,200 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import hashlib -import os -import shutil - -from twitter.common import log -from twitter.common.contextutil import temporary_file -from twitter.common.dirutil import safe_delete, touch -from twitter.common.quantity import Amount, Time - -from twitter.pants.base.config import Config -from twitter.pants.ivy.ivy import Ivy -from twitter.pants.net.http.fetcher import Fetcher - - -class Bootstrapper(object): - """Bootstraps a working ivy resolver. - - By default a working resolver will be bootstrapped from maven central and it will use standard - public jar repositories and a standard ivy local cache directory to execute resolve operations. - - A combination of site configuration options and environment variables can be used to override this - default setup. - - By default ivy will be bootstrapped from a stable ivy jar version found in maven central, but - this can be over-ridden with the ``ivy / bootstrap_jar_url`` config option. Additionally the - bootstrapping will use a connect/read timeout of 1 second by default, but this can be raised by - specifying a ``ivy / bootstrap_fetch_timeout_secs`` config value. - - After bootstrapping, ivy will re-resolve itself. By default it does this via maven central, but - a custom ivy tool classpath can be specified by using the ``ivy / ivy_profile`` option to point to - a custom ivy profile ivy.xml. This can be useful to upgrade ivy to a version released after pants - or else mix in auxiliary jars that provide ivy plugins. - - Finally, by default the ivysettings.xml embedded in the ivy jar will be used in conjunction with - the default ivy local cache directory of ~/.ivy2/cache. To specify custom values for these you - can either provide ``ivy / ivy_settings`` and ``ivy / cache_dir`` config values or supply these - values via the ``PANTS_IVY_SETTINGS_XML`` and ``PANTS_IVY_CACHE_DIR`` environment variables - respectively. The environment variables will trump config values if present. - """ - - class Error(Exception): - """Indicates an error bootstrapping an ivy classpath.""" - - _DEFAULT_VERSION = '2.3.0' - _DEFAULT_URL = ('http://repo1.maven.org/maven2/' - 'org/apache/ivy/ivy/' - '%(version)s/ivy-%(version)s.jar' % {'version': _DEFAULT_VERSION}) - - _INSTANCE = None - - @classmethod - def instance(cls): - """Returns the default global ivy bootstrapper.""" - if cls._INSTANCE is None: - cls._INSTANCE = cls() - return cls._INSTANCE - - @classmethod - def default_ivy(cls, java_executor=None, bootstrap_workunit_factory=None): - """Returns an Ivy instance using the default global bootstrapper. - - By default runs ivy via a subprocess java executor. - - :param java_executor: the optional java executor to use - :param bootstrap_workunit_factory: the optional workunit to bootstrap under. - :returns: an Ivy instance. - :raises: Bootstrapper.Error if the default ivy instance could not be bootstrapped - """ - return cls.instance().ivy(java_executor=java_executor, - bootstrap_workunit_factory=bootstrap_workunit_factory) - - def __init__(self): - """Creates an ivy bootstrapper.""" - self._config = Config.load() - self._bootstrap_jar_url = self._config.get('ivy', 'bootstrap_jar_url', - default=self._DEFAULT_URL) - self._timeout = Amount(self._config.getint('ivy', 'bootstrap_fetch_timeout_secs', default=1), - Time.SECONDS) - self._version_or_ivyxml = self._config.get('ivy', 'ivy_profile', default=self._DEFAULT_VERSION) - self._classpath = None - - def ivy(self, java_executor=None, bootstrap_workunit_factory=None): - """Returns an ivy instance bootstrapped by this bootstrapper. - - :param java_executor: the optional java executor to use - :param bootstrap_workunit_factory: the optional workunit to bootstrap under. - :raises: Bootstrapper.Error if ivy could not be bootstrapped - """ - return Ivy(self._get_classpath(java_executor, bootstrap_workunit_factory), - java_executor=java_executor, - ivy_settings=self._ivy_settings, - ivy_cache_dir=self.ivy_cache_dir) - - def _get_classpath(self, executor, workunit_factory): - """Returns the bootstrapped ivy classpath as a list of jar paths. - - :raises: Bootstrapper.Error if the classpath could not be bootstrapped - """ - if not self._classpath: - self._classpath = self._bootstrap_ivy_classpath(executor, workunit_factory) - return self._classpath - - @property - def _ivy_settings(self): - """Returns the bootstrapped ivysettings.xml path. - - By default the ivy.ivy_settings value found in pants.ini but can be overridden by via the - PANTS_IVY_SETTINGS_XML environment variable. If neither is specified defaults to ivy's built - in default ivysettings.xml of standard public resolvers. - """ - return os.getenv('PANTS_IVY_SETTINGS_XML') or self._config.get('ivy', 'ivy_settings') - - @property - def ivy_cache_dir(self): - """Returns the bootstrapped ivy cache dir. - - By default the ivy.cache_dir value found in pants.ini but can be overridden via the - PANTS_IVY_CACHE_DIR environment variable. If neither is specified defaults to ivy's built - in default cache dir; ie: ~/.ivy2/cache. - """ - return (os.getenv('PANTS_IVY_CACHE_DIR') - or self._config.get('ivy', 'cache_dir', default=os.path.expanduser('~/.ivy2/cache'))) - - def _bootstrap_ivy_classpath(self, executor, workunit_factory, retry=True): - # TODO(John Sirois): Extract a ToolCache class to control the path structure: - # https://jira.twitter.biz/browse/DPB-283 - ivy_bootstrap_dir = \ - os.path.join(self._config.getdefault('pants_bootstrapdir'), 'tools', 'jvm', 'ivy') - - digest = hashlib.sha1() - if os.path.isfile(self._version_or_ivyxml): - with open(self._version_or_ivyxml) as fp: - digest.update(fp.read()) - else: - digest.update(self._version_or_ivyxml) - classpath = os.path.join(ivy_bootstrap_dir, '%s.classpath' % digest.hexdigest()) - - if not os.path.exists(classpath): - ivy = self._bootstrap_ivy(os.path.join(ivy_bootstrap_dir, 'bootstrap.jar')) - args = ['-confs', 'default', '-cachepath', classpath] - if os.path.isfile(self._version_or_ivyxml): - args.extend(['-ivy', self._version_or_ivyxml]) - else: - args.extend(['-dependency', 'org.apache.ivy', 'ivy', self._version_or_ivyxml]) - - try: - ivy.execute(args=args, executor=executor, - workunit_factory=workunit_factory, workunit_name='ivy-bootstrap') - except ivy.Error as e: - safe_delete(classpath) - raise self.Error('Failed to bootstrap an ivy classpath! %s' % e) - - with open(classpath) as fp: - cp = fp.read().strip().split(os.pathsep) - if not all(map(os.path.exists, cp)): - safe_delete(classpath) - if retry: - return self._bootstrap_ivy_classpath(executor, workunit_factory, retry=False) - raise self.Error('Ivy bootstrapping failed - invalid classpath: %s' % ':'.join(cp)) - return cp - - def _bootstrap_ivy(self, bootstrap_jar_path): - if not os.path.exists(bootstrap_jar_path): - with temporary_file() as bootstrap_jar: - fetcher = Fetcher() - checksummer = fetcher.ChecksumListener(digest=hashlib.sha1()) - try: - log.info('\nDownloading %s' % self._bootstrap_jar_url) - # TODO: Capture the stdout of the fetcher, instead of letting it output - # to the console directly. - fetcher.download(self._bootstrap_jar_url, - listener=fetcher.ProgressListener().wrap(checksummer), - path_or_fd=bootstrap_jar, - timeout=self._timeout) - log.info('sha1: %s' % checksummer.checksum) - bootstrap_jar.close() - touch(bootstrap_jar_path) - shutil.move(bootstrap_jar.name, bootstrap_jar_path) - except fetcher.Error as e: - raise self.Error('Problem fetching the ivy bootstrap jar! %s' % e) - - return Ivy(bootstrap_jar_path, - ivy_settings=self._ivy_settings, - ivy_cache_dir=self.ivy_cache_dir) diff --git a/src/python/twitter/pants/ivy/ivy.py b/src/python/twitter/pants/ivy/ivy.py deleted file mode 100644 index cfd8e39f0..000000000 --- a/src/python/twitter/pants/ivy/ivy.py +++ /dev/null @@ -1,96 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import maybe_list -from twitter.common.lang import Compatibility - -from twitter.pants.java.executor import Executor, SubprocessExecutor -from twitter.pants.java import util - - -class Ivy(object): - """Encapsulates the ivy cli taking care of the basic invocation letting you just worry about the - args to pass to the cli itself. - """ - - class Error(Exception): - """Indicates an error executing an ivy command.""" - - def __init__(self, classpath, java_executor=None, ivy_settings=None, ivy_cache_dir=None): - """Configures an ivy wrapper for the ivy distribution at the given classpath.""" - - self._classpath = maybe_list(classpath) - - self._java = java_executor or SubprocessExecutor() - if not isinstance(self._java, Executor): - raise ValueError('java_executor must be an Executor instance, given %s of type %s' - % (self._java, type(self._java))) - - self._ivy_settings = ivy_settings - if self._ivy_settings and not isinstance(self._ivy_settings, Compatibility.string): - raise ValueError('ivy_settings must be a string, given %s of type %s' - % (self._ivy_settings, type(self._ivy_settings))) - - self._ivy_cache_dir = ivy_cache_dir - if self._ivy_cache_dir and not isinstance(self._ivy_cache_dir, Compatibility.string): - raise ValueError('ivy_cache_dir must be a string, given %s of type %s' - % (self._ivy_cache_dir, type(self._ivy_cache_dir))) - - @property - def ivy_settings(self): - """Returns the ivysettings.xml path used by this `Ivy` instance.""" - return self._ivy_settings - - @property - def ivy_cache_dir(self): - """Returns the ivy cache dir used by this `Ivy` instance.""" - return self._ivy_cache_dir - - def execute(self, jvm_options=None, args=None, executor=None, - workunit_factory=None, workunit_name=None, workunit_labels=None): - """Executes the ivy commandline client with the given args. - - Raises Ivy.Error if the command fails for any reason. - """ - runner = self.runner(jvm_options=jvm_options, args=args, executor=executor) - try: - result = util.execute_runner(runner, workunit_factory, workunit_name, workunit_labels) - if result != 0: - raise self.Error('Ivy command failed with exit code %d%s' - % (result, ': ' + ' '.join(args) if args else '')) - except self._java.Error as e: - raise self.Error('Problem executing ivy: %s' % e) - - def runner(self, jvm_options=None, args=None, executor=None): - """Creates an ivy commandline client runner for the given args.""" - args = args or [] - executor = executor or self._java - if not isinstance(executor, Executor): - raise ValueError('The executor argument must be an Executor instance, given %s of type %s' - % (executor, type(executor))) - - if self._ivy_cache_dir and '-cache' not in args: - # TODO(John Sirois): Currently this is a magic property to support hand-crafted in - # ivysettings.xml. Ideally we'd support either simple -caches or these hand-crafted cases - # instead of just hand-crafted. Clean this up by taking over ivysettings.xml and generating - # it from BUILD constructs. - jvm_options = ['-Divy.cache.dir=%s' % self._ivy_cache_dir] + (jvm_options or []) - - if self._ivy_settings and '-settings' not in args: - args = ['-settings', self._ivy_settings] + args - - return executor.runner(classpath=self._classpath, main='org.apache.ivy.Main', - jvm_options=jvm_options, args=args) diff --git a/src/python/twitter/pants/java/.gitignore b/src/python/twitter/pants/java/.gitignore deleted file mode 100644 index f169b97bc..000000000 --- a/src/python/twitter/pants/java/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!/bin/ diff --git a/src/python/twitter/pants/java/__init__.py b/src/python/twitter/pants/java/__init__.py deleted file mode 100644 index 24684ba62..000000000 --- a/src/python/twitter/pants/java/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/java/distribution/README.md b/src/python/twitter/pants/java/distribution/README.md deleted file mode 100644 index a85051a1b..000000000 --- a/src/python/twitter/pants/java/distribution/README.md +++ /dev/null @@ -1,9 +0,0 @@ -This package contains the resources file SystemProperties.class compiled from SystemProperties.java -with: - -``` console -javac -source 1.2 -target 1.1 SystemProperties.java -``` - -This is used by the python Distribution class in this package to test the java distribution version -in a controlled manner as well as provide the java system properties as a python dict. diff --git a/src/python/twitter/pants/java/distribution/SystemProperties.class b/src/python/twitter/pants/java/distribution/SystemProperties.class deleted file mode 100644 index 5da4b9d4d..000000000 Binary files a/src/python/twitter/pants/java/distribution/SystemProperties.class and /dev/null differ diff --git a/src/python/twitter/pants/java/distribution/SystemProperties.java b/src/python/twitter/pants/java/distribution/SystemProperties.java deleted file mode 100644 index 347e749b4..000000000 --- a/src/python/twitter/pants/java/distribution/SystemProperties.java +++ /dev/null @@ -1,34 +0,0 @@ -// ================================================================================================= -// Copyright 2013 Twitter, Inc. -// ------------------------------------------------------------------------------------------------- -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this work except in compliance with the License. -// You may obtain a copy of the License in the LICENSE file, or at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ================================================================================================= - -import java.util.Enumeration; -import java.util.Properties; - -/** - * Emits all the java system properties for the current jvm to standard out as [key]=[value] pairs, - * one per line. - */ -public class SystemProperties { - public static void main(String[] args) { - Properties properties = System.getProperties(); - Enumeration keys = properties.propertyNames(); - while(keys.hasMoreElements()) { - String key = (String) keys.nextElement(); - String value = properties.getProperty(key); - System.out.println(key + "=" + value); - } - } -} diff --git a/src/python/twitter/pants/java/distribution/__init__.py b/src/python/twitter/pants/java/distribution/__init__.py deleted file mode 100644 index 05494d9f3..000000000 --- a/src/python/twitter/pants/java/distribution/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from .distribution import Distribution - -__all__ = ( - 'Distribution' -) diff --git a/src/python/twitter/pants/java/distribution/distribution.py b/src/python/twitter/pants/java/distribution/distribution.py deleted file mode 100644 index 449395616..000000000 --- a/src/python/twitter/pants/java/distribution/distribution.py +++ /dev/null @@ -1,253 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from contextlib import contextmanager - -import os -import pkgutil -import subprocess - -from twitter.common import log -from twitter.common.contextutil import temporary_dir -from twitter.common.lang import Compatibility - -from twitter.pants.base.revision import Revision - - -class Distribution(object): - """Represents a java distribution - either a JRE or a JDK installed on the local system. - - In particular provides access to the distribution's binaries; ie: java while ensuring basic - constraints are met. For example a minimum version can be specified if you know need to compile - source code or run bytecode that exercise features only available in that version forward. - """ - - class Error(Exception): - """Indicates an invalid java distribution.""" - - _CACHE = {} - - @classmethod - def cached(cls, minimum_version=None, jdk=False): - def scan_constraint_match(): - for dist in cls._CACHE.values(): - if minimum_version and dist.version < minimum_version: - continue - if jdk and not dist.jdk: - continue - return dist - - key = (minimum_version, jdk) - dist = cls._CACHE.get(key) - if not dist: - dist = scan_constraint_match() - if not dist: - dist = cls.locate(minimum_version=minimum_version, jdk=jdk) - cls._CACHE[key] = dist - return dist - - @classmethod - def locate(cls, minimum_version=None, jdk=False): - """Finds a java distribution that meets any given constraints and returns it. - - First looks in JDK_HOME and JAVA_HOME if defined falling back to a search on the PATH. - Raises Distribution.Error if no suitable java distribution could be found. - """ - def home_bin_path(home_env_var): - home = os.environ.get(home_env_var) - return os.path.join(home, 'bin') if home else None - - def search_path(): - yield home_bin_path('JDK_HOME') - yield home_bin_path('JAVA_HOME') - path = os.environ.get('PATH') - if path: - for p in path.strip().split(os.pathsep): - yield p - - for path in filter(None, search_path()): - try: - dist = cls(path, minimum_version=minimum_version, jdk=jdk) - dist.validate() - log.debug('Located %s for constraints: minimum_version' - ' %s, jdk %s' % (dist, minimum_version, jdk)) - return dist - except (ValueError, cls.Error): - pass - - raise cls.Error('Failed to locate a %s distribution with minimum_version %s' - % ('JDK' if jdk else 'JRE', minimum_version)) - - @staticmethod - def _parse_java_version(version): - # Java version strings have been well defined since release 1.3.1 as defined here: - # http://www.oracle.com/technetwork/java/javase/versioning-naming-139433.html - # These version strings comply with semver except that the traditional pre-release semver - # slot (the 4th) can be delimited by an _ in the case of update releases of the jdk. - # We accomodate that difference here. - return Revision.semver(version.replace('_', '-')) - - @staticmethod - def _is_executable(path): - return os.path.isfile(path) and os.access(path, os.X_OK) - - def __init__(self, bin_path='/usr/bin', minimum_version=None, jdk=False): - """Creates a distribution wrapping the given bin_path. - - :param string bin_path: the path to the java distributions bin dir - :param minimum_version: a modified semantic version string or else a Revision object - :param bool jdk: ``True`` to require the distribution be a JDK vs a JRE - """ - - if not os.path.isdir(bin_path): - raise ValueError('The specified distribution path is invalid: %s' % bin_path) - self._bin_path = bin_path - - if isinstance(minimum_version, Compatibility.string): - minimum_version = self._parse_java_version(minimum_version) - if minimum_version and not isinstance(minimum_version, Revision): - raise ValueError('minimum_version must be a string or a Revision object,' - ' given: %s' % minimum_version) - self._minimum_version = minimum_version - - self._jdk = jdk - - self._is_jdk = False - self._system_properties = None - self._version = None - self._validated_binaries = {} - - @property - def jdk(self): - self.validate() - return self._is_jdk - - @property - def system_properties(self): - """Returns a dict containing the system properties of this java distribution.""" - return dict(self._get_system_properties(self.java)) - - @property - def version(self): - """Returns the distribution version. - - Raises Distribution.Error if this distribution is not valid according to the configured - constraints. - """ - return self._get_version(self.java) - - @property - def home(self): - """Returns the distribution JAVA_HOME.""" - return self._get_system_properties(self.java)['java.home'] - - @property - def java(self): - """Returns the path to this distribution's java command. - - If this distribution has no valid java command raises Distribution.Error. - """ - return self.binary('java') - - def binary(self, name): - """Returns the path to the command of the given name for this distribution. - - For example: :: - - >>> d = Distribution() - >>> jar = d.binary('jar') - >>> jar - '/usr/bin/jar' - >>> - - If this distribution has no valid command of the given name raises Distribution.Error. - """ - if not isinstance(name, Compatibility.string): - raise ValueError('name must be a binary name, given %s of type %s' % (name, type(name))) - self.validate() - return self._validated_executable(name) - - def validate(self): - """Validates this distribution against its configured constraints. - - Raises Distribution.Error if this distribution is not valid according to the configured - constraints. - """ - if self._validated_binaries: - return - - with self._valid_executable('java') as java: - if self._minimum_version: - version = self._get_version(java) - if version < self._minimum_version: - raise self.Error('The java distribution at %s is too old; expecting at least %s and' - ' got %s' % (java, self._minimum_version, version)) - - try: - self._validated_executable('javac') # Calling purely for the check and cache side effects - self._is_jdk = True - except self.Error: - if self._jdk: - raise - - def _get_version(self, java): - if not self._version: - self._version = self._parse_java_version(self._get_system_properties(java)['java.version']) - return self._version - - def _get_system_properties(self, java): - if not self._system_properties: - with temporary_dir() as classpath: - with open(os.path.join(classpath, 'SystemProperties.class'), 'w+') as fp: - fp.write(pkgutil.get_data(__name__, 'SystemProperties.class')) - cmd = [java, '-cp', classpath, 'SystemProperties'] - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - if process.returncode != 0: - raise self.Error('Failed to determine java system properties for %s with %s - exit code' - ' %d: %s' % (java, ' '.join(cmd), process.returncode, stderr)) - - props = {} - for line in stdout.split(os.linesep): - key, _, val = line.partition('=') - props[key] = val - self._system_properties = props - - return self._system_properties - - def _validate_executable(self, name): - exe = os.path.join(self._bin_path, name) - if not self._is_executable(exe): - raise self.Error('Failed to locate the %s executable, %s does not appear to be a' - ' valid %s distribution' % (name, self, 'JDK' if self._jdk else 'JRE')) - return exe - - def _validated_executable(self, name): - exe = self._validated_binaries.get(name) - if not exe: - exe = self._validate_executable(name) - self._validated_binaries[name] = exe - return exe - - @contextmanager - def _valid_executable(self, name): - exe = self._validate_executable(name) - yield exe - self._validated_binaries[name] = exe - - def __repr__(self): - return 'Distribution(%r, minimum_version=%r, jdk=%r)' % (self._bin_path, self._minimum_version, - self._jdk) diff --git a/src/python/twitter/pants/java/executor.py b/src/python/twitter/pants/java/executor.py deleted file mode 100644 index d479afe76..000000000 --- a/src/python/twitter/pants/java/executor.py +++ /dev/null @@ -1,188 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from abc import abstractmethod, abstractproperty -from contextlib import contextmanager - -import os -import subprocess - -from twitter.common import log -from twitter.common.collections import maybe_list -from twitter.common.contextutil import environment_as -from twitter.common.lang import AbstractClass, Compatibility - -from twitter.pants.java.distribution import Distribution - - -class Executor(AbstractClass): - """Executes java programs.""" - - @staticmethod - def _scrub_args(classpath, main, jvm_options, args): - classpath = maybe_list(classpath) - if not isinstance(main, Compatibility.string) or not main: - raise ValueError('A non-empty main classname is required, given: %s' % main) - jvm_options = maybe_list(jvm_options or ()) - args = maybe_list(args or ()) - return classpath, main, jvm_options, args - - class Error(Exception): - """Indicates an error launching a java program.""" - - class Runner(object): - """A re-usable executor that can run a configured java command line.""" - - @abstractproperty - def executor(self): - """Returns the executor this runner uses to run itself.""" - - @abstractproperty - def cmd(self): - """Returns a string representation of the command that will be run.""" - - @abstractmethod - def run(self, stdout=None, stderr=None): - """Runs the configured java command. - - If there is a problem executing tha java program subclasses should raise Executor.Error. - Its guaranteed that all arguments are valid as documented in `execute` - - :param stdout: An optional stream to pump stdout to; defaults to `sys.stdout`. - :param stderr: An optional stream to pump stderr to; defaults to `sys.stderr`. - """ - - def __init__(self, distribution=None): - """Constructs an Executor that can be used to launch java programs. - - :param distribution: an optional validated java distribution to use when launching java - programs - """ - if distribution: - if not isinstance(distribution, Distribution): - raise ValueError('A valid distribution is required, given: %s' % distribution) - distribution.validate() - else: - distribution = Distribution.cached() - - self._distribution = distribution - - def runner(self, classpath, main, jvm_options=None, args=None): - """Returns an `Executor.Runner` for the given java command.""" - return self._runner(*self._scrub_args(classpath, main, jvm_options, args)) - - def execute(self, classpath, main, jvm_options=None, args=None, stdout=None, stderr=None): - """Launches the java program defined by the classpath and main. - - :param list classpath: the classpath for the java program - :param string main: the fully qualified class name of the java program's entry point - :param list jvm_options: an optional sequence of options for the underlying jvm - :param list args: an optional sequence of args to pass to the java program - - Returns the exit code of the java program. - Raises Executor.Error if there was a problem launching java itself. - """ - executor = self.runner(classpath=classpath, main=main, jvm_options=jvm_options, args=args) - return executor.run(stdout=stdout, stderr=stderr) - - @abstractmethod - def _runner(self, classpath, main, jvm_options, args): - """Subclasses should return a `Runner` that can execute the given java main.""" - - def _create_command(self, classpath, main, jvm_options, args): - cmd = [self._distribution.java] - cmd.extend(jvm_options) - cmd.extend(['-cp', os.pathsep.join(classpath), main]) - cmd.extend(args) - return cmd - - -class CommandLineGrabber(Executor): - """Doesn't actually execute anything, just captures the cmd line.""" - def __init__(self, distribution=None): - super(CommandLineGrabber, self).__init__(distribution=distribution) - self._command = None # Initialized when we run something. - - def _runner(self, classpath, main, jvm_options, args): - self._command = self._create_command(classpath, main, jvm_options, args) - class Runner(self.Runner): - @property - def executor(_): - return self - - @property - def cmd(_): - return ' '.join(self._command) - - def run(_, stdout=None, stderr=None): - return 0 - return Runner() - - @property - def cmd(self): - return self._command - - -class SubprocessExecutor(Executor): - """Executes java programs by launching a jvm in a subprocess.""" - - def __init__(self, distribution=None, scrub_classpath=True): - super(SubprocessExecutor, self).__init__(distribution=distribution) - self._scrub_classpath = scrub_classpath - - def _runner(self, classpath, main, jvm_options, args): - command = self._create_command(classpath, main, jvm_options, args) - - class Runner(self.Runner): - @property - def executor(_): - return self - - @property - def cmd(_): - return ' '.join(command) - - def run(_, stdout=None, stderr=None): - return self._spawn(command, stdout=stdout, stderr=stderr).wait() - - return Runner() - - def spawn(self, classpath, main, jvm_options=None, args=None, **subprocess_args): - """Spawns the java program passing any extra subprocess kwargs on to subprocess.Popen. - - Returns the Popen process object handle to the spawned java program subprocess. - """ - cmd = self._create_command(*self._scrub_args(classpath, main, jvm_options, args)) - return self._spawn(cmd, **subprocess_args) - - def _spawn(self, cmd, **subprocess_args): - with self._maybe_scrubbed_classpath(): - log.debug('Executing: %s' % ' '.join(cmd)) - try: - return subprocess.Popen(cmd, **subprocess_args) - except OSError as e: - raise self.Error('Problem executing %s: %s' % (self._distribution.java, e)) - - @contextmanager - def _maybe_scrubbed_classpath(self): - if self._scrub_classpath: - classpath = os.getenv('CLASSPATH') - if classpath: - log.warn('Scrubbing CLASSPATH=%s' % classpath) - with environment_as(CLASSPATH=None): - yield - else: - yield diff --git a/src/python/twitter/pants/java/jar/__init__.py b/src/python/twitter/pants/java/jar/__init__.py deleted file mode 100644 index dc7f5176c..000000000 --- a/src/python/twitter/pants/java/jar/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import stat -import zipfile - -from contextlib import contextmanager - -from twitter.common.contextutil import open_zip - -from .manifest import Manifest - - -@contextmanager -def open_jar(path, *args, **kwargs): - """Yields a jar in a with context that will be closed when the context exits. - - The yielded jar is a zipfile.ZipFile object with an additional mkdirs(arcpath) method that will - create a zip directory entry similar to unix `mkdir -p`. Additionally, the ZipFile.write and - ZipFile.writestr methods are enhanced to call mkdirs as needed to ensure all jar entries contain - a full complement of parent paths leading from each leaf to the root of the jar. - """ - - with open_zip(path, *args, **kwargs) as jar: - real_write = jar.write - real_writestr = jar.writestr - - made_dirs = set() - - def mkdirs(arcpath): - if arcpath and arcpath not in made_dirs: - made_dirs.add(arcpath) - - parent_path = os.path.dirname(arcpath) - mkdirs(parent_path) - - zipinfo = zipfile.ZipInfo(arcpath if arcpath.endswith('/') else arcpath + '/') - - # We store directories without compression since they have no contents and - # attempts to store them with compression lead to corrupted zip files as such: - # $ unzip -t junit-runner-0.0.19.jar - # Archive: junit-runner-0.0.19.jar - # testing: com/ - # error: invalid compressed data to inflate - # testing: com/twitter/ - # error: invalid compressed data to inflate - # testing: com/twitter/common/ - # error: invalid compressed data to inflate - # testing: com/twitter/common/testing/ - # error: invalid compressed data to inflate - # testing: com/twitter/common/testing/runner/ - # error: invalid compressed data to inflate - # testing: com/twitter/common/testing/runner/StreamSource.class OK - zipinfo.compress_type = zipfile.ZIP_STORED - - # PKZIP says external_attr is a 4 byte field that is host system dependant: - # http://www.pkware.com/documents/casestudies/APPNOTE.TXT - # These notes do mention the low order byte will carry DOS file attributes for DOS host - # system zips. The DOS file attribute bits are described here: - # http://www.xxcopy.com/xxcopy06.htm - # - # More details are only found reading source, for example in BSD: - # ftp://ftp-archive.freebsd.org/pub/FreeBSD-Archive/old-releases/i386/1.0-RELEASE/ports/info-zip/zipinfo/zipinfo.c - # These sources reveal the 2 high order bytes contain unix file attribute bits. - # - # In summary though the full 32 bit field layout is: - # TTTTsstrwxrwxrwx0000000000ADVSHR - # ^^^^____________________________ stat.h file type: S_IFXXX - # ^^^_________________________ setuid, setgid, sticky - # ^^^^^^^^^________________ permissions - # ^^^^^^^^________ ??? - # ^^^^^^^^ DOS attribute bits - - # Setup unix directory perm bits: drwxr-xr-x - zipinfo.external_attr = ( - stat.S_IFDIR # file type dir - | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR # u+rwx - | stat.S_IRGRP | stat.S_IXGRP # g+rx - | stat.S_IROTH | stat.S_IXOTH # o+rx - ) << 16 - - # Add DOS directory bit - zipinfo.external_attr |= 0x10 - - real_writestr(zipinfo, '') - - def write(path, arcname=None, **kwargs): - if os.path.isdir(path): - mkdirs(arcname or path) - else: - mkdirs(os.path.dirname(arcname or path)) - real_write(path, arcname, **kwargs) - - def writestr(zinfo_or_arcname, *args, **kwargs): - mkdirs(os.path.dirname(zinfo_or_arcname)) - real_writestr(zinfo_or_arcname, *args, **kwargs) - - jar.mkdirs = mkdirs - jar.write = write - jar.writestr = writestr - - yield jar - - -__all__ = ( - 'open_jar', - 'Manifest', -) diff --git a/src/python/twitter/pants/java/jar/manifest.py b/src/python/twitter/pants/java/jar/manifest.py deleted file mode 100644 index 6a05abbbd..000000000 --- a/src/python/twitter/pants/java/jar/manifest.py +++ /dev/null @@ -1,57 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from contextlib import closing - -from twitter.common.lang import Compatibility -StringIO = Compatibility.StringIO - - -class Manifest(object): - """ - Implements the basics of the jar manifest specification. - - See: http://docs.oracle.com/javase/1.5.0/docs/guide/jar/jar.html#Manifest Specification - """ - @staticmethod - def _wrap(text): - with closing(StringIO(text)) as fp: - yield fp.read(70) - while True: - chunk = fp.read(69) - if not chunk: - return - yield ' %s' % chunk - - PATH = 'META-INF/MANIFEST.MF' - - MANIFEST_VERSION = 'Manifest-Version' - CREATED_BY = 'Created-By' - MAIN_CLASS = 'Main-Class' - CLASS_PATH = 'Class-Path' - - def __init__(self, contents=''): - self._contents = contents.strip() - - def addentry(self, header, value): - if len(header) > 68: - raise ValueError('Header name must be 68 characters or less, given %s' % header) - if self._contents: - self._contents += '\n' - self._contents += '\n'.join(self._wrap('%s: %s' % (header, value))) - - def contents(self): - return self._contents + '\n' diff --git a/src/python/twitter/pants/java/nailgun_client.py b/src/python/twitter/pants/java/nailgun_client.py deleted file mode 100644 index da73fe649..000000000 --- a/src/python/twitter/pants/java/nailgun_client.py +++ /dev/null @@ -1,210 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import select -import socket -import struct -import sys -import threading - -from functools import partial - - -class NailgunSession(object): - """Handles a single nailgun command session.""" - - class ProtocolError(Exception): - """Thrown if there is an error in the underlying nailgun protocol.""" - - # See: http://www.martiansoftware.com/nailgun/protocol.html - HEADER_FMT = '>Ic' - HEADER_LENGTH = 5 - - BUFF_SIZE = 8096 - - @classmethod - def _send_chunk(cls, sock, command, payload=''): - header = struct.pack(cls.HEADER_FMT, len(payload), command) - sock.sendall(header + payload) - - def __init__(self, sock, ins, out, err): - self._sock = sock - self._send_chunk = partial(self._send_chunk, sock) - self._input_reader = self._InputReader(ins, self._sock, self.BUFF_SIZE) if ins else None - self._out = out - self._err = err - - class _InputReader(threading.Thread): - def __init__(self, ins, sock, buff_size): - threading.Thread.__init__(self) - self.daemon = True - self._ins = ins - self._sock = sock - self._buff_size = buff_size - self._send_chunk = partial(NailgunSession._send_chunk, sock) - self._stopping = threading.Event() - - def run(self): - while self._should_run(): - readable, _, errored = select.select([self._ins], [], [self._ins]) - if self._ins in errored: - self.stop() - if self._should_run() and self._ins in readable: - data = os.read(self._ins.fileno(), self._buff_size) - if self._should_run(): - if data: - self._send_chunk('0', data) - else: - self._send_chunk('.') - try: - self._sock.shutdown(socket.SHUT_WR) - except socket.error: - # Can happen if response is quick - pass - self.stop() - - def stop(self): - self._stopping.set() - - def _should_run(self): - return not self._stopping.is_set() - - def execute(self, work_dir, main_class, *args, **environment): - for arg in args: - self._send_chunk('A', arg) - for k, v in environment.items(): - self._send_chunk('E', '%s=%s' % (k, v)) - self._send_chunk('D', work_dir) - self._send_chunk('C', main_class) - - if self._input_reader: - self._input_reader.start() - try: - return self._read_response() - finally: - if self._input_reader: - self._input_reader.stop() - - def _read_response(self): - buff = '' - while True: - command, payload, buff = self._read_chunk(buff) - if command == '1': - self._out.write(payload) - self._out.flush() - elif command == '2': - self._err.write(payload) - self._err.flush() - elif command == 'X': - self._out.flush() - self._err.flush() - return int(payload) - else: - raise self.ProtocolError('Received unexpected chunk %s -> %s' % (command, payload)) - - def _read_chunk(self, buff): - while len(buff) < self.HEADER_LENGTH: - buff += self._sock.recv(self.BUFF_SIZE) - - payload_length, command = struct.unpack(self.HEADER_FMT, buff[:self.HEADER_LENGTH]) - buff = buff[self.HEADER_LENGTH:] - while len(buff) < payload_length: - buff += self._sock.recv(self.BUFF_SIZE) - - payload = buff[:payload_length] - rest = buff[payload_length:] - return command, payload, rest - - -class NailgunClient(object): - """A client for the nailgun protocol that allows execution of java binaries within a resident vm. - """ - - class NailgunError(Exception): - """Indicates an error connecting to or interacting with a nailgun server.""" - - DEFAULT_NG_HOST = 'localhost' - DEFAULT_NG_PORT = 2113 - - # For backwards compatibility with nails expecting the ng c client special env vars. - ENV_DEFAULTS = dict( - NAILGUN_FILESEPARATOR = os.sep, - NAILGUN_PATHSEPARATOR = os.pathsep - ) - - def __init__(self, - host=DEFAULT_NG_HOST, - port=DEFAULT_NG_PORT, - ins=sys.stdin, - out=sys.stdout, - err=sys.stderr, - work_dir=None): - """Creates a nailgun client that can be used to issue zero or more nailgun commands. - - :param string host: the nailgun server to contact (defaults to localhost) - :param int port: the port the nailgun server is listening on (defaults to the default nailgun - port: 2113) - :param file ins: a file to read command standard input from (defaults to stdin) - can be None - in which case no input is read - :param file out: a stream to write command standard output to (defaults to stdout) - :param file err: a stream to write command standard error to (defaults to stderr) - :param string work_dir: the working directory for all nailgun commands (defaults to PWD) - """ - self._host = host - self._port = port - self._ins = ins - self._out = out - self._err = err - self._work_dir = work_dir or os.path.abspath(os.path.curdir) - - self.execute = self.__call__ - - def try_connect(self): - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - return sock if sock.connect_ex((self._host, self._port)) == 0 else None - - def __call__(self, main_class, *args, **environment): - """Executes the given main_class with any supplied args in the given environment. - - :param string main_class: the fully qualified class name of the main entrypoint - :param list args: any arguments to pass to the main entrypoint - :param dict environment: an environment mapping made available to native nails via the nail - context - - Returns the exit code of the main_class. - """ - environment = dict(self.ENV_DEFAULTS.items() + environment.items()) - - sock = self.try_connect() - if not sock: - raise self.NailgunError('Problem connecting to nailgun server' - ' %s:%d' % (self._host, self._port)) - - session = NailgunSession(sock, self._ins, self._out, self._err) - try: - return session.execute(self._work_dir, main_class, *args, **environment) - except socket.error as e: - raise self.NailgunError('Problem contacting nailgun server %s:%d:' - ' %s' % (self._host, self._port, e)) - except session.ProtocolError as e: - raise self.NailgunError('Problem executing the nailgun protocol with nailgun server %s:%s:' - ' %s' % (self._host, self._port, e)) - finally: - sock.close() - - def __repr__(self): - return 'NailgunClient(host=%r, port=%r, work_dir=%r)' % (self._host, self._port, self._work_dir) diff --git a/src/python/twitter/pants/java/nailgun_executor.py b/src/python/twitter/pants/java/nailgun_executor.py deleted file mode 100644 index db039dd1f..000000000 --- a/src/python/twitter/pants/java/nailgun_executor.py +++ /dev/null @@ -1,315 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import namedtuple - -import hashlib -import os -import re -import sys -import time - -# TODO: Once we integrate standard logging into our reporting framework, we can consider making -# some of the log.debug() below into log.info(). Right now it just looks wrong on the console. -from twitter.common import log -from twitter.common.collections import maybe_list -from twitter.common.dirutil import safe_open -from twitter.common.lang import Compatibility -from twitter.pants.base.build_environment import get_buildroot - -from .executor import Executor, SubprocessExecutor -from .nailgun_client import NailgunClient - - -class NailgunExecutor(Executor): - """Executes java programs by launching them in nailgun server. - - If a nailgun is not available for a given set of jvm args and classpath, one is launched and - re-used for the given jvm args and classpath on subsequent runs. - """ - - class Endpoint(namedtuple('Endpoint', ['fingerprint', 'pid', 'port'])): - """The coordinates for a nailgun server controlled by NailgunExecutor.""" - - @classmethod - def parse(cls, endpoint): - """Parses an endpoint from a string of the form fingerprint:pid:port""" - components = endpoint.split(':') - if len(components) != 3: - raise ValueError('Invalid endpoint spec %s' % endpoint) - fingerprint, pid, port = components - return cls(fingerprint, int(pid), int(port)) - - # Used to identify we own a given java nailgun server - _PANTS_NG_ARG_PREFIX = '-Dpants.buildroot' - _PANTS_NG_ARG = '%s=%s' % (_PANTS_NG_ARG_PREFIX, get_buildroot()) - - _PANTS_FINGERPRINT_ARG_PREFIX = '-Dpants.nailgun.fingerprint=' - - @staticmethod - def _check_pid(pid): - try: - os.kill(pid, 0) - return True - except OSError: - return False - - @staticmethod - def create_owner_arg(workdir): - # Currently the owner is identified via the full path to the workdir. - return '-Dpants.nailgun.owner=%s' % workdir - - @classmethod - def _create_fingerprint_arg(cls, fingerprint): - return cls._PANTS_FINGERPRINT_ARG_PREFIX + fingerprint - - @classmethod - def parse_fingerprint_arg(cls, args): - for arg in args: - components = arg.split(cls._PANTS_FINGERPRINT_ARG_PREFIX) - if len(components) == 2 and components[0] == '': - return components[1] - return None - - @staticmethod - def _fingerprint(jvm_args, classpath): - digest = hashlib.sha1() - digest.update(''.join(sorted(jvm_args))) - digest.update(''.join(sorted(classpath))) # TODO(John Sirois): hash classpath contents? - return digest.hexdigest() - - @staticmethod - def _log_kill(pid, port=None, logger=None): - logger = logger or log.info - logger('killing ng server @ pid:%d%s' % (pid, ' port:%d' % port if port else '')) - - def __init__(self, workdir, nailgun_classpath, distribution=None, ins=None): - super(NailgunExecutor, self).__init__(distribution=distribution) - - self._nailgun_classpath = maybe_list(nailgun_classpath) - - if not isinstance(workdir, Compatibility.string): - raise ValueError('Workdir must be a path string, given %s' % workdir) - - self._workdir = workdir - - self._ng_out = os.path.join(workdir, 'stdout') - self._ng_err = os.path.join(workdir, 'stderr') - - self._ins = ins - - def _runner(self, classpath, main, jvm_options, args): - command = self._create_command(classpath, main, jvm_options, args) - - class Runner(self.Runner): - @property - def executor(this): - return self - - @property - def cmd(this): - return ' '.join(command) - - def run(this, stdout=sys.stdout, stderr=sys.stderr): - nailgun = self._get_nailgun_client(jvm_options, classpath, stdout, stderr) - try: - log.debug('Executing via %s: %s' % (nailgun, this.cmd)) - return nailgun(main, *args) - except nailgun.NailgunError as e: - self.kill() - raise self.Error('Problem launching via %s command %s %s: %s' - % (nailgun, main, ' '.join(args), e)) - - return Runner() - - def kill(self): - """Kills the nailgun server owned by this executor if its currently running.""" - - endpoint = self._get_nailgun_endpoint() - if endpoint: - self._log_kill(endpoint.pid, endpoint.port) - try: - os.kill(endpoint.pid, 9) - except OSError: - pass - - def _get_nailgun_endpoint(self): - if self._find: - endpoint = self._find(self._workdir) - if endpoint: - log.debug('Found ng server with fingerprint %s @ pid:%d port:%d' % endpoint) - return endpoint - else: - return None - - def _get_nailgun_client(self, jvm_args, classpath, stdout, stderr): - classpath = self._nailgun_classpath + classpath - new_fingerprint = self._fingerprint(jvm_args, classpath) - - endpoint = self._get_nailgun_endpoint() - running = endpoint and self._check_pid(endpoint.pid) - updated = endpoint and endpoint.fingerprint != new_fingerprint - if running and not updated: - return self._create_ngclient(endpoint.port, stdout, stderr) - else: - if running and updated: - log.debug('Killing ng server with fingerprint %s @ pid:%d port:%d' % endpoint) - self.kill() - return self._spawn_nailgun_server(new_fingerprint, jvm_args, classpath, stdout, stderr) - - # 'NGServer started on 127.0.0.1, port 53785.' - _PARSE_NG_PORT = re.compile('.*\s+port\s+(\d+)\.$') - - def _parse_nailgun_port(self, line): - match = self._PARSE_NG_PORT.match(line) - if not match: - raise NailgunClient.NailgunError('Failed to determine spawned ng port from response' - ' line: %s' % line) - return int(match.group(1)) - - def _await_nailgun_server(self, stdout, stderr): - nailgun_timeout_seconds = 5 - max_socket_connect_attempts = 10 - nailgun = None - port_parse_start = time.time() - with safe_open(self._ng_out, 'r') as ng_out: - while not nailgun: - started = ng_out.readline() - if started: - port = self._parse_nailgun_port(started) - nailgun = self._create_ngclient(port, stdout, stderr) - log.debug('Detected ng server up on port %d' % port) - elif time.time() - port_parse_start > nailgun_timeout_seconds: - raise NailgunClient.NailgunError('Failed to read ng output after' - ' %s seconds' % nailgun_timeout_seconds) - - attempt = 0 - while nailgun: - sock = nailgun.try_connect() - if sock: - sock.close() - endpoint = self._get_nailgun_endpoint() - if endpoint: - log.debug('Connected to ng server with fingerprint %s pid: %d @ port: %d' % endpoint) - else: - raise NailgunClient.NailgunError('Failed to connect to ng server.') - return nailgun - elif attempt > max_socket_connect_attempts: - raise nailgun.NailgunError('Failed to connect to ng output after %d connect attempts' - % max_socket_connect_attempts) - attempt += 1 - log.debug('Failed to connect on attempt %d' % attempt) - time.sleep(0.1) - - def _create_ngclient(self, port, stdout, stderr): - return NailgunClient(port=port, ins=self._ins, out=stdout, err=stderr, work_dir=get_buildroot()) - - def _spawn_nailgun_server(self, fingerprint, jvm_args, classpath, stdout, stderr): - log.debug('No ng server found with fingerprint %s, spawning...' % fingerprint) - - with safe_open(self._ng_out, 'w'): - pass # truncate - - pid = os.fork() - if pid != 0: - # In the parent tine - block on ng being up for connections - return self._await_nailgun_server(stdout, stderr) - - os.setsid() - in_fd = open('/dev/null', 'r') - out_fd = safe_open(self._ng_out, 'w') - err_fd = safe_open(self._ng_err, 'w') - - java = SubprocessExecutor(self._distribution) - - jvm_args = jvm_args + [self._PANTS_NG_ARG, - self.create_owner_arg(self._workdir), - self._create_fingerprint_arg(fingerprint)] - - process = java.spawn(classpath=classpath, - main='com.martiansoftware.nailgun.NGServer', - jvm_options=jvm_args, - args=[':0'], - stdin=in_fd, - stdout=out_fd, - stderr=err_fd, - close_fds=True, - cwd=get_buildroot()) - - log.debug('Spawned ng server with fingerprint %s @ %d' % (fingerprint, process.pid)) - # Prevents finally blocks and atexit handlers from being executed, unlike sys.exit(). We - # don't want to execute finally blocks because we might, e.g., clean up tempfiles that the - # parent still needs. - os._exit(0) - - def __str__(self): - return 'NailgunExecutor(%s, server=%s)' % (self._distribution, self._get_nailgun_endpoint()) - - -# TODO(jsirois): Make psutil and other deps available in dev mode, so we don't need such tricks. -try: - import psutil - - def _find_ngs(everywhere=False): - def cmdline_matches(cmdline): - if everywhere: - return any(filter(lambda arg: arg.startswith(NailgunExecutor._PANTS_NG_ARG_PREFIX), cmdline)) - else: - return NailgunExecutor._PANTS_NG_ARG in cmdline - - for proc in psutil.process_iter(): - try: - if 'java' == proc.name and cmdline_matches(proc.cmdline): - yield proc - except (psutil.AccessDenied, psutil.NoSuchProcess): - pass - - def killall(logger=None, everywhere=False): - success = True - for proc in _find_ngs(everywhere=everywhere): - try: - NailgunExecutor._log_kill(proc.pid, logger=logger) - proc.kill() - except (psutil.AccessDenied, psutil.NoSuchProcess): - success = False - return success - - NailgunExecutor.killall = staticmethod(killall) - - def _find_ng_listen_port(proc): - for connection in proc.get_connections(kind='tcp'): - if connection.status == 'LISTEN': - host, port = connection.laddr - return port - return None - - def _find(workdir): - owner_arg = NailgunExecutor.create_owner_arg(workdir) - for proc in _find_ngs(everywhere=False): - try: - if owner_arg in proc.cmdline: - fingerprint = NailgunExecutor.parse_fingerprint_arg(proc.cmdline) - port = _find_ng_listen_port(proc) - if fingerprint and port: - return NailgunExecutor.Endpoint(fingerprint, proc.pid, port) - except (psutil.AccessDenied, psutil.NoSuchProcess): - pass - return None - - NailgunExecutor._find = staticmethod(_find) -except ImportError: - NailgunExecutor.killall = None - NailgunExecutor._find = None diff --git a/src/python/twitter/pants/java/util.py b/src/python/twitter/pants/java/util.py deleted file mode 100644 index d9fd3ff4d..000000000 --- a/src/python/twitter/pants/java/util.py +++ /dev/null @@ -1,83 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.workunit import WorkUnit - -from .executor import Executor, SubprocessExecutor -from .nailgun_executor import NailgunExecutor - - -def execute_java(classpath, main, jvm_options=None, args=None, executor=None, - workunit_factory=None, workunit_name=None, workunit_labels=None): - """Executes the java program defined by the classpath and main. - - If `workunit_factory` is supplied, does so in the context of a workunit. - - :param list classpath: the classpath for the java program - :param string main: the fully qualified class name of the java program's entry point - :param list jvm_options: an optional sequence of options for the underlying jvm - :param list args: an optional sequence of args to pass to the java program - :param executor: an optional java executor to use to launch the program; defaults to a subprocess - spawn of the default java distribution - :param workunit_factory: an optional callable that can produce a workunit context - :param string workunit_name: an optional name for the work unit; defaults to the main - :param list workunit_labels: an optional sequence of labels for the work unit - - Returns the exit code of the java program. - Raises `twitter.pants.java.Executor.Error` if there was a problem launching java itself. - """ - executor = executor or SubprocessExecutor() - if not isinstance(executor, Executor): - raise ValueError('The executor argument must be a java Executor instance, give %s of type %s' - % (executor, type(executor))) - - runner = executor.runner(classpath, main, args=args, jvm_options=jvm_options) - workunit_name = workunit_name or main - return execute_runner(runner, - workunit_factory=workunit_factory, - workunit_name=workunit_name, - workunit_labels=workunit_labels) - - -def execute_runner(runner, workunit_factory=None, workunit_name=None, workunit_labels=None): - """Executes the given java runner. - - If `workunit_factory` is supplied, does so in the context of a workunit. - - :param runner: the java runner to run - :param workunit_factory: an optional callable that can produce a workunit context - :param string workunit_name: an optional name for the work unit; defaults to the main - :param list workunit_labels: an optional sequence of labels for the work unit - - Returns the exit code of the java runner. - Raises `twitter.pants.java.Executor.Error` if there was a problem launching java itself. - """ - if not isinstance(runner, Executor.Runner): - raise ValueError('The runner argument must be a java Executor.Runner instance, ' - 'given %s of type %s' % (runner, type(runner))) - - if workunit_factory is None: - return runner.run() - else: - workunit_labels = [ - WorkUnit.TOOL, - WorkUnit.NAILGUN if isinstance(runner.executor, NailgunExecutor) else WorkUnit.JVM - ] + (workunit_labels or []) - - with workunit_factory(name=workunit_name, labels=workunit_labels, cmd=runner.cmd) as workunit: - ret = runner.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) - workunit.set_outcome(WorkUnit.FAILURE if ret else WorkUnit.SUCCESS) - return ret diff --git a/src/python/twitter/pants/net/__init__.py b/src/python/twitter/pants/net/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/src/python/twitter/pants/net/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/net/http/__init__.py b/src/python/twitter/pants/net/http/__init__.py deleted file mode 100644 index 10c9a1ec2..000000000 --- a/src/python/twitter/pants/net/http/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from .fetcher import Fetcher - -__all__ = ( - 'Fetcher' -) diff --git a/src/python/twitter/pants/net/http/fetcher.py b/src/python/twitter/pants/net/http/fetcher.py deleted file mode 100644 index a49267349..000000000 --- a/src/python/twitter/pants/net/http/fetcher.py +++ /dev/null @@ -1,292 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from contextlib import closing, contextmanager - -import hashlib -import os -import requests -import sys -import tempfile -import time - -from twitter.common.dirutil import safe_open -from twitter.common.lang import Compatibility -from twitter.common.quantity import Amount, Data, Time - - -# TODO(John Sirois): Consider lifting this to twitter.common.http and consolidating with, for -# example, twitter.common.python.http.Http -class Fetcher(object): - """A streaming URL fetcher that supports listeners.""" - - class Error(Exception): - """Indicates an error fetching an URL.""" - - class TransientError(Error): - """Indicates a fetch error for an operation that may reasonably be retried. - - For example a connection error or fetch timeout are both considered transient. - """ - - class PermanentError(Error): - """Indicates a fetch error that is likely permanent. - - Retrying operations that raise these errors is unlikely to succeed. For example, an HTTP 404 - response code is considered a permanent error. - """ - def __init__(self, value=None, response_code=None): - super(Fetcher.PermanentError, self).__init__(value) - if response_code and not isinstance(response_code, Compatibility.integer): - raise ValueError('response_code must be an integer, got %s' % response_code) - self._response_code = response_code - - @property - def response_code(self): - """The HTTP response code of the failed request. - - May be None it the request failed before receiving a server response. - """ - return self._response_code - - _TRANSIENT_EXCEPTION_TYPES = (requests.ConnectionError, requests.Timeout) - - class Listener(object): - """A listener callback interface for HTTP GET requests made by a Fetcher.""" - - def status(self, code, content_length=None): - """Called when the response headers are received before data starts streaming. - - :param int code: the HTTP response code - :param int content_length: the response Content-Length if known, otherwise None - """ - - def recv_chunk(self, data): - """Called as each chunk of data is received from the streaming response. - - :param data: a byte string containing the next chunk of response data - """ - - def finished(self): - """Called when the response has been fully read.""" - - def wrap(self, listener=None): - """Returns a Listener that wraps both the given listener and this listener, calling each in - turn for each callback method. - """ - if not listener: - return self - - class Wrapper(Fetcher.Listener): - def status(wrapper, code, content_length=None): - listener.status(code, content_length=content_length) - self.status(code, content_length=content_length) - - def recv_chunk(wrapper, data): - listener.recv_chunk(data) - self.recv_chunk(data) - - def finished(wrapper): - listener.finished() - self.finished() - - return Wrapper() - - class DownloadListener(Listener): - """A Listener that writes all received data to a file like object.""" - - def __init__(self, fh): - """Creates a DownloadListener that writes to the given open file handle. - - The file handle is not closed. - - :param fh: a file handle open for writing - """ - if not fh or not hasattr(fh, 'write'): - raise ValueError('fh must be an open file handle, given %s' % fh) - self._fh = fh - - def recv_chunk(self, data): - self._fh.write(data) - - class ChecksumListener(Listener): - """A Listener that checksums the data received.""" - - def __init__(self, digest=None): - """Creates a ChecksumListener with the given hashlib digest or else an MD5 digest if none is - supplied. - - :param digest: the digest to use to checksum the received data, MDS by default - """ - self.digest = digest or hashlib.md5() - self._checksum = None - - def recv_chunk(self, data): - self.digest.update(data) - - def finished(self): - self._checksum = self.digest.hexdigest() - - @property - def checksum(self): - """Returns the hex digest of the received data. - - Its not valid to access this property before the listener is finished. - - :rtype: string - :raises: ValueError if accessed before this listener is finished - """ - if self._checksum is None: - raise ValueError('The checksum cannot be accessed before this listener is finished.') - return self._checksum - - class ProgressListener(Listener): - """A Listener that logs progress to stdout.""" - - def __init__(self, width=None, chunk_size=None): - """Creates a ProgressListener that logs progress for known size items with a progress bar of - the given width in characters and otherwise logs a progress indicator every chunk_size. - - :param int width: the width of the progress bar for known size downloads, 50 by default - :param chunk_size: a Data Amount indicating the size of data chunks to note progress for, - 10 KB by default - """ - self._width = width or 50 - if not isinstance(self._width, Compatibility.integer): - raise ValueError('The width must be an integer, given %s' % self._width) - - self._chunk_size = chunk_size or Amount(10, Data.KB) - if not isinstance(self._chunk_size, Amount) or not isinstance(self._chunk_size.unit(), Data): - raise ValueError('The chunk_size must be a Data Amount, given %s' % self._chunk_size) - - self._start = time.time() - - def _convert(self, amount, to_unit): - return Amount(int(amount.as_(to_unit)), to_unit) - - def status(self, code, content_length=None): - self.size = content_length - - if content_length: - download_kb = int(Amount(content_length, Data.BYTES).as_(Data.KB)) - self.download_size = Amount(download_kb, Data.KB) - self.chunk = content_length / self._width - else: - self.chunk = self._chunk_size.as_(Data.BYTES) - - self.chunks = 0 - self.read = 0 - - def recv_chunk(self, data): - self.read += len(data) - chunk_count = self.read // self.chunk - if chunk_count > self.chunks: - self.chunks = chunk_count - if self.size: - sys.stdout.write('\r') - sys.stdout.write('%3d%% ' % ((self.read * 1.0 / self.size) * 100)) - sys.stdout.write('.' * self.chunks) - if self.size: - size_width = len(str(self.download_size)) - downloaded = self._convert(Amount(self.read, Data.BYTES), to_unit=Data.KB) - sys.stdout.write('%s %s' % (' ' * (self._width - self.chunks), - str(downloaded).rjust(size_width))) - sys.stdout.flush() - - def finished(self): - if self.chunks > 0: - sys.stdout.write(' %.3fs\n' % (time.time() - self._start)) - sys.stdout.flush() - - def __init__(self, requests_api=None): - """Creates a Fetcher that uses the given requests api object. - - By default uses the requests module, but can be any object conforming to the requests api like - a requests Session object. - """ - self._requests = requests_api or requests - - def fetch(self, url, listener, chunk_size=None, timeout=None): - """Fetches data from the given URL notifying listener of all lifecycle events. - - :param string url: the url to GET data from - :param listener: the listener to notify of all download lifecycle events - :param chunk_size: the chunk size to use for buffering data, 10 KB by default - :param timeout: the maximum time to wait for data to be available, 1 second by default - :raises: Fetcher.Error if there was a problem fetching all data from the given url - """ - chunk_size = chunk_size or Amount(10, Data.KB) - if not isinstance(chunk_size, Amount) or not isinstance(chunk_size.unit(), Data): - raise ValueError('chunk_size must be a Data Amount, given %s' % chunk_size) - - timeout = timeout or Amount(1, Time.SECONDS) - if not isinstance(timeout, Amount) or not isinstance(timeout.unit(), Time): - raise ValueError('chunk_size must be a Time Amount, given %s' % timeout) - - if not isinstance(listener, self.Listener): - raise ValueError('listener must be a Listener instance, given %s' % listener) - - try: - with closing(self._requests.get(url, stream=True, timeout=timeout.as_(Time.SECONDS))) as resp: - if resp.status_code != requests.codes.ok: - listener.status(resp.status_code) - raise self.PermanentError('GET request to %s failed with status code %d' - % (url, resp.status_code), - response_code=resp.status_code) - - size = resp.headers.get('content-length') - listener.status(resp.status_code, content_length=int(size) if size else None) - - read_bytes = 0 - for data in resp.iter_content(chunk_size=int(chunk_size.as_(Data.BYTES))): - listener.recv_chunk(data) - read_bytes += len(data) - if size and read_bytes != int(size): - raise self.Error('Expected %s bytes, read %d' % (size, read_bytes)) - listener.finished() - except requests.exceptions.RequestException as e: - exception_factory = (self.TransientError if isinstance(e, self._TRANSIENT_EXCEPTION_TYPES) - else self.PermanentError) - raise exception_factory('Problem GETing data from %s: %s' % (url, e)) - - def download(self, url, listener=None, path_or_fd=None, chunk_size=None, timeout=None): - """Downloads data from the given URL. - - By default data is downloaded to a temporary file. - - :param string url: the url to GET data from - :param listener: an optional listener to notify of all download lifecycle events - :param path_or_fd: an optional file path or open file descriptor to write data to - :param chunk_size: the chunk size to use for buffering data - :param timeout: the maximum time to wait for data to be available - :returns: the path to the file data was downloaded to. - :raises: Fetcher.Error if there was a problem downloading all data from the given url. - """ - @contextmanager - def download_fp(_path_or_fd): - if _path_or_fd and not isinstance(_path_or_fd, Compatibility.string): - yield _path_or_fd, _path_or_fd.name - else: - if not _path_or_fd: - fd, _path_or_fd = tempfile.mkstemp() - os.close(fd) - with safe_open(_path_or_fd, 'w') as fp: - yield fp, _path_or_fd - - with download_fp(path_or_fd) as (fp, path): - listener = self.DownloadListener(fp).wrap(listener) - self.fetch(url, listener, chunk_size=chunk_size, timeout=timeout) - return path diff --git a/src/python/twitter/pants/pants_doc/__init__.py b/src/python/twitter/pants/pants_doc/__init__.py deleted file mode 100644 index 78afe4920..000000000 --- a/src/python/twitter/pants/pants_doc/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Mark McBride' - -from twitter.pants.pants_doc.doc_builder import DocBuilder - -__all__ = [ - 'DocBuilder', -] diff --git a/src/python/twitter/pants/pants_doc/doc_builder.py b/src/python/twitter/pants/pants_doc/doc_builder.py deleted file mode 100644 index 128dffeb8..000000000 --- a/src/python/twitter/pants/pants_doc/doc_builder.py +++ /dev/null @@ -1,74 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -__author__ = 'Mark McBride' - -import os -import pkgutil -import shutil - -try: - import markdown - HAS_MARKDOWN = True -except ImportError: - HAS_MARKDOWN = False - - -from twitter.pants import is_doc # XXX This no longer exists -from twitter.pants.base.generator import Generator - -_TEMPLATE_BASEDIR = 'templates' - -class DocBuilder(object): - def __init__(self, root_dir): - self.root_dir = root_dir - - def build(self, targets, _): - template_path = os.path.join(_TEMPLATE_BASEDIR, 'doc.mustache') - template = pkgutil.get_data(__name__, template_path) - for target in targets: - assert is_doc(target), 'DocBuilder can only build DocTargets, given %s' % str(target) - base_dir = os.path.dirname(target.address.buildfile.full_path) - target_base = target.target_base - print('building doc for %s' % str(target)) - output_dir = os.path.normpath(os.path.join(self.root_dir, target.id)) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - for filename in target.sources: - if filename.endswith('md'): - if not HAS_MARKDOWN: - print('Missing markdown, cannot process %s' % filename, file=sys.stderr) - else: - print('processing %s' % filename) - html_filename = os.path.splitext(filename)[0] + '.html' - output_filename = os.path.join(output_dir, os.path.basename(html_filename)) - print('writing file to %s' % output_filename) - with open(output_filename, 'w') as output: - with open(os.path.join(target_base, filename), 'r') as md: - contents = md.read() - md_html = markdown.markdown(contents) - generator = Generator(template, root_dir = self.root_dir, text = md_html) - generator.write(output) - for filename in target.resources: - full_filepath = os.path.join(target_base, filename) - target_file = os.path.join(output_dir, os.path.relpath(full_filepath, base_dir)) - print('copying %s to %s' % (filename, target_file)) - if not os.path.exists(os.path.dirname(target_file)): - os.makedirs(os.path.dirname(target_file)) - shutil.copy(full_filepath, target_file) - return 0 diff --git a/src/python/twitter/pants/pants_doc/templates/doc.mustache b/src/python/twitter/pants/pants_doc/templates/doc.mustache deleted file mode 100644 index 83a0d13ae..000000000 --- a/src/python/twitter/pants/pants_doc/templates/doc.mustache +++ /dev/null @@ -1,4 +0,0 @@ - - -{{text}} - diff --git a/src/python/twitter/pants/process/__init__.py b/src/python/twitter/pants/process/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/src/python/twitter/pants/process/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/process/xargs.py b/src/python/twitter/pants/process/xargs.py deleted file mode 100644 index fc2e86115..000000000 --- a/src/python/twitter/pants/process/xargs.py +++ /dev/null @@ -1,66 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import subprocess -import errno - - -class Xargs(object): - """A subprocess execution wrapper in the spirit of the xargs command line tool. - - Specifically allows encapsulated commands to be passed very large argument lists by chunking up - the argument lists into a minimal set and then invoking the encapsulated command against each - chunk in turn. - """ - - @classmethod - def subprocess(cls, cmd, **kwargs): - """Creates an xargs engine that uses subprocess.call to execute the given cmd array with extra - arg chunks. - """ - def call(args): - return subprocess.call(cmd + args, **kwargs) - return cls(call) - - def __init__(self, cmd): - """Creates an xargs engine that calls cmd with argument chunks. - - :param cmd: A function that can execute a command line in the form of a list of strings - passed as its sole argument. - """ - self._cmd = cmd - - def _split_args(self, args): - half = len(args) // 2 - return args[:half], args[half:] - - def execute(self, args): - """Executes the configured cmd passing args in one or more rounds xargs style. - - :param list args: Extra arguments to pass to cmd. - """ - all_args = list(args) - try: - return self._cmd(all_args) - except OSError as e: - if errno.E2BIG == e.errno: - args1, args2 = self._split_args(all_args) - result = self.execute(args1) - if result != 0: - return result - return self.execute(args2) - else: - raise e diff --git a/src/python/twitter/pants/python/README.md b/src/python/twitter/pants/python/README.md deleted file mode 100644 index fcf7dd3c7..000000000 --- a/src/python/twitter/pants/python/README.md +++ /dev/null @@ -1 +0,0 @@ -[Pants documentation](http://pantsbuild.github.io/python-readme.html) has moved... \ No newline at end of file diff --git a/src/python/twitter/pants/python/__init__.py b/src/python/twitter/pants/python/__init__.py deleted file mode 100644 index 24684ba62..000000000 --- a/src/python/twitter/pants/python/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/python/antlr_builder.py b/src/python/twitter/pants/python/antlr_builder.py deleted file mode 100644 index bb6a0837f..000000000 --- a/src/python/twitter/pants/python/antlr_builder.py +++ /dev/null @@ -1,72 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import sys - -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.ivy.bootstrapper import Bootstrapper -from twitter.pants.ivy.ivy import Ivy - -from .code_generator import CodeGenerator - - -class PythonAntlrBuilder(CodeGenerator): - """ - Antlr builder. - """ - def run_antlrs(self, output_dir): - args = [ - '-dependency', 'org.antlr', 'antlr', self.target.antlr_version, - '-types', 'jar', - '-main', 'org.antlr.Tool', - '--', '-fo', output_dir - ] - for source in self.target.sources: - abs_path = os.path.abspath(os.path.join(self.root, self.target.target_base, source)) - args.append(abs_path) - - try: - ivy = Bootstrapper.default_ivy() - ivy.execute(args=args) # TODO: Needs a workunit, when we have a context here. - return True - except (Bootstrapper.Error, Ivy.Error) as e: - print('ANTLR generation failed! %s' % e, file=sys.stderr) - return False - - def generate(self): - # Create the package structure. - path = self.sdist_root - - package = '' - for module_name in self.target.module.split('.'): - path = os.path.join(path, module_name) - if package == '': - package = module_name - else: - package = package + '.' + module_name - safe_mkdir(path) - with open(os.path.join(path, '__init__.py'), 'w') as f: - if package != self.target.module: # Only write this in the non-leaf modules. - f.write("__import__('pkg_resources').declare_namespace(__name__)") - self.created_namespace_packages.add(package) - self.created_packages.add(package) - - # autogenerate the python files that we bundle up - self.run_antlrs(path) diff --git a/src/python/twitter/pants/python/binary_builder.py b/src/python/twitter/pants/python/binary_builder.py deleted file mode 100644 index 5ebd4323b..000000000 --- a/src/python/twitter/pants/python/binary_builder.py +++ /dev/null @@ -1,69 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import tempfile -import time - -from twitter.common.python.interpreter import PythonInterpreter -from twitter.common.python.pex_builder import PEXBuilder - -from twitter.pants.base.config import Config -from twitter.pants.targets.python_binary import PythonBinary - -from .python_chroot import PythonChroot - - -class PythonBinaryBuilder(object): - class NotABinaryTargetException(Exception): - pass - - def __init__(self, target, root_dir, run_tracker, interpreter=None, conn_timeout=None): - self.target = target - self.interpreter = interpreter or PythonInterpreter.get() - if not isinstance(target, PythonBinary): - raise PythonBinaryBuilder.NotABinaryTargetException( - "Target %s is not a PythonBinary!" % target) - - config = Config.load() - self.distdir = config.getdefault('pants_distdir') - distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) - - run_info = run_tracker.run_info - build_properties = {} - build_properties.update(run_info.add_basic_info(run_id=None, timestamp=time.time())) - build_properties.update(run_info.add_scm_info()) - - pexinfo = target.pexinfo.copy() - pexinfo.build_properties = build_properties - builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) - - self.chroot = PythonChroot( - target, - root_dir, - builder=builder, - interpreter=self.interpreter, - conn_timeout=conn_timeout) - - def run(self): - print('Building PythonBinary %s:' % self.target) - env = self.chroot.dump() - filename = os.path.join(self.distdir, '%s.pex' % self.target.name) - env.build(filename) - print('Wrote %s' % filename) - return 0 diff --git a/src/python/twitter/pants/python/code_generator.py b/src/python/twitter/pants/python/code_generator.py deleted file mode 100644 index 68b4614d9..000000000 --- a/src/python/twitter/pants/python/code_generator.py +++ /dev/null @@ -1,108 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import shutil -import tempfile -import textwrap - -from twitter.common.dirutil.chroot import RelativeChroot -from twitter.pants.python.sdist_builder import SdistBuilder - - -class CodeGenerator(object): - class Error(Exception): pass - class CodeGenerationException(Error): pass - - def __init__(self, target, root_dir, config, target_suffix=None): - self.target = target - self.config = config - self.suffix = target_suffix or '' - self.root = root_dir - distdir = self.config.getdefault('pants_distdir') - self.chroot = RelativeChroot(root_dir, distdir, target.name) - codegen_root = tempfile.mkdtemp(dir=self.chroot.path(), prefix='codegen.') - self.codegen_root = os.path.relpath(codegen_root, self.chroot.path()) - self.created_packages = set() - self.created_namespace_packages = set() - - def __del__(self): - self.cleanup() - - def cleanup(self): - shutil.rmtree(self.chroot.path()) - - @staticmethod - def path_to_module(path): - return path.replace(os.path.sep, '.') - - def package_name(self): - return '%s%s' % (self.target.id, self.suffix) - - def requirement_string(self): - return '%s==0.0.0' % self.package_name() - - @property - def package_dir(self): - """Return the code generation root.""" - return "." - - @property - def install_requires(self): - return [] - - def generate(self): - """Generate code for this target, updating the sets .created_packages and - .created_namespace_packages.""" - raise NotImplementedError - - def dump_setup_py(self): - boilerplate = textwrap.dedent(""" - from setuptools import setup - - setup(name = "%(package_name)s", - version = "0.0.0", - description = "autogenerated code for %(target_name)s", - install_requires = %(install_requires)r, - package_dir = { "": %(package_dir)r }, - packages = %(packages)s, - namespace_packages = %(namespace_packages)s) - """) - boilerplate = boilerplate % { - 'package_name': self.package_name(), - 'package_dir': self.package_dir, - 'target_name': self.target.name, - 'install_requires': self.install_requires, - 'packages': repr(self.created_packages), - 'namespace_packages': repr(list(self.created_namespace_packages)) - } - self.chroot.write(boilerplate.encode('utf8'), os.path.join(self.codegen_root, 'setup.py')) - self.chroot.write("include *.py".encode('utf8'), os.path.join(self.codegen_root, 'MANIFEST.in')) - - @property - def sdist_root(self): - return os.path.join(self.chroot.path(), self.codegen_root) - - @property - def package_root(self): - return os.path.join(self.sdist_root, self.package_dir) - - def build(self, interpreter=None): - self.generate() - self.dump_setup_py() - return SdistBuilder.build(self.sdist_root, self.target, interpreter=interpreter) diff --git a/src/python/twitter/pants/python/interpreter_cache.py b/src/python/twitter/pants/python/interpreter_cache.py deleted file mode 100644 index 612300616..000000000 --- a/src/python/twitter/pants/python/interpreter_cache.py +++ /dev/null @@ -1,194 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import shutil - -from twitter.common.dirutil import safe_mkdir -from twitter.common.python.package import EggPackage, SourcePackage -from twitter.common.python.installer import EggInstaller -from twitter.common.python.interpreter import ( - PythonCapability, - PythonIdentity, - PythonInterpreter, -) -from twitter.common.python.obtainer import Obtainer - -from .python_setup import PythonSetup -from .resolver import crawler_from_config, fetchers_from_config - -from pkg_resources import Requirement - - -# TODO(wickman) Create a safer version of this and add to twitter.common.dirutil -def safe_link(src, dst): - try: - os.unlink(dst) - except OSError: - pass - os.symlink(src, dst) - - -def resolve_interpreter(config, interpreter, requirement, logger=print): - """Given a :class:`PythonInterpreter` and :class:`Config`, and a requirement, - return an interpreter with the capability of resolving that requirement or - None if it's not possible to install a suitable requirement.""" - interpreter_cache = PythonInterpreterCache.cache_dir(config) - interpreter_dir = os.path.join(interpreter_cache, str(interpreter.identity)) - if interpreter.satisfies(PythonCapability([requirement])): - return interpreter - def installer_provider(sdist): - return EggInstaller(sdist, strict=requirement.key != 'setuptools', interpreter=interpreter) - egg = resolve_and_link( - config, - requirement, - os.path.join(interpreter_dir, requirement.key), - installer_provider, - logger=logger) - if egg: - return interpreter.with_extra(egg.name, egg.raw_version, egg.url) - else: - logger('Failed to resolve requirement %s for %s' % (requirement, interpreter)) - - -def resolve_and_link(config, requirement, target_link, installer_provider, logger=print): - if os.path.exists(target_link) and os.path.exists(os.path.realpath(target_link)): - egg = EggPackage(os.path.realpath(target_link)) - if egg.satisfies(requirement): - return egg - fetchers = fetchers_from_config(config) - crawler = crawler_from_config(config) - obtainer = Obtainer(crawler, fetchers, []) - obtainer_iterator = obtainer.iter(requirement) - links = [link for link in obtainer_iterator if isinstance(link, SourcePackage)] - for link in links: - logger(' fetching %s' % link.url) - sdist = link.fetch() - logger(' installing %s' % sdist) - installer = installer_provider(sdist) - dist_location = installer.bdist() - target_location = os.path.join(os.path.dirname(target_link), os.path.basename(dist_location)) - shutil.move(dist_location, target_location) - safe_link(target_location, target_link) - logger(' installed %s' % target_location) - return EggPackage(target_location) - - -# This is a setuptools <1 and >1 compatible version of Requirement.parse. -# For setuptools <1, if you did Requirement.parse('setuptools'), it would -# return 'distribute' which of course is not desirable for us. So they -# added a replacement=False keyword arg. Sadly, they removed this keyword -# arg in setuptools >= 1 so we have to simply failover using TypeError as a -# catch for 'Invalid Keyword Argument'. -def failsafe_parse(requirement): - try: - return Requirement.parse(requirement, replacement=False) - except TypeError: - return Requirement.parse(requirement) - - -def resolve(config, interpreter, logger=print): - """Resolve and cache an interpreter with a setuptools and wheel capability.""" - - setuptools_requirement = failsafe_parse( - 'setuptools==%s' % config.get('python-setup', 'setuptools_version', default='2.2')) - wheel_requirement = failsafe_parse( - 'wheel==%s' % config.get('python-setup', 'wheel_version', default='0.22.0')) - - interpreter = resolve_interpreter(config, interpreter, setuptools_requirement, logger=logger) - if interpreter: - return resolve_interpreter(config, interpreter, wheel_requirement, logger=logger) - - -class PythonInterpreterCache(object): - @staticmethod - def cache_dir(config): - return PythonSetup(config).scratch_dir('interpreter_cache', default_name='interpreters') - - @classmethod - def select_interpreter(cls, compatibilities, allow_multiple=False): - if allow_multiple: - return compatibilities - return [min(compatibilities)] if compatibilities else [] - - def __init__(self, config, logger=None): - self._path = self.cache_dir(config) - self._config = config - safe_mkdir(self._path) - self._interpreters = set() - self._logger = logger or (lambda msg: True) - - @property - def interpreters(self): - return self._interpreters - - def interpreter_from_path(self, path): - interpreter_dir = os.path.basename(path) - identity = PythonIdentity.from_path(interpreter_dir) - try: - executable = os.readlink(os.path.join(path, 'python')) - except OSError: - return None - interpreter = PythonInterpreter(executable, identity) - return resolve(self._config, interpreter, logger=self._logger) - - def setup_interpreter(self, interpreter): - interpreter_dir = os.path.join(self._path, str(interpreter.identity)) - safe_mkdir(interpreter_dir) - safe_link(interpreter.binary, os.path.join(interpreter_dir, 'python')) - return resolve(self._config, interpreter, logger=self._logger) - - def setup_cached(self): - for interpreter_dir in os.listdir(self._path): - path = os.path.join(self._path, interpreter_dir) - pi = self.interpreter_from_path(path) - if pi: - self._logger('Detected interpreter %s: %s' % (pi.binary, str(pi.identity))) - self._interpreters.add(pi) - - def setup_paths(self, paths): - for interpreter in PythonInterpreter.all(paths): - identity_str = str(interpreter.identity) - path = os.path.join(self._path, identity_str) - pi = self.interpreter_from_path(path) - if pi is None: - self.setup_interpreter(interpreter) - pi = self.interpreter_from_path(path) - if pi is None: - continue - self._interpreters.add(pi) - - def matches(self, filters): - for interpreter in self._interpreters: - if any(interpreter.identity.matches(filt) for filt in filters): - yield interpreter - - def setup(self, paths=(), force=False, filters=('',)): - has_setup = False - setup_paths = paths or os.getenv('PATH').split(os.pathsep) - self.setup_cached() - if force: - has_setup = True - self.setup_paths(setup_paths) - matches = list(self.matches(filters)) - if len(matches) == 0 and not has_setup: - self.setup_paths(setup_paths) - matches = list(self.matches(filters)) - if len(matches) == 0: - self._logger('Found no valid interpreters!') - return matches diff --git a/src/python/twitter/pants/python/python_builder.py b/src/python/twitter/pants/python/python_builder.py deleted file mode 100644 index 6e7676ea2..000000000 --- a/src/python/twitter/pants/python/python_builder.py +++ /dev/null @@ -1,65 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.python.interpreter import PythonInterpreter - -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_tests import PythonTests, PythonTestSuite - -from .binary_builder import PythonBinaryBuilder -from .test_builder import PythonTestBuilder - - -class PythonBuilder(object): - def __init__(self, run_tracker, root_dir): - self._root_dir = root_dir - self._run_tracker = run_tracker - - def build(self, targets, args, interpreter=None, conn_timeout=None): - test_targets = [] - binary_targets = [] - interpreter = interpreter or PythonInterpreter.get() - - for target in targets: - assert target.is_python, "PythonBuilder can only build PythonTargets, given %s" % str(target) - - # PythonBuilder supports PythonTests and PythonBinaries - for target in targets: - if isinstance(target, PythonTests) or isinstance(target, PythonTestSuite): - test_targets.append(target) - elif isinstance(target, PythonBinary): - binary_targets.append(target) - - rv = PythonTestBuilder( - test_targets, - args, - self._root_dir, - interpreter=interpreter, - conn_timeout=conn_timeout).run() - if rv != 0: - return rv - - for binary_target in binary_targets: - rv = PythonBinaryBuilder( - binary_target, - self._root_dir, - self._run_tracker, - interpreter=interpreter, - conn_timeout=conn_timeout).run() - if rv != 0: - return rv - - return 0 diff --git a/src/python/twitter/pants/python/python_chroot.py b/src/python/twitter/pants/python/python_chroot.py deleted file mode 100644 index 61ef88754..000000000 --- a/src/python/twitter/pants/python/python_chroot.py +++ /dev/null @@ -1,213 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -from collections import defaultdict -import os -import random -import shutil -import sys -import tempfile - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_mkdir, safe_rmtree -from twitter.common.python.interpreter import PythonInterpreter -from twitter.common.python.pex_builder import PEXBuilder -from twitter.common.python.platforms import Platform -from twitter.pants.base.build_invalidator import BuildInvalidator, CacheKeyGenerator -from twitter.pants.base.config import Config -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.targets.python_antlr_library import PythonAntlrLibrary -from twitter.pants.targets.python_binary import PythonBinary -from twitter.pants.targets.python_library import PythonLibrary -from twitter.pants.targets.python_requirement import PythonRequirement -from twitter.pants.targets.python_tests import PythonTests -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary - -from .antlr_builder import PythonAntlrBuilder -from .python_setup import PythonSetup -from .resolver import resolve_multi -from .thrift_builder import PythonThriftBuilder - - -class PythonChroot(object): - _VALID_DEPENDENCIES = { - PythonLibrary: 'libraries', - PythonRequirement: 'reqs', - PythonBinary: 'binaries', - PythonThriftLibrary: 'thrifts', - PythonAntlrLibrary: 'antlrs', - PythonTests: 'tests' - } - - MEMOIZED_THRIFTS = {} - - class InvalidDependencyException(Exception): - def __init__(self, target): - Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target) - - def __init__(self, - target, - root_dir, - extra_targets=None, - builder=None, - platforms=None, - interpreter=None, - conn_timeout=None): - self._config = Config.load() - self._target = target - self._root = root_dir - self._platforms = platforms - self._interpreter = interpreter or PythonInterpreter.get() - self._extra_targets = list(extra_targets) if extra_targets is not None else [] - self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) - - # Note: unrelated to the general pants artifact cache. - self._egg_cache_root = os.path.join( - PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'), - str(self._interpreter.identity)) - - self._key_generator = CacheKeyGenerator() - self._build_invalidator = BuildInvalidator( self._egg_cache_root) - - - def __del__(self): - if os.getenv('PANTS_LEAVE_CHROOT') is None: - safe_rmtree(self.path()) - else: - self.debug('Left chroot at %s' % self.path()) - - @property - def builder(self): - return self._builder - - def debug(self, msg, indent=0): - if os.getenv('PANTS_VERBOSE') is not None: - print('%s%s' % (' ' * indent, msg)) - - def path(self): - return self._builder.path() - - def _dump_library(self, library): - def copy_to_chroot(base, path, add_function): - src = os.path.join(self._root, base, path) - add_function(src, path) - - self.debug(' Dumping library: %s' % library) - for filename in library.sources: - copy_to_chroot(library.target_base, filename, self._builder.add_source) - for filename in library.resources: - copy_to_chroot(library.target_base, filename, self._builder.add_resource) - - def _dump_requirement(self, req, dynamic, repo): - self.debug(' Dumping requirement: %s%s%s' % (str(req), - ' (dynamic)' if dynamic else '', ' (repo: %s)' % repo if repo else '')) - self._builder.add_requirement(req, dynamic, repo) - - def _dump_distribution(self, dist): - self.debug(' Dumping distribution: .../%s' % os.path.basename(dist.location)) - self._builder.add_distribution(dist) - - def _generate_requirement(self, library, builder_cls): - library_key = self._key_generator.key_for_target(library) - builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8]) - - cache_dir = os.path.join(self._egg_cache_root, library_key.id) - if self._build_invalidator.needs_update(library_key): - sdist = builder.build(interpreter=self._interpreter) - safe_mkdir(cache_dir) - shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist))) - self._build_invalidator.update(library_key) - - with ParseContext.temp(): - return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True) - - def _generate_thrift_requirement(self, library): - return self._generate_requirement(library, PythonThriftBuilder) - - def _generate_antlr_requirement(self, library): - return self._generate_requirement(library, PythonAntlrBuilder) - - def resolve(self, targets): - children = defaultdict(OrderedSet) - def add_dep(trg): - for target_type, target_key in self._VALID_DEPENDENCIES.items(): - if isinstance(trg, target_type): - children[target_key].add(trg) - return - raise self.InvalidDependencyException(trg) - for target in targets: - target.walk(add_dep) - return children - - def dump(self): - self.debug('Building PythonBinary %s:' % self._target) - - targets = self.resolve([self._target] + self._extra_targets) - - for lib in targets['libraries'] | targets['binaries']: - self._dump_library(lib) - - generated_reqs = OrderedSet() - if targets['thrifts']: - for thr in set(targets['thrifts']): - if thr not in self.MEMOIZED_THRIFTS: - self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr) - generated_reqs.add(self.MEMOIZED_THRIFTS[thr]) - with ParseContext.temp(): - # trick pants into letting us add this python requirement, otherwise we get - # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to - # PythonRequirement(thrift) - # - # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we - # should first detect if any explicit thrift requirements have been added and use - # those. Only if they have not been supplied should we auto-inject it. - generated_reqs.add(PythonRequirement('thrift', use_2to3=True, - name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8)))) - - for antlr in targets['antlrs']: - generated_reqs.add(self._generate_antlr_requirement(antlr)) - - targets['reqs'] |= generated_reqs - reqs_to_build = OrderedSet() - for req in targets['reqs']: - if not req.should_build(self._interpreter.python, Platform.current()): - self.debug('Skipping %s based upon version filter' % req) - continue - reqs_to_build.add(req) - self._dump_requirement(req._requirement, False, req._repository) - - platforms = self._platforms - if isinstance(self._target, PythonBinary): - platforms = self._target.platforms - distributions = resolve_multi( - self._config, - reqs_to_build, - interpreter=self._interpreter, - platforms=platforms) - - locations = set() - for platform, dist_set in distributions.items(): - for dist in dist_set: - if dist.location not in locations: - self._dump_distribution(dist) - locations.add(dist.location) - - if len(targets['binaries']) > 1: - print('WARNING: Target has multiple python_binary targets!', file=sys.stderr) - - return self._builder diff --git a/src/python/twitter/pants/python/python_setup.py b/src/python/twitter/pants/python/python_setup.py deleted file mode 100644 index 312ac2792..000000000 --- a/src/python/twitter/pants/python/python_setup.py +++ /dev/null @@ -1,51 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - - -class PythonSetup(object): - """A clearing house for configuration data needed by components setting up python environments.""" - - def __init__(self, config, section='python-setup'): - self._config = config - self._section = section - - @property - def scratch_root(self): - """Returns the root scratch space for assembling python environments. - - Components should probably carve out their own directory rooted here. See `scratch_dir`. - """ - return self._config.get( - self._section, - 'cache_root', - default=os.path.join(self._config.getdefault('pants_workdir'), 'python')) - - def scratch_dir(self, key, default_name=None): - """Returns a named scratch dir. - - By default this will be a child of the `scratch_root` with the same name as the key. - - :param string key: The pants.ini config key this scratch dir can be overridden with. - :param default_name: A name to use instead of the keyname for the scratch dir. - - User's can override the location using the key in pants.ini. - """ - return self._config.get( - self._section, - key, - default=os.path.join(self.scratch_root, default_name or key)) diff --git a/src/python/twitter/pants/python/resolver.py b/src/python/twitter/pants/python/resolver.py deleted file mode 100644 index 54404eefc..000000000 --- a/src/python/twitter/pants/python/resolver.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import print_function - -import os -import time - -from twitter.common.dirutil import touch -from twitter.common.python.base import requirement_is_exact -from twitter.common.python.fetcher import Fetcher, PyPIFetcher -from twitter.common.python.http import Crawler -from twitter.common.python.obtainer import Obtainer, CachingObtainer -from twitter.common.python.interpreter import PythonInterpreter -from twitter.common.python.package import distribution_compatible -from twitter.common.python.platforms import Platform -from twitter.common.python.resolver import resolve -from twitter.common.python.translator import ( - ChainedTranslator, - EggTranslator, - SourceTranslator, - Translator -) - -from .python_setup import PythonSetup - -from pkg_resources import ( - Environment, - WorkingSet, -) - - -def get_platforms(platform_list): - def translate(platform): - return Platform.current() if platform == 'current' else platform - return tuple(set(map(translate, platform_list))) - - -def fetchers_from_config(config): - fetchers = [] - fetchers.extend(Fetcher([url]) for url in config.getlist('python-repos', 'repos', [])) - fetchers.extend(PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', [])) - return fetchers - - -def crawler_from_config(config, conn_timeout=None): - download_cache = PythonSetup(config).scratch_dir('download_cache', default_name='downloads') - return Crawler(cache=download_cache, conn_timeout=conn_timeout) - - -class PantsObtainer(CachingObtainer): - def iter(self, requirement): - if hasattr(requirement, 'repository') and requirement.repository: - obtainer = Obtainer( - crawler=self._crawler, - fetchers=[Fetcher([requirement.repository])], - translators=self._translator) - for package in obtainer.iter(requirement): - yield package - else: - for package in super(PantsObtainer, self).iter(requirement): - yield package - - -def resolve_multi(config, - requirements, - interpreter=None, - platforms=None, - conn_timeout=None, - ttl=3600): - """Multi-platform dependency resolution for PEX files. - - Given a pants configuration and a set of requirements, return a list of distributions - that must be included in order to satisfy them. That may involve distributions for - multiple platforms. - - :param config: Pants :class:`Config` object. - :param requirements: A list of :class:`PythonRequirement` objects to resolve. - :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. - If None specified, defaults to current interpreter. - :param platforms: Optional list of platforms against requirements will be resolved. If - None specified, the defaults from `config` will be used. - :param conn_timeout: Optional connection timeout for any remote fetching. - :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. - "flask>=0.2" if a matching distribution is available on disk. Defaults - to 3600. - """ - distributions = dict() - interpreter = interpreter or PythonInterpreter.get() - if not isinstance(interpreter, PythonInterpreter): - raise TypeError('Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) - - install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') - platforms = get_platforms(platforms or config.getlist('python-setup', 'platforms', ['current'])) - - for platform in platforms: - translator = Translator.default( - install_cache=install_cache, - interpreter=interpreter, - platform=platform, - conn_timeout=conn_timeout) - - obtainer = PantsObtainer( - install_cache=install_cache, - crawler=crawler_from_config(config, conn_timeout=conn_timeout), - fetchers=fetchers_from_config(config) or [PyPIFetcher()], - translators=translator) - - distributions[platform] = resolve(requirements=requirements, - obtainer=obtainer, - interpreter=interpreter, - platform=platform) - - return distributions diff --git a/src/python/twitter/pants/python/sdist_builder.py b/src/python/twitter/pants/python/sdist_builder.py deleted file mode 100644 index f35f693b2..000000000 --- a/src/python/twitter/pants/python/sdist_builder.py +++ /dev/null @@ -1,41 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import glob -import os -import subprocess -import sys - -from twitter.common.contextutil import pushd -from twitter.common.python.installer import Packager - - -class SdistBuilder(object): - """A helper class to run setup.py projects.""" - - class Error(Exception): pass - class SetupError(Error): pass - - @classmethod - def build(cls, setup_root, target, interpreter=None): - packager = Packager(setup_root, interpreter=interpreter, - install_dir=os.path.join(setup_root, 'dist')) - try: - return packager.sdist() - except Packager.InstallFailure as e: - raise cls.SetupError(str(e)) diff --git a/src/python/twitter/pants/python/test_builder.py b/src/python/twitter/pants/python/test_builder.py deleted file mode 100644 index 0b7304e8e..000000000 --- a/src/python/twitter/pants/python/test_builder.py +++ /dev/null @@ -1,274 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -__author__ = 'Brian Wickman' - -try: - import configparser -except ImportError: - import ConfigParser as configparser -import errno -import os -import time -import signal -import sys - -from twitter.common.collections import OrderedSet -from twitter.common.contextutil import temporary_file -from twitter.common.dirutil import safe_mkdir -from twitter.common.lang import Compatibility -from twitter.common.quantity import Amount, Time -from twitter.common.python.interpreter import PythonInterpreter -from twitter.common.python.pex import PEX -from twitter.common.python.pex_builder import PEXBuilder - -from twitter.pants.base.config import Config -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.python.python_chroot import PythonChroot -from twitter.pants.targets.python_requirement import PythonRequirement -from twitter.pants.targets.python_target import PythonTarget -from twitter.pants.targets.python_tests import PythonTests, PythonTestSuite - - -class PythonTestResult(object): - @staticmethod - def timeout(): - return PythonTestResult('TIMEOUT') - - @staticmethod - def exception(): - return PythonTestResult('EXCEPTION') - - @staticmethod - def rc(value): - return PythonTestResult('SUCCESS' if value == 0 else 'FAILURE', - rc=value) - - def __init__(self, msg, rc=None): - self._rc = rc - self._msg = msg - - def __str__(self): - return self._msg - - @property - def success(self): - return self._rc == 0 - - -DEFAULT_COVERAGE_CONFIG = """ -[run] -branch = True -timid = True - -[report] -exclude_lines = - def __repr__ - raise NotImplementedError - -ignore_errors = True -""" - -def generate_coverage_config(target): - cp = configparser.ConfigParser() - cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) - cp.add_section('html') - target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', - os.path.dirname(target.address.buildfile.relpath), target.name) - safe_mkdir(target_dir) - cp.set('html', 'directory', target_dir) - return cp - - -class PythonTestBuilder(object): - class InvalidDependencyException(Exception): pass - class ChrootBuildingException(Exception): pass - - TESTING_TARGETS = None - - # TODO(wickman) Expose these as configuratable parameters - TEST_TIMEOUT = Amount(2, Time.MINUTES) - TEST_POLL_PERIOD = Amount(100, Time.MILLISECONDS) - - def __init__(self, targets, args, root_dir, interpreter=None, conn_timeout=None): - self.targets = targets - self.args = args - self.root_dir = root_dir - self.interpreter = interpreter or PythonInterpreter.get() - self.successes = {} - self._conn_timeout = conn_timeout - - def run(self): - self.successes = {} - rv = self._run_tests(self.targets) - for target in sorted(self.successes): - print('%-80s.....%10s' % (target, self.successes[target])) - return 0 if rv.success else 1 - - @classmethod - def generate_test_targets(cls): - if cls.TESTING_TARGETS is None: - with ParseContext.temp(): - cls.TESTING_TARGETS = [ - PythonRequirement('pytest'), - PythonRequirement('pytest-cov'), - PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')), - PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3')) - ] - return cls.TESTING_TARGETS - - @staticmethod - def generate_junit_args(target): - args = [] - xml_base = os.getenv('JUNIT_XML_BASE') - if xml_base: - xml_base = os.path.abspath(os.path.normpath(xml_base)) - xml_path = os.path.join( - xml_base, os.path.dirname(target.address.buildfile.relpath), target.name + '.xml') - try: - os.makedirs(os.path.dirname(xml_path)) - except OSError as e: - if e.errno != errno.EEXIST: - raise PythonTestBuilder.ChrootBuildingException( - "Unable to establish JUnit target: %s! %s" % (target, e)) - args.append('--junitxml=%s' % xml_path) - return args - - @staticmethod - def cov_setup(target, chroot): - cp = generate_coverage_config(target) - with temporary_file(cleanup=False) as fp: - cp.write(fp) - filename = fp.name - if target.coverage: - source = target.coverage - else: - # This technically makes the assumption that tests/python/ will be testing - # src/python/. To change to honest measurements, do target.walk() here instead, - # however this results in very useless and noisy coverage reports. - source = set(os.path.dirname(source).replace(os.sep, '.') for source in target.sources) - args = ['-p', 'pytest_cov', - '--cov-config', filename, - '--cov-report', 'html', - '--cov-report', 'term'] - for module in source: - args.extend(['--cov', module]) - return filename, args - - @staticmethod - def wait_on(popen, timeout=TEST_TIMEOUT): - total_wait = Amount(0, Time.SECONDS) - while total_wait < timeout: - rc = popen.poll() - if rc is not None: - return PythonTestResult.rc(rc) - total_wait += PythonTestBuilder.TEST_POLL_PERIOD - time.sleep(PythonTestBuilder.TEST_POLL_PERIOD.as_(Time.SECONDS)) - popen.kill() - return PythonTestResult.timeout() - - def _run_python_test(self, target): - po = None - rv = PythonTestResult.exception() - coverage_rc = None - coverage_enabled = 'PANTS_PY_COVERAGE' in os.environ - - try: - builder = PEXBuilder(interpreter=self.interpreter) - builder.info.entry_point = target.entry_point - builder.info.ignore_errors = target._soft_dependencies - chroot = PythonChroot( - target, - self.root_dir, - extra_targets=self.generate_test_targets(), - builder=builder, - platforms=('current',), - interpreter=self.interpreter, - conn_timeout=self._conn_timeout) - builder = chroot.dump() - builder.freeze() - test_args = PythonTestBuilder.generate_junit_args(target) - test_args.extend(self.args) - if coverage_enabled: - coverage_rc, args = self.cov_setup(target, builder.chroot()) - test_args.extend(args) - sources = [os.path.join(target.target_base, source) for source in target.sources] - po = PEX(builder.path(), interpreter=self.interpreter).run( - args=test_args + sources, blocking=False, setsid=True) - # TODO(wickman) If coverage is enabled, write an intermediate .html that points to - # each of the coverage reports generated and webbrowser.open to that page. - rv = PythonTestBuilder.wait_on(po, timeout=target.timeout) - except Exception as e: - import traceback - print('Failed to run test!', file=sys.stderr) - traceback.print_exc() - rv = PythonTestResult.exception() - finally: - if coverage_rc: - os.unlink(coverage_rc) - if po and po.returncode != 0: - try: - os.killpg(po.pid, signal.SIGTERM) - except OSError as e: - if e.errno == errno.EPERM: - print("Unable to kill process group: %d" % po.pid) - elif e.errno != errno.ESRCH: - rv = PythonTestResult.exception() - self.successes[target._create_id()] = rv - return rv - - def _run_python_test_suite(self, target, fail_hard=True): - tests = OrderedSet([]) - def _gather_deps(trg): - if isinstance(trg, PythonTests): - tests.add(trg) - elif isinstance(trg, PythonTestSuite): - for dependency in trg.dependencies: - for dep in dependency.resolve(): - _gather_deps(dep) - _gather_deps(target) - - failed = False - for test in tests: - rv = self._run_python_test(test) - if not rv.success: - failed = True - if fail_hard: - return rv - return PythonTestResult.rc(1 if failed else 0) - - def _run_tests(self, targets): - fail_hard = 'PANTS_PYTHON_TEST_FAILSOFT' not in os.environ - if 'PANTS_PY_COVERAGE' in os.environ: - # Coverage often throws errors despite tests succeeding, so make PANTS_PY_COVERAGE - # force FAILSOFT. - fail_hard = False - failed = False - for target in targets: - if isinstance(target, PythonTests): - rv = self._run_python_test(target) - elif isinstance(target, PythonTestSuite): - rv = self._run_python_test_suite(target, fail_hard) - else: - raise PythonTestBuilder.InvalidDependencyException( - "Invalid dependency in python test target: %s" % target) - if not rv.success: - failed = True - if fail_hard: - return rv - return PythonTestResult.rc(1 if failed else 0) diff --git a/src/python/twitter/pants/python/thrift_builder.py b/src/python/twitter/pants/python/thrift_builder.py deleted file mode 100644 index 066ea390e..000000000 --- a/src/python/twitter/pants/python/thrift_builder.py +++ /dev/null @@ -1,155 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import functools -import keyword -import os -import re -import shutil -import subprocess -import sys - -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.python.code_generator import CodeGenerator -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary -from twitter.pants.thrift_util import select_thrift_binary - - -class PythonThriftBuilder(CodeGenerator): - """Code Generator a Python code from thrift IDL files.""" - class UnknownPlatformException(CodeGenerator.Error): - def __init__(self, platform): - super(PythonThriftBuilder.UnknownPlatformException, self).__init__( - "Unknown platform: %s!" % str(platform)) - - def __init__(self, target, root_dir, config, target_suffix=None): - super(PythonThriftBuilder, self).__init__(target, root_dir, config, target_suffix=target_suffix) - self._workdir = os.path.join(config.getdefault(option='thrift_workdir'), 'py-thrift') - - @property - def install_requires(self): - return ['thrift'] - - def run_thrifts(self): - """ - Generate Python thrift code using thrift compiler specified in pants config. - - Thrift fields conflicting with Python keywords are suffixed with a trailing - underscore (e.g.: from_). - """ - - def is_py_thrift(target): - return isinstance(target, PythonThriftLibrary) - - all_thrifts = set() - - def collect_sources(target): - for source in target.sources: - all_thrifts.add((target.target_base, source)) - - self.target.walk(collect_sources, predicate=is_py_thrift) - - copied_sources = set() - for base, relative_source in all_thrifts: - abs_source = os.path.join(base, relative_source) - copied_source = os.path.join(self._workdir, relative_source) - - safe_mkdir(os.path.dirname(copied_source)) - shutil.copyfile(abs_source, copied_source) - copied_sources.add(self._modify_thrift(copied_source)) - - for src in copied_sources: - if not self._run_thrift(src): - raise PythonThriftBuilder.CodeGenerationException("Could not generate .py from %s!" % src) - - def _run_thrift(self, source): - args = [ - select_thrift_binary(self.config), - '--gen', - 'py:new_style', - '-o', self.codegen_root, - '-I', self._workdir, - os.path.abspath(source)] - - po = subprocess.Popen(args, cwd=self.chroot.path()) - rv = po.wait() - if rv != 0: - comm = po.communicate() - print('thrift generation failed!', file=sys.stderr) - print('STDOUT', file=sys.stderr) - print(comm[0], file=sys.stderr) - print('STDERR', file=sys.stderr) - print(comm[1], file=sys.stderr) - return rv == 0 - - def _modify_thrift(self, source): - """ - Replaces the python keywords in the thrift file - - Find all python keywords in each thrift file and appends a trailing underscore. - For example, 'from' will be converted to 'from_'. - """ - rewrites = [] - renames = dict((kw, '%s_' % kw) for kw in keyword.kwlist) - token_regex = re.compile(r'(\W)(%s)(\W)' % '|'.join(renames.keys()), re.MULTILINE) - - def token_replace(match): - return '%s%s%s' % (match.group(1), renames[match.group(2)], match.group(3)) - - def replace_tokens(contents): - return token_regex.sub(token_replace, contents) - - rewrites.append(replace_tokens) - with open(source) as contents: - modified = functools.reduce(lambda txt, rewrite: rewrite(txt), rewrites, contents.read()) - contents.close() - with open(source, 'w') as thrift: - thrift.write(modified) - return source - - @property - def package_dir(self): - return "gen-py" - - def generate(self): - # auto-generate the python files that we bundle up - self.run_thrifts() - - # Thrift generates code with all parent namespaces with empty __init__.py's. Generally - # speaking we want to drop anything w/o an __init__.py, and for anything with an __init__.py, - # we want to explicitly make it a namespace package, hence the hoops here. - for root, _, files in os.walk(os.path.normpath(self.package_root)): - reldir = os.path.relpath(root, self.package_root) - if reldir == '.': # skip root - continue - if '__init__.py' not in files: # skip non-packages - continue - init_py_abspath = os.path.join(root, '__init__.py') - module_path = self.path_to_module(reldir) - self.created_packages.add(module_path) - if os.path.getsize(init_py_abspath) == 0: # empty __init__, translate to namespace package - with open(init_py_abspath, 'wb') as f: - f.write(b"__import__('pkg_resources').declare_namespace(__name__)") - self.created_namespace_packages.add(module_path) - else: - # non-empty __init__, this is a leaf package, usually with ttypes and constants, leave as-is - pass - - if not self.created_packages: - raise self.CodeGenerationException('No Thrift structures declared in %s!' % self.target) diff --git a/src/python/twitter/pants/reporting/__init__.py b/src/python/twitter/pants/reporting/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/reporting/assets/css/font-awesome.css b/src/python/twitter/pants/reporting/assets/css/font-awesome.css deleted file mode 100755 index 887509896..000000000 --- a/src/python/twitter/pants/reporting/assets/css/font-awesome.css +++ /dev/null @@ -1,540 +0,0 @@ -/*! - * Font Awesome 3.0.2 - * the iconic font designed for use with Twitter Bootstrap - * ------------------------------------------------------- - * The full suite of pictographic icons, examples, and documentation - * can be found at: http://fortawesome.github.com/Font-Awesome/ - * - * License - * ------------------------------------------------------- - * - The Font Awesome font is licensed under the SIL Open Font License - http://scripts.sil.org/OFL - * - Font Awesome CSS, LESS, and SASS files are licensed under the MIT License - - * http://opensource.org/licenses/mit-license.html - * - The Font Awesome pictograms are licensed under the CC BY 3.0 License - http://creativecommons.org/licenses/by/3.0/ - * - Attribution is no longer required in Font Awesome 3.0, but much appreciated: - * "Font Awesome by Dave Gandy - http://fortawesome.github.com/Font-Awesome" - - * Contact - * ------------------------------------------------------- - * Email: dave@davegandy.com - * Twitter: http://twitter.com/fortaweso_me - * Work: Lead Product Designer @ http://kyruus.com - */ -@font-face { - font-family: 'FontAwesome'; - src: url('../font/fontawesome-webfont.eot?v=3.0.1'); - src: url('../font/fontawesome-webfont.eot?#iefix&v=3.0.1') format('embedded-opentype'), - url('../font/fontawesome-webfont.woff?v=3.0.1') format('woff'), - url('../font/fontawesome-webfont.ttf?v=3.0.1') format('truetype'); - font-weight: normal; - font-style: normal; -} -/* Font Awesome styles - ------------------------------------------------------- */ -[class^="icon-"], -[class*=" icon-"] { - font-family: FontAwesome; - font-weight: normal; - font-style: normal; - text-decoration: inherit; - -webkit-font-smoothing: antialiased; - - /* sprites.less reset */ - display: inline; - width: auto; - height: auto; - line-height: normal; - vertical-align: baseline; - background-image: none; - background-position: 0% 0%; - background-repeat: repeat; - margin-top: 0; -} -/* more sprites.less reset */ -.icon-white, -.nav-pills > .active > a > [class^="icon-"], -.nav-pills > .active > a > [class*=" icon-"], -.nav-list > .active > a > [class^="icon-"], -.nav-list > .active > a > [class*=" icon-"], -.navbar-inverse .nav > .active > a > [class^="icon-"], -.navbar-inverse .nav > .active > a > [class*=" icon-"], -.dropdown-menu > li > a:hover > [class^="icon-"], -.dropdown-menu > li > a:hover > [class*=" icon-"], -.dropdown-menu > .active > a > [class^="icon-"], -.dropdown-menu > .active > a > [class*=" icon-"], -.dropdown-submenu:hover > a > [class^="icon-"], -.dropdown-submenu:hover > a > [class*=" icon-"] { - background-image: none; -} -[class^="icon-"]:before, -[class*=" icon-"]:before { - text-decoration: inherit; - display: inline-block; - speak: none; -} -/* makes sure icons active on rollover in links */ -a [class^="icon-"], -a [class*=" icon-"] { - display: inline-block; -} -/* makes the font 33% larger relative to the icon container */ -.icon-large:before { - vertical-align: -10%; - font-size: 1.3333333333333333em; -} -.btn [class^="icon-"], -.nav [class^="icon-"], -.btn [class*=" icon-"], -.nav [class*=" icon-"] { - display: inline; - /* keeps button heights with and without icons the same */ - -} -.btn [class^="icon-"].icon-large, -.nav [class^="icon-"].icon-large, -.btn [class*=" icon-"].icon-large, -.nav [class*=" icon-"].icon-large { - line-height: .9em; -} -.btn [class^="icon-"].icon-spin, -.nav [class^="icon-"].icon-spin, -.btn [class*=" icon-"].icon-spin, -.nav [class*=" icon-"].icon-spin { - display: inline-block; -} -.nav-tabs [class^="icon-"], -.nav-pills [class^="icon-"], -.nav-tabs [class*=" icon-"], -.nav-pills [class*=" icon-"] { - /* keeps button heights with and without icons the same */ - -} -.nav-tabs [class^="icon-"], -.nav-pills [class^="icon-"], -.nav-tabs [class*=" icon-"], -.nav-pills [class*=" icon-"], -.nav-tabs [class^="icon-"].icon-large, -.nav-pills [class^="icon-"].icon-large, -.nav-tabs [class*=" icon-"].icon-large, -.nav-pills [class*=" icon-"].icon-large { - line-height: .9em; -} -li [class^="icon-"], -.nav li [class^="icon-"], -li [class*=" icon-"], -.nav li [class*=" icon-"] { - display: inline-block; - width: 1.25em; - text-align: center; -} -li [class^="icon-"].icon-large, -.nav li [class^="icon-"].icon-large, -li [class*=" icon-"].icon-large, -.nav li [class*=" icon-"].icon-large { - /* increased font size for icon-large */ - - width: 1.5625em; -} -ul.icons { - list-style-type: none; - text-indent: -0.75em; -} -ul.icons li [class^="icon-"], -ul.icons li [class*=" icon-"] { - width: .75em; -} -.icon-muted { - color: #eeeeee; -} -.icon-border { - border: solid 1px #eeeeee; - padding: .2em .25em .15em; - -webkit-border-radius: 3px; - -moz-border-radius: 3px; - border-radius: 3px; -} -.icon-2x { - font-size: 2em; -} -.icon-2x.icon-border { - border-width: 2px; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; -} -.icon-3x { - font-size: 3em; -} -.icon-3x.icon-border { - border-width: 3px; - -webkit-border-radius: 5px; - -moz-border-radius: 5px; - border-radius: 5px; -} -.icon-4x { - font-size: 4em; -} -.icon-4x.icon-border { - border-width: 4px; - -webkit-border-radius: 6px; - -moz-border-radius: 6px; - border-radius: 6px; -} -.pull-right { - float: right; -} -.pull-left { - float: left; -} -[class^="icon-"].pull-left, -[class*=" icon-"].pull-left { - margin-right: .3em; -} -[class^="icon-"].pull-right, -[class*=" icon-"].pull-right { - margin-left: .3em; -} -.btn [class^="icon-"].pull-left.icon-2x, -.btn [class*=" icon-"].pull-left.icon-2x, -.btn [class^="icon-"].pull-right.icon-2x, -.btn [class*=" icon-"].pull-right.icon-2x { - margin-top: .18em; -} -.btn [class^="icon-"].icon-spin.icon-large, -.btn [class*=" icon-"].icon-spin.icon-large { - line-height: .8em; -} -.btn.btn-small [class^="icon-"].pull-left.icon-2x, -.btn.btn-small [class*=" icon-"].pull-left.icon-2x, -.btn.btn-small [class^="icon-"].pull-right.icon-2x, -.btn.btn-small [class*=" icon-"].pull-right.icon-2x { - margin-top: .25em; -} -.btn.btn-large [class^="icon-"], -.btn.btn-large [class*=" icon-"] { - margin-top: 0; -} -.btn.btn-large [class^="icon-"].pull-left.icon-2x, -.btn.btn-large [class*=" icon-"].pull-left.icon-2x, -.btn.btn-large [class^="icon-"].pull-right.icon-2x, -.btn.btn-large [class*=" icon-"].pull-right.icon-2x { - margin-top: .05em; -} -.btn.btn-large [class^="icon-"].pull-left.icon-2x, -.btn.btn-large [class*=" icon-"].pull-left.icon-2x { - margin-right: .2em; -} -.btn.btn-large [class^="icon-"].pull-right.icon-2x, -.btn.btn-large [class*=" icon-"].pull-right.icon-2x { - margin-left: .2em; -} -.icon-spin { - display: inline-block; - -moz-animation: spin 2s infinite linear; - -o-animation: spin 2s infinite linear; - -webkit-animation: spin 2s infinite linear; - animation: spin 2s infinite linear; -} -@-moz-keyframes spin { - 0% { -moz-transform: rotate(0deg); } - 100% { -moz-transform: rotate(359deg); } -} -@-webkit-keyframes spin { - 0% { -webkit-transform: rotate(0deg); } - 100% { -webkit-transform: rotate(359deg); } -} -@-o-keyframes spin { - 0% { -o-transform: rotate(0deg); } - 100% { -o-transform: rotate(359deg); } -} -@-ms-keyframes spin { - 0% { -ms-transform: rotate(0deg); } - 100% { -ms-transform: rotate(359deg); } -} -@keyframes spin { - 0% { transform: rotate(0deg); } - 100% { transform: rotate(359deg); } -} -@-moz-document url-prefix() { - .icon-spin { - height: .9em; - } - .btn .icon-spin { - height: auto; - } - .icon-spin.icon-large { - height: 1.25em; - } - .btn .icon-spin.icon-large { - height: .75em; - } -} -/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen - readers do not read off random characters that represent icons */ -.icon-glass:before { content: "\f000"; } -.icon-music:before { content: "\f001"; } -.icon-search:before { content: "\f002"; } -.icon-envelope:before { content: "\f003"; } -.icon-heart:before { content: "\f004"; } -.icon-star:before { content: "\f005"; } -.icon-star-empty:before { content: "\f006"; } -.icon-user:before { content: "\f007"; } -.icon-film:before { content: "\f008"; } -.icon-th-large:before { content: "\f009"; } -.icon-th:before { content: "\f00a"; } -.icon-th-list:before { content: "\f00b"; } -.icon-ok:before { content: "\f00c"; } -.icon-remove:before { content: "\f00d"; } -.icon-zoom-in:before { content: "\f00e"; } - -.icon-zoom-out:before { content: "\f010"; } -.icon-off:before { content: "\f011"; } -.icon-signal:before { content: "\f012"; } -.icon-cog:before { content: "\f013"; } -.icon-trash:before { content: "\f014"; } -.icon-home:before { content: "\f015"; } -.icon-file:before { content: "\f016"; } -.icon-time:before { content: "\f017"; } -.icon-road:before { content: "\f018"; } -.icon-download-alt:before { content: "\f019"; } -.icon-download:before { content: "\f01a"; } -.icon-upload:before { content: "\f01b"; } -.icon-inbox:before { content: "\f01c"; } -.icon-play-circle:before { content: "\f01d"; } -.icon-repeat:before { content: "\f01e"; } - -/* \f020 doesn't work in Safari. all shifted one down */ -.icon-refresh:before { content: "\f021"; } -.icon-list-alt:before { content: "\f022"; } -.icon-lock:before { content: "\f023"; } -.icon-flag:before { content: "\f024"; } -.icon-headphones:before { content: "\f025"; } -.icon-volume-off:before { content: "\f026"; } -.icon-volume-down:before { content: "\f027"; } -.icon-volume-up:before { content: "\f028"; } -.icon-qrcode:before { content: "\f029"; } -.icon-barcode:before { content: "\f02a"; } -.icon-tag:before { content: "\f02b"; } -.icon-tags:before { content: "\f02c"; } -.icon-book:before { content: "\f02d"; } -.icon-bookmark:before { content: "\f02e"; } -.icon-print:before { content: "\f02f"; } - -.icon-camera:before { content: "\f030"; } -.icon-font:before { content: "\f031"; } -.icon-bold:before { content: "\f032"; } -.icon-italic:before { content: "\f033"; } -.icon-text-height:before { content: "\f034"; } -.icon-text-width:before { content: "\f035"; } -.icon-align-left:before { content: "\f036"; } -.icon-align-center:before { content: "\f037"; } -.icon-align-right:before { content: "\f038"; } -.icon-align-justify:before { content: "\f039"; } -.icon-list:before { content: "\f03a"; } -.icon-indent-left:before { content: "\f03b"; } -.icon-indent-right:before { content: "\f03c"; } -.icon-facetime-video:before { content: "\f03d"; } -.icon-picture:before { content: "\f03e"; } - -.icon-pencil:before { content: "\f040"; } -.icon-map-marker:before { content: "\f041"; } -.icon-adjust:before { content: "\f042"; } -.icon-tint:before { content: "\f043"; } -.icon-edit:before { content: "\f044"; } -.icon-share:before { content: "\f045"; } -.icon-check:before { content: "\f046"; } -.icon-move:before { content: "\f047"; } -.icon-step-backward:before { content: "\f048"; } -.icon-fast-backward:before { content: "\f049"; } -.icon-backward:before { content: "\f04a"; } -.icon-play:before { content: "\f04b"; } -.icon-pause:before { content: "\f04c"; } -.icon-stop:before { content: "\f04d"; } -.icon-forward:before { content: "\f04e"; } - -.icon-fast-forward:before { content: "\f050"; } -.icon-step-forward:before { content: "\f051"; } -.icon-eject:before { content: "\f052"; } -.icon-chevron-left:before { content: "\f053"; } -.icon-chevron-right:before { content: "\f054"; } -.icon-plus-sign:before { content: "\f055"; } -.icon-minus-sign:before { content: "\f056"; } -.icon-remove-sign:before { content: "\f057"; } -.icon-ok-sign:before { content: "\f058"; } -.icon-question-sign:before { content: "\f059"; } -.icon-info-sign:before { content: "\f05a"; } -.icon-screenshot:before { content: "\f05b"; } -.icon-remove-circle:before { content: "\f05c"; } -.icon-ok-circle:before { content: "\f05d"; } -.icon-ban-circle:before { content: "\f05e"; } - -.icon-arrow-left:before { content: "\f060"; } -.icon-arrow-right:before { content: "\f061"; } -.icon-arrow-up:before { content: "\f062"; } -.icon-arrow-down:before { content: "\f063"; } -.icon-share-alt:before { content: "\f064"; } -.icon-resize-full:before { content: "\f065"; } -.icon-resize-small:before { content: "\f066"; } -.icon-plus:before { content: "\f067"; } -.icon-minus:before { content: "\f068"; } -.icon-asterisk:before { content: "\f069"; } -.icon-exclamation-sign:before { content: "\f06a"; } -.icon-gift:before { content: "\f06b"; } -.icon-leaf:before { content: "\f06c"; } -.icon-fire:before { content: "\f06d"; } -.icon-eye-open:before { content: "\f06e"; } - -.icon-eye-close:before { content: "\f070"; } -.icon-warning-sign:before { content: "\f071"; } -.icon-plane:before { content: "\f072"; } -.icon-calendar:before { content: "\f073"; } -.icon-random:before { content: "\f074"; } -.icon-comment:before { content: "\f075"; } -.icon-magnet:before { content: "\f076"; } -.icon-chevron-up:before { content: "\f077"; } -.icon-chevron-down:before { content: "\f078"; } -.icon-retweet:before { content: "\f079"; } -.icon-shopping-cart:before { content: "\f07a"; } -.icon-folder-close:before { content: "\f07b"; } -.icon-folder-open:before { content: "\f07c"; } -.icon-resize-vertical:before { content: "\f07d"; } -.icon-resize-horizontal:before { content: "\f07e"; } - -.icon-bar-chart:before { content: "\f080"; } -.icon-twitter-sign:before { content: "\f081"; } -.icon-facebook-sign:before { content: "\f082"; } -.icon-camera-retro:before { content: "\f083"; } -.icon-key:before { content: "\f084"; } -.icon-cogs:before { content: "\f085"; } -.icon-comments:before { content: "\f086"; } -.icon-thumbs-up:before { content: "\f087"; } -.icon-thumbs-down:before { content: "\f088"; } -.icon-star-half:before { content: "\f089"; } -.icon-heart-empty:before { content: "\f08a"; } -.icon-signout:before { content: "\f08b"; } -.icon-linkedin-sign:before { content: "\f08c"; } -.icon-pushpin:before { content: "\f08d"; } -.icon-external-link:before { content: "\f08e"; } - -.icon-signin:before { content: "\f090"; } -.icon-trophy:before { content: "\f091"; } -.icon-github-sign:before { content: "\f092"; } -.icon-upload-alt:before { content: "\f093"; } -.icon-lemon:before { content: "\f094"; } -.icon-phone:before { content: "\f095"; } -.icon-check-empty:before { content: "\f096"; } -.icon-bookmark-empty:before { content: "\f097"; } -.icon-phone-sign:before { content: "\f098"; } -.icon-twitter:before { content: "\f099"; } -.icon-facebook:before { content: "\f09a"; } -.icon-github:before { content: "\f09b"; } -.icon-unlock:before { content: "\f09c"; } -.icon-credit-card:before { content: "\f09d"; } -.icon-rss:before { content: "\f09e"; } - -.icon-hdd:before { content: "\f0a0"; } -.icon-bullhorn:before { content: "\f0a1"; } -.icon-bell:before { content: "\f0a2"; } -.icon-certificate:before { content: "\f0a3"; } -.icon-hand-right:before { content: "\f0a4"; } -.icon-hand-left:before { content: "\f0a5"; } -.icon-hand-up:before { content: "\f0a6"; } -.icon-hand-down:before { content: "\f0a7"; } -.icon-circle-arrow-left:before { content: "\f0a8"; } -.icon-circle-arrow-right:before { content: "\f0a9"; } -.icon-circle-arrow-up:before { content: "\f0aa"; } -.icon-circle-arrow-down:before { content: "\f0ab"; } -.icon-globe:before { content: "\f0ac"; } -.icon-wrench:before { content: "\f0ad"; } -.icon-tasks:before { content: "\f0ae"; } - -.icon-filter:before { content: "\f0b0"; } -.icon-briefcase:before { content: "\f0b1"; } -.icon-fullscreen:before { content: "\f0b2"; } - -.icon-group:before { content: "\f0c0"; } -.icon-link:before { content: "\f0c1"; } -.icon-cloud:before { content: "\f0c2"; } -.icon-beaker:before { content: "\f0c3"; } -.icon-cut:before { content: "\f0c4"; } -.icon-copy:before { content: "\f0c5"; } -.icon-paper-clip:before { content: "\f0c6"; } -.icon-save:before { content: "\f0c7"; } -.icon-sign-blank:before { content: "\f0c8"; } -.icon-reorder:before { content: "\f0c9"; } -.icon-list-ul:before { content: "\f0ca"; } -.icon-list-ol:before { content: "\f0cb"; } -.icon-strikethrough:before { content: "\f0cc"; } -.icon-underline:before { content: "\f0cd"; } -.icon-table:before { content: "\f0ce"; } - -.icon-magic:before { content: "\f0d0"; } -.icon-truck:before { content: "\f0d1"; } -.icon-pinterest:before { content: "\f0d2"; } -.icon-pinterest-sign:before { content: "\f0d3"; } -.icon-google-plus-sign:before { content: "\f0d4"; } -.icon-google-plus:before { content: "\f0d5"; } -.icon-money:before { content: "\f0d6"; } -.icon-caret-down:before { content: "\f0d7"; } -.icon-caret-up:before { content: "\f0d8"; } -.icon-caret-left:before { content: "\f0d9"; } -.icon-caret-right:before { content: "\f0da"; } -.icon-columns:before { content: "\f0db"; } -.icon-sort:before { content: "\f0dc"; } -.icon-sort-down:before { content: "\f0dd"; } -.icon-sort-up:before { content: "\f0de"; } - -.icon-envelope-alt:before { content: "\f0e0"; } -.icon-linkedin:before { content: "\f0e1"; } -.icon-undo:before { content: "\f0e2"; } -.icon-legal:before { content: "\f0e3"; } -.icon-dashboard:before { content: "\f0e4"; } -.icon-comment-alt:before { content: "\f0e5"; } -.icon-comments-alt:before { content: "\f0e6"; } -.icon-bolt:before { content: "\f0e7"; } -.icon-sitemap:before { content: "\f0e8"; } -.icon-umbrella:before { content: "\f0e9"; } -.icon-paste:before { content: "\f0ea"; } -.icon-lightbulb:before { content: "\f0eb"; } -.icon-exchange:before { content: "\f0ec"; } -.icon-cloud-download:before { content: "\f0ed"; } -.icon-cloud-upload:before { content: "\f0ee"; } - -.icon-user-md:before { content: "\f0f0"; } -.icon-stethoscope:before { content: "\f0f1"; } -.icon-suitcase:before { content: "\f0f2"; } -.icon-bell-alt:before { content: "\f0f3"; } -.icon-coffee:before { content: "\f0f4"; } -.icon-food:before { content: "\f0f5"; } -.icon-file-alt:before { content: "\f0f6"; } -.icon-building:before { content: "\f0f7"; } -.icon-hospital:before { content: "\f0f8"; } -.icon-ambulance:before { content: "\f0f9"; } -.icon-medkit:before { content: "\f0fa"; } -.icon-fighter-jet:before { content: "\f0fb"; } -.icon-beer:before { content: "\f0fc"; } -.icon-h-sign:before { content: "\f0fd"; } -.icon-plus-sign-alt:before { content: "\f0fe"; } - -.icon-double-angle-left:before { content: "\f100"; } -.icon-double-angle-right:before { content: "\f101"; } -.icon-double-angle-up:before { content: "\f102"; } -.icon-double-angle-down:before { content: "\f103"; } -.icon-angle-left:before { content: "\f104"; } -.icon-angle-right:before { content: "\f105"; } -.icon-angle-up:before { content: "\f106"; } -.icon-angle-down:before { content: "\f107"; } -.icon-desktop:before { content: "\f108"; } -.icon-laptop:before { content: "\f109"; } -.icon-tablet:before { content: "\f10a"; } -.icon-mobile-phone:before { content: "\f10b"; } -.icon-circle-blank:before { content: "\f10c"; } -.icon-quote-left:before { content: "\f10d"; } -.icon-quote-right:before { content: "\f10e"; } - -.icon-spinner:before { content: "\f110"; } -.icon-circle:before { content: "\f111"; } -.icon-reply:before { content: "\f112"; } -.icon-github-alt:before { content: "\f113"; } -.icon-folder-close-alt:before { content: "\f114"; } -.icon-folder-open-alt:before { content: "\f115"; } diff --git a/src/python/twitter/pants/reporting/assets/css/pants.css b/src/python/twitter/pants/reporting/assets/css/pants.css deleted file mode 100644 index d75470982..000000000 --- a/src/python/twitter/pants/reporting/assets/css/pants.css +++ /dev/null @@ -1,362 +0,0 @@ -html { - height: 100%; -} - -body { - margin: 0; - height: 100%; - font-family: Arial, Helvetica, sans-serif; - font-size: 13px; -} - -a:link { - color:inherit; -} - -a:visited { - color: inherit; -} - -.monospace { - font-family: "Courier New", Courier, monospace; -} - -#container { - background-color: #fff; - color: #333; - min-height: 100%; - margin-bottom: -2em; /* Height of sticky-footer. */ - position: relative; -} - -#top { - height: 4em; - line-height: 4em; - background-color: #ddd; - border-bottom: 1px solid gray; - padding-left: 2em; -} - -#top h1 { - padding: 0; - margin: 0; -} - -#menu { - font-family: Arial, Helvetica, sans-serif; - font-size: 15px; - float: left; - width: 160px; - height: 200px; - margin: 0; - border-right: 1px solid gray; - border-bottom: 1px solid gray; -} - -#nav { - list-style-type: none; - margin: 0; - padding: 0; -} - -#nav li a { - display: block; - width: 100%; - color: black; -} - -#nav li { - margin: 0.5em; - padding: 0.5em; -} - -#nav li:hover { - background-color: #ddd; -} - -#main-content { - margin-left: 300px; - padding: 1em; -} - -#bottom { - clear: both; - height: 2em; /* Height of sticky-footer. */ -} - -#sticky-footer { - margin: 0; - width: 100%; - height: 2em; - position: relative; - color: #333; - background-color: #ddd; -} - -#sticky-footer-content { - border-top: 1px solid gray; -} - -.bigtext { - font-size: 16pt; - font-weight: bold; -} - -.timeprefix { - position: absolute; - left: 200px; -} - -.timestamp { - color: grey; -} - -.timedelta { - margin-left: 6px; - color: black; -} - -/* Outcome colors. */ - -.aborted { - color: purple; -} - -.failure { - color: red; -} - -.warning { - color: orange; -} - -.success { - color: green; -} - -.unknown { - color: black; -} - - -/* Log level colors. */ - -.fatal { - color: red; -} - -.error { - color: red; -} - -.warn { - color: orange; -} - -.info { - color: green; -} - -.debug { - color: cyan; -} - - -/* Header toggling. */ - -.toggle-header { - font-weight: bold; - overflow: hidden; -} -.toggle-header-icon { - float: left; - width: 16px; -} -.toggle-header-text { - float: left; -} - -.toggle-content { - margin-left: 20px; -} - -.timer { - margin-left: 4px; - color: brown; -} - -.unaccounted-time { - margin-left: 4px; - color: red; -} - -.cmd-content { - color: purple; -} - -.greyed-header-text { - color: grey; -} - -.codebase-browse .breadcrumbs { - font-size: 16px; - font-weight: bold; - color: black; - padding-bottom: 1em; -} - -.codebase-browse .breadcrumbs .crumb { - margin: 0; - padding: 0; -} - -.dir-listing .dir-entries { - -} - -.file-content-frame { - margin: 0; - padding: 0; - overflow: auto; - width: 100%; - height: 80%; -} - -.run-list .header { - font-size: 16px; - font-weight: bold; - margin-bottom: 1em; -} - -.run-list .latest { - font-weight: bold; - margin-bottom: 1em; -} - -.run-list .date-text { - font-weight: bold; -} - -.run-list .time-of-day-text { - font-size: 14px; -} - -.run .no-runs { - font-size: 14px; - font-weight: bold; -} - -.run .no-such-run { - font-size: 14px; - font-weight: bold; - color: red; -} - -.run .run-info { - margin-bottom: 1em; -} - -.run .run-info .timestamp-text { - font-size: 14px; - font-weight: bold; - margin-bottom: 0.5em; -} - -.run .run-info .cmd-line-label { - font-size: 14px; - font-weight: bold; -} - -.aggregated-timings table tr td { - font-size: 14px; - font-weight: bold; - margin: 0; - padding: 0; -} - -.aggregated-timings table tr .timing-string { - text-align: right; - color: brown; - padding-right: 1px; -} - -.aggregated-timings table tr td i { - padding-left: 2px; -} - -.artifact-cache-stats table { - font-size: 14px; - font-weight: bold; -} - -.nodisplay { - display: none; -} - -.hidden { - visibility: hidden; -} - -.ansi-30, .ansi-30 a { - color: black; -} - -.ansi-31, .ansi-31 a { - color: red; -} - -.ansi-32, .ansi-32 a { - color: green; -} - -.ansi-33, .ansi-33 a { - color: orange; -} - -.ansi-34, .ansi-34 a { - color: blue; -} - -.ansi-35, .ansi-35 a { - color: magenta; -} - -.ansi-36, .ansi-36 a { - color: cyan; -} - -.ansi-37, .ansi-37 a { - color: white; -} - -.ansi-40 { - background-color: black; -} - -.ansi-41 { - background-color: red; -} - -.ansi-42 { - background-color: green; -} - -.ansi-43 { - background-color: orange; -} - -.ansi-44 { - background-color: blue; -} - -.ansi-45 { - background-color: magenta; -} - -.ansi-46 { - background-color: cyan; -} - -.ansi-47 { - background-color: white; -} diff --git a/src/python/twitter/pants/reporting/assets/css/prettify.css b/src/python/twitter/pants/reporting/assets/css/prettify.css deleted file mode 100644 index 36cd51c46..000000000 --- a/src/python/twitter/pants/reporting/assets/css/prettify.css +++ /dev/null @@ -1,56 +0,0 @@ -/* Pretty printing styles. Used with prettify.js. */ - -/* SPAN elements with the classes below are added by prettyprint. */ -.pln { color: #000 } /* plain text */ - -@media screen { - .str { color: #080 } /* string content */ - .kwd { color: #008 } /* a keyword */ - .com { color: #800 } /* a comment */ - .typ { color: #606 } /* a type name */ - .lit { color: #066 } /* a literal value */ - /* punctuation, lisp open bracket, lisp close bracket */ - .pun, .opn, .clo { color: #660 } - .tag { color: #008 } /* a markup tag name */ - .atn { color: #606 } /* a markup attribute name */ - .atv { color: #080 } /* a markup attribute value */ - .dec, .var { color: #606 } /* a declaration; a variable name */ - .fun { color: red } /* a function name */ -} - -/* Use higher contrast and text-weight for printable form. */ -@media print, projection { - .str { color: #060 } - .kwd { color: #006; font-weight: bold } - .com { color: #600; font-style: italic } - .typ { color: #404; font-weight: bold } - .lit { color: #044 } - .pun, .opn, .clo { color: #440 } - .tag { color: #006; font-weight: bold } - .atn { color: #404 } - .atv { color: #060 } -} - -/* Put a border around prettyprinted code snippets. */ -pre.prettyprint { padding: 2px; border: 1px solid #888 } - -/* Specify class=linenums on a pre to get line numbering */ -ol.linenums { margin-top: 0; margin-bottom: 0 } /* IE indents via margin-left */ - -/* Show only every fifth line number. */ -/*li.L0, -li.L1, -li.L2, -li.L3, -li.L5, -li.L6, -li.L7, -li.L8 { list-style-type: none }*/ - -/* Alternate shading for lines */ -li.L1, -li.L3, -li.L5, -li.L7, -li.L9 { background: #eee } - diff --git a/src/python/twitter/pants/reporting/assets/font/FontAwesome.otf b/src/python/twitter/pants/reporting/assets/font/FontAwesome.otf deleted file mode 100755 index 64049bf2e..000000000 Binary files a/src/python/twitter/pants/reporting/assets/font/FontAwesome.otf and /dev/null differ diff --git a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.eot b/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.eot deleted file mode 100755 index 7d81019e4..000000000 Binary files a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.eot and /dev/null differ diff --git a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.svg b/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.svg deleted file mode 100755 index ba0afe5ef..000000000 --- a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.svg +++ /dev/null @@ -1,284 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.ttf b/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.ttf deleted file mode 100755 index d46172476..000000000 Binary files a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.ttf and /dev/null differ diff --git a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.woff b/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.woff deleted file mode 100755 index 3c89ae09b..000000000 Binary files a/src/python/twitter/pants/reporting/assets/font/fontawesome-webfont.woff and /dev/null differ diff --git a/src/python/twitter/pants/reporting/assets/js/jquery-1.8.3.min.js b/src/python/twitter/pants/reporting/assets/js/jquery-1.8.3.min.js deleted file mode 100644 index 83589daa7..000000000 --- a/src/python/twitter/pants/reporting/assets/js/jquery-1.8.3.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! jQuery v1.8.3 jquery.com | jquery.org/license */ -(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t
a",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="
t
",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="
",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;ti.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="
",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="

",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t0)for(i=r;i=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*\s*$/g,Nt={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X
","
"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1>");try{for(;r1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]===""&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("
").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file diff --git a/src/python/twitter/pants/reporting/assets/js/pants.js b/src/python/twitter/pants/reporting/assets/js/pants.js deleted file mode 100644 index f5acc8dec..000000000 --- a/src/python/twitter/pants/reporting/assets/js/pants.js +++ /dev/null @@ -1,210 +0,0 @@ - -pants = { - // Functions to manipulate a 'collapsible' - a div that can be expanded or collapsed. - collapsible: { - toggle: function(id) { - $("#" + id + "-content").toggle(); - $("#" + id + "-icon").toggleClass("icon-caret-right icon-caret-down") - }, - - expand: function(id) { - $("#" + id + "-content").show(); - $("#" + id + "-icon").removeClass("icon-caret-right").addClass("icon-caret-down") - }, - - collapse: function(id) { - $("#" + id + "-content").hide(); - $("#" + id + "-icon").removeClass("icon-caret-down").addClass("icon-caret-right") - }, - - hasContent: function(id) { - $('#' + id + '-header').children().removeClass('greyed-header-text'); - $('#' + id + '-icon').removeClass('hidden'); - } - }, - - // Append the content selected by fromSelector to the element(s) selected by toSelector. - // Used to add reporting content to workunits on the fly, as they progress. - append: function(fromSelector, toSelector) { - $(fromSelector).appendTo($(toSelector)).show(); - }, - - // Append a string to the element(s) selected by toSelector. - appendString: function(str, toSelector) { - $(toSelector).append(str); - }, - - // Creates an object that knows how to manage multiple timers, and periodically emit timings. - // This allows us to show a rolling client-side timer for a workunit while it's executing. - createTimerManager: function() { - // The start time (in ms since the epoch) of each timer. - // We emit the timing of each timer to the element(s) selected by the appropriate selector. - // id -> {startTime: ..., selector: ...} - var timers = {}; - - // A handle to the polling event, so we can cancel it if needed. - var timingEvent = undefined; - - function updateTimers() { - var now = $.now(); - $.each(timers, function(id, timer) { - $(timer.selector).html('' + Math.round((now - timer.startTime) / 1000 - 0.5) + 's'); - }); - } - - return { - startTimer: function(id, selector, init) { - timers[id] = { 'startTime': init ? init : $.now(), 'selector': selector }; - if (!timingEvent) { - timingEvent = window.setInterval(updateTimers, 1000); - } - }, - - stopTimer: function(id) { - delete timers[id]; - var numTimers = 0; - $.each(timers, function(k,v) { numTimers++ }); - if (numTimers == 0) { - window.clearInterval(timingEvent); - timingEvent = undefined; - } - } - } - }, - - // Creates an object that knows how to poll multiple files by periodically hitting the server. - // Each polled file is associated with an id, so we can multiplex multiple pollings on - // on a single server request. - createPoller: function() { - - // State of each file we're polling. - // id -> state object. See doStartPolling() below for the fields in a state object. - var polledFileStates = {}; - - // A handle to the polling event, so we can cancel it if needed. - var pollingEvent = undefined; - - // Only allow one request in-flight at a time. - var inFlight = false; - - function pollOnce() { - function forgetId(id) { - delete polledFileStates[id]; - var n = 0; - $.each(polledFileStates, function(k, v) { n += 1; }); - if (!n) { - window.clearInterval(pollingEvent); - pollingEvent = undefined; - } - } - - function createRequestEntry(state, id) { - return { id: id, path: state.path, pos: state.pos }; - } - - if (!inFlight) { - inFlight = true; - $.ajax({ - url: '/poll', - type: 'GET', - data: { q: JSON.stringify($.map(polledFileStates, createRequestEntry))}, - dataType: 'json', - success: function(data, textStatus, jqXHR) { - function appendNewData() { - $.each(data, function(id, val) { - if (id in polledFileStates) { - var state = polledFileStates[id]; - // Execute the initFunc exactly once. - if (!state.hasBeenPolledAtLeastOnce) { - if (state.initFunc) { state.initFunc(); } - state.hasBeenPolledAtLeastOnce = true; - } - if (state.predicate ? state.predicate(val) : true) { - if (state.replace) { - // Replacing can reset view state, so only do it if we have to. - if (val != state.currentVal) { - $(state.selector).html(val); - } - } else { - $(state.selector).append(val); - state.pos += val.length; - } - state.currentVal = val; - } - } - }); - } - - function checkForStopped() { - var toDelete = []; - $.each(polledFileStates, function(id, state) { - if (state.toBeStopped && state.hasBeenPolledAtLeastOnce) { - toDelete.push(id); - } - }); - $.each(toDelete, function(idx, id) { forgetId(id); }); - } - appendNewData(); - checkForStopped(); - }, - error: function(jqXHR, textStatus, errorThrown) { - // Not necessary to do anything special on error. A future request will catch us up. - }, - complete: function(jqXHR, textStatus) { - inFlight = false; - } - }); - } - } - - function doStartPolling(id, path, targetSelector, initFunc, predicate, replace) { - polledFileStates[id] = { - path: path, // Path of file on server to poll, relative to build root. - pos: 0, // Position to poll from. - replace: replace, // Whether to append or replace the polled content. - currentVal: '', - selector: targetSelector, // append or replace the polled content to this element. - initFunc: initFunc, // Execute this exactly once, on first successful polling. - predicate: predicate, // append or replace val only if predicate(val) is true. - hasBeenPolledAtLeastOnce: false, - toBeStopped: false - }; - if (!pollingEvent) { - pollingEvent = window.setInterval(pollOnce, 200); - } - } - - // Stop the specified polling. - function doStopPolling(id) { - if (id in polledFileStates) { - polledFileStates[id].toBeStopped = true; - } - } - - return { - // Call this to start polling the specified file, assigning its content to the element(s) - // selected by the selector. You must assign some unique id to the request. - // If initFunc is provided, it is called the first time any content is assigned. - startPolling: function(id, path, targetSelector, initFunc, predicate) { - doStartPolling(id, path, targetSelector, initFunc, predicate, true); - }, - - // Call this to start tailing the specified file, appending its content to the element(s) - // selected by the selector. You must assign some unique id to the request. - // If initFunc is provided, it is called the first time any content is appended. - startTailing: function(id, path, targetSelector, initFunc, predicate) { - doStartPolling(id, path, targetSelector, initFunc, predicate, false); - }, - - // Stop the specified polling. - stopPolling: doStopPolling, - - // Stop the specified tailing. - stopTailing: doStopPolling - } - } -}; - -// We really only need one global one of each of these. So here they are. -pants.timerManager = pants.createTimerManager(); -pants.poller = pants.createPoller(); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify.js b/src/python/twitter/pants/reporting/assets/js/prettify.js deleted file mode 100644 index eef5ad7e6..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify.js +++ /dev/null @@ -1,28 +0,0 @@ -var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; -(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= -[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), -l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, -q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, -q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, -"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), -a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} -for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], -"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], -H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], -J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ -I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), -["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", -/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), -["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", -hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= -!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p)/],["lit",/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],["lit",/^#[\da-f]{3,6}/i],["pln",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i],["pun",/^[^\s\w"']+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[["kwd",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[["str",/^[^"')]+/]]),["css-str"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-go.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-go.js deleted file mode 100644 index fc18dc079..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-go.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["pln",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])+(?:'|$)|`[^`]*(?:`|$))/,null,"\"'"]],[["com",/^(?:\/\/[^\n\r]*|\/\*[\S\s]*?\*\/)/],["pln",/^(?:[^"'/`]|\/(?![*/]))+/]]),["go"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-hs.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-hs.js deleted file mode 100644 index 9d77b0838..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-hs.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t-\r ]+/,null,"\t\n \r "],["str",/^"(?:[^\n\f\r"\\]|\\[\S\s])*(?:"|$)/,null,'"'],["str",/^'(?:[^\n\f\r'\\]|\\[^&])'?/,null,"'"],["lit",/^(?:0o[0-7]+|0x[\da-f]+|\d+(?:\.\d+)?(?:e[+-]?\d+)?)/i,null,"0123456789"]],[["com",/^(?:--+[^\n\f\r]*|{-(?:[^-]|-+[^}-])*-})/],["kwd",/^(?:case|class|data|default|deriving|do|else|if|import|in|infix|infixl|infixr|instance|let|module|newtype|of|then|type|where|_)(?=[^\d'A-Za-z]|$)/, -null],["pln",/^(?:[A-Z][\w']*\.)*[A-Za-z][\w']*/],["pun",/^[^\d\t-\r "'A-Za-z]+/]]),["hs"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lisp.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lisp.js deleted file mode 100644 index 02a30e8d1..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lisp.js +++ /dev/null @@ -1,3 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["opn",/^\(+/,a,"("],["clo",/^\)+/,a,")"],["com",/^;[^\n\r]*/,a,";"],["pln",/^[\t\n\r \xa0]+/,a,"\t\n\r \xa0"],["str",/^"(?:[^"\\]|\\[\S\s])*(?:"|$)/,a,'"']],[["kwd",/^(?:block|c[ad]+r|catch|con[ds]|def(?:ine|un)|do|eq|eql|equal|equalp|eval-when|flet|format|go|if|labels|lambda|let|load-time-value|locally|macrolet|multiple-value-call|nil|progn|progv|quote|require|return-from|setq|symbol-macrolet|t|tagbody|the|throw|unwind)\b/,a], -["lit",/^[+-]?(?:[#0]x[\da-f]+|\d+\/\d+|(?:\.\d+|\d+(?:\.\d*)?)(?:[de][+-]?\d+)?)/i],["lit",/^'(?:-*(?:\w|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?)?/],["pln",/^-*(?:[_a-z]|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?/i],["pun",/^[^\w\t\n\r "'-);\\\xa0]+/]]),["cl","el","lisp","scm"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lua.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lua.js deleted file mode 100644 index e83a3c469..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-lua.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["str",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$))/,null,"\"'"]],[["com",/^--(?:\[(=*)\[[\S\s]*?(?:]\1]|$)|[^\n\r]*)/],["str",/^\[(=*)\[[\S\s]*?(?:]\1]|$)/],["kwd",/^(?:and|break|do|else|elseif|end|false|for|function|if|in|local|nil|not|or|repeat|return|then|true|until|while)\b/,null],["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i], -["pln",/^[_a-z]\w*/i],["pun",/^[^\w\t\n\r \xa0][^\w\t\n\r "'+=\xa0-]*/]]),["lua"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-ml.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-ml.js deleted file mode 100644 index 6df02d728..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-ml.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["com",/^#(?:if[\t\n\r \xa0]+(?:[$_a-z][\w']*|``[^\t\n\r`]*(?:``|$))|else|endif|light)/i,null,"#"],["str",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])(?:'|$))/,null,"\"'"]],[["com",/^(?:\/\/[^\n\r]*|\(\*[\S\s]*?\*\))/],["kwd",/^(?:abstract|and|as|assert|begin|class|default|delegate|do|done|downcast|downto|elif|else|end|exception|extern|false|finally|for|fun|function|if|in|inherit|inline|interface|internal|lazy|let|match|member|module|mutable|namespace|new|null|of|open|or|override|private|public|rec|return|static|struct|then|to|true|try|type|upcast|use|val|void|when|while|with|yield|asr|land|lor|lsl|lsr|lxor|mod|sig|atomic|break|checked|component|const|constraint|constructor|continue|eager|event|external|fixed|functor|global|include|method|mixin|object|parallel|process|protected|pure|sealed|trait|virtual|volatile)\b/], -["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i],["pln",/^(?:[_a-z][\w']*[!#?]?|``[^\t\n\r`]*(?:``|$))/i],["pun",/^[^\w\t\n\r "'\xa0]+/]]),["fs","ml"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-n.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-n.js deleted file mode 100644 index 6c2e85b98..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-n.js +++ /dev/null @@ -1,4 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["str",/^(?:'(?:[^\n\r'\\]|\\.)*'|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,a,'"'],["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,a,"#"],["pln",/^\s+/,a," \r\n\t\xa0"]],[["str",/^@"(?:[^"]|"")*(?:"|$)/,a],["str",/^<#[^#>]*(?:#>|$)/,a],["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,a],["com",/^\/\/[^\n\r]*/,a],["com",/^\/\*[\S\s]*?(?:\*\/|$)/, -a],["kwd",/^(?:abstract|and|as|base|catch|class|def|delegate|enum|event|extern|false|finally|fun|implements|interface|internal|is|macro|match|matches|module|mutable|namespace|new|null|out|override|params|partial|private|protected|public|ref|sealed|static|struct|syntax|this|throw|true|try|type|typeof|using|variant|virtual|volatile|when|where|with|assert|assert2|async|break|checked|continue|do|else|ensures|for|foreach|if|late|lock|new|nolate|otherwise|regexp|repeat|requires|return|surroundwith|unchecked|unless|using|while|yield)\b/, -a],["typ",/^(?:array|bool|byte|char|decimal|double|float|int|list|long|object|sbyte|short|string|ulong|uint|ufloat|ulong|ushort|void)\b/,a],["lit",/^@[$_a-z][\w$@]*/i,a],["typ",/^@[A-Z]+[a-z][\w$@]*/,a],["pln",/^'?[$_a-z][\w$@]*/i,a],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,a,"0123456789"],["pun",/^.[^\s\w"-$'./@`]*/,a]]),["n","nemerle"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-proto.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-proto.js deleted file mode 100644 index f006ad8cf..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-proto.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.sourceDecorator({keywords:"bytes,default,double,enum,extend,extensions,false,group,import,max,message,option,optional,package,repeated,required,returns,rpc,service,syntax,to,true",types:/^(bool|(double|s?fixed|[su]?int)(32|64)|float|string)\b/,cStyleComments:!0}),["proto"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-scala.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-scala.js deleted file mode 100644 index 60d034de4..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-scala.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["str",/^"(?:""(?:""?(?!")|[^"\\]|\\.)*"{0,3}|(?:[^\n\r"\\]|\\.)*"?)/,null,'"'],["lit",/^`(?:[^\n\r\\`]|\\.)*`?/,null,"`"],["pun",/^[!#%&(--:-@[-^{-~]+/,null,"!#%&()*+,-:;<=>?@[\\]^{|}~"]],[["str",/^'(?:[^\n\r'\\]|\\(?:'|[^\n\r']+))'/],["lit",/^'[$A-Z_a-z][\w$]*(?![\w$'])/],["kwd",/^(?:abstract|case|catch|class|def|do|else|extends|final|finally|for|forSome|if|implicit|import|lazy|match|new|object|override|package|private|protected|requires|return|sealed|super|throw|trait|try|type|val|var|while|with|yield)\b/], -["lit",/^(?:true|false|null|this)\b/],["lit",/^(?:0(?:[0-7]+|x[\da-f]+)l?|(?:0|[1-9]\d*)(?:(?:\.\d+)?(?:e[+-]?\d+)?f?|l?)|\\.\d+(?:e[+-]?\d+)?f?)/i],["typ",/^[$_]*[A-Z][\d$A-Z_]*[a-z][\w$]*/],["pln",/^[$A-Z_a-z][\w$]*/],["com",/^\/(?:\/.*|\*(?:\/|\**[^*/])*(?:\*+\/?)?)/],["pun",/^(?:\.+|\/)/]]),["scala"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-sql.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-sql.js deleted file mode 100644 index da705b0b6..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-sql.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["str",/^(?:"(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*')/,null,"\"'"]],[["com",/^(?:--[^\n\r]*|\/\*[\S\s]*?(?:\*\/|$))/],["kwd",/^(?:add|all|alter|and|any|as|asc|authorization|backup|begin|between|break|browse|bulk|by|cascade|case|check|checkpoint|close|clustered|coalesce|collate|column|commit|compute|constraint|contains|containstable|continue|convert|create|cross|current|current_date|current_time|current_timestamp|current_user|cursor|database|dbcc|deallocate|declare|default|delete|deny|desc|disk|distinct|distributed|double|drop|dummy|dump|else|end|errlvl|escape|except|exec|execute|exists|exit|fetch|file|fillfactor|for|foreign|freetext|freetexttable|from|full|function|goto|grant|group|having|holdlock|identity|identitycol|identity_insert|if|in|index|inner|insert|intersect|into|is|join|key|kill|left|like|lineno|load|match|merge|national|nocheck|nonclustered|not|null|nullif|of|off|offsets|on|open|opendatasource|openquery|openrowset|openxml|option|or|order|outer|over|percent|plan|precision|primary|print|proc|procedure|public|raiserror|read|readtext|reconfigure|references|replication|restore|restrict|return|revoke|right|rollback|rowcount|rowguidcol|rule|save|schema|select|session_user|set|setuser|shutdown|some|statistics|system_user|table|textsize|then|to|top|tran|transaction|trigger|truncate|tsequal|union|unique|update|updatetext|use|user|using|values|varying|view|waitfor|when|where|while|with|writetext)(?=[^\w-]|$)/i, -null],["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i],["pln",/^[_a-z][\w-]*/i],["pun",/^[^\w\t\n\r "'\xa0][^\w\t\n\r "'+\xa0-]*/]]),["sql"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-tex.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-tex.js deleted file mode 100644 index ce96fbbd1..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-tex.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"],["com",/^%[^\n\r]*/,null,"%"]],[["kwd",/^\\[@-Za-z]+/],["kwd",/^\\./],["typ",/^[$&]/],["lit",/[+-]?(?:\.\d+|\d+(?:\.\d*)?)(cm|em|ex|in|pc|pt|bp|mm)/i],["pun",/^[()=[\]{}]+/]]),["latex","tex"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vb.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vb.js deleted file mode 100644 index 07506b03c..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vb.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0\u2028\u2029]+/,null,"\t\n\r Â\xa0

"],["str",/^(?:["\u201c\u201d](?:[^"\u201c\u201d]|["\u201c\u201d]{2})(?:["\u201c\u201d]c|$)|["\u201c\u201d](?:[^"\u201c\u201d]|["\u201c\u201d]{2})*(?:["\u201c\u201d]|$))/i,null,'"“â€'],["com",/^['\u2018\u2019].*/,null,"'‘’"]],[["kwd",/^(?:addhandler|addressof|alias|and|andalso|ansi|as|assembly|auto|boolean|byref|byte|byval|call|case|catch|cbool|cbyte|cchar|cdate|cdbl|cdec|char|cint|class|clng|cobj|const|cshort|csng|cstr|ctype|date|decimal|declare|default|delegate|dim|directcast|do|double|each|else|elseif|end|endif|enum|erase|error|event|exit|finally|for|friend|function|get|gettype|gosub|goto|handles|if|implements|imports|in|inherits|integer|interface|is|let|lib|like|long|loop|me|mod|module|mustinherit|mustoverride|mybase|myclass|namespace|new|next|not|notinheritable|notoverridable|object|on|option|optional|or|orelse|overloads|overridable|overrides|paramarray|preserve|private|property|protected|public|raiseevent|readonly|redim|removehandler|resume|return|select|set|shadows|shared|short|single|static|step|stop|string|structure|sub|synclock|then|throw|to|try|typeof|unicode|until|variant|wend|when|while|with|withevents|writeonly|xor|endif|gosub|let|variant|wend)\b/i, -null],["com",/^rem.*/i],["lit",/^(?:true\b|false\b|nothing\b|\d+(?:e[+-]?\d+[dfr]?|[dfilrs])?|(?:&h[\da-f]+|&o[0-7]+)[ils]?|\d*\.\d+(?:e[+-]?\d+)?[dfr]?|#\s+(?:\d+[/-]\d+[/-]\d+(?:\s+\d+:\d+(?::\d+)?(\s*(?:am|pm))?)?|\d+:\d+(?::\d+)?(\s*(?:am|pm))?)\s+#)/i],["pln",/^(?:(?:[a-z]|_\w)\w*|\[(?:[a-z]|_\w)\w*])/i],["pun",/^[^\w\t\n\r "'[\]\xa0\u2018\u2019\u201c\u201d\u2028\u2029]+/],["pun",/^(?:\[|])/]]),["vb","vbs"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vhdl.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vhdl.js deleted file mode 100644 index 128b5b6cf..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-vhdl.js +++ /dev/null @@ -1,3 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r Â\xa0"]],[["str",/^(?:[box]?"(?:[^"]|"")*"|'.')/i],["com",/^--[^\n\r]*/],["kwd",/^(?:abs|access|after|alias|all|and|architecture|array|assert|attribute|begin|block|body|buffer|bus|case|component|configuration|constant|disconnect|downto|else|elsif|end|entity|exit|file|for|function|generate|generic|group|guarded|if|impure|in|inertial|inout|is|label|library|linkage|literal|loop|map|mod|nand|new|next|nor|not|null|of|on|open|or|others|out|package|port|postponed|procedure|process|pure|range|record|register|reject|rem|report|return|rol|ror|select|severity|shared|signal|sla|sll|sra|srl|subtype|then|to|transport|type|unaffected|units|until|use|variable|wait|when|while|with|xnor|xor)(?=[^\w-]|$)/i, -null],["typ",/^(?:bit|bit_vector|character|boolean|integer|real|time|string|severity_level|positive|natural|signed|unsigned|line|text|std_u?logic(?:_vector)?)(?=[^\w-]|$)/i,null],["typ",/^'(?:active|ascending|base|delayed|driving|driving_value|event|high|image|instance_name|last_active|last_event|last_value|left|leftof|length|low|path_name|pos|pred|quiet|range|reverse_range|right|rightof|simple_name|stable|succ|transaction|val|value)(?=[^\w-]|$)/i,null],["lit",/^\d+(?:_\d+)*(?:#[\w.\\]+#(?:[+-]?\d+(?:_\d+)*)?|(?:\.\d+(?:_\d+)*)?(?:e[+-]?\d+(?:_\d+)*)?)/i], -["pln",/^(?:[a-z]\w*|\\[^\\]*\\)/i],["pun",/^[^\w\t\n\r "'\xa0][^\w\t\n\r "'\xa0-]*/]]),["vhdl","vhd"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-wiki.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-wiki.js deleted file mode 100644 index 9b0b44873..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-wiki.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\d\t a-gi-z\xa0]+/,null,"\t Â\xa0abcdefgijklmnopqrstuvwxyz0123456789"],["pun",/^[*=[\]^~]+/,null,"=*~^[]"]],[["lang-wiki.meta",/(?:^^|\r\n?|\n)(#[a-z]+)\b/],["lit",/^[A-Z][a-z][\da-z]+[A-Z][a-z][^\W_]+\b/],["lang-",/^{{{([\S\s]+?)}}}/],["lang-",/^`([^\n\r`]+)`/],["str",/^https?:\/\/[^\s#/?]*(?:\/[^\s#?]*)?(?:\?[^\s#]*)?(?:#\S*)?/i],["pln",/^(?:\r\n|[\S\s])[^\n\r#*=A-[^`h{~]*/]]),["wiki"]); -PR.registerLangHandler(PR.createSimpleLexer([["kwd",/^#[a-z]+/i,null,"#"]],[]),["wiki.meta"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-xq.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-xq.js deleted file mode 100644 index e323ae323..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-xq.js +++ /dev/null @@ -1,3 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["var pln",/^\$[\w-]+/,null,"$"]],[["pln",/^[\s=][<>][\s=]/],["lit",/^@[\w-]+/],["tag",/^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["com",/^\(:[\S\s]*?:\)/],["pln",/^[(),/;[\]{}]$/],["str",/^(?:"(?:[^"\\{]|\\[\S\s])*(?:"|$)|'(?:[^'\\{]|\\[\S\s])*(?:'|$))/,null,"\"'"],["kwd",/^(?:xquery|where|version|variable|union|typeswitch|treat|to|then|text|stable|sortby|some|self|schema|satisfies|returns|return|ref|processing-instruction|preceding-sibling|preceding|precedes|parent|only|of|node|namespace|module|let|item|intersect|instance|in|import|if|function|for|follows|following-sibling|following|external|except|every|else|element|descending|descendant-or-self|descendant|define|default|declare|comment|child|cast|case|before|attribute|assert|ascending|as|ancestor-or-self|ancestor|after|eq|order|by|or|and|schema-element|document-node|node|at)\b/], -["typ",/^(?:xs:yearMonthDuration|xs:unsignedLong|xs:time|xs:string|xs:short|xs:QName|xs:Name|xs:long|xs:integer|xs:int|xs:gYearMonth|xs:gYear|xs:gMonthDay|xs:gDay|xs:float|xs:duration|xs:double|xs:decimal|xs:dayTimeDuration|xs:dateTime|xs:date|xs:byte|xs:boolean|xs:anyURI|xf:yearMonthDuration)\b/,null],["fun pln",/^(?:xp:dereference|xinc:node-expand|xinc:link-references|xinc:link-expand|xhtml:restructure|xhtml:clean|xhtml:add-lists|xdmp:zip-manifest|xdmp:zip-get|xdmp:zip-create|xdmp:xquery-version|xdmp:word-convert|xdmp:with-namespaces|xdmp:version|xdmp:value|xdmp:user-roles|xdmp:user-last-login|xdmp:user|xdmp:url-encode|xdmp:url-decode|xdmp:uri-is-file|xdmp:uri-format|xdmp:uri-content-type|xdmp:unquote|xdmp:unpath|xdmp:triggers-database|xdmp:trace|xdmp:to-json|xdmp:tidy|xdmp:subbinary|xdmp:strftime|xdmp:spawn-in|xdmp:spawn|xdmp:sleep|xdmp:shutdown|xdmp:set-session-field|xdmp:set-response-encoding|xdmp:set-response-content-type|xdmp:set-response-code|xdmp:set-request-time-limit|xdmp:set|xdmp:servers|xdmp:server-status|xdmp:server-name|xdmp:server|xdmp:security-database|xdmp:security-assert|xdmp:schema-database|xdmp:save|xdmp:role-roles|xdmp:role|xdmp:rethrow|xdmp:restart|xdmp:request-timestamp|xdmp:request-status|xdmp:request-cancel|xdmp:request|xdmp:redirect-response|xdmp:random|xdmp:quote|xdmp:query-trace|xdmp:query-meters|xdmp:product-edition|xdmp:privilege-roles|xdmp:privilege|xdmp:pretty-print|xdmp:powerpoint-convert|xdmp:platform|xdmp:permission|xdmp:pdf-convert|xdmp:path|xdmp:octal-to-integer|xdmp:node-uri|xdmp:node-replace|xdmp:node-kind|xdmp:node-insert-child|xdmp:node-insert-before|xdmp:node-insert-after|xdmp:node-delete|xdmp:node-database|xdmp:mul64|xdmp:modules-root|xdmp:modules-database|xdmp:merging|xdmp:merge-cancel|xdmp:merge|xdmp:md5|xdmp:logout|xdmp:login|xdmp:log-level|xdmp:log|xdmp:lock-release|xdmp:lock-acquire|xdmp:load|xdmp:invoke-in|xdmp:invoke|xdmp:integer-to-octal|xdmp:integer-to-hex|xdmp:http-put|xdmp:http-post|xdmp:http-options|xdmp:http-head|xdmp:http-get|xdmp:http-delete|xdmp:hosts|xdmp:host-status|xdmp:host-name|xdmp:host|xdmp:hex-to-integer|xdmp:hash64|xdmp:hash32|xdmp:has-privilege|xdmp:groups|xdmp:group-serves|xdmp:group-servers|xdmp:group-name|xdmp:group-hosts|xdmp:group|xdmp:get-session-field-names|xdmp:get-session-field|xdmp:get-response-encoding|xdmp:get-response-code|xdmp:get-request-username|xdmp:get-request-user|xdmp:get-request-url|xdmp:get-request-protocol|xdmp:get-request-path|xdmp:get-request-method|xdmp:get-request-header-names|xdmp:get-request-header|xdmp:get-request-field-names|xdmp:get-request-field-filename|xdmp:get-request-field-content-type|xdmp:get-request-field|xdmp:get-request-client-certificate|xdmp:get-request-client-address|xdmp:get-request-body|xdmp:get-current-user|xdmp:get-current-roles|xdmp:get|xdmp:function-name|xdmp:function-module|xdmp:function|xdmp:from-json|xdmp:forests|xdmp:forest-status|xdmp:forest-restore|xdmp:forest-restart|xdmp:forest-name|xdmp:forest-delete|xdmp:forest-databases|xdmp:forest-counts|xdmp:forest-clear|xdmp:forest-backup|xdmp:forest|xdmp:filesystem-file|xdmp:filesystem-directory|xdmp:exists|xdmp:excel-convert|xdmp:eval-in|xdmp:eval|xdmp:estimate|xdmp:email|xdmp:element-content-type|xdmp:elapsed-time|xdmp:document-set-quality|xdmp:document-set-property|xdmp:document-set-properties|xdmp:document-set-permissions|xdmp:document-set-collections|xdmp:document-remove-properties|xdmp:document-remove-permissions|xdmp:document-remove-collections|xdmp:document-properties|xdmp:document-locks|xdmp:document-load|xdmp:document-insert|xdmp:document-get-quality|xdmp:document-get-properties|xdmp:document-get-permissions|xdmp:document-get-collections|xdmp:document-get|xdmp:document-forest|xdmp:document-delete|xdmp:document-add-properties|xdmp:document-add-permissions|xdmp:document-add-collections|xdmp:directory-properties|xdmp:directory-locks|xdmp:directory-delete|xdmp:directory-create|xdmp:directory|xdmp:diacritic-less|xdmp:describe|xdmp:default-permissions|xdmp:default-collections|xdmp:databases|xdmp:database-restore-validate|xdmp:database-restore-status|xdmp:database-restore-cancel|xdmp:database-restore|xdmp:database-name|xdmp:database-forests|xdmp:database-backup-validate|xdmp:database-backup-status|xdmp:database-backup-purge|xdmp:database-backup-cancel|xdmp:database-backup|xdmp:database|xdmp:collection-properties|xdmp:collection-locks|xdmp:collection-delete|xdmp:collation-canonical-uri|xdmp:castable-as|xdmp:can-grant-roles|xdmp:base64-encode|xdmp:base64-decode|xdmp:architecture|xdmp:apply|xdmp:amp-roles|xdmp:amp|xdmp:add64|xdmp:add-response-header|xdmp:access|trgr:trigger-set-recursive|trgr:trigger-set-permissions|trgr:trigger-set-name|trgr:trigger-set-module|trgr:trigger-set-event|trgr:trigger-set-description|trgr:trigger-remove-permissions|trgr:trigger-module|trgr:trigger-get-permissions|trgr:trigger-enable|trgr:trigger-disable|trgr:trigger-database-online-event|trgr:trigger-data-event|trgr:trigger-add-permissions|trgr:remove-trigger|trgr:property-content|trgr:pre-commit|trgr:post-commit|trgr:get-trigger-by-id|trgr:get-trigger|trgr:document-scope|trgr:document-content|trgr:directory-scope|trgr:create-trigger|trgr:collection-scope|trgr:any-property-content|thsr:set-entry|thsr:remove-term|thsr:remove-synonym|thsr:remove-entry|thsr:query-lookup|thsr:lookup|thsr:load|thsr:insert|thsr:expand|thsr:add-synonym|spell:suggest-detailed|spell:suggest|spell:remove-word|spell:make-dictionary|spell:load|spell:levenshtein-distance|spell:is-correct|spell:insert|spell:double-metaphone|spell:add-word|sec:users-collection|sec:user-set-roles|sec:user-set-password|sec:user-set-name|sec:user-set-description|sec:user-set-default-permissions|sec:user-set-default-collections|sec:user-remove-roles|sec:user-privileges|sec:user-get-roles|sec:user-get-description|sec:user-get-default-permissions|sec:user-get-default-collections|sec:user-doc-permissions|sec:user-doc-collections|sec:user-add-roles|sec:unprotect-collection|sec:uid-for-name|sec:set-realm|sec:security-version|sec:security-namespace|sec:security-installed|sec:security-collection|sec:roles-collection|sec:role-set-roles|sec:role-set-name|sec:role-set-description|sec:role-set-default-permissions|sec:role-set-default-collections|sec:role-remove-roles|sec:role-privileges|sec:role-get-roles|sec:role-get-description|sec:role-get-default-permissions|sec:role-get-default-collections|sec:role-doc-permissions|sec:role-doc-collections|sec:role-add-roles|sec:remove-user|sec:remove-role-from-users|sec:remove-role-from-role|sec:remove-role-from-privileges|sec:remove-role-from-amps|sec:remove-role|sec:remove-privilege|sec:remove-amp|sec:protect-collection|sec:privileges-collection|sec:privilege-set-roles|sec:privilege-set-name|sec:privilege-remove-roles|sec:privilege-get-roles|sec:privilege-add-roles|sec:priv-doc-permissions|sec:priv-doc-collections|sec:get-user-names|sec:get-unique-elem-id|sec:get-role-names|sec:get-role-ids|sec:get-privilege|sec:get-distinct-permissions|sec:get-collection|sec:get-amp|sec:create-user-with-role|sec:create-user|sec:create-role|sec:create-privilege|sec:create-amp|sec:collections-collection|sec:collection-set-permissions|sec:collection-remove-permissions|sec:collection-get-permissions|sec:collection-add-permissions|sec:check-admin|sec:amps-collection|sec:amp-set-roles|sec:amp-remove-roles|sec:amp-get-roles|sec:amp-doc-permissions|sec:amp-doc-collections|sec:amp-add-roles|search:unparse|search:suggest|search:snippet|search:search|search:resolve-nodes|search:resolve|search:remove-constraint|search:parse|search:get-default-options|search:estimate|search:check-options|prof:value|prof:reset|prof:report|prof:invoke|prof:eval|prof:enable|prof:disable|prof:allowed|ppt:clean|pki:template-set-request|pki:template-set-name|pki:template-set-key-type|pki:template-set-key-options|pki:template-set-description|pki:template-in-use|pki:template-get-version|pki:template-get-request|pki:template-get-name|pki:template-get-key-type|pki:template-get-key-options|pki:template-get-id|pki:template-get-description|pki:need-certificate|pki:is-temporary|pki:insert-trusted-certificates|pki:insert-template|pki:insert-signed-certificates|pki:insert-certificate-revocation-list|pki:get-trusted-certificate-ids|pki:get-template-ids|pki:get-template-certificate-authority|pki:get-template-by-name|pki:get-template|pki:get-pending-certificate-requests-xml|pki:get-pending-certificate-requests-pem|pki:get-pending-certificate-request|pki:get-certificates-for-template-xml|pki:get-certificates-for-template|pki:get-certificates|pki:get-certificate-xml|pki:get-certificate-pem|pki:get-certificate|pki:generate-temporary-certificate-if-necessary|pki:generate-temporary-certificate|pki:generate-template-certificate-authority|pki:generate-certificate-request|pki:delete-template|pki:delete-certificate|pki:create-template|pdf:make-toc|pdf:insert-toc-headers|pdf:get-toc|pdf:clean|p:status-transition|p:state-transition|p:remove|p:pipelines|p:insert|p:get-by-id|p:get|p:execute|p:create|p:condition|p:collection|p:action|ooxml:runs-merge|ooxml:package-uris|ooxml:package-parts-insert|ooxml:package-parts|msword:clean|mcgm:polygon|mcgm:point|mcgm:geospatial-query-from-elements|mcgm:geospatial-query|mcgm:circle|math:tanh|math:tan|math:sqrt|math:sinh|math:sin|math:pow|math:modf|math:log10|math:log|math:ldexp|math:frexp|math:fmod|math:floor|math:fabs|math:exp|math:cosh|math:cos|math:ceil|math:atan2|math:atan|math:asin|math:acos|map:put|map:map|map:keys|map:get|map:delete|map:count|map:clear|lnk:to|lnk:remove|lnk:insert|lnk:get|lnk:from|lnk:create|kml:polygon|kml:point|kml:interior-polygon|kml:geospatial-query-from-elements|kml:geospatial-query|kml:circle|kml:box|gml:polygon|gml:point|gml:interior-polygon|gml:geospatial-query-from-elements|gml:geospatial-query|gml:circle|gml:box|georss:point|georss:geospatial-query|georss:circle|geo:polygon|geo:point|geo:interior-polygon|geo:geospatial-query-from-elements|geo:geospatial-query|geo:circle|geo:box|fn:zero-or-one|fn:years-from-duration|fn:year-from-dateTime|fn:year-from-date|fn:upper-case|fn:unordered|fn:true|fn:translate|fn:trace|fn:tokenize|fn:timezone-from-time|fn:timezone-from-dateTime|fn:timezone-from-date|fn:sum|fn:subtract-dateTimes-yielding-yearMonthDuration|fn:subtract-dateTimes-yielding-dayTimeDuration|fn:substring-before|fn:substring-after|fn:substring|fn:subsequence|fn:string-to-codepoints|fn:string-pad|fn:string-length|fn:string-join|fn:string|fn:static-base-uri|fn:starts-with|fn:seconds-from-time|fn:seconds-from-duration|fn:seconds-from-dateTime|fn:round-half-to-even|fn:round|fn:root|fn:reverse|fn:resolve-uri|fn:resolve-QName|fn:replace|fn:remove|fn:QName|fn:prefix-from-QName|fn:position|fn:one-or-more|fn:number|fn:not|fn:normalize-unicode|fn:normalize-space|fn:node-name|fn:node-kind|fn:nilled|fn:namespace-uri-from-QName|fn:namespace-uri-for-prefix|fn:namespace-uri|fn:name|fn:months-from-duration|fn:month-from-dateTime|fn:month-from-date|fn:minutes-from-time|fn:minutes-from-duration|fn:minutes-from-dateTime|fn:min|fn:max|fn:matches|fn:lower-case|fn:local-name-from-QName|fn:local-name|fn:last|fn:lang|fn:iri-to-uri|fn:insert-before|fn:index-of|fn:in-scope-prefixes|fn:implicit-timezone|fn:idref|fn:id|fn:hours-from-time|fn:hours-from-duration|fn:hours-from-dateTime|fn:floor|fn:false|fn:expanded-QName|fn:exists|fn:exactly-one|fn:escape-uri|fn:escape-html-uri|fn:error|fn:ends-with|fn:encode-for-uri|fn:empty|fn:document-uri|fn:doc-available|fn:doc|fn:distinct-values|fn:distinct-nodes|fn:default-collation|fn:deep-equal|fn:days-from-duration|fn:day-from-dateTime|fn:day-from-date|fn:data|fn:current-time|fn:current-dateTime|fn:current-date|fn:count|fn:contains|fn:concat|fn:compare|fn:collection|fn:codepoints-to-string|fn:codepoint-equal|fn:ceiling|fn:boolean|fn:base-uri|fn:avg|fn:adjust-time-to-timezone|fn:adjust-dateTime-to-timezone|fn:adjust-date-to-timezone|fn:abs|feed:unsubscribe|feed:subscription|feed:subscribe|feed:request|feed:item|feed:description|excel:clean|entity:enrich|dom:set-pipelines|dom:set-permissions|dom:set-name|dom:set-evaluation-context|dom:set-domain-scope|dom:set-description|dom:remove-pipeline|dom:remove-permissions|dom:remove|dom:get|dom:evaluation-context|dom:domains|dom:domain-scope|dom:create|dom:configuration-set-restart-user|dom:configuration-set-permissions|dom:configuration-set-evaluation-context|dom:configuration-set-default-domain|dom:configuration-get|dom:configuration-create|dom:collection|dom:add-pipeline|dom:add-permissions|dls:retention-rules|dls:retention-rule-remove|dls:retention-rule-insert|dls:retention-rule|dls:purge|dls:node-expand|dls:link-references|dls:link-expand|dls:documents-query|dls:document-versions-query|dls:document-version-uri|dls:document-version-query|dls:document-version-delete|dls:document-version-as-of|dls:document-version|dls:document-update|dls:document-unmanage|dls:document-set-quality|dls:document-set-property|dls:document-set-properties|dls:document-set-permissions|dls:document-set-collections|dls:document-retention-rules|dls:document-remove-properties|dls:document-remove-permissions|dls:document-remove-collections|dls:document-purge|dls:document-manage|dls:document-is-managed|dls:document-insert-and-manage|dls:document-include-query|dls:document-history|dls:document-get-permissions|dls:document-extract-part|dls:document-delete|dls:document-checkout-status|dls:document-checkout|dls:document-checkin|dls:document-add-properties|dls:document-add-permissions|dls:document-add-collections|dls:break-checkout|dls:author-query|dls:as-of-query|dbk:convert|dbg:wait|dbg:value|dbg:stopped|dbg:stop|dbg:step|dbg:status|dbg:stack|dbg:out|dbg:next|dbg:line|dbg:invoke|dbg:function|dbg:finish|dbg:expr|dbg:eval|dbg:disconnect|dbg:detach|dbg:continue|dbg:connect|dbg:clear|dbg:breakpoints|dbg:break|dbg:attached|dbg:attach|cvt:save-converted-documents|cvt:part-uri|cvt:destination-uri|cvt:basepath|cvt:basename|cts:words|cts:word-query-weight|cts:word-query-text|cts:word-query-options|cts:word-query|cts:word-match|cts:walk|cts:uris|cts:uri-match|cts:train|cts:tokenize|cts:thresholds|cts:stem|cts:similar-query-weight|cts:similar-query-nodes|cts:similar-query|cts:shortest-distance|cts:search|cts:score|cts:reverse-query-weight|cts:reverse-query-nodes|cts:reverse-query|cts:remainder|cts:registered-query-weight|cts:registered-query-options|cts:registered-query-ids|cts:registered-query|cts:register|cts:query|cts:quality|cts:properties-query-query|cts:properties-query|cts:polygon-vertices|cts:polygon|cts:point-longitude|cts:point-latitude|cts:point|cts:or-query-queries|cts:or-query|cts:not-query-weight|cts:not-query-query|cts:not-query|cts:near-query-weight|cts:near-query-queries|cts:near-query-options|cts:near-query-distance|cts:near-query|cts:highlight|cts:geospatial-co-occurrences|cts:frequency|cts:fitness|cts:field-words|cts:field-word-query-weight|cts:field-word-query-text|cts:field-word-query-options|cts:field-word-query-field-name|cts:field-word-query|cts:field-word-match|cts:entity-highlight|cts:element-words|cts:element-word-query-weight|cts:element-word-query-text|cts:element-word-query-options|cts:element-word-query-element-name|cts:element-word-query|cts:element-word-match|cts:element-values|cts:element-value-ranges|cts:element-value-query-weight|cts:element-value-query-text|cts:element-value-query-options|cts:element-value-query-element-name|cts:element-value-query|cts:element-value-match|cts:element-value-geospatial-co-occurrences|cts:element-value-co-occurrences|cts:element-range-query-weight|cts:element-range-query-value|cts:element-range-query-options|cts:element-range-query-operator|cts:element-range-query-element-name|cts:element-range-query|cts:element-query-query|cts:element-query-element-name|cts:element-query|cts:element-pair-geospatial-values|cts:element-pair-geospatial-value-match|cts:element-pair-geospatial-query-weight|cts:element-pair-geospatial-query-region|cts:element-pair-geospatial-query-options|cts:element-pair-geospatial-query-longitude-name|cts:element-pair-geospatial-query-latitude-name|cts:element-pair-geospatial-query-element-name|cts:element-pair-geospatial-query|cts:element-pair-geospatial-boxes|cts:element-geospatial-values|cts:element-geospatial-value-match|cts:element-geospatial-query-weight|cts:element-geospatial-query-region|cts:element-geospatial-query-options|cts:element-geospatial-query-element-name|cts:element-geospatial-query|cts:element-geospatial-boxes|cts:element-child-geospatial-values|cts:element-child-geospatial-value-match|cts:element-child-geospatial-query-weight|cts:element-child-geospatial-query-region|cts:element-child-geospatial-query-options|cts:element-child-geospatial-query-element-name|cts:element-child-geospatial-query-child-name|cts:element-child-geospatial-query|cts:element-child-geospatial-boxes|cts:element-attribute-words|cts:element-attribute-word-query-weight|cts:element-attribute-word-query-text|cts:element-attribute-word-query-options|cts:element-attribute-word-query-element-name|cts:element-attribute-word-query-attribute-name|cts:element-attribute-word-query|cts:element-attribute-word-match|cts:element-attribute-values|cts:element-attribute-value-ranges|cts:element-attribute-value-query-weight|cts:element-attribute-value-query-text|cts:element-attribute-value-query-options|cts:element-attribute-value-query-element-name|cts:element-attribute-value-query-attribute-name|cts:element-attribute-value-query|cts:element-attribute-value-match|cts:element-attribute-value-geospatial-co-occurrences|cts:element-attribute-value-co-occurrences|cts:element-attribute-range-query-weight|cts:element-attribute-range-query-value|cts:element-attribute-range-query-options|cts:element-attribute-range-query-operator|cts:element-attribute-range-query-element-name|cts:element-attribute-range-query-attribute-name|cts:element-attribute-range-query|cts:element-attribute-pair-geospatial-values|cts:element-attribute-pair-geospatial-value-match|cts:element-attribute-pair-geospatial-query-weight|cts:element-attribute-pair-geospatial-query-region|cts:element-attribute-pair-geospatial-query-options|cts:element-attribute-pair-geospatial-query-longitude-name|cts:element-attribute-pair-geospatial-query-latitude-name|cts:element-attribute-pair-geospatial-query-element-name|cts:element-attribute-pair-geospatial-query|cts:element-attribute-pair-geospatial-boxes|cts:document-query-uris|cts:document-query|cts:distance|cts:directory-query-uris|cts:directory-query-depth|cts:directory-query|cts:destination|cts:deregister|cts:contains|cts:confidence|cts:collections|cts:collection-query-uris|cts:collection-query|cts:collection-match|cts:classify|cts:circle-radius|cts:circle-center|cts:circle|cts:box-west|cts:box-south|cts:box-north|cts:box-east|cts:box|cts:bearing|cts:arc-intersection|cts:and-query-queries|cts:and-query-options|cts:and-query|cts:and-not-query-positive-query|cts:and-not-query-negative-query|cts:and-not-query|css:get|css:convert|cpf:success|cpf:failure|cpf:document-set-state|cpf:document-set-processing-status|cpf:document-set-last-updated|cpf:document-set-error|cpf:document-get-state|cpf:document-get-processing-status|cpf:document-get-last-updated|cpf:document-get-error|cpf:check-transition|alert:spawn-matching-actions|alert:rule-user-id-query|alert:rule-set-user-id|alert:rule-set-query|alert:rule-set-options|alert:rule-set-name|alert:rule-set-description|alert:rule-set-action|alert:rule-remove|alert:rule-name-query|alert:rule-insert|alert:rule-id-query|alert:rule-get-user-id|alert:rule-get-query|alert:rule-get-options|alert:rule-get-name|alert:rule-get-id|alert:rule-get-description|alert:rule-get-action|alert:rule-action-query|alert:remove-triggers|alert:make-rule|alert:make-log-action|alert:make-config|alert:make-action|alert:invoke-matching-actions|alert:get-my-rules|alert:get-all-rules|alert:get-actions|alert:find-matching-rules|alert:create-triggers|alert:config-set-uri|alert:config-set-trigger-ids|alert:config-set-options|alert:config-set-name|alert:config-set-description|alert:config-set-cpf-domain-names|alert:config-set-cpf-domain-ids|alert:config-insert|alert:config-get-uri|alert:config-get-trigger-ids|alert:config-get-options|alert:config-get-name|alert:config-get-id|alert:config-get-description|alert:config-get-cpf-domain-names|alert:config-get-cpf-domain-ids|alert:config-get|alert:config-delete|alert:action-set-options|alert:action-set-name|alert:action-set-module-root|alert:action-set-module-db|alert:action-set-module|alert:action-set-description|alert:action-remove|alert:action-insert|alert:action-get-options|alert:action-get-name|alert:action-get-module-root|alert:action-get-module-db|alert:action-get-module|alert:action-get-description|zero-or-one|years-from-duration|year-from-dateTime|year-from-date|upper-case|unordered|true|translate|trace|tokenize|timezone-from-time|timezone-from-dateTime|timezone-from-date|sum|subtract-dateTimes-yielding-yearMonthDuration|subtract-dateTimes-yielding-dayTimeDuration|substring-before|substring-after|substring|subsequence|string-to-codepoints|string-pad|string-length|string-join|string|static-base-uri|starts-with|seconds-from-time|seconds-from-duration|seconds-from-dateTime|round-half-to-even|round|root|reverse|resolve-uri|resolve-QName|replace|remove|QName|prefix-from-QName|position|one-or-more|number|not|normalize-unicode|normalize-space|node-name|node-kind|nilled|namespace-uri-from-QName|namespace-uri-for-prefix|namespace-uri|name|months-from-duration|month-from-dateTime|month-from-date|minutes-from-time|minutes-from-duration|minutes-from-dateTime|min|max|matches|lower-case|local-name-from-QName|local-name|last|lang|iri-to-uri|insert-before|index-of|in-scope-prefixes|implicit-timezone|idref|id|hours-from-time|hours-from-duration|hours-from-dateTime|floor|false|expanded-QName|exists|exactly-one|escape-uri|escape-html-uri|error|ends-with|encode-for-uri|empty|document-uri|doc-available|doc|distinct-values|distinct-nodes|default-collation|deep-equal|days-from-duration|day-from-dateTime|day-from-date|data|current-time|current-dateTime|current-date|count|contains|concat|compare|collection|codepoints-to-string|codepoint-equal|ceiling|boolean|base-uri|avg|adjust-time-to-timezone|adjust-dateTime-to-timezone|adjust-date-to-timezone|abs)\b/], -["pln",/^[\w:-]+/],["pln",/^[\t\n\r \xa0]+/]]),["xq","xquery"]); diff --git a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-yaml.js b/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-yaml.js deleted file mode 100644 index c38729b6c..000000000 --- a/src/python/twitter/pants/reporting/assets/js/prettify_extra_langs/lang-yaml.js +++ /dev/null @@ -1,2 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["pun",/^[:>?|]+/,a,":|>?"],["dec",/^%(?:YAML|TAG)[^\n\r#]+/,a,"%"],["typ",/^&\S+/,a,"&"],["typ",/^!\S*/,a,"!"],["str",/^"(?:[^"\\]|\\.)*(?:"|$)/,a,'"'],["str",/^'(?:[^']|'')*(?:'|$)/,a,"'"],["com",/^#[^\n\r]*/,a,"#"],["pln",/^\s+/,a," \t\r\n"]],[["dec",/^(?:---|\.\.\.)(?:[\n\r]|$)/],["pun",/^-/],["kwd",/^\w+:[\n\r ]/],["pln",/^\w+/]]),["yaml","yml"]); diff --git a/src/python/twitter/pants/reporting/html_reporter.py b/src/python/twitter/pants/reporting/html_reporter.py deleted file mode 100644 index 7d1041b60..000000000 --- a/src/python/twitter/pants/reporting/html_reporter.py +++ /dev/null @@ -1,270 +0,0 @@ -import cgi -import os -import re -import uuid - -from collections import namedtuple, defaultdict -from pystache.renderer import Renderer - -from twitter.common.dirutil import safe_mkdir -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.mustache import MustacheRenderer -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.reporting.linkify import linkify -from twitter.pants.reporting.report import Report -from twitter.pants.reporting.reporter import Reporter -from twitter.pants.reporting.reporting_utils import items_to_report_element - - -class HtmlReporter(Reporter): - """HTML reporting to files. - - The files are intended to be served by the ReportingServer, - not accessed directly from the filesystem. - """ - - # HTML reporting settings. - # html_dir: Where the report files go. - # template_dir: Where to find mustache templates. - Settings = namedtuple('Settings', Reporter.Settings._fields + ('html_dir', 'template_dir')) - - def __init__(self, run_tracker, settings): - Reporter.__init__(self, run_tracker, settings) - # The main report, and associated tool outputs, go under this dir. - self._html_dir = settings.html_dir - - # We render HTML from mustache templates. - self._renderer = MustacheRenderer(settings.template_dir, __name__) - - # We serve files relative to the build root. - self._buildroot = get_buildroot() - self._html_path_base = os.path.relpath(self._html_dir, self._buildroot) - - # We write the main report body to this file object. - self._report_file = None - - # We redirect stdout, stderr etc. of tool invocations to these files. - self._output_files = defaultdict(dict) # workunit_id -> {path -> fileobj}. - - def report_path(self): - """The path to the main report file.""" - return os.path.join(self._html_dir, 'build.html') - - def open(self): - """Implementation of Reporter callback.""" - safe_mkdir(os.path.dirname(self._html_dir)) - self._report_file = open(self.report_path(), 'w') - - def close(self): - """Implementation of Reporter callback.""" - self._report_file.close() - # Make sure everything's closed. - for files in self._output_files.values(): - for f in files.values(): - f.close() - - def start_workunit(self, workunit): - """Implementation of Reporter callback.""" - # We use these properties of the workunit to decide how to render information about it. - is_bootstrap = workunit.has_label(WorkUnit.BOOTSTRAP) - is_tool = workunit.has_label(WorkUnit.TOOL) - is_multitool = workunit.has_label(WorkUnit.MULTITOOL) - is_test = workunit.has_label(WorkUnit.TEST) - - # Get useful properties from the workunit. - workunit_dict = workunit.to_dict() - if workunit_dict['cmd']: - workunit_dict['cmd'] = linkify(self._buildroot, workunit_dict['cmd'].replace('$', '\\\\$')) - - # Create the template arguments. - args = { 'indent': len(workunit.ancestors()) * 10, - 'html_path_base': self._html_path_base, - 'workunit': workunit_dict, - 'header_text': workunit.name, - 'initially_open': is_test or not (is_bootstrap or is_tool or is_multitool), - 'is_tool': is_tool, - 'is_multitool': is_multitool } - args.update({ 'collapsible': lambda x: self._renderer.render_callable('collapsible', x, args) }) - - # Render the workunit's div. - s = self._renderer.render_name('workunit_start', args) - - if is_tool: - # This workunit is a tool invocation, so render the appropriate content. - # We use the same args, slightly modified. - del args['initially_open'] - if is_test: - # Have test framework stdout open by default, but not that of other tools. - # This is an arbitrary choice, but one that turns out to be useful to users in practice. - args['stdout_initially_open'] = True - s += self._renderer.render_name('tool_invocation_start', args) - - # ... and we're done. - self._emit(s) - - # CSS classes from pants.css that we use to style the header text to reflect the outcome. - _outcome_css_classes = ['aborted', 'failure', 'warning', 'success', 'unknown'] - - def end_workunit(self, workunit): - """Implementation of Reporter callback.""" - # Create the template arguments. - duration = workunit.duration() - timing = '%.3f' % duration - unaccounted_time = None - # Background work may be idle a lot, no point in reporting that as unaccounted. - if self.is_under_main_root(workunit): - unaccounted_time_secs = workunit.unaccounted_time() - if unaccounted_time_secs >= 1 and unaccounted_time_secs > 0.05 * duration: - unaccounted_time = '%.3f' % unaccounted_time_secs - args = { 'workunit': workunit.to_dict(), - 'status': workunit.choose(*HtmlReporter._outcome_css_classes), - 'timing': timing, - 'unaccounted_time': unaccounted_time, - 'aborted': workunit.outcome() == WorkUnit.ABORTED } - - s = '' - if workunit.has_label(WorkUnit.TOOL): - s += self._renderer.render_name('tool_invocation_end', args) - s += self._renderer.render_name('workunit_end', args) - self._emit(s) - - # Update the timings. - def render_timings(timings): - timings_dict = timings.get_all() - for item in timings_dict: - item['timing_string'] = '%.3f' % item['timing'] - args = { - 'timings': timings_dict - } - return self._renderer.render_name('aggregated_timings', args) - - self._overwrite('cumulative_timings', render_timings(self.run_tracker.cumulative_timings)) - self._overwrite('self_timings', render_timings(self.run_tracker.self_timings)) - - # Update the artifact cache stats. - def render_cache_stats(artifact_cache_stats): - def fix_detail_id(e, _id): - return e if isinstance(e, basestring) else e + (_id, ) - - msg_elements = [] - for cache_name, stat in artifact_cache_stats.stats_per_cache.items(): - msg_elements.extend([ - cache_name + ' artifact cache: ', - # Explicitly set the detail ids, so their displayed/hidden state survives a refresh. - fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'), - ', ', - fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'), - '.' - ]) - if not msg_elements: - msg_elements = ['No artifact cache use.'] - return self._render_message(*msg_elements) - - self._overwrite('artifact_cache_stats', - render_cache_stats(self.run_tracker.artifact_cache_stats)) - - for f in self._output_files[workunit.id].values(): - f.close() - - def handle_output(self, workunit, label, s): - """Implementation of Reporter callback.""" - if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all. - path = os.path.join(self._html_dir, '%s.%s' % (workunit.id, label)) - output_files = self._output_files[workunit.id] - if path not in output_files: - f = open(path, 'w') - output_files[path] = f - else: - f = output_files[path] - f.write(self._htmlify_text(s)) - # We must flush in the same thread as the write. - f.flush() - - _log_level_css_map = { - Report.FATAL: 'fatal', - Report.ERROR: 'error', - Report.WARN: 'warn', - Report.INFO: 'info', - Report.DEBUG: 'debug' - } - def do_handle_log(self, workunit, level, *msg_elements): - """Implementation of Reporter callback.""" - content = '%s' % \ - (HtmlReporter._log_level_css_map[level], self._render_message(*msg_elements)) - - # Generate some javascript that appends the content to the workunit's div. - args = { - 'content_id': uuid.uuid4(), # Identifies this content. - 'workunit_id': workunit.id, # The workunit this reporting content belongs to. - 'content': content, # The content to append. - } - s = self._renderer.render_name('append_to_workunit', args) - - # Emit that javascript to the main report body. - self._emit(s) - - def _render_message(self, *msg_elements): - elements = [] - detail_ids = [] - for element in msg_elements: - # Each element can be a message or a (message, detail) pair, as received by handle_log(). - # - # However, as an internal implementation detail, we also allow an element to be a tuple - # (message, detail, detail_initially_visible[, detail_id]) - # - # - If the detail exists, clicking on the text will toggle display of the detail and close - # all other details in this message. - # - If detail_initially_visible is True, the detail will be displayed by default. - # - # Toggling is managed via detail_ids: when clicking on a detail, it closes all details - # in this message with detail_ids different than that of the one being clicked on. - # We allow detail_id to be explicitly specified, so that the open/closed state can be - # preserved through refreshes. For example, when looking at the artifact cache stats, - # if "hits" are open and "misses" are closed, we want to remember that even after - # the cache stats are updated and the message re-rendered. - if isinstance(element, basestring): - element = [element] - defaults = ('', None, None, False) - # Map assumes None for missing values, so this will pick the default for those. - (text, detail, detail_id, detail_initially_visible) = \ - map(lambda x, y: x or y, element, defaults) - element_args = {'text': self._htmlify_text(text) } - if detail is not None: - detail_id = detail_id or uuid.uuid4() - detail_ids.append(detail_id) - element_args.update({ - 'detail': self._htmlify_text(detail), - 'detail_initially_visible': detail_initially_visible, - 'detail-id': detail_id - }) - elements.append(element_args) - args = { 'elements': elements, - 'all-detail-ids': detail_ids } - return self._renderer.render_name('message', args) - - def _emit(self, s): - """Append content to the main report file.""" - if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all. - self._report_file.write(s) - self._report_file.flush() # We must flush in the same thread as the write. - - def _overwrite(self, filename, s): - """Overwrite a file with the specified contents.""" - if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all. - with open(os.path.join(self._html_dir, filename), 'w') as f: - f.write(s) - - def _htmlify_text(self, s): - """Make text HTML-friendly.""" - colored = self._handle_ansi_color_codes(cgi.escape(str(s))) - return linkify(self._buildroot, colored).replace('\n', '
') - - _ANSI_COLOR_CODE_RE = re.compile(r'\033\[((\d|;)*)m') - def _handle_ansi_color_codes(self, s): - """Replace ansi color sequences with spans of appropriately named css classes.""" - def ansi_code_to_css(code): - return ' '.join(['ansi-%s' % c for c in code.split(';')]) - return '' +\ - HtmlReporter._ANSI_COLOR_CODE_RE.sub( - lambda m: '' % ansi_code_to_css(m.group(1)), s) +\ - '' diff --git a/src/python/twitter/pants/reporting/linkify.py b/src/python/twitter/pants/reporting/linkify.py deleted file mode 100644 index bb089354b..000000000 --- a/src/python/twitter/pants/reporting/linkify.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -import re - -from twitter.pants.base.build_file import BuildFile - - -# A regex to recognize substrings that are probably URLs or file paths. Broken down for readability. -_PREFIX = r'(https?://)?/?' # http://, https:// or / or nothing. -_OPTIONAL_PORT = r'(:\d+)?' -_REL_PATH_COMPONENT = r'(\w|[-.])+' # One or more alphanumeric, underscore, dash or dot. -_ABS_PATH_COMPONENT = r'/' + _REL_PATH_COMPONENT -_ABS_PATH_COMPONENTS = r'(%s)+' % _ABS_PATH_COMPONENT -_OPTIONAL_TARGET_SUFFIX = r'(:%s)?' % _REL_PATH_COMPONENT # For /foo/bar:target. - -# Note that we require at least two path components. -# We require the last characgter to be alphanumeric or underscore, because some tools print an -# ellipsis after file names (I'm looking at you, zinc). None of our files end in a dot in practice, -# so this is fine. -_PATH = _PREFIX + _REL_PATH_COMPONENT + _OPTIONAL_PORT + _ABS_PATH_COMPONENTS + \ - _OPTIONAL_TARGET_SUFFIX + '\w' -_PATH_RE = re.compile(_PATH) - -def linkify(buildroot, s): - """Augment text by heuristically finding URL and file references and turning them into links/""" - def to_url(m): - if m.group(1): - return m.group(0) # It's an http(s) url. - path = m.group(0) - if path.startswith('/'): - path = os.path.relpath(path, buildroot) - else: - # See if it's a reference to a target in a BUILD file. - # TODO: Deal with sibling BUILD files? - parts = path.split(':') - if len(parts) == 2: - putative_dir = parts[0] - else: - putative_dir = path - if os.path.isdir(os.path.join(buildroot, putative_dir)): - path = os.path.join(putative_dir, BuildFile._CANONICAL_NAME) - if os.path.exists(os.path.join(buildroot, path)): - # The reporting server serves file content at /browse/. - return '/browse/%s' % path - else: - return None - - def maybe_add_link(url, text): - return '%s' % (url, text) if url else text - return _PATH_RE.sub(lambda m: maybe_add_link(to_url(m), m.group(0)), s) diff --git a/src/python/twitter/pants/reporting/plaintext_reporter.py b/src/python/twitter/pants/reporting/plaintext_reporter.py deleted file mode 100644 index 9cf79def6..000000000 --- a/src/python/twitter/pants/reporting/plaintext_reporter.py +++ /dev/null @@ -1,161 +0,0 @@ -from collections import namedtuple - -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.reporting.report import Report -from twitter.pants.reporting.reporter import Reporter - - -try: - from colors import cyan, green, red, yellow - _colorfunc_map = { - Report.FATAL: red, - Report.ERROR: red, - Report.WARN: yellow, - Report.INFO: green, - Report.DEBUG: cyan - } -except ImportError: - _colorfunc_map = {} - - -class PlainTextReporter(Reporter): - """Plain-text reporting to stdout. - - We only report progress for things under the default work root. It gets too - confusing to try and show progress for background work too. - """ - - # Console reporting settings. - # outfile: Write to this file-like object. - # color: use ANSI colors in output. - # indent: Whether to indent the reporting to reflect the nesting of workunits. - # timing: Show timing report at the end of the run. - # cache_stats: Show artifact cache report at the end of the run. - Settings = namedtuple('Settings', - Reporter.Settings._fields + ('outfile', 'color', 'indent', 'timing', 'cache_stats')) - - def __init__(self, run_tracker, settings): - Reporter.__init__(self, run_tracker, settings) - - def open(self): - """Implementation of Reporter callback.""" - pass - - def close(self): - """Implementation of Reporter callback.""" - if self.settings.timing: - self.emit('\n') - self.emit('\nCumulative Timings') - self.emit('\n==================') - self.emit('\n') - self.emit(self._format_aggregated_timings(self.run_tracker.cumulative_timings)) - self.emit('\n') - self.emit('\nSelf Timings') - self.emit('\n============') - self.emit('\n') - self.emit(self._format_aggregated_timings(self.run_tracker.self_timings)) - if self.settings.cache_stats: - self.emit('\n') - self.emit('\nArtifact Cache Stats') - self.emit('\n====================') - self.emit('\n') - self.emit(self._format_artifact_cache_stats(self.run_tracker.artifact_cache_stats)) - self.emit('\n') - - def start_workunit(self, workunit): - """Implementation of Reporter callback.""" - if not self.is_under_main_root(workunit): - return - - if workunit.parent and workunit.parent.has_label(WorkUnit.MULTITOOL): - # For brevity, we represent each consecutive invocation of a multitool with a dot. - self.emit('.') - elif not workunit.parent or \ - all([not x.has_label(WorkUnit.MULTITOOL) and not x.has_label(WorkUnit.BOOTSTRAP) - for x in workunit.parent.ancestors()]): - # Bootstrapping can be chatty, so don't show anything for its sub-workunits. - self.emit('\n%s %s %s[%s]' % - (workunit.start_time_string(), - workunit.start_delta_string(), - self._indent(workunit), - workunit.name if self.settings.indent else workunit.path())) - if self._show_output(workunit): - # So that emitted output starts on a new line (see below). - self.emit(self._prefix(workunit, '\n')) - self.flush() - - def end_workunit(self, workunit): - """Implementation of Reporter callback.""" - if not self.is_under_main_root(workunit): - return - - if workunit.outcome() != WorkUnit.SUCCESS and not self._show_output(workunit): - # Emit the suppressed workunit output, if any, to aid in debugging the problem. - for name, outbuf in workunit.outputs().items(): - self.emit(self._prefix(workunit, '\n==== %s ====\n' % name)) - self.emit(self._prefix(workunit, outbuf.read_from(0))) - self.flush() - - def do_handle_log(self, workunit, level, *msg_elements): - """Implementation of Reporter callback.""" - if not self.is_under_main_root(workunit): - return - - # If the element is a (msg, detail) pair, we ignore the detail. There's no - # useful way to display it on the console. - elements = [e if isinstance(e, basestring) else e[0] for e in msg_elements] - msg = '\n' + ''.join(elements) - if self.settings.color: - msg = _colorfunc_map.get(level, lambda x: x)(msg) - self.emit(self._prefix(workunit, msg)) - self.flush() - - def handle_output(self, workunit, label, s): - """Implementation of Reporter callback.""" - if not self.is_under_main_root(workunit): - return - - if self._show_output_indented(workunit): - self.emit(self._prefix(workunit, s)) - elif self._show_output_unindented(workunit): - self.emit(s) - self.flush() - - def emit(self, s): - self.settings.outfile.write(s) - - def flush(self): - self.settings.outfile.flush() - - # Emit output from some tools and not others. - # This is an arbitrary choice, but one that turns out to be useful to users in practice. - - def _show_output(self, workunit): - return self._show_output_indented(workunit) or self._show_output_unindented(workunit) - - def _show_output_indented(self, workunit): - return workunit.has_label(WorkUnit.COMPILER) or workunit.has_label(WorkUnit.TEST) - - def _show_output_unindented(self, workunit): - # Indenting looks weird in these cases. - return workunit.has_label(WorkUnit.REPL) or workunit.has_label(WorkUnit.RUN) - - def _format_aggregated_timings(self, aggregated_timings): - return '\n'.join(['%(timing).3f %(label)s' % x for x in aggregated_timings.get_all()]) - - def _format_artifact_cache_stats(self, artifact_cache_stats): - stats = artifact_cache_stats.get_all() - return 'No artifact cache reads.' if not stats else \ - '\n'.join(['%(cache_name)s - Hits: %(num_hits)d Misses: %(num_misses)d' % x - for x in stats]) - - def _indent(self, workunit): - return ' ' * (len(workunit.ancestors()) - 1) - - _time_string_filler = ' ' * len('HH:MM:SS mm:ss ') - def _prefix(self, workunit, s): - if self.settings.indent: - return s.replace('\n', '\n' + PlainTextReporter._time_string_filler + self._indent(workunit)) - else: - return PlainTextReporter._time_string_filler + s - diff --git a/src/python/twitter/pants/reporting/quiet_reporter.py b/src/python/twitter/pants/reporting/quiet_reporter.py deleted file mode 100644 index e4c3ce819..000000000 --- a/src/python/twitter/pants/reporting/quiet_reporter.py +++ /dev/null @@ -1,54 +0,0 @@ -import sys - -from collections import namedtuple - -from twitter.pants.reporting.report import Report -from twitter.pants.reporting.reporter import Reporter - - -try: - from colors import red - _maybe_color = red -except ImportError: - _maybe_color = lambda x: x - - -class QuietReporter(Reporter): - """Squelched plaintext reporting, only prints errors.""" - Settings = namedtuple('Settings', Reporter.Settings._fields + ('color', )) - - def __init__(self, run_tracker, settings): - Reporter.__init__(self, run_tracker, settings._replace(log_level=Report.ERROR)) - - def open(self): - """Implementation of Reporter callback.""" - pass - - def close(self): - """Implementation of Reporter callback.""" - pass - - def start_workunit(self, workunit): - """Implementation of Reporter callback.""" - pass - - def end_workunit(self, workunit): - """Implementation of Reporter callback.""" - pass - - def do_handle_log(self, workunit, level, *msg_elements): - """Implementation of Reporter callback.""" - # If the element is a (msg, detail) pair, we ignore the detail. There's no - # useful way to display it on the console. - elements = [e if isinstance(e, basestring) else e[0] for e in msg_elements] - msg = '\n' + ''.join(elements) - if self.settings.color: - msg = _maybe_color(msg) - self._emit(msg) - - def handle_output(self, workunit, label, s): - """Implementation of Reporter callback.""" - pass - - def _emit(self, s): - sys.stdout.write(s) diff --git a/src/python/twitter/pants/reporting/report.py b/src/python/twitter/pants/reporting/report.py deleted file mode 100644 index 98a7690ec..000000000 --- a/src/python/twitter/pants/reporting/report.py +++ /dev/null @@ -1,106 +0,0 @@ -import threading - -from twitter.common.threading import PeriodicThread - - -class ReportingError(Exception): - pass - -class Report(object): - """A report of a pants run.""" - - # Log levels. - FATAL = 0 - ERROR = 1 - WARN = 2 - INFO = 3 - DEBUG = 4 - - _log_level_name_map = { - 'FATAL': FATAL, 'ERROR': ERROR, 'WARN': WARN, 'WARNING': WARN, 'INFO': INFO, 'DEBUG': DEBUG - } - - @staticmethod - def log_level_from_string(s): - s = s.upper() - return Report._log_level_name_map.get(s, Report.INFO) - - def __init__(self): - # We periodically emit newly gathered output from tool invocations. - self._emitter_thread = \ - PeriodicThread(target=self.flush, name='output-emitter', period_secs=0.5) - self._emitter_thread.daemon = True - - # Map from workunit id to workunit. - self._workunits = {} - - # We report to these reporters. - self._reporters = {} # name -> Reporter instance. - - # We synchronize on this, to support parallel execution. - self._lock = threading.Lock() - - def open(self): - with self._lock: - for reporter in self._reporters.values(): - reporter.open() - self._emitter_thread.start() - - # Note that if you addr/remove reporters after open() has been called you have - # to ensure that their state is set up correctly. Best only to do this with - # stateless reporters, such as ConsoleReporter. - - def add_reporter(self, name, reporter): - with self._lock: - self._reporters[name] = reporter - - def remove_reporter(self, name): - with self._lock: - ret = self._reporters[name] - del self._reporters[name] - return ret - - def start_workunit(self, workunit): - with self._lock: - self._workunits[workunit.id] = workunit - for reporter in self._reporters.values(): - reporter.start_workunit(workunit) - - def log(self, workunit, level, *msg_elements): - """Log a message. - - Each element of msg_elements is either a message string or a (message, detail) pair. - """ - with self._lock: - for reporter in self._reporters.values(): - reporter.handle_log(workunit, level, *msg_elements) - - def end_workunit(self, workunit): - with self._lock: - self._notify() # Make sure we flush everything reported until now. - for reporter in self._reporters.values(): - reporter.end_workunit(workunit) - if workunit.id in self._workunits: - del self._workunits[workunit.id] - - def flush(self): - with self._lock: - self._notify() - - def close(self): - self._emitter_thread.stop() - with self._lock: - self._notify() # One final time. - for reporter in self._reporters.values(): - reporter.close() - - def _notify(self): - # Notify for output in all workunits. Note that output may be coming in from workunits other - # than the current one, if work is happening in parallel. - # Assumes self._lock is held by the caller. - for workunit in self._workunits.values(): - for label, output in workunit.outputs().items(): - s = output.read() - if len(s) > 0: - for reporter in self._reporters.values(): - reporter.handle_output(workunit, label, s) diff --git a/src/python/twitter/pants/reporting/reporter.py b/src/python/twitter/pants/reporting/reporter.py deleted file mode 100644 index bddfed29d..000000000 --- a/src/python/twitter/pants/reporting/reporter.py +++ /dev/null @@ -1,67 +0,0 @@ -from collections import namedtuple -from twitter.pants.goal.run_tracker import RunTracker - - -class Reporter(object): - """Formats and emits reports. - - Subclasses implement the callback methods, to provide specific reporting - functionality, e.g., to console or to browser. - """ - - # Generic reporting settings. - # log_level: Display log messages up to this level. - # subsettings: subclass-specific settings. - Settings = namedtuple('Settings', ['log_level']) - - def __init__(self, run_tracker, settings): - self.run_tracker = run_tracker - self.settings = settings - - def open(self): - """Begin the report.""" - pass - - def close(self): - """End the report.""" - pass - - def start_workunit(self, workunit): - """A new workunit has started.""" - pass - - def end_workunit(self, workunit): - """A workunit has finished.""" - pass - - def handle_log(self, workunit, level, *msg_elements): - """Handle a message logged by pants code. - - level: One of the constants above. - - Each element in msg_elements is either a message or a (message, detail) pair. - A subclass must show the message, but may choose to show the detail in some - sensible way (e.g., when the message text is clicked on in a browser). - - This convenience implementation filters by log level and then delegates to do_handle_log. - """ - if level <= self.settings.log_level: - self.do_handle_log(workunit, level, *msg_elements) - - def do_handle_log(self, workunit, level, *msg_elements): - """Handle a message logged by pants code, after it's passed the log level check.""" - pass - - def handle_output(self, workunit, label, s): - """Handle output captured from an invoked tool (e.g., javac). - - workunit: The innermost WorkUnit in which the tool was invoked. - label: Classifies the output e.g., 'stdout' for output captured from a tool's stdout or - 'debug' for debug output captured from a tool's logfiles. - s: The content captured. - """ - pass - - def is_under_main_root(self, workunit): - """Is the workunit running under the main thread's root.""" - return self.run_tracker.is_under_main_root(workunit) diff --git a/src/python/twitter/pants/reporting/reporting_server.py b/src/python/twitter/pants/reporting/reporting_server.py deleted file mode 100644 index 6cff686a7..000000000 --- a/src/python/twitter/pants/reporting/reporting_server.py +++ /dev/null @@ -1,387 +0,0 @@ -import itertools -import json -import mimetypes -import os -import pkgutil -import pystache -import re -import urllib -import urlparse - -import BaseHTTPServer - -from collections import namedtuple -from datetime import date, datetime - -from pystache import Renderer -from twitter.common.dirutil import safe_mkdir -from twitter.pants.base.build_environment import get_buildroot - -from twitter.pants.base.mustache import MustacheRenderer -from twitter.pants.goal.run_tracker import RunInfo - - -# Google Prettyprint plugin files. -PPP_RE=re.compile("""^lang-.*\.js$""") - - -class PantsHandler(BaseHTTPServer.BaseHTTPRequestHandler): - """A handler that demultiplexes various pants reporting URLs.""" - - def __init__(self, settings, renderer, request, client_address, server): - self._settings = settings # An instance of ReportingServer.Settings. - self._root = self._settings.root - self._renderer = renderer - self._client_address = client_address - # The underlying handlers for specific URL prefixes. - self._GET_handlers = [ - ('/runs/', self._handle_runs), # Show list of known pants runs. - ('/run/', self._handle_run), # Show a report for a single pants run. - ('/browse/', self._handle_browse), # Browse filesystem under build root. - ('/content/', self._handle_content), # Show content of file. - ('/assets/', self._handle_assets), # Statically serve assets (css, js etc.) - ('/poll', self._handle_poll), # Handle poll requests for raw file content. - ('/latestrunid', self._handle_latest_runid) # Return id of latest pants run. - ] - BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, client_address, server) - - def do_GET(self): - """GET method implementation for BaseHTTPRequestHandler.""" - if not self._client_allowed(): - return - - try: - (_, _, path, query, _) = urlparse.urlsplit(self.path) - params = urlparse.parse_qs(query) - # Give each handler a chance to respond. - for prefix, handler in self._GET_handlers: - if self._maybe_handle(prefix, handler, path, params): - return - # If no path specified, default to showing the list of all runs. - if path == '/': - self._handle_runs('', {}) - return - - self._send_content('Invalid GET request %s' % self.path, 'text/html') - except (IOError, ValueError): - pass # Printing these errors gets annoying, and there's nothing to do about them anyway. - #sys.stderr.write('Invalid GET request %s' % self.path) - - def _handle_runs(self, relpath, params): - """Show a listing of all pants runs since the last clean-all.""" - runs_by_day = self._partition_runs_by_day() - args = self._default_template_args('run_list') - args['runs_by_day'] = runs_by_day - self._send_content(self._renderer.render_name('base', args), 'text/html') - - def _handle_run(self, relpath, params): - """Show the report for a single pants run.""" - args = self._default_template_args('run') - run_id = relpath - run_info = self._get_run_info_dict(run_id) - if run_info is None: - args['no_such_run'] = relpath - if run_id == 'latest': - args['is_latest'] = 'none' - else: - report_abspath = run_info['default_report'] - report_relpath = os.path.relpath(report_abspath, self._root) - report_dir = os.path.dirname(report_relpath) - self_timings_path = os.path.join(report_dir, 'self_timings') - cumulative_timings_path = os.path.join(report_dir, 'cumulative_timings') - artifact_cache_stats_path = os.path.join(report_dir, 'artifact_cache_stats') - run_info['timestamp_text'] = \ - datetime.fromtimestamp(float(run_info['timestamp'])).strftime('%H:%M:%S on %A, %B %d %Y') - args.update({'run_info': run_info, - 'report_path': report_relpath, - 'self_timings_path': self_timings_path, - 'cumulative_timings_path': cumulative_timings_path, - 'artifact_cache_stats_path': artifact_cache_stats_path}) - if run_id == 'latest': - args['is_latest'] = run_info['id'] - args.update({ - 'collapsible': lambda x: self._renderer.render_callable('collapsible', x, args) - }) - self._send_content(self._renderer.render_name('base', args), 'text/html') - - def _handle_browse(self, relpath, params): - """Handle requests to browse the filesystem under the build root.""" - abspath = os.path.normpath(os.path.join(self._root, relpath)) - if not abspath.startswith(self._root): - raise ValueError # Prevent using .. to get files from anywhere other than root. - if os.path.isdir(abspath): - self._serve_dir(abspath, params) - elif os.path.isfile(abspath): - self._serve_file(abspath, params) - - def _handle_content(self, relpath, params): - """Render file content for pretty display.""" - abspath = os.path.normpath(os.path.join(self._root, relpath)) - if os.path.isfile(abspath): - with open(abspath, 'r') as infile: - content = infile.read() - else: - content = 'No file found at %s' % abspath - content_type = mimetypes.guess_type(abspath)[0] or 'text/plain' - if not content_type.startswith('text/') and not content_type == 'application/xml': - # Binary file. Display it as hex, split into lines. - n = 120 # Display lines of this max size. - content = repr(content)[1:-1] # Will escape non-printables etc, dropping surrounding quotes. - content = '\n'.join([content[i:i+n] for i in xrange(0, len(content), n)]) - prettify = False - prettify_extra_langs = [] - else: - prettify = True - if self._settings.assets_dir: - prettify_extra_dir = os.path.join(self._settings.assets_dir, 'js', 'prettify_extra_langs') - prettify_extra_langs = [ {'name': x} for x in os.listdir(prettify_extra_dir) ] - else: - # TODO: Find these from our package, somehow. - prettify_extra_langs = [] - linenums = True - args = { 'prettify_extra_langs': prettify_extra_langs, 'content': content, - 'prettify': prettify, 'linenums': linenums } - self._send_content(self._renderer.render_name('file_content', args), 'text/html') - - def _handle_assets(self, relpath, params): - """Statically serve assets: js, css etc.""" - if self._settings.assets_dir: - abspath = os.path.normpath(os.path.join(self._settings.assets_dir, relpath)) - with open(abspath, 'r') as infile: - content = infile.read() - else: - content = pkgutil.get_data(__name__, os.path.join('assets', relpath)) - content_type = mimetypes.guess_type(relpath)[0] or 'text/plain' - self._send_content(content, content_type) - - def _handle_poll(self, relpath, params): - """Handle poll requests for raw file contents.""" - request = json.loads(params.get('q')[0]) - ret = {} - # request is a polling request for multiple files. For each file: - # - id is some identifier assigned by the client, used to differentiate the results. - # - path is the file to poll. - # - pos is the last byte position in that file seen by the client. - for poll in request: - _id = poll.get('id', None) - path = poll.get('path', None) - pos = poll.get('pos', 0) - if path: - abspath = os.path.normpath(os.path.join(self._root, path)) - if os.path.isfile(abspath): - with open(abspath, 'r') as infile: - if pos: - infile.seek(pos) - content = infile.read() - ret[_id] = content - self._send_content(json.dumps(ret), 'application/json') - - def _handle_latest_runid(self, relpath, params): - """Handle request for the latest run id. - - Used by client-side javascript to detect when there's a new run to display. - """ - latest_runinfo = self._get_run_info_dict('latest') - if latest_runinfo is None: - self._send_content('none', 'text/plain') - else: - self._send_content(latest_runinfo['id'], 'text/plain') - - def _partition_runs_by_day(self): - """Split the runs by day, so we can display them grouped that way.""" - run_infos = self._get_all_run_infos() - for x in run_infos: - ts = float(x['timestamp']) - x['time_of_day_text'] = datetime.fromtimestamp(ts).strftime('%H:%M:%S') - - def date_text(dt): - delta_days = (date.today() - dt).days - if delta_days == 0: - return 'Today' - elif delta_days == 1: - return 'Yesterday' - elif delta_days < 7: - return dt.strftime('%A') # Weekday name. - else: - d = dt.day % 10 - suffix = 'st' if d == 1 else 'nd' if d == 2 else 'rd' if d == 3 else 'th' - return dt.strftime('%B %d') + suffix # E.g., October 30th. - - keyfunc = lambda x: datetime.fromtimestamp(float(x['timestamp'])) - sorted_run_infos = sorted(run_infos, key=keyfunc, reverse=True) - return [ { 'date_text': date_text(dt), 'run_infos': [x for x in infos] } - for dt, infos in itertools.groupby(sorted_run_infos, lambda x: keyfunc(x).date()) ] - - def _get_run_info_dict(self, run_id): - """Get the RunInfo for a run, as a dict.""" - run_info_path = os.path.join(self._settings.info_dir, run_id, 'info') - if os.path.exists(run_info_path): - # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. - return RunInfo(run_info_path).get_as_dict() - else: - return None - - def _get_all_run_infos(self): - """Find the RunInfos for all runs since the last clean-all.""" - info_dir = self._settings.info_dir - if not os.path.isdir(info_dir): - return [] - paths = [os.path.join(info_dir, x) for x in os.listdir(info_dir)] - - # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. - # We filter only those that have a timestamp, to avoid a race condition with writing - # that field. - return filter(lambda d: 'timestamp' in d, [RunInfo(os.path.join(p, 'info')).get_as_dict() - for p in paths if os.path.isdir(p) and not os.path.islink(p)]) - - def _serve_dir(self, abspath, params): - """Show a directory listing.""" - relpath = os.path.relpath(abspath, self._root) - breadcrumbs = self._create_breadcrumbs(relpath) - entries = [ {'link_path': os.path.join(relpath, e), 'name': e} for e in os.listdir(abspath)] - args = self._default_template_args('dir') - args.update({ 'root_parent': os.path.dirname(self._root), - 'breadcrumbs': breadcrumbs, - 'entries': entries, - 'params': params }) - self._send_content(self._renderer.render_name('base', args), 'text/html') - - def _serve_file(self, abspath, params): - """Show a file. - - The actual content of the file is rendered by _handle_content.""" - relpath = os.path.relpath(abspath, self._root) - breadcrumbs = self._create_breadcrumbs(relpath) - link_path = urlparse.urlunparse([None, None, relpath, None, urllib.urlencode(params), None]) - args = self._default_template_args('file') - args.update({ 'root_parent': os.path.dirname(self._root), - 'breadcrumbs': breadcrumbs, - 'link_path': link_path }) - self._send_content(self._renderer.render_name('base', args), 'text/html') - - def _send_content(self, content, content_type, code=200): - """Send content to client.""" - self.send_response(code) - self.send_header('Content-Type', content_type) - self.send_header('Content-Length', str(len(content))) - self.end_headers() - self.wfile.write(content) - - def _client_allowed(self): - """Check if client is allowed to connect to this server.""" - client_ip = self._client_address[0] - if not client_ip in self._settings.allowed_clients and \ - not 'ALL' in self._settings.allowed_clients: - self._send_content('Access from host %s forbidden.' % client_ip, 'text/html') - return False - return True - - def _maybe_handle(self, prefix, handler, path, params, data=None): - """Apply the handler if the prefix matches.""" - if path.startswith(prefix): - relpath = path[len(prefix):] - if data: - handler(relpath, params, data) - else: - handler(relpath, params) - return True - else: - return False - - def _create_breadcrumbs(self, relpath): - """Create filesystem browsing breadcrumb navigation. - - That is, make each path segment into a clickable element that takes you to that dir. - """ - if relpath == '.': - breadcrumbs = [] - else: - path_parts = [os.path.basename(self._root)] + relpath.split(os.path.sep) - path_links = ['/'.join(path_parts[1:i+1]) for i, name in enumerate(path_parts)] - breadcrumbs = [{'link_path': link_path, 'name': name } - for link_path, name in zip(path_links, path_parts)] - return breadcrumbs - - def _default_template_args(self, content_template): - """Initialize template args.""" - def include(text, args): - template_name = pystache.render(text, args) - return self._renderer.render_name(template_name, args) - # Our base template calls include on the content_template. - ret = { 'content_template': content_template } - ret['include'] = lambda text: include(text, ret) - return ret - - def log_message(self, fmt, *args): - """Silence BaseHTTPRequestHandler's logging.""" - pass - - -class ReportingServer(object): - # Reporting server settings. - # info_dir: path to dir containing RunInfo files. - # template_dir: location of mustache template files. If None, the templates - # embedded in our package are used. - # assets_dir: location of assets (js, css etc.) If None, the assets - # embedded in our package are used. - # root: build root. - # allowed_clients: list of ips or ['ALL']. - Settings = namedtuple('Settings', - ['info_dir', 'template_dir', 'assets_dir', 'root', 'allowed_clients']) - - def __init__(self, port, settings): - renderer = MustacheRenderer(settings.template_dir, __name__) - - class MyHandler(PantsHandler): - def __init__(self, request, client_address, server): - PantsHandler.__init__(self, settings, renderer, request, client_address, server) - - self._httpd = BaseHTTPServer.HTTPServer(('', port), MyHandler) - self._httpd.timeout = 0.1 # Not the network timeout, but how often handle_request yields. - - def server_port(self): - return self._httpd.server_port - - def start(self): - self._httpd.serve_forever() - - -class ReportingServerManager(object): - @staticmethod - def _get_pidfile_dir(): - return os.path.join(get_buildroot(), '.pids', 'daemon') - - @staticmethod - def save_current_server_port(port): - """Save the port of the currently-running server, so we can find it across pants runs.""" - # We don't put the pidfile in .pants.d, because we want to find it even after a clean. - # NOTE: If changing this dir/file name, also change get_current_server_pidfiles_and_ports - # appropriately. - # TODO: Generalize the pidfile idiom into some central library. - pidfile_dir = ReportingServerManager._get_pidfile_dir() - safe_mkdir(pidfile_dir) - pidfile = os.path.join(pidfile_dir, 'port_%d.pid' % port) - with open(pidfile, 'w') as outfile: - outfile.write(str(os.getpid())) - - @staticmethod - def get_current_server_port(): - """Returns the port of the currently-running server, or None if no server is detected.""" - pidfiles_and_ports = ReportingServerManager.get_current_server_pidfiles_and_ports() - # There should only be one pidfile, but in case there are many due to error, - # pick the first one. - return pidfiles_and_ports[0][1] if pidfiles_and_ports else None - - @staticmethod - def get_current_server_pidfiles_and_ports(): - """Returns a list of pairs (pidfile, port) of all found pidfiles.""" - pidfile_dir = ReportingServerManager._get_pidfile_dir() - # There should only be one pidfile, but there may be errors/race conditions where - # there are multiple of them. - pidfile_names = os.listdir(pidfile_dir) if os.path.exists(pidfile_dir) else [] - ret = [] - for pidfile_name in pidfile_names: - m = re.match(r'port_(\d+)\.pid', pidfile_name) - if m is not None: - ret.append((os.path.join(pidfile_dir, pidfile_name), int(m.group(1)))) - return ret diff --git a/src/python/twitter/pants/reporting/reporting_utils.py b/src/python/twitter/pants/reporting/reporting_utils.py deleted file mode 100644 index 0011d4c00..000000000 --- a/src/python/twitter/pants/reporting/reporting_utils.py +++ /dev/null @@ -1,30 +0,0 @@ - - -def items_to_report_element(items, item_type): - """Converts an iterable of items to a (message, detail) pair. - - - items: a list of items (e.g., Target instances) that can be str()-ed. - - item_type: a string describing the type of item (e.g., 'target'). - - Returns (message, detail) where message is the count of items (e.g., '26 targets') - and detail is the text representation of the list of items, one per line. - - The return value can be used as an argument to Report.log(). - - This is useful when we want to say "N targets" or "K sources" - and allow the user to see which ones by clicking on that text. - """ - def pluralize(x): - if x.endswith('s'): - return x + 'es' - else: - return x + 's' - - items = [str(x) for x in items] - n = len(items) - text = '%d %s' % (n, item_type if n == 1 else pluralize(item_type)) - if n == 0: - return text - else: - detail = '\n'.join(items) - return text, detail diff --git a/src/python/twitter/pants/reporting/templates/aggregated_timings.mustache b/src/python/twitter/pants/reporting/templates/aggregated_timings.mustache deleted file mode 100644 index 0af62e9f8..000000000 --- a/src/python/twitter/pants/reporting/templates/aggregated_timings.mustache +++ /dev/null @@ -1,6 +0,0 @@ -
-{{#timings}} - - -{{/timings}} -
{{timing_string}}{{label}}{{#is_tool}}{{/is_tool}}
diff --git a/src/python/twitter/pants/reporting/templates/append_to_workunit.mustache b/src/python/twitter/pants/reporting/templates/append_to_workunit.mustache deleted file mode 100644 index 07292a05c..000000000 --- a/src/python/twitter/pants/reporting/templates/append_to_workunit.mustache +++ /dev/null @@ -1,7 +0,0 @@ -{{! Append some reporting content to a workunit. }} -
{{{content}}}
- diff --git a/src/python/twitter/pants/reporting/templates/base.mustache b/src/python/twitter/pants/reporting/templates/base.mustache deleted file mode 100644 index eb20cde3a..000000000 --- a/src/python/twitter/pants/reporting/templates/base.mustache +++ /dev/null @@ -1,41 +0,0 @@ -{{! Chrome for reporting console. }} - - - Pants - - - - - - - -
-
-

Pants

-
- -
- - -
-{{#include}}{{content_template}}{{/include}} -
-
- -
-
- -
- - - - diff --git a/src/python/twitter/pants/reporting/templates/collapsible.mustache b/src/python/twitter/pants/reporting/templates/collapsible.mustache deleted file mode 100644 index 047759947..000000000 --- a/src/python/twitter/pants/reporting/templates/collapsible.mustache +++ /dev/null @@ -1,19 +0,0 @@ -
-
-
- -
-
-
- {{workunit.start_time_string}}{{workunit.start_delta_string}} -
- [{{title}}] - - {{#icon}}{{/icon}} - ctrl-c - -
- {{#spinner}}
{{/spinner}} -
-
-
diff --git a/src/python/twitter/pants/reporting/templates/dir.mustache b/src/python/twitter/pants/reporting/templates/dir.mustache deleted file mode 100644 index 5d83bb842..000000000 --- a/src/python/twitter/pants/reporting/templates/dir.mustache +++ /dev/null @@ -1,10 +0,0 @@ -
- -
-{{#entries}} -{{name}}
-{{/entries}} -
-
diff --git a/src/python/twitter/pants/reporting/templates/file.mustache b/src/python/twitter/pants/reporting/templates/file.mustache deleted file mode 100644 index c1bd81253..000000000 --- a/src/python/twitter/pants/reporting/templates/file.mustache +++ /dev/null @@ -1,7 +0,0 @@ -{{! Display the contents of a file. }} -
- - -
diff --git a/src/python/twitter/pants/reporting/templates/file_content.mustache b/src/python/twitter/pants/reporting/templates/file_content.mustache deleted file mode 100644 index 47b80e3fb..000000000 --- a/src/python/twitter/pants/reporting/templates/file_content.mustache +++ /dev/null @@ -1,14 +0,0 @@ - - - - - {{#prettify_extra_langs}} - - {{/prettify_extra_langs}} - - -
-{{content}}
-
- - diff --git a/src/python/twitter/pants/reporting/templates/message.mustache b/src/python/twitter/pants/reporting/templates/message.mustache deleted file mode 100644 index 73edf6b54..000000000 --- a/src/python/twitter/pants/reporting/templates/message.mustache +++ /dev/null @@ -1,9 +0,0 @@ -{{! Render a message consisting of elements each of which is either text or text+detail. - In the latter case we render such that clicking on the text toggles the detail, and closes - all other known details in the message (as specified in all-detail-ids). }} -
-{{#elements}}{{#detail?}}{{{text}}}{{/detail?}}{{^detail?}}{{{text}}}{{/detail?}}{{/elements}} -{{#elements}}{{#detail?}} -
{{{detail}}}
-{{/detail?}}{{/elements}} -
diff --git a/src/python/twitter/pants/reporting/templates/run.mustache b/src/python/twitter/pants/reporting/templates/run.mustache deleted file mode 100644 index 7e19f4363..000000000 --- a/src/python/twitter/pants/reporting/templates/run.mustache +++ /dev/null @@ -1,63 +0,0 @@ -{{! A report for a single pants run. }} -
-{{#no_such_run}} -

-{{#is_latest}}

No runs found since last clean-all.

Latest run will display here automatically.

{{/is_latest}} -{{^is_latest}}
No run found with id {{.}}
{{/is_latest}} -

-{{/no_such_run}} -{{^no_such_run}} -
-{{#run_info}} -
Pants run at {{timestamp_text}}
-

-

Command line:
-
{{cmd_line}}
-

-
-
{{#collapsible}}id=cumulative-timings-collapsible&title=Cumulative%20timings&class_prefix=aggregated-timings{{/collapsible}}
-
{{#collapsible}}id=self-timings-collapsible&title=Self%20timings&class_prefix=aggregated-timings{{/collapsible}}
-
{{#collapsible}}id=artifact-cache-stats-collapsible&title=Artifact%20cache%20stats&class_prefix=artifact-cache-stats{{/collapsible}}
-
-
-

-

-

- -{{/run_info}} -{{/no_such_run}} -{{#is_latest}} - -{{/is_latest}} -
diff --git a/src/python/twitter/pants/reporting/templates/run_list.mustache b/src/python/twitter/pants/reporting/templates/run_list.mustache deleted file mode 100644 index e90c11e8b..000000000 --- a/src/python/twitter/pants/reporting/templates/run_list.mustache +++ /dev/null @@ -1,17 +0,0 @@ -{{! The list of all known pants runs. }} -
-
Pants runs since last clean-all
-
-Latest -
-
-{{#runs_by_day}} -
{{date_text}}
- -{{/runs_by_day}} -
-
diff --git a/src/python/twitter/pants/reporting/templates/tool_invocation_end.mustache b/src/python/twitter/pants/reporting/templates/tool_invocation_end.mustache deleted file mode 100644 index 469420729..000000000 --- a/src/python/twitter/pants/reporting/templates/tool_invocation_end.mustache +++ /dev/null @@ -1,7 +0,0 @@ -{{! Finalize reporting for a finished tool invocation. }} - - diff --git a/src/python/twitter/pants/reporting/templates/tool_invocation_start.mustache b/src/python/twitter/pants/reporting/templates/tool_invocation_start.mustache deleted file mode 100644 index 118792cd5..000000000 --- a/src/python/twitter/pants/reporting/templates/tool_invocation_start.mustache +++ /dev/null @@ -1,25 +0,0 @@ -{{! Create a collapsible div to represent invocation of an external tool. }} -
-{{#collapsible}}id={{workunit.id}}-cmd&title=cmdline&class_prefix=cmd{{/collapsible}} -{{#collapsible}}id={{workunit.id}}-stdout&title=stdout&class_prefix=greyed{{#stdout_initially_open}}&initially_open=1{{/stdout_initially_open}}{{/collapsible}} -{{#collapsible}}id={{workunit.id}}-stderr&title=stderr&class_prefix=greyed{{/collapsible}} -
- diff --git a/src/python/twitter/pants/reporting/templates/workunit_end.mustache b/src/python/twitter/pants/reporting/templates/workunit_end.mustache deleted file mode 100644 index a21aa69a6..000000000 --- a/src/python/twitter/pants/reporting/templates/workunit_end.mustache +++ /dev/null @@ -1,13 +0,0 @@ -{{! Finalize reporting for a finished workunit. }} - diff --git a/src/python/twitter/pants/reporting/templates/workunit_start.mustache b/src/python/twitter/pants/reporting/templates/workunit_start.mustache deleted file mode 100644 index f999cc5ac..000000000 --- a/src/python/twitter/pants/reporting/templates/workunit_start.mustache +++ /dev/null @@ -1,13 +0,0 @@ -{{! Create a collapsible div in which to nest the reporting for a workunit. - To add content to this div, append it to ${'#WORKUNITID-content'}. -}} -
{{#collapsible}}id={{workunit.id}}&title={{header_text}}{{#workunit.target_addresses}}&targets={{.}}{{/workunit.target_addresses}}{{#initially_open}}&initially_open=1{{/initially_open}}&spinner=1{{#is_tool}}&icon=icon-cog{{/is_tool}}{{#is_multitool}}&icon=icon-cogs{{/is_multitool}}{{/collapsible}}
- diff --git a/src/python/twitter/pants/scm/__init__.py b/src/python/twitter/pants/scm/__init__.py deleted file mode 100644 index 3ef35bbbb..000000000 --- a/src/python/twitter/pants/scm/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from abc import abstractmethod, abstractproperty - -from twitter.common.lang import AbstractClass - - -class Scm(AbstractClass): - """Abstracts high-level scm operations needed by pants core and pants tasks.""" - - class ScmException(Exception): - """Indicates a problem interacting with the scm.""" - - class RemoteException(ScmException): - """Indicates a problem performing a remote scm operation.""" - - class LocalException(ScmException): - """Indicates a problem performing a local scm operation.""" - - @abstractproperty - def commit_id(self): - """Returns the id of the current commit.""" - - @abstractproperty - def tag_name(self): - """Returns the name of the current tag if any.""" - - @abstractproperty - def branch_name(self): - """Returns the name of the current branch if any.""" - - @abstractmethod - def changed_files(self, from_commit=None, include_untracked=False): - """Returns a list of files with uncommitted changes or else files changed since from_commit. - - If include_untracked=True then any workspace files that are un-tracked by the scm and not - ignored will be included as well. - """ - - @abstractmethod - def changelog(self, from_commit=None, files=None): - """Produces a changelog from the given commit or the 1st commit if none is specified until the - present workspace commit for the changes affecting the given files. - - If no files are given then the full change log should be produced. - """ - - @abstractmethod - def refresh(self): - """Refreshes the local workspace with any changes on the server. - - Subclasses should raise some form of ScmException to indicate a refresh error whether it be - a conflict or a communication channel error. - """ - - @abstractmethod - def tag(self, name, message=None): - """Tags the state in the local workspace and ensures this tag is on the server. - - Subclasses should raise RemoteException if there is a problem getting the tag to the server. - """ - - @abstractmethod - def commit(self, message): - """Commits the changes in the local workspace and ensure this commit is on the server. - - Subclasses should raise RemoteException if there is a problem getting the tag to the server. - """ diff --git a/src/python/twitter/pants/scm/git.py b/src/python/twitter/pants/scm/git.py deleted file mode 100644 index 30d924016..000000000 --- a/src/python/twitter/pants/scm/git.py +++ /dev/null @@ -1,157 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import subprocess - -from . import Scm - - -class Git(Scm): - """An Scm implementation backed by git.""" - - def __init__(self, binary='git', gitdir=None, worktree=None, remote=None, branch=None, log=None): - """Creates a git scm proxy that assumes the git repository is in the cwd by default. - - binary: The path to the git binary to use, 'git' by default. - gitdir: The path to the repository's git metadata directory (typically '.git'). - workspace: The path to the git repository working tree directory (typically '.'). - remote: The default remote to use. - branch: The default remote branch to use. - log: A log object that supports debug, info, and warn methods. - """ - Scm.__init__(self) - - self._gitcmd = binary - self._worktree = os.path.realpath(worktree or os.getcwd()) - self._gitdir = os.path.realpath(gitdir) if gitdir else os.path.join(self._worktree, '.git') - self._remote = remote - self._branch = branch - - if log: - self._log = log - else: - from twitter.common import log as c_log - self._log = c_log - - @property - def commit_id(self): - sha = self._check_output(['rev-parse', 'HEAD'], raise_type=Scm.LocalException) - return self._cleanse(sha) - - @property - def tag_name(self): - tag = self._check_output(['describe', '--tags', '--always'], raise_type=Scm.LocalException) - return None if b'cannot' in tag else self._cleanse(tag) - - @property - def branch_name(self): - branch = self._check_output(['rev-parse', '--abbrev-ref', 'HEAD'], - raise_type=Scm.LocalException) - branch = self._cleanse(branch) - return None if branch == 'HEAD' else branch - - def changed_files(self, from_commit=None, include_untracked=False): - uncommitted_changes = self._check_output(['diff', '--name-only', 'HEAD'], - raise_type=Scm.LocalException) - - files = set(uncommitted_changes.split()) - if from_commit: - # Grab the diff from the merge-base to HEAD using ... syntax. This ensures we have just - # the changes that have occurred on the current branch. - committed_changes = self._check_output(['diff', '--name-only', '%s...HEAD' % from_commit], - raise_type=Scm.LocalException) - files.update(committed_changes.split()) - if include_untracked: - untracked = self._check_output(['ls-files', '--other', '--exclude-standard'], - raise_type=Scm.LocalException) - files.update(untracked.split()) - return files - - def changelog(self, from_commit=None, files=None): - args = ['whatchanged', '--stat', '--find-renames', '--find-copies'] - if from_commit: - args.append('%s..HEAD' % from_commit) - if files: - args.append('--') - args.extend(files) - return self._check_output(args, raise_type=Scm.LocalException) - - def refresh(self): - remote, merge = self._get_upstream() - self._check_call(['pull', '--ff-only', '--tags', remote, merge], raise_type=Scm.RemoteException) - - def tag(self, name, message=None): - # We use -a here instead of --annotate to maintain maximum git compatibility. - # --annotate was only introduced in 1.7.8 via: - # https://github.com/git/git/commit/c97eff5a95d57a9561b7c7429e7fcc5d0e3a7f5d - self._check_call(['tag', '-a', '--message=%s' % (message or ''), name], - raise_type=Scm.LocalException) - self._push('refs/tags/%s' % name) - - def commit(self, message): - self._check_call(['commit', '--all', '--message=%s' % message], raise_type=Scm.LocalException) - self._push() - - def _push(self, *refs): - remote, merge = self._get_upstream() - self._check_call(['push', remote, merge] + list(refs), raise_type=Scm.RemoteException) - - def _get_upstream(self): - if not self._remote or not self._branch: - branch = self.branch_name - if not branch: - raise Scm.LocalException('Failed to determine local branch') - - def get_local_config(key): - value = self._check_output(['config', '--local', '--get', key], - raise_type=Scm.LocalException) - return value.strip() - - self._remote = self._remote or get_local_config('branch.%s.remote' % branch) - self._branch = self._branch or get_local_config('branch.%s.merge' % branch) - return self._remote, self._branch - - def _check_call(self, args, failure_msg=None, raise_type=None): - cmd = self._create_git_cmdline(args) - self._log_call(cmd) - result = subprocess.call(cmd) - self._check_result(cmd, result, failure_msg, raise_type) - - def _check_output(self, args, failure_msg=None, raise_type=None): - cmd = self._create_git_cmdline(args) - self._log_call(cmd) - - # We let stderr flow to wherever its currently mapped for this process - generally to the - # terminal where the user can see the error. - process = subprocess.Popen(cmd, stdout=subprocess.PIPE) - out, _ = process.communicate() - - self._check_result(cmd, process.returncode, failure_msg, raise_type) - return out - - def _create_git_cmdline(self, args): - return [self._gitcmd, '--git-dir=%s' % self._gitdir, '--work-tree=%s' % self._worktree] + args - - def _log_call(self, cmd): - self._log.debug('Executing: %s' % ' '.join(cmd)) - - def _check_result(self, cmd, result, failure_msg=None, raise_type=Scm.ScmException): - if result != 0: - raise raise_type(failure_msg or '%s failed with exit code %d' % (' '.join(cmd), result)) - - def _cleanse(self, output): - return output.strip().decode('utf-8') diff --git a/src/python/twitter/pants/targets/__init__.py b/src/python/twitter/pants/targets/__init__.py deleted file mode 100644 index 24684ba62..000000000 --- a/src/python/twitter/pants/targets/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/src/python/twitter/pants/targets/annotation_processor.py b/src/python/twitter/pants/targets/annotation_processor.py deleted file mode 100644 index ec9cec35b..000000000 --- a/src/python/twitter/pants/targets/annotation_processor.py +++ /dev/null @@ -1,68 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .exportable_jvm_library import ExportableJvmLibrary -from .resources import WithResources - - -@manual.builddict(tags=['java']) -class AnnotationProcessor(ExportableJvmLibrary, WithResources): - """Produces a Java library containing one or more annotation processors.""" - - def __init__(self, - name, - sources, - provides=None, - dependencies=None, - excludes=None, - resources=None, - processors=None, - exclusives=None): - - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of file paths (DEPRECATED) or - ``resources`` targets (which in turn point to file paths). The paths - indicate text file resources to place in this module's jar. - :param processors: A list of the fully qualified class names of the - annotation processors this library exports. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(AnnotationProcessor, self).__init__( - name, - sources, - provides, - dependencies, - excludes, - exclusives=exclusives) - - self.resources = resources - self.processors = processors - self.add_labels('java', 'apt') diff --git a/src/python/twitter/pants/targets/anonymous.py b/src/python/twitter/pants/targets/anonymous.py deleted file mode 100644 index f06e77aee..000000000 --- a/src/python/twitter/pants/targets/anonymous.py +++ /dev/null @@ -1,32 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import OrderedSet - - -# TODO(John Sirois): this is a fragile duck-type, rationalize a dependency bucket interface -class AnonymousDeps(object): - def __init__(self): - self._dependencies = OrderedSet() - - @property - def dependencies(self): - return self._dependencies - - def resolve(self): - for dependency in self.dependencies: - for dep in dependency.resolve(): - yield dep diff --git a/src/python/twitter/pants/targets/artifact.py b/src/python/twitter/pants/targets/artifact.py deleted file mode 100644 index 11c2383d5..000000000 --- a/src/python/twitter/pants/targets/artifact.py +++ /dev/null @@ -1,80 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== -from twitter.common.collections import maybe_list - -from twitter.common.lang import Compatibility - -from twitter.pants.base.build_manual import manual - -from .pants_target import Pants -from .repository import Repository -from .util import resolve - - -@manual.builddict(tags=["jvm"]) -class Artifact(object): - """Represents a jvm artifact ala maven or ivy. - - Used in the ``provides`` parameter to *jvm*\_library targets. - """ - - def __init__(self, org, name, repo, description=None): - """ - :param string org: Organization of this artifact, or groupId in maven parlance. - :param string name: Name of the artifact, or artifactId in maven parlance. - :param repo: :class:`twitter.pants.targets.repository.Repository` - this artifact is published to. - :param string description: Description of this artifact. - """ - if not isinstance(org, Compatibility.string): - raise ValueError("org must be %s but was %s" % (Compatibility.string, org)) - if not isinstance(name, Compatibility.string): - raise ValueError("name must be %s but was %s" % (Compatibility.string, name)) - - if repo is None: - raise ValueError("repo must be supplied") - repos = [] - for tgt in maybe_list(resolve(repo), expected_type=(Pants, Repository)): - repos.extend(tgt.resolve()) - if len(repos) != 1: - raise ValueError("An artifact must have exactly 1 repo, given: %s" % repos) - repo = repos[0] - - if description is not None and not isinstance(description, Compatibility.string): - raise ValueError("description must be None or %s but was %s" - % (Compatibility.string, description)) - - self.org = org - self.name = name - self.rev = None - self.repo = repo - self.description = description - - def __eq__(self, other): - result = other and ( - type(other) == Artifact) and ( - self.org == other.org) and ( - self.name == other.name) - return result - - def __hash__(self): - return hash((self.org, self.name)) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "%s-%s -> %s" % (self.org, self.name, self.repo) diff --git a/src/python/twitter/pants/targets/benchmark.py b/src/python/twitter/pants/targets/benchmark.py deleted file mode 100644 index 90a85c692..000000000 --- a/src/python/twitter/pants/targets/benchmark.py +++ /dev/null @@ -1,60 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .jvm_target import JvmTarget -from .resources import WithResources - - -@manual.builddict(tags=["jvm"]) -class Benchmark(JvmTarget, WithResources): - """A caliper benchmark. - - Run it with the ``bench`` goal. - """ - - def __init__(self, - name, - sources=None, - java_sources=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param java_sources: - :class:`twitter.pants.targets.java_library.JavaLibrary` or list of - JavaLibrary targets this library has a circular dependency on. - Prefer using dependencies to express non-circular dependencies. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of ``resources`` targets containing text - file resources to place in this module's jar. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(Benchmark, self).__init__(name, sources, dependencies, excludes, exclusives=exclusives) - - self.java_sources = java_sources - self.resources = resources diff --git a/src/python/twitter/pants/targets/credentials.py b/src/python/twitter/pants/targets/credentials.py deleted file mode 100644 index 0e9420ea7..000000000 --- a/src/python/twitter/pants/targets/credentials.py +++ /dev/null @@ -1,46 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.target import Target - - -class Credentials(Target): - """Supplies credentials for a maven repository on demand. - - The ``jar-publish`` section of your ``pants.ini`` file can refer to one - or more of these. - """ - - def __init__(self, name, username=None, password=None, - exclusives=None): - """ - :param string name: The name of these credentials. - :param username: Either a constant username value or else a callable that can fetch one. - :type username: string or callable - :param password: Either a constant password value or else a callable that can fetch one. - :type password: string or callable - """ - Target.__init__(self, name, exclusives=exclusives) - self._username = username if callable(username) else lambda: username - self._password = password if callable(password) else lambda: password - - def username(self, repository): - """Returns the username in java system property argument form.""" - return self._username(repository) - - def password(self, repository): - """Returns the password in java system property argument form.""" - return self._password(repository) diff --git a/src/python/twitter/pants/targets/doc.py b/src/python/twitter/pants/targets/doc.py deleted file mode 100644 index 48962f9a5..000000000 --- a/src/python/twitter/pants/targets/doc.py +++ /dev/null @@ -1,81 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target - -from .internal import InternalTarget -from .pants_target import Pants -from .with_sources import TargetWithSources - - -class Wiki(Target): - """Target that identifies a wiki where pages can be published.""" - - def __init__(self, name, url_builder, exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param url_builder: Function that accepts a page target and an optional wiki config dict. - :returns: A tuple of (alias, fully qualified url). - """ - Target.__init__(self, name, exclusives=exclusives) - self.url_builder = url_builder - - -class Page(InternalTarget, TargetWithSources): - """Describes a single documentation page.""" - - def __init__(self, name, source, dependencies=None, resources=None, exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param source: Source of the page in markdown format. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param resources: An optional list of Resources objects. - """ - InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) - TargetWithSources.__init__(self, name, sources=[source], exclusives=exclusives) - - self.resources = self._resolve_paths(resources) if resources else [] - self._wikis = {} - - @property - def source(self): - return self.sources[0] - - @manual.builddict() - def register_wiki(self, wiki, **kwargs): - """Adds this page to the given wiki for publishing. Wiki-specific configuration is passed as - kwargs. - """ - if isinstance(wiki, Pants): - wiki = wiki.get() - if not isinstance(wiki, Wiki): - raise ValueError('The 1st argument must be a wiki target, given: %s' % wiki) - self._wikis[wiki] = kwargs - return self - - def wiki_config(self, wiki): - """Gets the wiki specific config for the given wiki if present or else returns None.""" - return self._wikis.get(wiki) - - def wikis(self): - """Returns all the wikis registered with this page.""" - return self._wikis.keys() diff --git a/src/python/twitter/pants/targets/exclude.py b/src/python/twitter/pants/targets/exclude.py deleted file mode 100644 index 10daeec4f..000000000 --- a/src/python/twitter/pants/targets/exclude.py +++ /dev/null @@ -1,47 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - - -@manual.builddict(tags=["jvm"]) -class Exclude(object): - """Represents a dependency exclude pattern to filter transitive dependencies against.""" - - def __init__(self, org, name=None): - """ - :param string org: Organization of the artifact to filter, - known as groupId in Maven parlance. - :param string name: Name of the artifact to filter in the org, or filter - everything if unspecified. - """ - self.org = org - self.name = name - - def __eq__(self, other): - return all([other, - type(other) == Exclude, - self.org == other.org, - self.name == other.name]) - - def __hash__(self): - return hash((self.org, self.name)) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "Exclude(org='%s', name=%s)" % (self.org, ('%s' % self.name) if self.name else None) diff --git a/src/python/twitter/pants/targets/exportable_jvm_library.py b/src/python/twitter/pants/targets/exportable_jvm_library.py deleted file mode 100644 index e2d3c9806..000000000 --- a/src/python/twitter/pants/targets/exportable_jvm_library.py +++ /dev/null @@ -1,61 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from .jvm_target import JvmTarget - - -class ExportableJvmLibrary(JvmTarget): - """A baseclass for java targets that support being exported to an artifact repository.""" - - def __init__(self, - name, - sources, - provides=None, - dependencies=None, - excludes=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param provides: - An optional Dependency object indicating the The ivy artifact to export. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param buildflags: Unused, and will be removed in a future release. - """ - - # It's critical that provides is set 1st since _provides() is called elsewhere in the - # constructor flow. - self._provides = provides - - super(ExportableJvmLibrary, self).__init__( - name, - sources, - dependencies, - excludes, - exclusives=exclusives) - - self.add_labels('exportable') - - @property - def provides(self): - return self._provides diff --git a/src/python/twitter/pants/targets/external_dependency.py b/src/python/twitter/pants/targets/external_dependency.py deleted file mode 100644 index 8df9de8b6..000000000 --- a/src/python/twitter/pants/targets/external_dependency.py +++ /dev/null @@ -1,26 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from abc import abstractmethod -from twitter.common.lang import AbstractClass - - -class ExternalDependency(AbstractClass): - @abstractmethod - def cache_key(self): - """ - Returns the key that can uniquely identify this target in the build cache. - """ diff --git a/src/python/twitter/pants/targets/internal.py b/src/python/twitter/pants/targets/internal.py deleted file mode 100644 index 3a6c6a05f..000000000 --- a/src/python/twitter/pants/targets/internal.py +++ /dev/null @@ -1,258 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import collections -import copy - -from functools import partial - -from twitter.common.collections import maybe_list, OrderedSet - -from twitter.pants.base.target import Target, TargetDefinitionException - -from .anonymous import AnonymousDeps -from .external_dependency import ExternalDependency -from .jar_dependency import JarDependency -from .util import resolve - - -class InternalTarget(Target): - """A baseclass for targets that support an optional dependency set.""" - - class CycleException(Exception): - """Thrown when a circular dependency is detected.""" - def __init__(self, cycle): - Exception.__init__(self, 'Cycle detected:\n\t%s' % ( - ' ->\n\t'.join(str(target.address) for target in cycle) - )) - - @classmethod - def sort_targets(cls, internal_targets): - """Returns the targets that internal_targets depend on sorted from most dependent to least.""" - roots = OrderedSet() - inverted_deps = collections.defaultdict(OrderedSet) # target -> dependent targets - visited = set() - path = OrderedSet() - - def invert(target): - if target in path: - path_list = list(path) - cycle_head = path_list.index(target) - cycle = path_list[cycle_head:] + [target] - raise cls.CycleException(cycle) - path.add(target) - if target not in visited: - visited.add(target) - if getattr(target, 'internal_dependencies', None): - for internal_dependency in target.internal_dependencies: - if hasattr(internal_dependency, 'internal_dependencies'): - inverted_deps[internal_dependency].add(target) - invert(internal_dependency) - else: - roots.add(target) - path.remove(target) - - for internal_target in internal_targets: - invert(internal_target) - - ordered = [] - visited.clear() - - def topological_sort(target): - if target not in visited: - visited.add(target) - if target in inverted_deps: - for dep in inverted_deps[target]: - topological_sort(dep) - ordered.append(target) - - for root in roots: - topological_sort(root) - - return ordered - - @classmethod - def coalesce_targets(cls, internal_targets, discriminator): - """Returns a list of targets internal_targets depend on sorted from most dependent to least and - grouped where possible by target type as categorized by the given discriminator. - """ - - sorted_targets = filter(discriminator, cls.sort_targets(internal_targets)) - - # can do no better for any of these: - # [] - # [a] - # [a,b] - if len(sorted_targets) <= 2: - return sorted_targets - - # For these, we'd like to coalesce if possible, like: - # [a,b,a,c,a,c] -> [a,a,a,b,c,c] - # adopt a quadratic worst case solution, when we find a type change edge, scan forward for - # the opposite edge and then try to swap dependency pairs to move the type back left to its - # grouping. If the leftwards migration fails due to a dependency constraint, we just stop - # and move on leaving "type islands". - current_type = None - - # main scan left to right no backtracking - for i in range(len(sorted_targets) - 1): - current_target = sorted_targets[i] - if current_type != discriminator(current_target): - scanned_back = False - - # scan ahead for next type match - for j in range(i + 1, len(sorted_targets)): - look_ahead_target = sorted_targets[j] - if current_type == discriminator(look_ahead_target): - scanned_back = True - - # swap this guy as far back as we can - for k in range(j, i, -1): - previous_target = sorted_targets[k - 1] - mismatching_types = current_type != discriminator(previous_target) - not_a_dependency = look_ahead_target not in previous_target.internal_dependencies - if mismatching_types and not_a_dependency: - sorted_targets[k] = sorted_targets[k - 1] - sorted_targets[k - 1] = look_ahead_target - else: - break # out of k - - break # out of j - - if not scanned_back: # done with coalescing the current type, move on to next - current_type = discriminator(current_target) - - return sorted_targets - - def sort(self): - """Returns a list of targets this target depends on sorted from most dependent to least.""" - return self.sort_targets([self]) - - def coalesce(self, discriminator): - """Returns a list of targets this target depends on sorted from most dependent to least and - grouped where possible by target type as categorized by the given discriminator. - """ - return self.coalesce_targets([self], discriminator) - - def __init__(self, name, dependencies, exclusives=None): - """ - :param string name: The name of this module target, addressable via pants via the - portion of the spec following the colon. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - """ - Target.__init__(self, name, exclusives=exclusives) - self._injected_deps = [] - self._processed_dependencies = resolve(dependencies) - - self.add_labels('internal') - self.dependency_addresses = OrderedSet() - - self._dependencies = OrderedSet() - self._internal_dependencies = OrderedSet() - self._jar_dependencies = OrderedSet() - - if dependencies: - maybe_list(self._processed_dependencies, - expected_type=(ExternalDependency, AnonymousDeps, Target), - raise_type=partial(TargetDefinitionException, self)) - - def add_injected_dependency(self, spec): - self._injected_deps.append(spec) - - def inject_dependencies(self): - self.update_dependencies(resolve(self._injected_deps)) - - @property - def dependencies(self): - self._maybe_apply_deps() - return self._dependencies - - @property - def internal_dependencies(self): - self._maybe_apply_deps() - return self._internal_dependencies - - @property - def jar_dependencies(self): - self._maybe_apply_deps() - return self._jar_dependencies - - def _maybe_apply_deps(self): - if self._processed_dependencies is not None: - self.update_dependencies(self._processed_dependencies) - self._processed_dependencies = None - if self._injected_deps: - self.update_dependencies(resolve(self._injected_deps)) - self._injected_deps = [] - - def update_dependencies(self, dependencies): - if dependencies: - for dependency in dependencies: - if hasattr(dependency, 'address'): - self.dependency_addresses.add(dependency.address) - if not hasattr(dependency, "resolve"): - raise TargetDefinitionException(self, 'Cannot add %s as a dependency of %s' - % (dependency, self)) - for resolved_dependency in dependency.resolve(): - if resolved_dependency.is_concrete and not self.valid_dependency(resolved_dependency): - raise TargetDefinitionException(self, 'Cannot add %s as a dependency of %s' - % (resolved_dependency, self)) - self._dependencies.add(resolved_dependency) - if isinstance(resolved_dependency, InternalTarget): - self._internal_dependencies.add(resolved_dependency) - self._jar_dependencies = OrderedSet(filter(lambda tgt: isinstance(tgt, JarDependency), - self._dependencies - self._internal_dependencies)) - - def valid_dependency(self, dep): - """Subclasses can over-ride to reject invalid dependencies.""" - return True - - def replace_dependency(self, dependency, replacement): - self._dependencies.discard(dependency) - self._internal_dependencies.discard(dependency) - self._jar_dependencies.discard(dependency) - self.update_dependencies([replacement]) - - def _walk(self, walked, work, predicate=None): - Target._walk(self, walked, work, predicate) - for dep in self.dependencies: - if isinstance(dep, Target) and not dep in walked: - walked.add(dep) - if not predicate or predicate(dep): - additional_targets = work(dep) - dep._walk(walked, work, predicate) - if additional_targets: - for additional_target in additional_targets: - additional_target._walk(walked, work, predicate) - - def _propagate_exclusives(self): - # Note: this overrides Target._propagate_exclusives without - # calling the supermethod. Targets in pants do not necessarily - # have a dependencies field, or ever have their dependencies - # available at all pre-resolve. Subtypes of InternalTarget, however, - # do have well-defined dependency lists in their dependencies field, - # so we can do a better job propagating their exclusives quickly. - if self.exclusives is not None: - return - self.exclusives = copy.deepcopy(self.declared_exclusives) - for t in self.dependencies: - if isinstance(t, Target): - t._propagate_exclusives() - self.add_to_exclusives(t.exclusives) - elif hasattr(t, "declared_exclusives"): - self.add_to_exclusives(t.declared_exclusives) diff --git a/src/python/twitter/pants/targets/jar_dependency.py b/src/python/twitter/pants/targets/jar_dependency.py deleted file mode 100644 index 1e2dd5793..000000000 --- a/src/python/twitter/pants/targets/jar_dependency.py +++ /dev/null @@ -1,238 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import AbstractTarget - -from .exclude import Exclude -from .external_dependency import ExternalDependency - - -class Artifact(object): - """ - Specification for an Ivy Artifact for this jar dependency. - - See: http://ant.apache.org/ivy/history/latest-milestone/ivyfile/artifact.html - """ - - _HASH_KEYS = ( - 'name', - 'type_', - 'ext', - 'conf', - 'url', - 'classifier', - ) - - def __init__(self, name, type_=None, ext=None, conf=None, url=None, classifier=None): - """ - :param name: The name of the published artifact. This name must not include revision. - :param type_: The type of the published artifact. It's usually its extension, but not - necessarily. For instance, ivy files are of type 'ivy' but have 'xml' extension. - :param ext: The extension of the published artifact. - :param conf: The public configuration in which this artifact is published. The '*' wildcard can - be used to designate all public configurations. - :param url: The url at which this artifact can be found if it isn't located at the standard - location in the repository - :param classifier: The maven classifier of this artifact. - """ - self.name = name - self.type_ = type_ or 'jar' - self.ext = ext - self.conf = conf - self.url = url - self.classifier = classifier - - def cache_key(self): - return ''.join(str(getattr(self, key)) for key in self._HASH_KEYS) - - def __repr__(self): - return ('Artifact(%r, type_=%r, ext=%r, conf=%r, url=%r, classifier=%r)' - % (self.name, self.type_, self.ext, self.conf, self.url, self.classifier)) - - - -@manual.builddict(tags=["jvm"]) -class JarDependency(ExternalDependency, AbstractTarget): - """A pre-built Maven repository dependency.""" - - _JAR_HASH_KEYS = ( - 'org', - 'name', - 'rev', - 'force', - 'excludes', - 'transitive', - 'mutable', - ) - - def __init__(self, org, name, rev=None, force=False, ext=None, url=None, apidocs=None, - type_=None, classifier=None, mutable=None, exclusives=None): - """ - :param string org: The Maven ``groupId`` of this dependency. - :param string name: The Maven ``artifactId`` of this dependency. - :param string rev: The Maven ``version`` of this dependency. - If unspecified the latest available version is used. - :param boolean force: Force this specific artifact revision even if other transitive - dependencies specify a different revision. This requires specifying the ``rev`` parameter. - :param string ext: Extension of the artifact if different from the artifact type. - This is sometimes needed for artifacts packaged with Maven bundle type but stored as jars. - :param string url: URL of this artifact, if different from the Maven repo standard location - (specifying this parameter is unusual). - :param string apidocs: URL of existing javadocs, which if specified, pants-generated javadocs - will properly hyperlink {\ @link}s. - :param string type_: Artifact packaging type. - :param string classifier: Classifier specifying the artifact variant to use. - Use ``with_artifact`` to include multiple artifacts with different classifiers. - :param boolean mutable: Inhibit caching of this mutable artifact. A common use is for - Maven -SNAPSHOT style artifacts in an active development/integration cycle. - """ - self.org = org - self.name = name - self.rev = rev - self.force = force - self.excludes = [] - self.transitive = True - self.apidocs = apidocs - self.mutable = mutable - self._classifier = classifier - - self.artifacts = [] - if ext or url or type_ or classifier: - self.with_artifact(name=name, type_=type_, ext=ext, url=url, classifier=classifier) - - self.id = "%s-%s-%s" % (self.org, self.name, self.rev) - self._configurations = ['default'] - self.declared_exclusives = defaultdict(set) - if exclusives is not None: - for k in exclusives: - self.declared_exclusives[k] |= exclusives[k] - - # Support legacy method names - # TODO(John Sirois): introduce a deprecation cycle for these and then kill - self.withSources = self.with_sources - self.withDocs = self.with_docs - - self.declared_exclusives = defaultdict(set) - if exclusives is not None: - for k in exclusives: - self.declared_exclusives[k] |= exclusives[k] - - @property - def is_jar(self): - return True - - @property - def configurations(self): - confs = OrderedSet(self._configurations) - confs.update(artifact.conf for artifact in self.artifacts if artifact.conf) - return list(confs) - - @property - def classifier(self): - """Returns the maven classifier for this jar dependency. - - If the classifier is ambiguous; ie: there was no classifier set in the constructor and the jar - dependency has multiple attached artifacts, a :class:`ValueError` is raised. - """ - if self._classifier or len(self.artifacts) == 0: - return self._classifier - elif len(self.artifacts) == 1: - return self.artifacts[0].classifier - else: - raise ValueError('Cannot determine classifier. No explicit classifier is set and this jar ' - 'has more than 1 artifact: %s\n\t%s' - % (self, '\n\t'.join(map(str, self.artifacts)))) - - @manual.builddict() - def exclude(self, org, name=None): - """Adds a transitive dependency of this jar to the exclude list.""" - - self.excludes.append(Exclude(org, name)) - return self - - @manual.builddict() - def intransitive(self): - """Declares this Dependency intransitive, indicating only the jar for the dependency itself - should be downloaded and placed on the classpath""" - - self.transitive = False - return self - - @manual.builddict() - def with_sources(self): - """This requests the artifact have its source jar fetched. - (This implies there *is* a source jar to fetch.) Used in contexts - that can use source jars (as of 2013, just eclipse and idea goals).""" - self._configurations.append('sources') - return self - - def with_docs(self): - """This requests the artifact have its javadoc jar fetched. - (This implies there *is* a javadoc jar to fetch.) Used in contexts - that can use source jars (as of 2014, just eclipse and idea goals).""" - self._configurations.append('javadoc') - return self - - # TODO: This is necessary duck-typing because in some places JarDependency is treated like - # a Target, even though it doesn't extend Target. Probably best to fix that. - def has_label(self, label): - return False - - def with_artifact(self, name=None, type_=None, ext=None, url=None, configuration=None, - classifier=None): - """Sets an alternative artifact to fetch or adds additional artifacts if called multiple times. - """ - artifact = Artifact(name or self.name, type_=type_, ext=ext, url=url, conf=configuration, - classifier=classifier) - self.artifacts.append(artifact) - return self - - def __eq__(self, other): - result = (isinstance(other, JarDependency) - and self.org == other.org - and self.name == other.name - and self.rev == other.rev) - return result - - def __hash__(self): - return hash((self.org, self.name, self.rev)) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return self.id - - def cache_key(self): - key = ''.join(str(getattr(self, key)) for key in self._JAR_HASH_KEYS) - key += ''.join(sorted(self._configurations)) - key += ''.join(a.cache_key() for a in sorted(self.artifacts, key=lambda a: a.name + a.type_)) - return key - - def resolve(self): - yield self - - def walk(self, work, predicate=None): - if not predicate or predicate(self): - work(self) - - def _as_jar_dependencies(self): - yield self diff --git a/src/python/twitter/pants/targets/jar_library.py b/src/python/twitter/pants/targets/jar_library.py deleted file mode 100644 index 6852965c4..000000000 --- a/src/python/twitter/pants/targets/jar_library.py +++ /dev/null @@ -1,146 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from functools import partial - -from twitter.common.collections import maybe_list, OrderedSet - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target, TargetDefinitionException - -from . import util -from .anonymous import AnonymousDeps -from .exclude import Exclude -from .external_dependency import ExternalDependency -from .exportable_jvm_library import ExportableJvmLibrary -from .pants_target import Pants -from .jar_dependency import JarDependency - - -@manual.builddict(tags=["anylang"]) -class JarLibrary(Target): - """A set of dependencies that may be depended upon, - as if depending upon the set of dependencies directly. - """ - - def __init__(self, name, dependencies, overrides=None, exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :param overrides: List of strings, each of which will be recursively resolved to - any targets that provide artifacts. Those artifacts will override corresponding - direct/transitive dependencies in the dependencies list. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(JarLibrary, self).__init__(name, exclusives=exclusives) - - self._pre_override_dependencies = OrderedSet( - maybe_list(util.resolve(dependencies), - expected_type=(ExternalDependency, AnonymousDeps, Target), - raise_type=partial(TargetDefinitionException, self))) - self._dependencies = None - self._dependency_addresses = None - self.override_targets = set(map(Pants, overrides or [])) - self.add_labels('jars') - - @property - def dependencies(self): - if self._dependencies is None: - # compute overridden dependencies - self._dependencies = self._resolve_overrides() - return self._dependencies - - @property - def dependency_addresses(self): - if self._dependency_addresses is None: - self._dependency_addresses = set() - for dependency in self.dependencies: - if hasattr(dependency, 'address'): - self._dependency_addresses.add(dependency.address) - # If the dependency is one that supports exclusives, the JarLibrary's - # exclusives should be added to it. - if hasattr(dependency, 'declared_exclusives'): - for k in self.declared_exclusives: - dependency.declared_exclusives[k] |= self.declared_exclusives[k] - return self._dependency_addresses - - def resolve(self): - yield self - for dependency in self.dependencies: - for resolved_dependency in dependency.resolve(): - yield resolved_dependency - - def _resolve_overrides(self): - """ - Resolves override targets, and then excludes and re-includes each of them - to create and return a new dependency set. - """ - if not self.override_targets: - return self._pre_override_dependencies - - result = OrderedSet() - - # resolve overrides and fetch all of their "artifact-providing" dependencies - excludes = set() - for override_target in self.override_targets: - # add pre_override deps of the target as exclusions - for resolved in override_target.resolve(): - excludes.update(self._excludes(resolved)) - # prepend the target as a new target - result.add(override_target) - - # add excludes for each artifact - for direct_dep in self._pre_override_dependencies: - # add relevant excludes to jar dependencies - for jar_dep in self._jar_dependencies(direct_dep): - for exclude in excludes: - jar_dep.exclude(exclude.org, exclude.name) - result.add(direct_dep) - - return result - - def _excludes(self, dep): - """ - A generator for Exclude objects that will recursively exclude all artifacts - provided by the given dep. - """ - if isinstance(dep, JarDependency): - yield Exclude(dep.org, dep.name) - elif isinstance(dep, ExportableJvmLibrary): - if not dep.provides: - raise TargetDefinitionException(self, - 'Targets passed to `overrides` must represent published artifacts. %s does not.' % dep) - yield Exclude(dep.provides.org, dep.provides.name) - elif isinstance(dep, JarLibrary): - for d in dep._pre_override_dependencies: - for exclude in self._excludes(d): - yield exclude - - def _jar_dependencies(self, dep): - """ - A generator for JarDependencies transitively included by the given dep. - """ - if isinstance(dep, JarDependency): - yield dep - elif isinstance(dep, JarLibrary): - for direct_dep in dep._pre_override_dependencies: - for dep in self._jar_dependencies(direct_dep): - yield dep - elif isinstance(dep, Pants): - for d in self._jar_dependencies(dep.get()): - yield d diff --git a/src/python/twitter/pants/targets/jarable.py b/src/python/twitter/pants/targets/jarable.py deleted file mode 100644 index 9a3e12fb3..000000000 --- a/src/python/twitter/pants/targets/jarable.py +++ /dev/null @@ -1,54 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from abc import abstractproperty - -from twitter.common.lang import AbstractClass - -from .jar_dependency import JarDependency - - -class Jarable(AbstractClass): - """A mixin that identifies a target as one that can provide a jar.""" - - @abstractproperty - def identifier(self): - """Subclasses should return a stable unique identifier for the jarable target.""" - - @property - def provides(self): - """Returns an optional :class:`twitter.pants.targets.Artifact` if this target is exportable. - - Subclasses should override to provide an artifact descriptor when one applies, by default None - is supplied. - """ - return None - - def get_artifact_info(self): - """Returns a triple composed of a :class:`twitter.pants.targets.jar_dependency.JarDependency` - describing the jar for this target, this target's artifact identifier and a bool indicating if - this target is exportable. - """ - exported = bool(self.provides) - - org = self.provides.org if exported else 'internal' - module = self.provides.name if exported else self.identifier - - id_ = "%s-%s" % (self.provides.org, self.provides.name) if exported else self.identifier - - # TODO(John Sirois): This should return something less than a JarDependency encapsulating just - # the org and name. Perhaps a JarFamily? - return JarDependency(org=org, name=module, rev=None), id_, exported diff --git a/src/python/twitter/pants/targets/java_agent.py b/src/python/twitter/pants/targets/java_agent.py deleted file mode 100644 index b825e6d0c..000000000 --- a/src/python/twitter/pants/targets/java_agent.py +++ /dev/null @@ -1,127 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ===================================== - -from twitter.common.lang import Compatibility - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import TargetDefinitionException - -from .java_library import JavaLibrary - - -@manual.builddict(tags=['jvm']) -class JavaAgent(JavaLibrary): - """Defines a java agent entrypoint.""" - - def __init__(self, - name, - sources=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None, - premain=None, - agent_class=None, - can_redefine=False, - can_retransform=False, - can_set_native_method_prefix=False): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of file paths (DEPRECATED) or - ``resources`` targets (which in turn point to file paths). The paths - indicate text file resources to place in this module's jar. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - :param string premain: When an agent is specified at JVM launch time this attribute specifies - the agent class. Exactly one of ``premain`` or ``agent_class`` must be specified. - :param string agent_class: If an implementation supports a mechanism to start agents sometime - after the VM has started then this attribute specifies the agent class. Exactly one of - ``premain`` or ``agent_class`` must be specified. - :param bool can_redefine: `True` if the ability to redefine classes is needed by this agent; - `False` by default. - :param bool can_retransform: `True` if the ability to retransform classes is needed by this - agent; `False` by default. - :param bool can_set_native_method_prefix: `True` if the ability to set he native method prefix - is needed by this agent; `False` by default. - """ - - super(JavaAgent, self).__init__( - name, - sources, - provides=None, - dependencies=dependencies, - excludes=excludes, - resources=resources, - exclusives=exclusives) - - if not premain or agent_class: - raise TargetDefinitionException(self, "Must have at least one of 'premain' or 'agent_class' " - "defined.") - if premain and not isinstance(premain, Compatibility.string): - raise TargetDefinitionException(self, 'The premain must be a fully qualified class name, ' - 'given %s of type %s' % (premain, type(premain))) - - if agent_class and not isinstance(agent_class, Compatibility.string): - raise TargetDefinitionException(self, - 'The agent_class must be a fully qualified class name, given ' - '%s of type %s' % (agent_class, type(agent_class))) - - self._premain = premain - self._agent_class = agent_class - self._can_redefine = can_redefine - self._can_retransform = can_retransform - self._can_set_native_method_prefix = can_set_native_method_prefix - - self.add_labels('java_agent') - - @property - def premain(self): - """The launch time agent fully qualified class name. - - Either ``agent_class`` or ``premain`` will be defined and the other will be `None`. - """ - return self._premain - - @property - def agent_class(self): - """The post-launch-time agent fully qualified class name. - - Either ``agent_class`` or ``premain`` will be defined and the other will be `None`. - """ - return self._agent_class - - @property - def can_redefine(self): - """Returns `True` if the ability to redefine classes is needed by this agent.""" - return self._can_redefine - - @property - def can_retransform(self): - """Returns `True` if the ability to retransform classes is needed by this agent.""" - return self._can_retransform - - @property - def can_set_native_method_prefix(self): - """Returns `True` if the ability to set he native method prefix is needed by this agent.""" - return self._can_set_native_method_prefix diff --git a/src/python/twitter/pants/targets/java_antlr_library.py b/src/python/twitter/pants/targets/java_antlr_library.py deleted file mode 100644 index be9fa79dd..000000000 --- a/src/python/twitter/pants/targets/java_antlr_library.py +++ /dev/null @@ -1,64 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Brian Larson' - -from twitter.pants.base.build_manual import manual - -from .exportable_jvm_library import ExportableJvmLibrary - - -@manual.builddict(tags=["jvm"]) -class JavaAntlrLibrary(ExportableJvmLibrary): - """Generates a stub Java library from Antlr grammar files.""" - - def __init__(self, - name, - sources, - provides=None, - dependencies=None, - excludes=None, - compiler='antlr3'): - - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param compiler: The name of the compiler used to compile the ANTLR files. - Currently only supports 'antlr3' and 'antlr4' - """ - - ExportableJvmLibrary.__init__(self, - name, - sources, - provides, - dependencies, - excludes) - self.add_labels('codegen') - - if compiler not in ('antlr3', 'antlr4'): - raise ValueError("Illegal value for 'compiler': {}".format(compiler)) - self.compiler = compiler diff --git a/src/python/twitter/pants/targets/java_library.py b/src/python/twitter/pants/targets/java_library.py deleted file mode 100644 index 9358f819a..000000000 --- a/src/python/twitter/pants/targets/java_library.py +++ /dev/null @@ -1,74 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ===================================== - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import TargetDefinitionException - -from .exportable_jvm_library import ExportableJvmLibrary -from .resources import WithResources - - -@manual.builddict(tags=['java']) -class JavaLibrary(ExportableJvmLibrary, WithResources): - """A collection of Java code. - - Normally has conceptually-related sources; invoking the ``compile`` goal - on this target compiles Java and generates classes. Invoking the ``jar`` - goal on this target creates a ``.jar``; but that's an unusual thing to do. - Instead, a ``jvm_binary`` might depend on this library; that binary is a - more sensible thing to bundle. - """ - - def __init__(self, - name, - sources=None, - provides=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of file paths (DEPRECATED) or - ``resources`` targets (which in turn point to file paths). The paths - indicate text file resources to place in this module's jar. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(JavaLibrary, self).__init__( - name, - sources, - provides, - dependencies, - excludes, - exclusives=exclusives) - - if (sources is None) and (resources is None): - raise TargetDefinitionException(self, 'Must specify sources and/or resources.') - - self.resources = resources - self.add_labels('java') diff --git a/src/python/twitter/pants/targets/java_protobuf_library.py b/src/python/twitter/pants/targets/java_protobuf_library.py deleted file mode 100644 index 8cc3afe75..000000000 --- a/src/python/twitter/pants/targets/java_protobuf_library.py +++ /dev/null @@ -1,61 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .exportable_jvm_library import ExportableJvmLibrary - - -@manual.builddict(tags=["java"]) -class JavaProtobufLibrary(ExportableJvmLibrary): - """Generates a stub Java library from protobuf IDL files.""" - - def __init__(self, - name, - sources, - provides=None, - dependencies=None, - excludes=None, - buildflags=None, - exclusives=None): - - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param buildflags: Unused, and will be removed in a future release. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - - ExportableJvmLibrary.__init__(self, - name, - sources, - provides, - dependencies, - excludes, - exclusives=exclusives) - - self.add_labels('codegen') diff --git a/src/python/twitter/pants/targets/java_tests.py b/src/python/twitter/pants/targets/java_tests.py deleted file mode 100644 index a85a85339..000000000 --- a/src/python/twitter/pants/targets/java_tests.py +++ /dev/null @@ -1,58 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .jvm_target import JvmTarget -from .resources import WithResources - - -@manual.builddict(tags=['jvm']) -class JavaTests(JvmTarget, WithResources): - """Tests JVM sources with JUnit.""" - - def __init__(self, - name, - sources=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of ``resources`` targets containing text - file resources to place in this module's jar. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(JavaTests, self).__init__(name, sources, dependencies, excludes, exclusives=exclusives) - - self.resources = resources - - # TODO(John Sirois): These could be scala, clojure, etc. 'jvm' and 'tests' are the only truly - # applicable labels - fixup the 'java' misnomer. - self.add_labels('java', 'tests') diff --git a/src/python/twitter/pants/targets/java_thrift_library.py b/src/python/twitter/pants/targets/java_thrift_library.py deleted file mode 100644 index e324d55a6..000000000 --- a/src/python/twitter/pants/targets/java_thrift_library.py +++ /dev/null @@ -1,124 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import Iterable -from functools import partial - -from twitter.common.collections import maybe_list - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import TargetDefinitionException - -from .jar_dependency import JarDependency -from .jvm_target import JvmTarget -from .pants_target import Pants - - -@manual.builddict(tags=['java']) -class JavaThriftLibrary(JvmTarget): - """Generates a stub Java or Scala library from thrift IDL files.""" - - - _COMPILERS = frozenset(['thrift', 'scrooge', 'scrooge-legacy']) - _COMPILER_DEFAULT = 'thrift' - - _LANGUAGES = frozenset(['java', 'scala']) - _LANGUAGE_DEFAULT = 'java' - - _RPC_STYLES = frozenset(['sync', 'finagle', 'ostrich']) - _RPC_STYLE_DEFAULT = 'sync' - - def __init__(self, - name, - sources, - provides=None, - dependencies=None, - excludes=None, - compiler=_COMPILER_DEFAULT, - language=_LANGUAGE_DEFAULT, - rpc_style=_RPC_STYLE_DEFAULT, - namespace_map=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param compiler: An optional compiler used to compile the thrift files. - :param language: The language used to generate the output files. - One of 'java' or 'scala' with a default of 'java'. - :param rpc_style: An optional rpc style to generate service stubs with. - One of 'sync', 'finagle' or 'ostrich' with a default of 'sync'. - :param namespace_map: A dictionary of namespaces to remap (old: new) - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - - # It's critical that provides is set 1st since _provides() is called elsewhere in the - # constructor flow. - self._provides = provides - - super(JavaThriftLibrary, self).__init__( - name, - sources, - dependencies, - excludes, - exclusives=exclusives) - - self.add_labels('codegen') - - if dependencies: - if not isinstance(dependencies, Iterable): - raise TargetDefinitionException(self, - 'dependencies must be Iterable but was: %s' % dependencies) - maybe_list(dependencies, expected_type=(JarDependency, JavaThriftLibrary, Pants), - raise_type=partial(TargetDefinitionException, self)) - - def check_value_for_arg(arg, value, values): - if value not in values: - raise TargetDefinitionException(self, "%s may only be set to %s ('%s' not valid)" % - (arg, ', or '.join(map(repr, values)), value)) - return value - - # TODO(John Sirois): The defaults should be grabbed from the workspace config. - - # some gen BUILD files explicitly set this to None - compiler = compiler or self._COMPILER_DEFAULT - self.compiler = check_value_for_arg('compiler', compiler, self._COMPILERS) - - language = language or self._LANGUAGE_DEFAULT - self.language = check_value_for_arg('language', language, self._LANGUAGES) - - rpc_style = rpc_style or self._RPC_STYLE_DEFAULT - self.rpc_style = check_value_for_arg('rpc_style', rpc_style, self._RPC_STYLES) - - self.namespace_map = namespace_map - - @property - def is_thrift(self): - return True - - @property - def provides(self): - return self._provides diff --git a/src/python/twitter/pants/targets/jvm_binary.py b/src/python/twitter/pants/targets/jvm_binary.py deleted file mode 100644 index bf5c47ab9..000000000 --- a/src/python/twitter/pants/targets/jvm_binary.py +++ /dev/null @@ -1,294 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from functools import partial - -from twitter.common.collections import maybe_list -from twitter.common.dirutil import Fileset -from twitter.common.lang import Compatibility - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import TargetDefinitionException - -from . import util -from .internal import InternalTarget -from .jar_library import JarLibrary -from .jvm_target import JvmTarget -from .pants_target import Pants -from .resources import WithResources - - -@manual.builddict(tags=["jvm"]) -class JvmBinary(JvmTarget, WithResources): - """Produces a JVM binary optionally identifying a launcher main class. - - Below are a summary of how key goals affect targets of this type: - - * ``bundle`` - Creates a self-contained directory with the binary and all - its dependencies, optionally archived, suitable for deployment. - * ``binary`` - Create an executable jar of the binary. On the JVM - this means the jar has a manifest specifying the main class. - * ``run`` - Executes the main class of this binary locally. - """ - def __init__(self, name, - main=None, - basename=None, - source=None, - resources=None, - dependencies=None, - excludes=None, - deploy_excludes=None, - configurations=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param string main: The name of the ``main`` class, e.g., - ``'com.twitter.common.examples.pingpong.Main'``. This class may be - present as the source of this target or depended-upon library. - :param string basename: Base name for the generated ``.jar`` file, e.g., - ``'pingpong'``. (By default, uses ``name`` param) - :param string source: Name of one ``.java`` or ``.scala`` file (a good - place for a ``main``). - :param resources: List of ``resource``\s to include in bundle. - :param dependencies: List of targets (probably ``java_library`` and - ``scala_library`` targets) to "link" in. - :param excludes: List of ``exclude``\s to filter this target's transitive - dependencies against. - :param deploy_excludes: List of ``excludes`` to apply at deploy time. - If you, for example, deploy a java servlet that has one version of - ``servlet.jar`` onto a Tomcat environment that provides another version, - they might conflict. ``deploy_excludes`` gives you a way to build your - code but exclude the conflicting ``jar`` when deploying. - :param configurations: Ivy configurations to resolve for this target. - This parameter is not intended for general use. - :type configurations: tuple of strings - """ - super(JvmBinary, self).__init__(name=name, - sources=[source] if source else None, - dependencies=dependencies, - excludes=excludes, - configurations=configurations, - exclusives=exclusives) - - if main and not isinstance(main, Compatibility.string): - raise TargetDefinitionException(self, 'main must be a fully qualified classname') - - if source and not isinstance(source, Compatibility.string): - raise TargetDefinitionException(self, 'source must be a single relative file path') - - self.main = main - self.basename = basename or name - self.resources = resources - self.deploy_excludes = deploy_excludes or [] - - -class RelativeToMapper(object): - """A mapper that maps files specified relative to a base directory.""" - - def __init__(self, base): - """The base directory files should be mapped from.""" - - self.base = base - - def __call__(self, file): - return os.path.relpath(file, self.base) - - def __repr__(self): - return 'IdentityMapper(%s)' % self.base - - -@manual.builddict(tags=["jvm"]) -class Bundle(object): - """A set of files to include in an application bundle. - - Looking for Java-style resources accessible via the ``Class.getResource`` API? - Those are :ref:`bdict_resources`\ . - - Files added to the bundle will be included when bundling an application target. - By default relative paths are preserved. For example, to include ``config`` - and ``scripts`` directories: :: - - bundles=[ - bundle().add(rglobs('config/*', 'scripts/*')), - ] - - To include files relative to some path component use the ``relative_to`` parameter. - The following places the contents of ``common/config`` in a ``config`` directory - in the bundle. :: - - bundles=[ - bundle(relative_to='common').add(globs('common/config/*')) - ] - """ - - def __init__(self, base=None, mapper=None, relative_to=None): - """ - :param base: Base path of the "source" file paths. By default, path of the - BUILD file. Useful for assets that don't live in the source code repo. - :param mapper: Function that takes a path string and returns a path string. Takes a path in - the source tree, returns a path to use in the resulting bundle. By default, an identity - mapper. - :param string relative_to: Set up a simple mapping from source path to bundle path. - E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle. - """ - if mapper and relative_to: - raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'") - - self._base = base or ParseContext.path() - - if relative_to: - base = os.path.join(self._base, relative_to) - if not os.path.isdir(base): - raise ValueError('Could not find a directory to bundle relative to at %s' % base) - self.mapper = RelativeToMapper(base) - else: - self.mapper = mapper or RelativeToMapper(self._base) - - self.filemap = {} - - @manual.builddict() - def add(self, *filesets): - """Add files to the bundle, where ``filesets`` is a filename, ``globs``, or ``rglobs``. - Note this is a variable length param and may be specified any number of times. - """ - for fileset in filesets: - paths = fileset() if isinstance(fileset, Fileset) \ - else fileset if hasattr(fileset, '__iter__') \ - else [fileset] - for path in paths: - abspath = path - if not os.path.isabs(abspath): - abspath = os.path.join(self._base, path) - if not os.path.exists(abspath): - raise ValueError('Given path: %s with absolute path: %s which does not exist' - % (path, abspath)) - self.filemap[abspath] = self.mapper(abspath) - return self - - def resolve(self): - yield self - - def __repr__(self): - return 'Bundle(%s, %s)' % (self.mapper, self.filemap) - - -@manual.builddict(tags=["jvm"]) -class JvmApp(InternalTarget): - """A JVM-based application consisting of a binary plus "extra files". - - Invoking the ``bundle`` goal on one of these targets creates a - self-contained artifact suitable for deployment on some other machine. - The artifact contains the executable jar, its dependencies, and - extra files like config files, startup scripts, etc. - """ - - def __init__(self, name, binary, bundles, basename=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param binary: The :class:`twitter.pants.targets.jvm_binary.JvmBinary`, - or a :class:`twitter.pants.targets.pants_target.Pants` pointer to one. - :param bundles: One or more :class:`twitter.pants.targets.jvm_binary.Bundle`'s - describing "extra files" that should be included with this app - (e.g.: config files, startup scripts). - :param string basename: Name of this application, if different from the - ``name``. Pants uses this in the ``bundle`` goal to name the distribution - artifact. In most cases this parameter is not necessary. - """ - super(JvmApp, self).__init__(name, dependencies=[]) - - self._binaries = maybe_list( - util.resolve(binary), - expected_type=(Pants, JarLibrary, JvmBinary), - raise_type=partial(TargetDefinitionException, self)) - - self._bundles = maybe_list(bundles, expected_type=Bundle, - raise_type=partial(TargetDefinitionException, self)) - - if name == basename: - raise TargetDefinitionException(self, 'basename must not equal name.') - self.basename = basename or name - - self._resolved_binary = None - self._resolved_bundles = [] - - def is_jvm_app(self): - return True - - @property - def binary(self): - self._maybe_resolve_binary() - return self._resolved_binary - - def _maybe_resolve_binary(self): - if self._binaries is not None: - binaries_list = [] - for binary in self._binaries: - binaries_list.extend(filter(lambda t: t.is_concrete, binary.resolve())) - - if len(binaries_list) != 1 or not isinstance(binaries_list[0], JvmBinary): - raise TargetDefinitionException(self, - 'must supply exactly 1 JvmBinary, got %s' % binaries_list) - self._resolved_binary = binaries_list[0] - self.update_dependencies([self._resolved_binary]) - self._binaries = None - - @property - def bundles(self): - self._maybe_resolve_bundles() - return self._resolved_bundles - - def _maybe_resolve_bundles(self): - if self._bundles is not None: - def is_resolvable(item): - return hasattr(item, 'resolve') - - def is_bundle(bundle): - return isinstance(bundle, Bundle) - - def resolve(item): - return list(item.resolve()) if is_resolvable(item) else [None] - - if is_resolvable(self._bundles): - self._bundles = resolve(self._bundles) - - try: - for item in iter(self._bundles): - for bundle in resolve(item): - if not is_bundle(bundle): - raise TypeError() - self._resolved_bundles.append(bundle) - except TypeError: - raise TargetDefinitionException(self, 'bundles must be one or more Bundle objects, ' - 'got %s' % self._bundles) - self._bundles = None - - @property - def dependencies(self): - self._maybe_resolve_binary() - return super(JvmApp, self).dependencies - - def resolve(self): - # TODO(John Sirois): Clean this up when BUILD parse refactoring is tackled. - unused_resolved_binary = self.binary - unused_resolved_bundles = self.bundles - - for resolved in super(JvmApp, self).resolve(): - yield resolved diff --git a/src/python/twitter/pants/targets/jvm_target.py b/src/python/twitter/pants/targets/jvm_target.py deleted file mode 100644 index b09a53949..000000000 --- a/src/python/twitter/pants/targets/jvm_target.py +++ /dev/null @@ -1,63 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.collections import maybe_list - -from .exclude import Exclude -from .internal import InternalTarget -from .jarable import Jarable -from .with_sources import TargetWithSources - - -class JvmTarget(InternalTarget, TargetWithSources, Jarable): - """A base class for all java module targets that provides path and dependency translation.""" - - def __init__(self, - name, - sources, - dependencies, - excludes=None, - configurations=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: One or more :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param configurations: One or more ivy configurations to resolve for this target. - This parameter is not intended for general use. - :type configurations: tuple of strings - """ - InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) - TargetWithSources.__init__(self, name, sources) - - self.add_labels('jvm') - for source in self.sources: - rel_path = os.path.join(self.target_base, source) - TargetWithSources.register_source(rel_path, self) - self.excludes = maybe_list(excludes or [], Exclude) - self.configurations = maybe_list(configurations or []) - - def _provides(self): - return None diff --git a/src/python/twitter/pants/targets/pants_target.py b/src/python/twitter/pants/targets/pants_target.py deleted file mode 100644 index 303abd14b..000000000 --- a/src/python/twitter/pants/targets/pants_target.py +++ /dev/null @@ -1,91 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.address import Address -from twitter.pants.base.build_manual import manual -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException - - -@manual.builddict(tags=["anylang"]) -class Pants(Target): - """A pointer to a pants target. - - Useful, for example, in a target's dependencies list. One target can depend - on several others; Each pants() target refers to one of those. - """ - - _DEFINITION_ERROR_MSG = ("An invalid pants pointer has been specified. " - "Please identify this reference and correct the issue: ") - - def __init__(self, spec, exclusives=None): - """ - :param string spec: target address. E.g., `src/java/com/twitter/common/util/BUILD\:util` - """ - # it's critical the spec is parsed 1st, the results are needed elsewhere in constructor flow - parse_context = ParseContext.locate() - - def parse_address(): - if spec.startswith(':'): - # the :[target] could be in a sibling BUILD - so parse using the canonical address - pathish = "%s:%s" % (parse_context.buildfile.canonical_relpath, spec[1:]) - return Address.parse(parse_context.buildfile.root_dir, pathish, False) - else: - return Address.parse(parse_context.buildfile.root_dir, spec, False) - - try: - self.address = parse_address() - except IOError as e: - self.address = parse_context.buildfile.relpath - raise TargetDefinitionException(self, '%s%s' % (self._DEFINITION_ERROR_MSG, e)) - - # We must disable the re-init check, because our funky __getattr__ breaks it. - # We're not involved in any multiple inheritance, so it's OK to disable it here. - super(Pants, self).__init__(self.address.target_name, reinit_check=False, exclusives=exclusives) - - def _register(self): - # A pants target is a pointer, do not register it as an actual target (see resolve). - pass - - def _locate(self): - return self.address - - def resolve(self): - # De-reference this pants pointer to an actual parsed target. - resolved = Target.get(self.address) - if not resolved: - raise TargetDefinitionException(self, '%s%s' % (self._DEFINITION_ERROR_MSG, self.address)) - for dep in resolved.resolve(): - yield dep - - def get(self): - """De-reference this pants pointer to a single target. - - If the pointer aliases more than one target a LookupError is raised. - """ - resolved = [t for t in self.resolve() if t.is_concrete] - if len(resolved) > 1: - raise LookupError('%s points to more than one target: %s' % (self, resolved)) - return resolved.pop() - - def __getattr__(self, name): - try: - return Target.__getattribute__(self, name) - except AttributeError as e: - try: - return getattr(self.get(), name) - except (AttributeError, LookupError): - raise e diff --git a/src/python/twitter/pants/targets/python_antlr_library.py b/src/python/twitter/pants/targets/python_antlr_library.py deleted file mode 100644 index 35f640681..000000000 --- a/src/python/twitter/pants/targets/python_antlr_library.py +++ /dev/null @@ -1,62 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import OrderedSet -from twitter.pants.base.build_manual import manual -from twitter.pants.targets.python_target import PythonTarget -from twitter.pants.targets.pants_target import Pants - - -@manual.builddict(tags=["python"]) -class PythonAntlrLibrary(PythonTarget): - """Generates a stub Python library from Antlr grammar files.""" - - def __init__(self, - name, - module, - antlr_version='3.1.3', - sources=None, - resources=None, - dependencies=None, - exclusives=None): - """ - :param name: Name of library - :param module: everything beneath module is relative to this module name, None if root namespace - :param antlr_version: - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param resources: non-Python resources, e.g. templates, keys, other data (it is - recommended that your application uses the pkgutil package to access these - resources in a .zip-module friendly way.) - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param dict exclusives: An optional dict of exclusives tags. See CheckExclusives for details. - """ - - def get_all_deps(): - all_deps = OrderedSet() - all_deps.update(Pants('3rdparty/python:antlr-%s' % antlr_version).resolve()) - if dependencies: - all_deps.update(dependencies) - return all_deps - - super(PythonAntlrLibrary, self).__init__(name, sources, resources, get_all_deps(), - exclusives=exclusives) - - self.module = module - self.antlr_version = antlr_version diff --git a/src/python/twitter/pants/targets/python_artifact.py b/src/python/twitter/pants/targets/python_artifact.py deleted file mode 100644 index 9ca5d04c8..000000000 --- a/src/python/twitter/pants/targets/python_artifact.py +++ /dev/null @@ -1,97 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - - -@manual.builddict(tags=["python"]) -class PythonArtifact(object): - """Represents a Python setup.py-based project.""" - class MissingArgument(Exception): pass - class UnsupportedArgument(Exception): pass - - UNSUPPORTED_ARGS = frozenset([ - 'data_files', - 'package_dir', - 'package_data', - 'packages', - ]) - - def __init__(self, **kwargs): - """Passes params to `setuptools.setup `_.""" - self._kw = kwargs - self._binaries = {} - - def has(name): - value = self._kw.get(name) - if value is None: - raise self.MissingArgument('PythonArtifact requires %s to be specified!' % name) - return value - - def misses(name): - if name in self._kw: - raise self.UnsupportedArgument('PythonArtifact prohibits %s from being specified' % name) - - self._version = has('version') - self._name = has('name') - for arg in self.UNSUPPORTED_ARGS: - misses(arg) - - @property - def name(self): - return self._name - - @property - def version(self): - return self._version - - @property - def key(self): - return '%s==%s' % (self._name, self._version) - - @property - def setup_py_keywords(self): - return self._kw - - @property - def binaries(self): - return self._binaries - - @manual.builddict() - def with_binaries(self, *args, **kw): - """Add binaries tagged to this artifact. - - For example: :: - - provides = setup_py( - name = 'my_library', - zip_safe = True - ).with_binaries( - my_command = pants(':my_library_bin') - ) - - This adds a console_script entry_point for the python_binary target - pointed at by :my_library_bin. Currently only supports - python_binaries that specify entry_point explicitly instead of source. - - Also can take a dictionary, e.g. - with_binaries({'my-command': pants(...)}) - """ - for arg in args: - if isinstance(arg, dict): - self._binaries.update(arg) - self._binaries.update(kw) - return self diff --git a/src/python/twitter/pants/targets/python_binary.py b/src/python/twitter/pants/targets/python_binary.py deleted file mode 100644 index d5041526a..000000000 --- a/src/python/twitter/pants/targets/python_binary.py +++ /dev/null @@ -1,153 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.collections import maybe_list -from twitter.common.lang import Compatibility -from twitter.common.python.pex_info import PexInfo - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target, TargetDefinitionException - -from .python_target import PythonTarget - - -@manual.builddict(tags=['python']) -class PythonBinary(PythonTarget): - """Produces a Python binary. - - Python binaries are pex files, self-contained executable shell - scripts that contain a complete Python environment capable of - running the target. For more information about pex files see - https://github.com/twitter/commons/blob/master/src/python/twitter/pants/python/README.md""" - - # TODO(wickman) Consider splitting pex options out into a separate PexInfo builder that can be - # attached to the binary target. Ideally the PythonBinary target is agnostic about pex mechanics - def __init__(self, - name, - source=None, - dependencies=None, - entry_point=None, - inherit_path=False, # pex option - zip_safe=True, # pex option - always_write_cache=False, # pex option - repositories=None, # pex option - indices=None, # pex option - ignore_errors=False, # pex option - allow_pypi=False, # pex option - platforms=(), - compatibility=None, - exclusives=None): - """ - :param name: target name - :param source: the python source file that becomes this binary's __main__. - If None specified, drops into an interpreter by default. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param entry_point: the default entry point for this binary. if None, drops into the entry - point that is defined by source - :param inherit_path: inherit the sys.path of the environment that this binary runs in - :param zip_safe: whether or not this binary is safe to run in compacted (zip-file) form - :param always_write_cache: whether or not the .deps cache of this PEX file should always - be written to disk. - :param repositories: a list of repositories to query for dependencies. - :param indices: a list of indices to use for packages. - :param platforms: extra platforms to target when building this binary. - :param compatibility: either a string or list of strings that represents - interpreter compatibility for this target, using the Requirement-style format, - e.g. ``'CPython>=3', or just ['>=2.7','<3']`` for requirements agnostic to interpreter class. - :param dict exclusives: An optional dict of exclusives tags. See CheckExclusives for details. - """ - - # TODO(John Sirois): Fixup TargetDefinitionException - it has awkward Target base-class - # initialization requirements right now requiring this Target.__init__. - Target.__init__(self, name, exclusives=exclusives) - - if source is None and entry_point is None: - raise TargetDefinitionException(self, - 'A python binary target must specify either source or entry_point.') - - PythonTarget.__init__(self, - name, - [] if source is None else [source], - compatibility=compatibility, - dependencies=dependencies, - exclusives=exclusives, - ) - - if not isinstance(platforms, (list, tuple)) and not isinstance(platforms, Compatibility.string): - raise TargetDefinitionException(self, 'platforms must be a list, tuple or string.') - - self._entry_point = entry_point - self._inherit_path = bool(inherit_path) - self._zip_safe = bool(zip_safe) - self._always_write_cache = bool(always_write_cache) - self._repositories = maybe_list(repositories or []) - self._indices = maybe_list(indices or []) - self._ignore_errors = bool(ignore_errors) - self._platforms = tuple(maybe_list(platforms or [])) - - if source and entry_point: - entry_point_module = entry_point.split(':', 1)[0] - source_entry_point = self._translate_to_entry_point(self.sources[0]) - if entry_point_module != source_entry_point: - raise TargetDefinitionException(self, - 'Specified both source and entry_point but they do not agree: %s vs %s' % ( - source_entry_point, entry_point_module)) - - @property - def platforms(self): - return self._platforms - - # TODO(wickman) These should likely be attributes on PythonLibrary targets - # and not PythonBinary targets, or at the very worst, both. - @property - def repositories(self): - return self._repositories - - @property - def indices(self): - return self._indices - - def _translate_to_entry_point(self, source): - source_base, _ = os.path.splitext(source) - return source_base.replace(os.path.sep, '.') - - @property - def entry_point(self): - if self._entry_point: - return self._entry_point - elif self.sources: - assert len(self.sources) == 1 - return self._translate_to_entry_point(self.sources[0]) - else: - return None - - @property - def pexinfo(self): - info = PexInfo.default() - for repo in self._repositories: - info.add_repository(repo) - for index in self._indices: - info.add_index(index) - info.zip_safe = self._zip_safe - info.always_write_cache = self._always_write_cache - info.inherit_path = self._inherit_path - info.entry_point = self.entry_point - info.ignore_errors = self._ignore_errors - return info diff --git a/src/python/twitter/pants/targets/python_egg.py b/src/python/twitter/pants/targets/python_egg.py deleted file mode 100644 index ac99fcd18..000000000 --- a/src/python/twitter/pants/targets/python_egg.py +++ /dev/null @@ -1,59 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -# XXX(wickman) This should probably die? - -import os - -from glob import glob as fsglob -from zipimport import zipimporter - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.parse_context import ParseContext - -from .python_requirement import PythonRequirement - -from pkg_resources import Distribution, EggMetadata, PathMetadata - - -@manual.builddict(tags=["python"]) -def PythonEgg(glob, name=None): - """Refers to pre-built Python eggs in the file system. (To instead fetch - eggs in a ``pip``/``easy_install`` way, use ``python_requirement``) - - E.g., ``egg(name='foo', glob='foo-0.1-py2.6.egg')`` would pick up the - file ``foo-0.1-py2.6.egg`` from the ``BUILD`` file's directory; targets - could depend on it by name ``foo``. - - :param string glob: File glob pattern. - :param string name: Target name; by default uses the egg's project name. - """ - # TODO(John Sirois): Rationalize with globs handling in ParseContext - eggs = fsglob(ParseContext.path(glob)) - - requirements = set() - for egg in eggs: - if os.path.isdir(egg): - metadata = PathMetadata(egg, os.path.join(egg, 'EGG-INFO')) - else: - metadata = EggMetadata(zipimporter(egg)) - dist = Distribution.from_filename(egg, metadata=metadata) - requirements.add(dist.as_requirement()) - - if len(requirements) > 1: - raise ValueError('Got multiple egg versions! => %s' % requirements) - - return PythonRequirement(str(requirements.pop()), name=name) diff --git a/src/python/twitter/pants/targets/python_library.py b/src/python/twitter/pants/targets/python_library.py deleted file mode 100644 index b0cc389c5..000000000 --- a/src/python/twitter/pants/targets/python_library.py +++ /dev/null @@ -1,58 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual -from twitter.pants.targets.python_target import PythonTarget - - -@manual.builddict(tags=["python"]) -class PythonLibrary(PythonTarget): - """Produces a Python library.""" - - def __init__(self, - name, - sources=(), - resources=(), - dependencies=(), - provides=None, - compatibility=None, - exclusives=None): - """ - :param name: Name of library - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param resources: non-Python resources, e.g. templates, keys, other data (it is - recommended that your application uses the pkgutil package to access these - resources in a .zip-module friendly way.) - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param provides: - The :ref:`setup_py ` (implemented by - :class:`twitter.pants.targets.artifact.PythonArtifact`) - to publish that represents this target outside the repo. - :param dict exclusives: An optional dict of exclusives tags. See CheckExclusives for details. - """ - PythonTarget.__init__(self, - name, - sources=sources, - resources=resources, - dependencies=dependencies, - provides=provides, - compatibility=compatibility, - exclusives=exclusives, - ) diff --git a/src/python/twitter/pants/targets/python_requirement.py b/src/python/twitter/pants/targets/python_requirement.py deleted file mode 100644 index a7f22c05d..000000000 --- a/src/python/twitter/pants/targets/python_requirement.py +++ /dev/null @@ -1,82 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target - -from .external_dependency import ExternalDependency - -from pkg_resources import Requirement - - -@manual.builddict(tags=["python"]) -class PythonRequirement(Target, ExternalDependency): - """Pants wrapper around pkg_resources.Requirement""" - - def __init__(self, requirement, name=None, repository=None, version_filter=None, use_2to3=False, - compatibility=None, exclusives=None): - # TODO(wickman) Allow PythonRequirements to be specified using pip-style vcs or url identifiers, - # e.g. git+https or just http://... - self._requirement = Requirement.parse(requirement) - self._repository = repository - self._name = name or self._requirement.project_name - self._use_2to3 = use_2to3 - self._version_filter = version_filter or (lambda py, pl: True) - # TODO(wickman) Unify this with PythonTarget .compatibility - self.compatibility = compatibility or [''] - Target.__init__(self, self._name, exclusives=exclusives) - - def should_build(self, python, platform): - return self._version_filter(python, platform) - - @property - def use_2to3(self): - return self._use_2to3 - - @property - def repository(self): - return self._repository - - # duck-typing Requirement interface for Resolver, since Requirement cannot be - # subclassed (curses!) - @property - def key(self): - return self._requirement.key - - @property - def extras(self): - return self._requirement.extras - - @property - def specs(self): - return self._requirement.specs - - @property - def project_name(self): - return self._requirement.project_name - - @property - def requirement(self): - return self._requirement - - def __contains__(self, item): - return item in self._requirement - - def cache_key(self): - return str(self._requirement) - - def __repr__(self): - return 'PythonRequirement(%s)' % self._requirement diff --git a/src/python/twitter/pants/targets/python_target.py b/src/python/twitter/pants/targets/python_target.py deleted file mode 100644 index 7c3573dbe..000000000 --- a/src/python/twitter/pants/targets/python_target.py +++ /dev/null @@ -1,74 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -from twitter.common.collections import OrderedSet -from twitter.common.python.interpreter import PythonIdentity - -from twitter.pants.base.target import Target, TargetDefinitionException - -from .with_dependencies import TargetWithDependencies -from .with_sources import TargetWithSources - -from twitter.pants.targets.python_artifact import PythonArtifact - -class PythonTarget(TargetWithDependencies, TargetWithSources): - """Base class for all Python targets.""" - - def __init__(self, - name, - sources, - resources=None, - dependencies=None, - provides=None, - compatibility=None, - exclusives=None): - TargetWithSources.__init__(self, name, sources=sources, exclusives=exclusives) - TargetWithDependencies.__init__(self, name, dependencies=dependencies, exclusives=exclusives) - - self.add_labels('python') - self.resources = self._resolve_paths(resources) if resources else OrderedSet() - - if provides and not isinstance(provides, PythonArtifact): - raise TargetDefinitionException(self, - "Target must provide a valid pants setup_py object. Received a '%s' object instead." % - provides.__class__.__name__) - self.provides = provides - - self.compatibility = compatibility or [''] - for req in self.compatibility: - try: - PythonIdentity.parse_requirement(req) - except ValueError as e: - raise TargetDefinitionException(str(e)) - - def _walk(self, walked, work, predicate=None): - super(PythonTarget, self)._walk(walked, work, predicate) - if self.provides and self.provides.binaries: - for binary in self.provides.binaries.values(): - binary._walk(walked, work, predicate) - - def _propagate_exclusives(self): - self.exclusives = defaultdict(set) - for k in self.declared_exclusives: - self.exclusives[k] = self.declared_exclusives[k] - for t in self.dependencies: - if isinstance(t, Target): - t._propagate_exclusives() - self.add_to_exclusives(t.exclusives) - elif hasattr(t, "declared_exclusives"): - self.add_to_exclusives(t.declared_exclusives) diff --git a/src/python/twitter/pants/targets/python_tests.py b/src/python/twitter/pants/targets/python_tests.py deleted file mode 100644 index c1ee7fb5f..000000000 --- a/src/python/twitter/pants/targets/python_tests.py +++ /dev/null @@ -1,78 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import maybe_list -from twitter.common.quantity import Amount, Time -from twitter.pants.base.build_manual import manual -from twitter.pants.targets.python_target import PythonTarget - - -@manual.builddict(tags=["python"]) -class PythonTests(PythonTarget): - """Tests a Python library.""" - - def __init__(self, - name, - sources, - resources=None, - dependencies=None, - timeout=Amount(2, Time.MINUTES), - coverage=None, - soft_dependencies=False, - entry_point='pytest', - exclusives=None): - """ - :param name: See PythonLibrary target - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param resources: See PythonLibrary target - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param timeout: Amount of time before this test should be considered timed-out. - :param coverage: the module(s) whose coverage should be generated, e.g. - 'twitter.common.log' or ['twitter.common.log', 'twitter.common.http'] - :param soft_dependencies: Whether or not we should ignore dependency resolution - errors for this test. - :param entry_point: The entry point to use to run the tests. - :param dict exclusives: An optional dict of exclusives tags. See CheckExclusives for details. - """ - self._timeout = timeout - self._soft_dependencies = bool(soft_dependencies) - self._coverage = maybe_list(coverage) if coverage is not None else [] - self._entry_point = entry_point - super(PythonTests, self).__init__(name, sources, resources, dependencies, exclusives=exclusives) - self.add_labels('python', 'tests') - - @property - def timeout(self): - return self._timeout - - @property - def coverage(self): - return self._coverage - - @property - def entry_point(self): - return self._entry_point - - -class PythonTestSuite(PythonTarget): - """Tests one or more python test targets.""" - - def __init__(self, name, dependencies=None): - super(PythonTestSuite, self).__init__(name, (), (), dependencies) diff --git a/src/python/twitter/pants/targets/python_thrift_library.py b/src/python/twitter/pants/targets/python_thrift_library.py deleted file mode 100644 index d8482b2ad..000000000 --- a/src/python/twitter/pants/targets/python_thrift_library.py +++ /dev/null @@ -1,45 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .python_target import PythonTarget - - -@manual.builddict(tags=["python"]) -class PythonThriftLibrary(PythonTarget): - """Generates a stub Python library from thrift IDL files.""" - - def __init__(self, name, - sources=None, - resources=None, - dependencies=None, - provides=None, - exclusives=None): - """ - :param name: Name of library - :param sources: thrift source files (If more than one tries to use the same - namespace, beware https://issues.apache.org/jira/browse/THRIFT-515) - :param resources: non-Python resources, e.g. templates, keys, other data (it is - recommended that your application uses the pkgutil package to access these - resources in a .zip-module friendly way.) - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param dict exclusives: An optional dict of exclusives tags. See CheckExclusives for details. - """ - super(PythonThriftLibrary, self).__init__(name, sources, resources, dependencies, provides, - exclusives=exclusives) diff --git a/src/python/twitter/pants/targets/repository.py b/src/python/twitter/pants/targets/repository.py deleted file mode 100644 index 386a5148d..000000000 --- a/src/python/twitter/pants/targets/repository.py +++ /dev/null @@ -1,51 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target - - -@manual.builddict(tags=["jvm"]) -class Repository(Target): - """An artifact repository, such as a maven repo.""" - - def __init__(self, name, url, push_db, exclusives=None): - """ - :param string name: Name of the repository. - :param string url: Optional URL of the repository. - :param string push_db: Path of the push history file. - """ - - super(Repository, self).__init__(name, exclusives=exclusives) - - self.name = name - self.url = url - self.push_db = push_db - - def __eq__(self, other): - result = other and ( - type(other) == Repository) and ( - self.name == other.name) - return result - - def __hash__(self): - return hash(self.name) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "%s -> %s (%s)" % (self.name, self.url, self.push_db) diff --git a/src/python/twitter/pants/targets/resources.py b/src/python/twitter/pants/targets/resources.py deleted file mode 100644 index 3e7a507d2..000000000 --- a/src/python/twitter/pants/targets/resources.py +++ /dev/null @@ -1,79 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from . import util -from .internal import InternalTarget -from .with_sources import TargetWithSources - - -@manual.builddict(tags=['jvm']) -class Resources(InternalTarget, TargetWithSources): - """A set of files accessible as resources from the JVM classpath. - - Looking for loose files in your application bundle? Those are :ref:`bdict_bundle`\ s. - - Resources are Java-style resources accessible via the ``Class.getResource`` - and friends API. In the ``jar`` goal, the resource files are placed in the resulting `.jar`. - """ - - def __init__(self, name, sources, exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the resources - this library provides. - """ - # TODO(John Sirois): XXX Review why this is an InternalTarget - InternalTarget.__init__(self, name, dependencies=None, exclusives=exclusives) - TargetWithSources.__init__(self, name, sources=sources, exclusives=exclusives) - - def has_sources(self, extension=None): - """``Resources`` never own sources of any particular native type, like for example - ``JavaLibrary``. - """ - # TODO(John Sirois): track down the reason for this hack and kill or explain better. - return extension is None - - -class WithResources(InternalTarget): - """A mixin for internal targets that have resources.""" - - def __init__(self, *args, **kwargs): - super(WithResources, self).__init__(*args, **kwargs) - self._resources = [] - self._raw_resources = None - - @property - def resources(self): - if self._raw_resources is not None: - self._resources = list(self.resolve_all(self._raw_resources, Resources)) - self.update_dependencies(self._resources) - self._raw_resources = None - return self._resources - - @resources.setter - def resources(self, resources): - self._resources = [] - self._raw_resources = util.resolve(resources) - - def resolve(self): - # TODO(John Sirois): Clean this up when BUILD parse refactoring is tackled. - unused_resolved_resources = self.resources - - for resolved in super(WithResources, self).resolve(): - yield resolved diff --git a/src/python/twitter/pants/targets/scala_library.py b/src/python/twitter/pants/targets/scala_library.py deleted file mode 100644 index 0bda80dd4..000000000 --- a/src/python/twitter/pants/targets/scala_library.py +++ /dev/null @@ -1,110 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import maybe_list - -from twitter.pants.base.build_manual import manual -from twitter.pants.base.target import Target, TargetDefinitionException - -from . import util -from .exportable_jvm_library import ExportableJvmLibrary -from .java_library import JavaLibrary -from .resources import WithResources - - -@manual.builddict(tags=['scala']) -class ScalaLibrary(ExportableJvmLibrary, WithResources): - """A collection of Scala code. - - Normally has conceptually-related sources; invoking the ``compile`` goal - on this target compiles scala and generates classes. Invoking the ``bundle`` - goal on this target creates a ``.jar``; but that's an unusual thing to do. - Instead, a ``jvm_binary`` might depend on this library; that binary is a - more sensible thing to bundle. - """ - - def __init__(self, - name, - sources=None, - java_sources=None, - provides=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - """ - :param string name: The name of this target, which combined with this - build file defines the target :class:`twitter.pants.base.address.Address`. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param java_sources: - :class:`twitter.pants.targets.java_library.JavaLibrary` or list of - JavaLibrary targets this library has a circular dependency on. - Prefer using dependencies to express non-circular dependencies. - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of paths (DEPRECATED) or ``resources`` - targets containing resources that belong on this library's classpath. - :param exclusives: An optional list of exclusives tags. - """ - super(ScalaLibrary, self).__init__( - name, - sources, - provides, - dependencies, - excludes, - exclusives=exclusives) - - if (sources is None) and (resources is None): - raise TargetDefinitionException(self, 'Must specify sources and/or resources.') - - self.resources = resources - - self._java_sources = [] - self._raw_java_sources = util.resolve(java_sources) - - self.add_labels('scala') - - @property - def java_sources(self): - if self._raw_java_sources is not None: - self._java_sources = list(Target.resolve_all(maybe_list(self._raw_java_sources, Target), - JavaLibrary)) - - self._raw_java_sources = None - - # TODO(John Sirois): reconsider doing this auto-linking. - # We have circular java/scala dep, add an inbound dependency edge from java to scala in this - # case to force scala compilation to precede java - since scalac supports generating java - # stubs for these cycles and javac does not this is both necessary and always correct. - for java_target in self._java_sources: - java_target.update_dependencies([self]) - return self._java_sources - - def resolve(self): - # TODO(John Sirois): Clean this up when BUILD parse refactoring is tackled. - unused_resolved_java_sources = self.java_sources - - for resolved in super(ScalaLibrary, self).resolve(): - yield resolved - diff --git a/src/python/twitter/pants/targets/scala_tests.py b/src/python/twitter/pants/targets/scala_tests.py deleted file mode 100644 index a71e028ef..000000000 --- a/src/python/twitter/pants/targets/scala_tests.py +++ /dev/null @@ -1,64 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .jvm_target import JvmTarget -from .resources import WithResources - - -@manual.builddict(tags=['scala']) -class ScalaTests(JvmTarget, WithResources): - """Tests a Scala library.""" - - def __init__(self, - name, - sources=None, - java_sources=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - - """ - :param name: The name of this module target, addressable via pants via the portion of the spec - following the colon - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param java_sources: - :class:`twitter.pants.targets.java_library.JavaLibrary` or list of - JavaLibrary targets this library has a circular dependency on. - Prefer using dependencies to express non-circular dependencies. - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against. - :param resources: An optional list of Resources that should be in this target's classpath. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - super(ScalaTests, self).__init__(name, sources, dependencies, excludes, exclusives=exclusives) - - # TODO(John Sirois): Merge handling with ScalaLibrary.java_sources - which is different and - # likely more correct. - self.java_sources = java_sources - - self.resources = resources - self.add_labels('scala', 'tests') diff --git a/src/python/twitter/pants/targets/scalac_plugin.py b/src/python/twitter/pants/targets/scalac_plugin.py deleted file mode 100644 index 61b5c1d2a..000000000 --- a/src/python/twitter/pants/targets/scalac_plugin.py +++ /dev/null @@ -1,75 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_manual import manual - -from .scala_library import ScalaLibrary - - -@manual.builddict(tags=['scala']) -class ScalacPlugin(ScalaLibrary): - """Defines a target that produces a scalac_plugin.""" - - def __init__(self, - name, - classname, - plugin=None, - sources=None, - java_sources=None, - provides=None, - dependencies=None, - excludes=None, - resources=None, - exclusives=None): - - """ - :param name: The name of this module target, addressable via pants via the portion of the - spec following the colon - required. - :param classname: The fully qualified plugin class name - required. - :param plugin: The name of the plugin which defaults to name if not supplied. - :param sources: A list of filenames representing the source code - this library is compiled from. - :type sources: list of strings - :param java_sources: - :class:`twitter.pants.targets.java_library.JavaLibrary` or list of - JavaLibrary targets this library has a circular dependency on. - Prefer using dependencies to express non-circular dependencies. - :param Artifact provides: - The :class:`twitter.pants.targets.artifact.Artifact` - to publish that represents this target outside the repo. - :param dependencies: List of :class:`twitter.pants.base.target.Target` instances - this target depends on. - :type dependencies: list of targets - :param excludes: List of :class:`twitter.pants.targets.exclude.Exclude` instances - to filter this target's transitive dependencies against - :param resources: An optional list of paths (DEPRECATED) or ``resources`` - targets containing resources that belong on this library's classpath. - :param exclusives: An optional map of exclusives tags. See CheckExclusives for details. - """ - - super(ScalacPlugin, self).__init__( - name, - sources, - java_sources, - provides, - dependencies, - excludes, - resources, - exclusives=exclusives) - - self.plugin = plugin or name - self.classname = classname - self.add_labels('scalac_plugin') diff --git a/src/python/twitter/pants/targets/sources.py b/src/python/twitter/pants/targets/sources.py deleted file mode 100644 index 6ce70e607..000000000 --- a/src/python/twitter/pants/targets/sources.py +++ /dev/null @@ -1,148 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.build_manual import manual -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import TargetDefinitionException - - -@manual.builddict() -class SourceRoot(object): - """Allows registration of a source root for a set of targets. - - A source root is the base path sources for a particular language are found relative to. - Generally compilers or interpreters for the source will expect sources relative to a base path - and a source root allows calculation of the correct relative paths. - - E.g., a Java compiler probably expects to find ``.java`` files for - ``package com.twitter.common.net`` in ``*something*/com/twitter/common/net``. - The ``source_root`` command specifies that *something*. - - It is illegal to have nested source roots. - """ - _ROOTS_BY_TYPE = {} - _TYPES_BY_ROOT = {} - _SEARCHED = set() - - @classmethod - def reset(cls): - """Reset all source roots to empty. Only intended for testing.""" - cls._ROOTS_BY_TYPE = {} - cls._TYPES_BY_ROOT = {} - cls._SEARCHED = set() - - @classmethod - def find(cls, target): - """Finds the source root for the given target. - - If none is registered, returns the parent directory of the target's BUILD file. - """ - target_path = os.path.relpath(target.address.buildfile.parent_path, get_buildroot()) - - def _find(): - for root_dir, types in cls._TYPES_BY_ROOT.items(): - if target_path.startswith(root_dir): # The only candidate root for this target. - # Validate the target type, if restrictions were specified. - if types and not isinstance(target, tuple(types)): - # TODO: Find a way to use the BUILD file aliases in the error message, instead - # of target.__class__.__name__. E.g., java_tests instead of JavaTests. - raise TargetDefinitionException(target, - 'Target type %s not allowed under %s' % (target.__class__.__name__, root_dir)) - return root_dir - return None - - # Try already registered roots - root = _find() - if root: - return root - - # Fall back to searching the ancestor path for a root. - # TODO(John Sirois): We currently allow for organic growth of maven multi-module layout style - # projects (for example) and do not require a global up-front registration of all source roots - # and instead do lazy resolution here. This allows for parse cycles that lead to surprising - # runtime errors. Re-consider allowing lazy source roots at all. - for buildfile in reversed(target.address.buildfile.ancestors()): - if buildfile not in cls._SEARCHED: - ParseContext(buildfile).parse() - cls._SEARCHED.add(buildfile) - root = _find() - if root: - return root - - # Finally, resolve files relative to the BUILD file parent dir as the target base - return target_path - - @classmethod - def types(cls, root): - """Returns the set of target types rooted at root.""" - return cls._TYPES_BY_ROOT[root] - - @classmethod - def roots(cls, target_type): - """Returns the set of roots for given target type.""" - return cls._ROOTS_BY_TYPE[target_type] - - @classmethod - def all_roots(cls): - """Returns a mapping from source roots to the associated target types.""" - return dict(cls._TYPES_BY_ROOT) - - @classmethod - def register(cls, basedir, *allowed_target_types): - """Registers the given basedir (relative to the buildroot) as a source root. - - :param string basedir: The base directory to resolve sources relative to. - :param list allowed_target_types: Optional list of target types. If specified, we enforce that - only targets of those types appear under this source root. - """ - cls._register(basedir, *allowed_target_types) - - @classmethod - def _register(cls, source_root_dir, *allowed_target_types): - """Registers a source root. - - :source_root_dir The source root directory against which we resolve source paths, - relative to the build root. - :allowed_target_types Optional list of target types. If specified, we enforce that - only targets of those types appear under this source root. - """ - # Verify that source_root_dir doesn't reach outside buildroot. - buildroot = get_buildroot() - if source_root_dir.startswith(buildroot): - abspath = os.path.normpath(source_root_dir) - else: - abspath = os.path.normpath(os.path.join(buildroot, source_root_dir)) - if not abspath.startswith(buildroot): - raise ValueError('Source root %s is not under the build root %s' % (abspath, buildroot)) - source_root_dir = os.path.relpath(abspath, buildroot) - - types = cls._TYPES_BY_ROOT.get(source_root_dir) - if types is None: - types = OrderedSet() - cls._TYPES_BY_ROOT[source_root_dir] = types - - for allowed_target_type in allowed_target_types: - types.add(allowed_target_type) - roots = cls._ROOTS_BY_TYPE.get(allowed_target_type) - if roots is None: - roots = OrderedSet() - cls._ROOTS_BY_TYPE[allowed_target_type] = roots - roots.add(source_root_dir) diff --git a/src/python/twitter/pants/targets/util.py b/src/python/twitter/pants/targets/util.py deleted file mode 100644 index 9514f4621..000000000 --- a/src/python/twitter/pants/targets/util.py +++ /dev/null @@ -1,42 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Foursquare Labs, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Ryan Williams' - -from collections import Iterable - -from twitter.common.lang import Compatibility - -from .pants_target import Pants - - -def resolve(arg, clazz=Pants): - """Wraps strings in Pants() targets, for BUILD file convenience. - - - single string literal gets wrapped in Pants() target - - single object is left alone - - list of strings and other miscellaneous objects gets its strings wrapped in Pants() targets - """ - if isinstance(arg, Compatibility.string): - return clazz(arg) - elif isinstance(arg, Iterable): - # If arg is iterable, recurse on its elements. - return [resolve(dependency, clazz=clazz) for dependency in arg] - else: - # NOTE(ryan): Ideally we'd check isinstance(arg, Target) here, but some things that Targets - # depend on are not themselves subclasses of Target, notably JarDependencies. - return arg - diff --git a/src/python/twitter/pants/targets/with_dependencies.py b/src/python/twitter/pants/targets/with_dependencies.py deleted file mode 100644 index 10fc6c019..000000000 --- a/src/python/twitter/pants/targets/with_dependencies.py +++ /dev/null @@ -1,40 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import OrderedSet -from twitter.pants.base.target import Target - -from .util import resolve - - -class TargetWithDependencies(Target): - def __init__(self, name, dependencies=None, exclusives=None): - Target.__init__(self, name, exclusives=exclusives) - self.dependencies = OrderedSet(resolve(dependencies)) if dependencies else OrderedSet() - - def _walk(self, walked, work, predicate=None): - Target._walk(self, walked, work, predicate) - for dependency in self.dependencies: - for dep in dependency.resolve(): - if isinstance(dep, Target) and not dep in walked: - walked.add(dep) - if not predicate or predicate(dep): - additional_targets = work(dep) - dep._walk(walked, work, predicate) - if additional_targets: - for additional_target in additional_targets: - if hasattr(additional_target, '_walk'): - additional_target._walk(walked, work, predicate) diff --git a/src/python/twitter/pants/targets/with_sources.py b/src/python/twitter/pants/targets/with_sources.py deleted file mode 100644 index 2cff98880..000000000 --- a/src/python/twitter/pants/targets/with_sources.py +++ /dev/null @@ -1,132 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from collections import defaultdict - -from twitter.common.lang import Compatibility -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.targets.sources import SourceRoot - - -class TargetWithSources(Target): - _source_to_targets = defaultdict(set) - - @classmethod - def register_source(cls, source, target): - cls._source_to_targets[source].add(target) - - def __init__(self, name, sources=None, exclusives=None): - Target.__init__(self, name, exclusives=exclusives) - - self.add_labels('sources') - self.target_base = SourceRoot.find(self) - self._unresolved_sources = sources or [] - self._resolved_sources = None - - def expand_files(self, recursive=True, include_buildfile=True): - """Expand files used to build this target to absolute paths. By default this expansion is done - recursively and target BUILD files are included. - """ - - files = [] - - def _expand(target): - files.extend([os.path.abspath(os.path.join(target.target_base, s)) - for s in (target.sources or [])]) - if include_buildfile: - files.append(target.address.buildfile.full_path) - if recursive: - for dep in target.dependencies: - if isinstance(dep, TargetWithSources): - _expand(dep) - elif hasattr(dep, 'address'): - # Don't know what it is, but we'll include the BUILD file to be paranoid - files.append(dep.address.buildfile.full_path) - - _expand(self) - return files - - @property - def sources(self): - if self._resolved_sources is None: - self._resolved_sources = self._resolve_paths(self._unresolved_sources or []) - return self._resolved_sources - - def sources_relative_to_buildroot(self): - """Returns this target's sources, relative to the buildroot. - - Prefer this over .sources unless you need to know about the target_base. - """ - for src in self.sources: - yield os.path.join(self.target_base, src) - - def sources_absolute_paths(self): - """Returns the absolute paths of this target's sources. - - Prefer this over .sources unless you need to know about the target_base. - """ - abs_target_base = os.path.join(get_buildroot(), self.target_base) - for src in self.sources: - yield os.path.join(abs_target_base, src) - - def set_resolved_sources(self, sources): - """Set resolved sources directly, skipping the resolution. - - Useful when synthesizing targets. - """ - self._resolved_sources = sources - - def _resolve_paths(self, paths): - """Resolves paths.""" - if not paths: - return [] - - def flatten_paths(*items): - """Flattens one or more items into a list. - - If the item is iterable each of its items is flattened. If an item is callable, it is called - and the result is flattened. Otherwise the atom is appended to the flattened list. These - rules are applied recursively such that the returned list will only contain non-iterable, - non-callable atoms. - """ - - flat = [] - - def flatmap(item): - if isinstance(item, Compatibility.string): - flat.append(item) - else: - try: - for i in iter(item): - flatmap(i) - except TypeError: - if callable(item): - flatmap(item()) - else: - flat.append(item) - - for item in items: - flatmap(item) - - return flat - - src_relpath = os.path.relpath(self.address.buildfile.parent_path, - os.path.join(get_buildroot(), self.target_base)) - - return [os.path.normpath(os.path.join(src_relpath, path)) for path in flatten_paths(paths)] diff --git a/src/python/twitter/pants/tasks/__init__.py b/src/python/twitter/pants/tasks/__init__.py deleted file mode 100644 index 22f7273dd..000000000 --- a/src/python/twitter/pants/tasks/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# ================================================================================================= -# Copyright 2011 Twitter, Inc. -# ------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================= - -# Must come before the import of Task, as things Task imports require it here. -from .task_error import TaskError -from .task import Task diff --git a/src/python/twitter/pants/tasks/antlr_gen.py b/src/python/twitter/pants/tasks/antlr_gen.py deleted file mode 100644 index 2d9c53cb3..000000000 --- a/src/python/twitter/pants/tasks/antlr_gen.py +++ /dev/null @@ -1,149 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.targets.java_antlr_library import JavaAntlrLibrary -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.code_gen import CodeGen -from twitter.pants.tasks.nailgun_task import NailgunTask - - -class AntlrGen(CodeGen, NailgunTask): - - # Maps the compiler attribute of a target to the config key in pants.ini - _CONFIG_SECTION_BY_COMPILER = { - 'antlr3': 'antlr-gen', - 'antlr4': 'antlr4-gen', - } - - def __init__(self, context): - CodeGen.__init__(self, context) - NailgunTask.__init__(self, context) - - # TODO(John Sirois): kill if not needed by prepare_gen - self._classpath_by_compiler = {} - - active_compilers = set(map(lambda t: t.compiler, context.targets(predicate=self.is_gentarget))) - for compiler, tools in self._all_possible_antlr_bootstrap_tools(): - if compiler in active_compilers: - self._jvm_tool_bootstrapper.register_jvm_tool(compiler, tools) - - def is_gentarget(self, target): - return isinstance(target, JavaAntlrLibrary) - - def is_forced(self, lang): - return True - - def genlangs(self): - return dict(java=lambda t: t.is_jvm) - - def prepare_gen(self, targets): - compilers = set(map(lambda t: t.compiler, targets)) - for compiler in compilers: - classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(compiler) - self._classpath_by_compiler[compiler] = classpath - - def genlang(self, lang, targets): - if lang != 'java': - raise TaskError('Unrecognized antlr gen lang: %s' % lang) - - # TODO: Instead of running the compiler for each target, collect the targets - # by type and invoke it twice, once for antlr3 and once for antlr4. - - for target in targets: - java_out = self._java_out(target) - safe_mkdir(java_out) - - antlr_classpath = self._classpath_by_compiler[target.compiler] - args = ["-o", java_out] - - if target.compiler == 'antlr3': - java_main = 'org.antlr.Tool' - elif target.compiler == 'antlr4': - args.append("-visitor") # Generate Parse Tree Vistor As Well - java_main = 'org.antlr.v4.Tool' - else: - raise TaskError("Unknown ANTLR compiler: {}".format(target.compiler)) - - sources = self._calculate_sources([target]) - args.extend(sources) - result = self.runjava(classpath=antlr_classpath, main=java_main, - args=args, workunit_name='antlr') - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (java_main, result)) - - def _calculate_sources(self, targets): - sources = set() - - def collect_sources(target): - if self.is_gentarget(target): - sources.update(target.sources_relative_to_buildroot()) - for target in targets: - target.walk(collect_sources) - return sources - - def createtarget(self, lang, gentarget, dependees): - if lang != 'java': - raise TaskError('Unrecognized antlr gen lang: %s' % lang) - return self._create_java_target(gentarget, dependees) - - def _create_java_target(self, target, dependees): - antlr_files_suffix = ["Lexer.java", "Parser.java"] - if (target.compiler == 'antlr4'): - antlr_files_suffix = ["BaseListener.java", "BaseVisitor.java", - "Listener.java", "Visitor.java"] + antlr_files_suffix - - generated_sources = [] - for source in target.sources: - # Antlr enforces that generated sources are relative to the base filename, and that - # each grammar filename must match the resulting grammar Lexer and Parser classes. - source_base, source_ext = os.path.splitext(source) - for suffix in antlr_files_suffix: - generated_sources.append(source_base + suffix) - - deps = self._resolve_java_deps(target) - - tgt = self.context.add_new_target(os.path.join(self._java_out(target), target.target_base), - JavaLibrary, - name=target.id, - sources=generated_sources, - provides=target.provides, - dependencies=deps, - excludes=target.excludes) - for dependee in dependees: - dependee.update_dependencies([tgt]) - return tgt - - def _resolve_java_deps(self, target): - key = self._CONFIG_SECTION_BY_COMPILER[target.compiler] - - deps = OrderedSet() - for dep in self.context.config.getlist(key, 'javadeps'): - deps.update(self.context.resolve(dep)) - return deps - - def _all_possible_antlr_bootstrap_tools(self): - for compiler, key in self._CONFIG_SECTION_BY_COMPILER.items(): - yield compiler, self.context.config.getlist(key, 'javadeps') - - def _java_out(self, target): - key = self._CONFIG_SECTION_BY_COMPILER[target.compiler] - return os.path.join(self.context.config.get(key, 'workdir'), 'gen-java') diff --git a/src/python/twitter/pants/tasks/args_resource_mapper.py b/src/python/twitter/pants/tasks/args_resource_mapper.py deleted file mode 100644 index 9736fe7b9..000000000 --- a/src/python/twitter/pants/tasks/args_resource_mapper.py +++ /dev/null @@ -1,153 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os - -from twitter.pants.java.jar import open_jar -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.scala_library import ScalaLibrary - -from .task import Task - - -RESOURCE_RELDIR = 'com/twitter/common/args/apt' -RESOURCE_BASENAME = 'cmdline.arg.info.txt' - - -class ArgsResourceMapper(Task): - """Maps resource files generated by com.twitter.common#args-apt into a binary jar.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("include_all"), mkflag("include_all", negate=True), - dest="args_resource_mapper_include_all", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Include all arg fields resources.") - - def __init__(self, context, select_targets, transitive, main): - """ - :param context: The pants context. - :param select_targets: A predicate that selects the targets to create a trimmed cmdline args - resource file for. - :param transitive: If True, splits cmdline args resource info for all classes in the - transitive closure of classes depended on by the selected targets; otherwise, just - selects cmdline info for the classes owned by the selected targets directly. - :param main: True if the split cmdline arg resource info is for a main; False otherwise. - """ - Task.__init__(self, context) - - self.select_targets = select_targets - self.transitive = transitive - - # The args apt plugin uses a sequential suffix scheme to detect a family of cmdline args - # resource files available on a classpath. The 0th slot is normally skipped and reserved to - # the cmdline arg resource file of a main. - self.resource_index = 0 if main else 1 - - context.products.require('jars', self.select_targets) - context.products.require_data('classes_by_target') - default_args_resource_mapper = [ - os.path.join(self.get_workdir(key='java_workdir', workdir='javac'), 'classes')] - self.classdirs = context.config.getlist('args-resource-mapper', 'classdirs', - default=default_args_resource_mapper) - self.include_all = context.options.args_resource_mapper_include_all - - def execute(self, targets): - if self.classdirs: - jarmap = self.context.products.get('jars') - for target in filter(self.select_targets, targets): - mapping = jarmap.get(target) - if mapping: - for basedir, jars in mapping.items(): - for jar in jars: - self._addargsresources(os.path.join(basedir, jar), target) - else: - self.context.log.warn('No classes found for target %s' % target) - - def _addargsresources(self, jar, target): - lines = set() - for resourcedir in [os.path.join(classdir, RESOURCE_RELDIR) for classdir in self.classdirs]: - if os.path.exists(resourcedir): - for path in os.listdir(resourcedir): - if path.startswith(RESOURCE_BASENAME): - with open(os.path.join(resourcedir, path)) as resource: - lines.update(resource.readlines()) - - if lines: - class Args(object): - def __init__(self, context, transitive, classes_by_target): - self.context = context - self.classnames = set() - - def add_classnames(target): - if target.has_sources('.java'): - target_products = classes_by_target.get(target) - if target_products: - for _, classes in target_products.rel_paths(): - for cls in classes: - self.classnames.add(cls.replace('.class', '').replace('/', '.')) - else: - self.context.log.debug('No mapping for %s' % target) - - if transitive: - target.walk(add_classnames, lambda t: t.is_internal) - else: - add_classnames(target) - - def matches(self, line): - line = line.strip() - if not line: - return False - components = line.split(' ') - keyname = components[0] - if keyname in ('positional', 'field'): - # Line format: [key] class field - return components[1] in self.classnames - elif keyname == 'parser': - # Line format: [key] parsed-class parser-class - return components[2] in self.classnames - elif keyname == 'verifier': - # Line format: [key] verified-class verification-annotation-class verifier-class - return components[2] in self.classnames and components[3] in self.classnames - else: - # Unknown line (comments, ws, unknown configuration types - return True - - classes_by_target = self.context.products.get_data('classes_by_target') - self._addargs(lines if self.include_all - else filter(Args(self.context, - self.transitive, - classes_by_target).matches, lines), - jar, - target) - - def _addargs(self, lines, jarfile, target): - def is_configurationinfo(line): - line = line.strip() - return line and not line.startswith('#') - - if any(filter(is_configurationinfo, lines)): - resource = os.path.join(RESOURCE_RELDIR, '%s.%d' % (RESOURCE_BASENAME, self.resource_index)) - - content = '# Created by pants goal args-apt\n' - content += ''.join(sorted(lines)) - - with open_jar(jarfile, 'a') as jar: - jar.writestr(resource, content) - self.context.log.debug('Added args-apt resource file %s for %s:' - '\n%s' % (resource, target, content)) diff --git a/src/python/twitter/pants/tasks/benchmark_run.py b/src/python/twitter/pants/tasks/benchmark_run.py deleted file mode 100644 index 39933bdae..000000000 --- a/src/python/twitter/pants/tasks/benchmark_run.py +++ /dev/null @@ -1,111 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import shutil - -from twitter.pants.java.util import execute_java - -from .jvm_task import JvmTask - -from . import TaskError - - -class BenchmarkRun(JvmTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("target"), dest="target_class", action="append", - help="Name of the benchmark class.") - - option_group.add_option(mkflag("memory"), mkflag("memory", negate=True), - dest="memory_profiling", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Enable memory profiling.") - - option_group.add_option(mkflag("debug"), mkflag("debug", negate=True), - dest="debug", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Enable caliper debug mode.") - - option_group.add_option(mkflag("caliper-args"), dest="extra_caliper_args", default=[], - action="append", - help="Allows the user to pass additional command line options to " - "caliper. Can be used multiple times and arguments will be " - "concatenated. Example use: --bench-caliper-args='-Dsize=10,20 " - "-Dcomplex=true,false' --bench-caliper-args=-Dmem=1,2,3") - - def __init__(self, context): - super(BenchmarkRun, self).__init__(context) - - config = context.config - self.confs = config.getlist('benchmark-run', 'confs', default=['default']) - self.jvm_args = config.getlist('benchmark-run', 'jvm_args', - default=['-Xmx1g', '-XX:MaxPermSize=256m']) - - self._benchmark_bootstrap_key = 'benchmark-tool' - benchmark_bootstrap_tools = config.getlist('benchmark-run', 'bootstrap-tools', - default=[':benchmark-caliper-0.5']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._benchmark_bootstrap_key, - benchmark_bootstrap_tools) - self._agent_bootstrap_key = 'benchmark-agent' - agent_bootstrap_tools = config.getlist('benchmark-run', 'agent_profile', - default=[':benchmark-java-allocation-instrumenter-2.1']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._agent_bootstrap_key, agent_bootstrap_tools) - - # TODO(Steve Gury): - # Find all the target classes from the Benchmark target itself - # https://jira.twitter.biz/browse/AWESOME-1938 - self.caliper_args = context.options.target_class - - if context.options.memory_profiling: - self.caliper_args += ['--measureMemory'] - - if context.options.debug: - self.jvm_args.extend(context.config.getlist('jvm', 'debug_args')) - self.caliper_args += ['--debug'] - - self.caliper_args.extend(context.options.extra_caliper_args) - - def execute(self, targets): - # For rewriting JDK classes to work, the JAR file has to be listed specifically in - # the JAR manifest as something that goes in the bootclasspath. - # The MANIFEST list a jar 'allocation.jar' this is why we have to rename it - agent_tools_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._agent_bootstrap_key) - agent_jar = agent_tools_classpath[0] - allocation_jar = os.path.join(os.path.dirname(agent_jar), "allocation.jar") - - # TODO(Steve Gury): Find a solution to avoid copying the jar every run and being resilient - # to version upgrade - shutil.copyfile(agent_jar, allocation_jar) - os.environ['ALLOCATION_JAR'] = str(allocation_jar) - - benchmark_tools_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._benchmark_bootstrap_key) - - classpath = self.classpath(benchmark_tools_classpath, - confs=self.confs, - exclusives_classpath=self.get_base_classpath_for_target(targets[0])) - - caliper_main = 'com.google.caliper.Runner' - exit_code = execute_java(classpath=classpath, - main=caliper_main, - jvm_options=self.jvm_args, - args=self.caliper_args, - workunit_factory=self.context.new_workunit, - workunit_name='caliper') - if exit_code != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (caliper_main, exit_code)) diff --git a/src/python/twitter/pants/tasks/binary_create.py b/src/python/twitter/pants/tasks/binary_create.py deleted file mode 100644 index bcf8e054c..000000000 --- a/src/python/twitter/pants/tasks/binary_create.py +++ /dev/null @@ -1,133 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from zipfile import ZIP_STORED, ZIP_DEFLATED -import zipfile - -from twitter.common.contextutil import temporary_dir -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.base.build_environment import get_buildroot, get_version -from twitter.pants.tasks import TaskError -from twitter.pants.fs.archive import ZIP -from twitter.pants.java.jar import open_jar, Manifest -from twitter.pants.tasks.jvm_binary_task import JvmBinaryTask - - -class BinaryCreate(JvmBinaryTask): - """Creates a runnable monolithic binary deploy jar.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - JvmBinaryTask.setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("compressed"), mkflag("compressed", negate=True), - dest="binary_create_compressed", default=True, - action="callback", callback=mkflag.set_bool, - help="[%default] Create a compressed binary jar.") - - option_group.add_option(mkflag("zip64"), mkflag("zip64", negate=True), - dest="binary_create_zip64", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Create the binary jar with zip64 extensions.") - - def __init__(self, context): - JvmBinaryTask.__init__(self, context) - - self.outdir = os.path.abspath( - context.options.jvm_binary_create_outdir or - context.config.get('binary-create', 'outdir', - default=context.config.getdefault('pants_distdir')) - ) - self.compression = ZIP_DEFLATED if context.options.binary_create_compressed else ZIP_STORED - self.zip64 = ( - context.options.binary_create_zip64 - or context.config.getbool('binary-create', 'zip64', default=False) - ) - self.deployjar = context.options.jvm_binary_create_deployjar - - context.products.require('jars', predicate=self.is_binary) - context.products.require_data('classes_by_target') - context.products.require_data('resources_by_target') - if self.deployjar: - self.require_jar_dependencies() - - def execute(self, targets): - for binary in filter(self.is_binary, targets): - self.create_binary(binary) - - def create_binary(self, binary): - import platform - safe_mkdir(self.outdir) - - jarmap = self.context.products.get('jars') - - binary_jarname = '%s.jar' % binary.basename - binaryjarpath = os.path.join(self.outdir, binary_jarname) - self.context.log.info('creating %s' % os.path.relpath(binaryjarpath, get_buildroot())) - - with open_jar(binaryjarpath, 'w', compression=self.compression, allowZip64=self.zip64) as jar: - def add_jars(target): - generated = jarmap.get(target) - if generated: - for basedir, jars in generated.items(): - for internaljar in jars: - self.dump(os.path.join(basedir, internaljar), jar) - - binary.walk(add_jars, lambda t: t.is_internal) - - if self.deployjar: - for basedir, externaljar in self.list_jar_dependencies(binary): - self.dump(os.path.join(basedir, externaljar), jar) - - def write_binary_data(product_type): - data = self.context.products.get_data(product_type).get(binary) - if data: - for root, rel_paths in data.rel_paths(): - for rel_path in rel_paths: - jar.write(os.path.join(root, rel_path), arcname=rel_path) - - write_binary_data('classes_by_target') - write_binary_data('resources_by_target') - - manifest = Manifest() - manifest.addentry(Manifest.MANIFEST_VERSION, '1.0') - manifest.addentry( - Manifest.CREATED_BY, - 'python %s pants %s (Twitter, Inc.)' % (platform.python_version(), get_version()) - ) - main = binary.main or '*** java -jar not supported, please use -cp and pick a main ***' - manifest.addentry(Manifest.MAIN_CLASS, main) - jar.writestr(Manifest.PATH, manifest.contents()) - - jarmap.add(binary, self.outdir, [binary_jarname]) - - def dump(self, jarpath, jarfile): - self.context.log.debug(' dumping %s' % jarpath) - - with temporary_dir() as tmpdir: - try: - ZIP.extract(jarpath, tmpdir) - except zipfile.BadZipfile: - raise TaskError('Bad JAR file, maybe empty: %s' % jarpath) - for root, dirs, files in os.walk(tmpdir): - for f in files: - path = os.path.join(root, f) - relpath = os.path.relpath(path, tmpdir) - if Manifest.PATH != relpath: - jarfile.write(path, relpath) - diff --git a/src/python/twitter/pants/tasks/bootstrap_jvm_tools.py b/src/python/twitter/pants/tasks/bootstrap_jvm_tools.py deleted file mode 100644 index c36c0ba80..000000000 --- a/src/python/twitter/pants/tasks/bootstrap_jvm_tools.py +++ /dev/null @@ -1,81 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# =================================================================================================== - -import threading - -from twitter.pants.base.workunit import WorkUnit - -from . import Task, TaskError - - -class BootstrapJvmTools(Task): - - def __init__(self, context): - super(BootstrapJvmTools, self).__init__(context) - context.products.require_data('jvm_build_tools') - - def execute(self, targets): - context = self.context - if context.products.is_required_data('jvm_build_tools_classpath_callbacks'): - tool_product_map = context.products.get_data('jvm_build_tools') or {} - callback_product_map = context.products.get_data('jvm_build_tools_classpath_callbacks') or {} - # We leave a callback in the products map because we want these Ivy calls - # to be done lazily (they might never actually get executed) and we want - # to hit Task.invalidated (called in Task.ivy_resolve) on the instance of - # BootstrapJvmTools rather than the instance of whatever class requires - # the bootstrap tools. It would be awkward and possibly incorrect to call - # self.invalidated twice on a Task that does meaningful invalidation on its - # targets. -pl - for key, deplist in tool_product_map.iteritems(): - callback_product_map[key] = self.cached_bootstrap_classpath_callback(key, deplist) - context.products.safe_create_data('jvm_build_tools_classpath_callbacks', - lambda: callback_product_map) - - def resolve_tool_targets(self, tools): - if not tools: - raise TaskError("BootstrapJvmTools.resolve_tool_targets called with no tool" - " dependency addresses. This probably means that you don't" - " have an entry in your pants.ini for this tool.") - for tool in tools: - try: - targets = list(self.context.resolve(tool)) - if not targets: - raise KeyError - except KeyError: - self.context.log.error("Failed to resolve target for bootstrap tool: %s. " - "You probably need to add this dep to your tools " - "BUILD file(s), usually located in the root of the build." % - tool) - raise - for target in targets: - yield target - - def cached_bootstrap_classpath_callback(self, key, tools): - cache = {} - cache_lock = threading.Lock() - - def bootstrap_classpath(executor=None): - with cache_lock: - if 'classpath' not in cache: - targets = list(self.resolve_tool_targets(tools)) - workunit_name = 'bootstrap-%s' % str(key) - cache['classpath'] = self.ivy_resolve(targets, - executor=executor, - silent=True, - workunit_name=workunit_name, - workunit_labels=[WorkUnit.BOOTSTRAP]) - return cache['classpath'] - return bootstrap_classpath diff --git a/src/python/twitter/pants/tasks/build_lint.py b/src/python/twitter/pants/tasks/build_lint.py deleted file mode 100644 index 88fb56ecd..000000000 --- a/src/python/twitter/pants/tasks/build_lint.py +++ /dev/null @@ -1,134 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== -import os - -__author__ = 'Benjy Weinberger' - -import difflib -import re - -from collections import defaultdict - -from twitter.pants.tasks import Task - - -class BuildLint(Task): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - Task.setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag("transitive"), mkflag("transitive", negate=True), - dest="buildlint_transitive", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] apply lint rules transitively to all dependency buildfiles.") - - option_group.add_option(mkflag("include-intransitive-deps"), - mkflag("include-intransitive-deps", negate=True), - dest="buildlint_include_intransitive", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] correct both simple missing dependencies and intransitive missing deps") - - - option_group.add_option(mkflag("action"), dest="buildlint_actions", default=[], - action="append", type="choice", choices=['diff', 'rewrite'], - help="diff=print out diffs, rewrite=apply changes to BUILD files directly.") - - def __init__(self, context): - Task.__init__(self, context) - context.products.require('missing_deps') - self.transitive = context.options.buildlint_transitive - self.actions = set(context.options.buildlint_actions) - self.include_intransitive = context.options.buildlint_include_intransitive - # Manually apply the default. Can't use flag default, because action is 'append', so - # diffs would always be printed, even if we only wanted to rewrite. - if not self.actions: - self.actions.add('diff') - - def execute(self, targets): - # Map from buildfile path to map of target name -> missing deps for that target. - buildfile_paths = defaultdict(lambda: defaultdict(list)) - genmap_trans = self.context.products.get('missing_deps') - genmap_intrans = self.context.products.get('missing_intransitive_deps') - - def add_buildfile_for_target(target, genmap): - missing_dep_map = genmap[target] - missing_deps = missing_dep_map[self.context._buildroot] if missing_dep_map else defaultdict(list) - buildfile_paths[target.address.buildfile.full_path][target.name] += missing_deps - - if self.transitive: - for target in targets: - add_buildfile_for_target(target, genmap_trans) - if self.include_intransitive: - add_buildfile_for_target(target, genmap_intrans) - else: - for target in self.context.target_roots: - add_buildfile_for_target(target, genmap_trans) - if self.include_intransitive: - add_buildfile_for_target(target, genmap_intrans) - - for buildfile_path, missing_dep_map in buildfile_paths.items(): - self._fix_lint(buildfile_path, missing_dep_map) - - - # We use heuristics to find target names and their list of dependencies. - # Attempts to use the Python AST proved to be extremely complex and not worth the trouble. - NAMES_RE = re.compile('^\w+\(\s*name\s*=\s*["\']((?:\w|-)+)["\']', flags=re.DOTALL|re.MULTILINE) - DEPS_RE = re.compile(r'^\s*dependencies\s*=\s*\[([^\]]*)\s*\]', flags=re.DOTALL|re.MULTILINE) - INLINE_SINGLE_DEP_RE = re.compile(r'^ *dependencies *= *\[[^\n,\]]* *\]') - - def _fix_lint(self, buildfile_path, missing_dep_map): - if os.path.exists(buildfile_path): - with open(buildfile_path, 'r') as infile: - old_buildfile_source = infile.read() - names = [] - for m in BuildLint.NAMES_RE.finditer(old_buildfile_source): - names.append(m.group(1)) - - # We'll step through this to find the name of the target whose deps we're currently looking at. - nameiter = iter(names) - - def sort_deps(m): - try: - name = nameiter.next() - except StopIteration: - name = '-UNKNOWN-' - deps = m.group(1).split('\n') - deps = filter(lambda x: x, [x.strip().replace('"', "'") for x in deps]) - missing_deps = ["'%s'," % x for x in missing_dep_map[name]] - deps.extend(missing_deps) - if deps: # Add comma if needed. We must do this before sorting. - # Allow a single dep on a single line, if that's what the file already had. - # This is common in 3rdparty/BUILD files. - if len(deps) == 1 and BuildLint.INLINE_SINGLE_DEP_RE.match(m.group(0)): - return ' dependencies = [%s]' % deps[0] - parts = [x.strip() for x in deps[-1].split('#')] - if not parts[0].rstrip().endswith(','): - deps[-1] = '%s,%s' % (parts[0], ' #' + parts[1] if len(parts) > 1 else '') - - # The key hack is to make sure local imports (those starting with a colon) come last. - deps = sorted(deps, key=lambda x: 'zzz' + x if (x.startswith("':") or x.startswith("pants(':")) else x) - res = ' dependencies = [\n %s\n ]' % ('\n '.join(deps)) if deps else 'dependencies = []' - return res - - new_buildfile_source = BuildLint.DEPS_RE.sub(sort_deps, old_buildfile_source) - if new_buildfile_source != old_buildfile_source: - if 'rewrite' in self.actions: - with open(buildfile_path, 'w') as outfile: - outfile.write(new_buildfile_source) - if 'diff' in self.actions: - diff = '\n'.join(difflib.unified_diff(old_buildfile_source.split('\n'), - new_buildfile_source.split('\n'), buildfile_path)) - print(diff) diff --git a/src/python/twitter/pants/tasks/builddictionary.py b/src/python/twitter/pants/tasks/builddictionary.py deleted file mode 100644 index 08062f2a5..000000000 --- a/src/python/twitter/pants/tasks/builddictionary.py +++ /dev/null @@ -1,284 +0,0 @@ -# ============================================================================= -# Copyright 2013 Twitter, Inc. -# ----------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================= - -import inspect -import os - -from twitter.common.dirutil import Fileset, safe_open -from twitter.pants.base.build_file_helpers import maven_layout -from twitter.pants.base.build_manual import get_builddict_info -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.generator import Generator, TemplateData -from twitter.pants.goal.phase import Phase -from twitter.pants.tasks import Task, TaskError - -from pkg_resources import resource_string - - -def entry(nom, classdoc=None, msg_rst=None, argspec=None, funcdoc=None, methods=None): - """Create a struct that our template expects to see. - - :param nom: Symbol name, e.g. python_binary - :param classdoc: plain text appears above argspec - :param msg_rst: reST. useful in hand-crafted entries - :param argspec: arg string like (x, y="deflt") - :param funcdoc: function's __doc__, plain text - :param methods: list of entries for class' methods - """ - - def indent_docstring_by_1(s): - """Given a non-empty docstring, return a version indented by a space. - Given an empty thing, return the thing itself - """ - # In reST, it's useful to have strings that are similarly-indented. - # If we have a classdoc indented by 2 next to an __init__ funcdoc indented - # by 4, reST doesn't format things nicely. Oh, totally-dedenting doesn't - # format nicely either. - - # Docstring indentation: more gnarly than you'd think: - # http://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation - if not s: return s - # Convert tabs to spaces (following the normal Python rules) - # and split into a list of lines: - lines = s.expandtabs().splitlines() - # Determine minimum indentation (first line doesn't count): - indent = 999 - for line in lines[1:]: - stripped = line.lstrip() - if stripped: - indent = min(indent, len(line) - len(stripped)) - # Remove indentation (first line is special): - trimmed = [lines[0].strip()] - if indent < 999: - for line in lines[1:]: - trimmed.append(line[indent:].rstrip()) - # Strip off trailing and leading blank lines: - while trimmed and not trimmed[-1]: - trimmed.pop() - while trimmed and not trimmed[0]: - trimmed.pop(0) - # Return a single string: - return '\n'.join([" " + t for t in trimmed]) - - return TemplateData( - nom=nom.strip(), - classdoc=indent_docstring_by_1(classdoc), - msg_rst=indent_docstring_by_1(msg_rst), - argspec=argspec, - funcdoc=indent_docstring_by_1(funcdoc), - methods=methods, - showmethods=(methods and len(methods) > 0)) - - -def msg_entry(nom, defn): - """For hard-wired entries a la "See Instead" or other simple stuff""" - return entry(nom, msg_rst=defn) - - -def entry_for_one_func(nom, func): - """Generate a BUILD dictionary entry for a function - nom: name like 'python_binary' - func: function object""" - args, varargs, varkw, defaults = inspect.getargspec(func) - argspec = inspect.formatargspec(args, varargs, varkw, defaults) - return entry(nom, - argspec=argspec, - funcdoc=func.__doc__) - - -def entry_for_one_method(nom, method): - """Generate a BUILD dictionary entry for a method - nom: name like 'with_description' - method: method object""" - # TODO(lhosken) : This is darned similar to entry_for_one_func. Merge 'em? - # (Punted so far since funcdoc indentation made my head hurt) - assert inspect.ismethod(method) - args, varargs, varkw, defaults = inspect.getargspec(method) - # args[:1] instead of args to discard "self" arg - argspec = inspect.formatargspec(args[1:], varargs, varkw, defaults) - return entry(nom, - argspec=argspec, - funcdoc=(method.__doc__ or "").replace("\n", " ")) - - -def entry_for_one(nom, sym): - if inspect.isclass(sym): - return entry_for_one_class(nom, sym) - if inspect.ismethod(sym) or inspect.isfunction(sym): - return entry_for_one_func(nom, sym) - return msg_entry(nom, "TODO! no doc gen for %s %s" % ( - str(type(sym)), str(sym))) - - -PREDEFS = { # some hardwired entries - "Amount": {"defn": msg_entry("Amount", """ - `Amount from twitter.commons.quantity `_ - E.g., ``Amount(2, Time.MINUTES)``.""")}, - "__file__": {"defn": msg_entry("__file__", "Path to BUILD file (string).")}, - "globs": {"defn": entry_for_one("globs", Fileset.globs)}, - "jar_library": {"defn": msg_entry("jar_library", - """Old name for `dependencies`_""")}, - "java_tests": {"defn": msg_entry("java_tests", - """Old name for `junit_tests`_""")}, - "maven_layout": {"defn": entry_for_one("maven_layout", maven_layout)}, - "python_artifact": {"suppress": True}, # unused alias for PythonArtifact - "rglobs": {"defn": entry_for_one("rglobs", Fileset.rglobs)}, - "ROOT_DIR": {"defn": msg_entry("ROOT_DIR", - "Root directory of source code (string).")}, - "scala_tests": {"defn": msg_entry("scala_tests", - """Old name for `scala_specs`_""")}, - "Time": {"defn": msg_entry("Time", """ - `Amount from twitter.commons.quantity `_ - E.g., ``Amount(2, Time.MINUTES)``."""), }, -} - - -# Thingies like scala_library -# Returns list of duples [(name, object), (name, object), (name, object),...] -def get_syms(): - r = {} - vc = ParseContext.default_globals() - for s in vc: - if s in PREDEFS: continue - if s[0].isupper(): continue # REMIND see both jvm_binary and JvmBinary?? - o = vc[s] - r[s] = o - return r - - -def tocl(d): - """Generate TOC, in-page links to the IDs we're going to define below""" - anchors = sorted(d.keys(), key=str.lower) - return TemplateData(t="All The Things", e=[a for a in anchors]) - - -def tags_tocl(d, tag_list, title): - """Generate specialized TOC. - E.g., tags_tocl(d, ["python", "anylang"], "Python") - tag_list: if an entry's tags contains any of these, use it - title: pretty title - """ - filtered_anchors = [] - for anc in sorted(d.keys(), key=str.lower): - entry = d[anc] - if not "tags" in entry: continue - found = [t for t in tag_list if t in entry["tags"]] - if not found: continue - filtered_anchors.append(anc) - return TemplateData(t=title, e=filtered_anchors) - - -def entry_for_one_class(nom, klas): - """ Generate a BUILD dictionary entry for a class. - nom: name like 'python_binary' - klas: class like twitter.pants.python_binary""" - try: - args, varargs, varkw, defaults = inspect.getargspec(klas.__init__) - argspec = inspect.formatargspec(args[1:], varargs, varkw, defaults) - funcdoc = klas.__init__.__doc__ - - methods = [] - for attrname in dir(klas): - attr = getattr(klas, attrname) - attr_bdi = get_builddict_info(attr) - if not attr_bdi: continue - if inspect.ismethod(attr): - methods.append(entry_for_one_method(attrname, attr)) - continue - raise TaskError('@manual.builddict on non-method %s within class %s ' - 'but I only know what to do with methods' % - (attrname, nom)) - - except TypeError: # __init__ might not be a Python function - argspec = None - funcdoc = None - methods = None - - return entry(nom, - classdoc=klas.__doc__, - argspec=argspec, - funcdoc=funcdoc, - methods=methods) - - -def assemble(predefs=PREDEFS, symbol_hash=None): - """Assemble big hash of entries suitable for smushing into a template. - - predefs: Hash of "hard-wired" predefined entries. - symbol_hash: Python syms from which to generate more entries. Default: get from BUILD context""" - d = {} - for k in PREDEFS: - v = PREDEFS[k] - if "suppress" in v and v["suppress"]: continue - d[k] = v - if symbol_hash is None: - symbol_hash = get_syms() - for k in symbol_hash: - bdi = get_builddict_info(symbol_hash[k]) - if bdi is None: continue - d[k] = bdi.copy() - if not "defn" in d[k]: - d[k]["defn"] = entry_for_one(k, symbol_hash[k]) - return d - - -class BuildBuildDictionary(Task): - """Generate documentation for the Sphinx site.""" - - def __init__(self, context): - super(BuildBuildDictionary, self).__init__(context) - self._templates_dir = os.path.join('templates', 'builddictionary') - self._outdir = os.path.join(self.context.config.getdefault("pants_distdir"), "builddict") - - def execute(self, targets): - self._gen_goals_reference() - - d = assemble() - template = resource_string(__name__, os.path.join(self._templates_dir, 'page.mustache')) - tocs = [tocl(d), - tags_tocl(d, ["java", "scala", "jvm", "anylang"], "JVM"), - tags_tocl(d, ["python", "anylang"], "Python")] - defns = [d[t]["defn"] for t in sorted(d.keys(), key=str.lower)] - filename = os.path.join(self._outdir, 'build_dictionary.rst') - self.context.log.info('Generating %s' % filename) - with safe_open(filename, 'w') as outfile: - generator = Generator(template, - tocs=tocs, - defns=defns) - generator.write(outfile) - - def _gen_goals_reference(self): - """Generate the goals reference rst doc.""" - phase_dict = {} - phase_names = [] - for phase, raw_goals in Phase.all(): - goals = [] - for g in raw_goals: - # TODO(lahosken) generalize indent_docstring, use here - doc = (g.task_type.__doc__ or "").replace("\n", " ").strip() - goals.append(TemplateData(name=g.task_type.__name__, doc=doc)) - phase_dict[phase.name] = TemplateData(phase=phase, goals=goals) - phase_names.append(phase.name) - - phases = [phase_dict[name] for name in sorted(phase_names, key=str.lower)] - - template = resource_string(__name__, - os.path.join(self._templates_dir, 'goals_reference.mustache')) - filename = os.path.join(self._outdir, 'goals_reference.rst') - self.context.log.info('Generating %s' % filename) - with safe_open(filename, 'w') as outfile: - generator = Generator(template, phases=phases) - generator.write(outfile) diff --git a/src/python/twitter/pants/tasks/bundle_create.py b/src/python/twitter/pants/tasks/bundle_create.py deleted file mode 100644 index 88c241894..000000000 --- a/src/python/twitter/pants/tasks/bundle_create.py +++ /dev/null @@ -1,166 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from zipfile import ZIP_DEFLATED - -from twitter.common.collections import OrderedSet -from twitter.common.contextutil import open_zip -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.fs import archive -from twitter.pants.java.jar import Manifest -from twitter.pants.targets.jvm_binary import JvmApp, JvmBinary -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.jvm_binary_task import JvmBinaryTask - - -class BundleCreate(JvmBinaryTask): - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - JvmBinaryTask.setup_parser(option_group, args, mkflag) - - archive_flag = mkflag("archive") - option_group.add_option(archive_flag, dest="bundle_create_archive", - type="choice", choices=list(archive.TYPE_NAMES), - help="[%%default] Create an archive from the bundle. " - "Choose from %s" % sorted(archive.TYPE_NAMES)) - - option_group.add_option(mkflag("archive-prefix"), mkflag("archive-prefix", negate=True), - dest="bundle_create_prefix", default=False, - action="callback", callback=mkflag.set_bool, - help="[%%default] Used in conjunction with %s this packs the archive " - "with its basename as the path prefix." % archive_flag) - - def __init__(self, context): - JvmBinaryTask.__init__(self, context) - - self.outdir = ( - context.options.jvm_binary_create_outdir or - context.config.get('bundle-create', 'outdir', - default=context.config.getdefault('pants_distdir')) - ) - - self.prefix = context.options.bundle_create_prefix - - def fill_archiver_type(): - self.archiver_type = context.options.bundle_create_archive - # If no option specified, check if anyone is requiring it - if not self.archiver_type: - for archive_type in archive.TYPE_NAMES: - if context.products.isrequired(archive_type): - self.archiver_type = archive_type - - fill_archiver_type() - self.deployjar = context.options.jvm_binary_create_deployjar - if not self.deployjar: - self.context.products.require('jars', predicate=self.is_binary) - self.require_jar_dependencies() - - class App(object): - """A uniform interface to an app.""" - - @staticmethod - def is_app(target): - return isinstance(target, (JvmApp, JvmBinary)) - - def __init__(self, target): - assert self.is_app(target), "%s is not a valid app target" % target - - self.binary = target if isinstance(target, JvmBinary) else target.binary - self.bundles = [] if isinstance(target, JvmBinary) else target.bundles - self.basename = target.basename - - def execute(self, _): - archiver = archive.archiver(self.archiver_type) if self.archiver_type else None - for target in self.context.target_roots: - for app in map(self.App, filter(self.App.is_app, target.resolve())): - basedir = self.bundle(app) - if archiver: - archivemap = self.context.products.get(self.archiver_type) - archivepath = archiver.create( - basedir, - self.outdir, - app.basename, - prefix=app.basename if self.prefix else None - ) - archivemap.add(app, self.outdir, [archivepath]) - self.context.log.info('created %s' % os.path.relpath(archivepath, get_buildroot())) - - def bundle(self, app): - """Create a self-contained application bundle containing the target - classes, dependencies and resources. - """ - assert(isinstance(app, BundleCreate.App)) - - bundledir = os.path.join(self.outdir, '%s-bundle' % app.basename) - self.context.log.info('creating %s' % os.path.relpath(bundledir, get_buildroot())) - - safe_mkdir(bundledir, clean=True) - - classpath = OrderedSet() - if not self.deployjar: - libdir = os.path.join(bundledir, 'libs') - os.mkdir(libdir) - - # Add internal dependencies to the bundle. - def add_jars(target): - target_jars = self.context.products.get('jars').get(target) - if target_jars is not None: - for basedir, jars in target_jars.items(): - for internaljar in jars: - os.symlink(os.path.join(basedir, internaljar), - os.path.join(libdir, internaljar)) - classpath.add(internaljar) - app.binary.walk(add_jars, lambda t: t.is_internal) - - # Add external dependencies to the bundle. - for basedir, externaljar in self.list_jar_dependencies(app.binary): - path = os.path.join(basedir, externaljar) - os.symlink(path, os.path.join(libdir, externaljar)) - classpath.add(externaljar) - - for basedir, jars in self.context.products.get('jars').get(app.binary).items(): - if len(jars) != 1: - raise TaskError('Expected 1 mapped binary for %s but found: %s' % (app.binary, jars)) - - binary = jars[0] - binary_jar = os.path.join(basedir, binary) - bundle_jar = os.path.join(bundledir, binary) - if not classpath: - os.symlink(binary_jar, bundle_jar) - else: - with open_zip(binary_jar, 'r') as src: - with open_zip(bundle_jar, 'w', compression=ZIP_DEFLATED) as dest: - for item in src.infolist(): - buf = src.read(item.filename) - if Manifest.PATH == item.filename: - manifest = Manifest(buf) - manifest.addentry(Manifest.CLASS_PATH, - ' '.join(os.path.join('libs', jar) for jar in classpath)) - buf = manifest.contents() - dest.writestr(item, buf) - - for bundle in app.bundles: - for path, relpath in bundle.filemap.items(): - bundlepath = os.path.join(bundledir, relpath) - safe_mkdir(os.path.dirname(bundlepath)) - os.symlink(path, bundlepath) - - return bundledir diff --git a/src/python/twitter/pants/tasks/cache_manager.py b/src/python/twitter/pants/tasks/cache_manager.py deleted file mode 100644 index 4d1df110e..000000000 --- a/src/python/twitter/pants/tasks/cache_manager.py +++ /dev/null @@ -1,331 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -try: - import cPickle as pickle -except ImportError: - import pickle - -from twitter.pants.base.build_invalidator import ( - BuildInvalidator, - CacheKeyGenerator, - NO_SOURCES, - TARGET_SOURCES) -from twitter.pants.base.target import Target -from twitter.pants.targets.external_dependency import ExternalDependency -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.jar_library import JarLibrary -from twitter.pants.targets.pants_target import Pants -from twitter.pants.targets.with_sources import TargetWithSources - - -class VersionedTargetSet(object): - """Represents a list of targets, a corresponding CacheKey, and a flag determining whether the - list of targets is currently valid. - - When invalidating a single target, this can be used to represent that target as a singleton. - When checking the artifact cache, this can also be used to represent a list of targets that are - built together into a single artifact. - """ - - @staticmethod - def from_versioned_targets(versioned_targets): - first_target = versioned_targets[0] - cache_manager = first_target._cache_manager - - # Quick sanity check; all the versioned targets should have the same cache manager. - # TODO(ryan): the way VersionedTargets store their own links to a single CacheManager instance - # feels hacky; see if there's a cleaner way for callers to handle awareness of the CacheManager. - for versioned_target in versioned_targets: - if versioned_target._cache_manager != cache_manager: - raise ValueError("Attempting to combine versioned targets %s and %s with different" - " CacheManager instances: %s and %s" % (first_target, versioned_target, - cache_manager, - versioned_target._cache_manager)) - return VersionedTargetSet(cache_manager, versioned_targets) - - def __init__(self, cache_manager, versioned_targets): - self._cache_manager = cache_manager - self.versioned_targets = versioned_targets - self.targets = [vt.target for vt in versioned_targets] - # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method. - self.cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key - for vt in versioned_targets]) - self.num_sources = self.cache_key.num_sources - self.sources = self.cache_key.sources - self.valid = not cache_manager.needs_update(self.cache_key) - - def update(self): - self._cache_manager.update(self) - - def force_invalidate(self): - self._cache_manager.force_invalidate(self) - - def __repr__(self): - return 'VTS(%s, %s)' % (','.join(target.id for target in self.targets), - 'valid' if self.valid else 'invalid') - - -class VersionedTarget(VersionedTargetSet): - """This class represents a singleton VersionedTargetSet, and has links to VersionedTargets that - the wrapped target depends on (after having resolved through any "alias" targets. - """ - def __init__(self, cache_manager, target, cache_key): - if not isinstance(target, Target): - raise ValueError("The target %s must be an instance of Target but is not." % target.id) - - self.target = target - self.cache_key = cache_key - # Must come after the assignments above, as they are used in the parent's __init__. - VersionedTargetSet.__init__(self, cache_manager, [self]) - self.id = target.id - self.dependencies = set() - - -# The result of calling check() on a CacheManager. -# Each member is a list of VersionedTargetSet objects in topological order. -# Tasks may need to perform no, some or all operations on either of these, depending on how they -# are implemented. -class InvalidationCheck(object): - @classmethod - def _partition_versioned_targets(cls, versioned_targets, partition_size_hint): - """Groups versioned targets so that each group has roughly the same number of sources. - - versioned_targets is a list of VersionedTarget objects [vt1, vt2, vt3, vt4, vt5, vt6, ...]. - - Returns a list of VersionedTargetSet objects, e.g., [VT1, VT2, VT3, ...] representing the - same underlying targets. E.g., VT1 is the combination of [vt1, vt2, vt3], VT2 is the combination - of [vt4, vt5] and VT3 is [vt6]. - - The new versioned targets are chosen to have roughly partition_size_hint sources. - - This is useful as a compromise between flat mode, where we build all targets in a - single compiler invocation, and non-flat mode, where we invoke a compiler for each target, - which may lead to lots of compiler startup overhead. A task can choose instead to build one - group at a time. - """ - res = [] - - # Hack around the python outer scope problem. - class VtGroup(object): - def __init__(self): - self.vts = [] - self.total_sources = 0 - - current_group = VtGroup() - - def add_to_current_group(vt): - current_group.vts.append(vt) - current_group.total_sources += vt.num_sources - - def close_current_group(): - if len(current_group.vts) > 0: - new_vt = VersionedTargetSet.from_versioned_targets(current_group.vts) - res.append(new_vt) - current_group.vts = [] - current_group.total_sources = 0 - - for vt in versioned_targets: - add_to_current_group(vt) - if current_group.total_sources > 1.5 * partition_size_hint and len(current_group.vts) > 1: - # Too big. Close the current group without this vt and add it to the next one. - current_group.vts.pop() - close_current_group() - add_to_current_group(vt) - elif current_group.total_sources > partition_size_hint: - close_current_group() - close_current_group() # Close the last group, if any. - - return res - - def __init__(self, all_vts, invalid_vts, partition_size_hint=None): - # All the targets, valid and invalid. - self.all_vts = all_vts - - # All the targets, partitioned if so requested. - self.all_vts_partitioned = self._partition_versioned_targets( - all_vts, partition_size_hint) if partition_size_hint else all_vts - - # Just the invalid targets. - self.invalid_vts = invalid_vts - - # Just the invalid targets, partitioned if so requested. - self.invalid_vts_partitioned = self._partition_versioned_targets( - invalid_vts, partition_size_hint) if partition_size_hint else invalid_vts - - -class CacheManager(object): - """Manages cache checks, updates and invalidation keeping track of basic change - and invalidation statistics. - Note that this is distinct from the ArtifactCache concept, and should probably be renamed. - """ - def __init__(self, cache_key_generator, build_invalidator_dir, - invalidate_dependents, extra_data, only_externaldeps): - self._cache_key_generator = cache_key_generator - self._invalidate_dependents = invalidate_dependents - self._extra_data = pickle.dumps(extra_data) # extra_data may be None. - self._sources = NO_SOURCES if only_externaldeps else TARGET_SOURCES - - self._invalidator = BuildInvalidator(build_invalidator_dir) - - def update(self, vts): - """Mark a changed or invalidated VersionedTargetSet as successfully processed.""" - for vt in vts.versioned_targets: - self._invalidator.update(vt.cache_key) - vt.valid = True - self._invalidator.update(vts.cache_key) - vts.valid = True - - def force_invalidate(self, vts): - """Force invalidation of a VersionedTargetSet.""" - for vt in vts.versioned_targets: - self._invalidator.force_invalidate(vt.cache_key) - vt.valid = False - self._invalidator.force_invalidate(vts.cache_key) - vts.valid = False - - def check(self, targets, partition_size_hint=None): - """Checks whether each of the targets has changed and invalidates it if so. - - Returns a list of VersionedTargetSet objects (either valid or invalid). The returned sets - 'cover' the input targets, possibly partitioning them, and are in topological order. - The caller can inspect these in order and, e.g., rebuild the invalid ones. - """ - all_vts = self._sort_and_validate_targets(targets) - invalid_vts = filter(lambda vt: not vt.valid, all_vts) - return InvalidationCheck(all_vts, invalid_vts, partition_size_hint) - - def _sort_and_validate_targets(self, targets): - """Validate each target. - - Returns a topologically ordered set of VersionedTargets, each representing one input target. - """ - # We must check the targets in this order, to ensure correctness if invalidate_dependents=True, - # since we use earlier cache keys to compute later cache keys in this case. - ordered_targets = self._order_target_list(targets) - - # This will be a list of VersionedTargets that correspond to @targets. - versioned_targets = [] - - # This will be a mapping from each target to its corresponding VersionedTarget. - versioned_targets_by_target = {} - - # Map from id to current fingerprint of the target with that id. We update this as we iterate, - # in topological order, so when handling a target, this will already contain all its deps (in - # this round). - id_to_hash = {} - - for target in ordered_targets: - dependency_keys = set() - if self._invalidate_dependents and hasattr(target, 'dependencies'): - # Note that we only need to do this for the immediate deps, because those will already - # reflect changes in their own deps. - for dep in target.dependencies: - # We rely on the fact that any deps have already been processed, either in an earlier - # round or because they came first in ordered_targets. - # Note that only external deps (e.g., JarDependency) or targets with sources can - # affect invalidation. Other targets (JarLibrary, Pants) are just dependency scaffolding. - if isinstance(dep, ExternalDependency): - dependency_keys.add(dep.cache_key()) - elif isinstance(dep, TargetWithSources): - fprint = id_to_hash.get(dep.id, None) - if fprint is None: - # It may have been processed in a prior round, and therefore the fprint should - # have been written out by the invalidator. - fprint = self._invalidator.existing_hash(dep.id) - # Note that fprint may still be None here. E.g., a codegen target is in the list - # of deps, but its fprint is not visible to our self._invalidator (that of the - # target synthesized from it is visible, so invalidation will still be correct.) - # - # Another case where this can happen is a dep of a codegen target on, say, - # a java target that hasn't been built yet (again, the synthesized target will - # depend on that same java target, so invalidation will still be correct.) - # TODO(benjy): Make this simpler and more obviously correct. - if fprint is not None: - dependency_keys.add(fprint) - elif isinstance(dep, JarLibrary) or isinstance(dep, Pants): - pass - else: - raise ValueError('Cannot calculate a cache_key for a dependency: %s' % dep) - cache_key = self._key_for(target, dependency_keys) - id_to_hash[target.id] = cache_key.hash - - # Create a VersionedTarget corresponding to @target. - versioned_target = VersionedTarget(self, target, cache_key) - - # Add the new VersionedTarget to the list of computed VersionedTargets. - versioned_targets.append(versioned_target) - - # Add to the mapping from Targets to VersionedTargets, for use in hooking up VersionedTarget - # dependencies below. - versioned_targets_by_target[target] = versioned_target - - # Having created all applicable VersionedTargets, now we build the VersionedTarget dependency - # graph, looking through targets that don't correspond to VersionedTargets themselves. - versioned_target_deps_by_target = {} - - def get_versioned_target_deps_for_target(target): - # For every dependency of @target, we will store its corresponding VersionedTarget here. For - # dependencies that don't correspond to a VersionedTarget (e.g. pass-through dependency - # wrappers), we will resolve their actual dependencies and find VersionedTargets for them. - versioned_target_deps = set([]) - if hasattr(target, 'dependencies'): - for dep in target.dependencies: - for dependency in dep.resolve(): - if dependency in versioned_targets_by_target: - # If there exists a VersionedTarget corresponding to this Target, store it and - # continue. - versioned_target_deps.add(versioned_targets_by_target[dependency]) - elif dependency in versioned_target_deps_by_target: - # Otherwise, see if we've already resolved this dependency to the VersionedTargets it - # depends on, and use those. - versioned_target_deps.update(versioned_target_deps_by_target[dependency]) - else: - # Otherwise, compute the VersionedTargets that correspond to this dependency's - # dependencies, cache and use the computed result. - versioned_target_deps_by_target[dependency] = get_versioned_target_deps_for_target( - dependency) - versioned_target_deps.update(versioned_target_deps_by_target[dependency]) - - # Return the VersionedTarget dependencies that this target's VersionedTarget should depend on. - return versioned_target_deps - - # Initialize all VersionedTargets to point to the VersionedTargets they depend on. - for versioned_target in versioned_targets: - versioned_target.dependencies = get_versioned_target_deps_for_target(versioned_target.target) - - return versioned_targets - - def needs_update(self, cache_key): - return self._invalidator.needs_update(cache_key) - - def _order_target_list(self, targets): - """Orders the targets topologically, from least to most dependent.""" - targets = set(t for t in targets if isinstance(t, Target)) - return filter(targets.__contains__, reversed(InternalTarget.sort_targets(targets))) - - def _key_for(self, target, dependency_keys): - def fingerprint_extra(sha): - sha.update(self._extra_data) - for key in sorted(dependency_keys): # Sort to ensure hashing in a consistent order. - sha.update(key) - - return self._cache_key_generator.key_for_target( - target, - sources=self._sources, - fingerprint_extra=fingerprint_extra - ) - diff --git a/src/python/twitter/pants/tasks/check_exclusives.py b/src/python/twitter/pants/tasks/check_exclusives.py deleted file mode 100644 index 4830eebb5..000000000 --- a/src/python/twitter/pants/tasks/check_exclusives.py +++ /dev/null @@ -1,287 +0,0 @@ -from twitter.common.collections import OrderedSet - -__author__ = 'Mark Chu-Carroll (markcc@foursquare.com)' - - -from collections import defaultdict -from copy import copy -from twitter.pants.base.target import Target -from twitter.pants.tasks import Task, TaskError -from twitter.pants.targets.internal import InternalTarget - - -class CheckExclusives(Task): - """Computes transitive exclusive maps. - - This computes transitive exclusive tags for a dependency graph rooted - with a set of build targets specified by a user. If this process produces - any collisions where a single target contains multiple tag values for a single - exclusives key, then it generates an error and the compilation will fail. - - The syntax of the exclusives attribute is: :: - - exclusives = {"id": "value", ...} - - For example, suppose that we had two java targets, jliba and jlibb. jliba uses - slf4j, which includes in its jar package an implementation of log4j. jlibb uses - log4j directly. But the version of log4j that's packaged inside of slf4j is - different from the version used by jlibb. :: - - java_library(name='jliba', - depedencies = ['slf4j-with-log4j-2.4']) - java_library(name='jlibb', - dependencies=['log4j-1.9']) - java_binary(name='javabin', dependencies=[':jliba', ':jlibb']) - - In this case, the binary target 'javabin' depends on both slf4j with its - packaged log4j version 2.4, and on log4j-1.9. - Pants doesn't know that the slf4j and log4j jar_dependencies contain - incompatible versions of the same library, and so it can't detect the error. - - With exclusives, the jar_library target for the joda libraries would declare - exclusives tags: :: - - jar_library(name='slf4j-with-log4j-2.4', exclusives={'log4j': '2.4'}) - jar_library(name='joda-2.1', exclusives={'log4j': '1.9'}) - - With the exclusives declared, pants can recognize that 'javabin' has conflicting - dependencies, and can generate an appropriate error message. - - Data about exclusives is provided to other tasks via data build products. - If the build data product 'exclusives_groups' is required, then an - ExclusivesMapping object will be created. - """ - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - Task.setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag('error_on_collision'), - mkflag('error_on_collision', negate=True), - dest='exclusives_error_on_collision', default=True, - action='callback', callback=mkflag.set_bool, - help=("[%default] Signal an error and abort the build if an " + - "exclusives collision is detected")) - - def __init__(self, context, signal_error=None): - Task.__init__(self, context) - self.signal_error = (context.options.exclusives_error_on_collision - if signal_error is None else signal_error) - - def _compute_exclusives_conflicts(self, targets): - """Compute the set of distinct chunks of targets that are required based on exclusives. - If two targets have different values for a particular exclusives tag, - then those targets must end up in different chunks. - This method computes the exclusives values that define each chunk. - e.g.: if target a has exclusives {"x": "1", "z": "1"}, target b has {"x": "2"}, - target c has {"y", "1"}, and target d has {"y", "2", "z": "1"}, then we need to - divide into chunks on exclusives tags "x" and "y". We don't need to include - "z" in the chunk specification, because there are no conflicts on z. - - Parameters: - targets: a list of the targets being built. - Return: the set of exclusives tags that should be used for chunking. - """ - exclusives_map = defaultdict(set) - for t in targets: - if t.exclusives is not None: - for k in t.exclusives: - exclusives_map[k] |= t.exclusives[k] - conflicting_keys = defaultdict(set) - for k in exclusives_map: - if len(exclusives_map[k]) > 1: - conflicting_keys[k] = exclusives_map[k] - return conflicting_keys - - def execute(self, targets): - # compute transitive exclusives - for t in targets: - t._propagate_exclusives() - # Check for exclusives collision. - for t in targets: - excl = t.get_all_exclusives() - for key in excl: - if len(excl[key]) > 1: - msg = 'target %s has more than one exclusives tag for key %s: %s' % \ - (t.address.reference(), key, list(excl[key])) - if self.signal_error: - raise TaskError(msg) - else: - print('Warning: %s' % msg) - - if self.context.products.is_required_data('exclusives_groups'): - mapping = ExclusivesMapping(self.context) - partition_keys = self._compute_exclusives_conflicts(targets) - for key in partition_keys: - mapping.add_conflict(key, partition_keys[key]) - mapping._populate_target_maps(targets) - self.context.products.safe_create_data('exclusives_groups', lambda: mapping) - - -class ExclusivesMapping(object): - def __init__(self, context): - self.context = context - self.conflicting_exclusives = {} - self.key_to_targets = defaultdict(set) - self.target_to_key = {} - self.ordering = None - self._group_classpaths = {} # key -> OrderedSet. - - def add_conflict(self, key, values): - """Register a conflict on an exclusives key. - Parameters: - key the exclusives key with a conflicting_exclusives - value the different values used for the key in different targets. - """ - self.conflicting_exclusives[key] = values - - def get_targets_for_group_key(self, key): - """Gets the set of targets that share exclusives. - Parameters: - key: a key, generated by _get_exclusives_key, for the exclusives - settings shared by a group of targets. - Return: the set of targets that share the exclusives settings. Returns - an empty set if no targets have that key. - """ - - return self.key_to_targets[key] - - def get_group_key_for_target(self, target): - """ Get the exclusives key for a target """ - return self.target_to_key[target] - - def get_group_keys(self): - """Get the set of keys for all exclusives groups in the current build.""" - if len(self.conflicting_exclusives) == 0: - return [""] - else: - return self.key_to_targets.keys() - - def get_ordered_group_keys(self): - """Compute the correct order in which to compile exclusives groups. - - In group, we already do group-based ordering. But that ordering is done separately on - each exclusives group. If we have a grouping: - a(exclusives={x: 1, y:2}, dependencies=[ ':b', ':c' ]) - b(exclusives={x:"", y: ""}, dependencies=[]) - c(exclusives={x:, y:2}, dependencies=[':b']) - - If we were to do grouping in the exclusives ordering {x:, y:2}, {x: , y:}, - {x:1, y:2}, then we'd be compiling the group containing c before the group containing b; but - c depends on b. - """ - def number_of_emptys(key): - if key == "": - return len(self.conflicting_keys) - return key.count("") - - if self.ordering is not None: - return self.ordering - # The correct order is from least exclusives to most exclusives - a target can only depend on - # other targets with fewer exclusives than itself. - keys_by_empties = [ [] for l in range(len(self.key_to_targets)) ] - # Flag to indicate whether there are any groups without any exclusives. - no_exclusives = False - for k in self.key_to_targets: - if k == "": - no_exclusives = True - else: - keys_by_empties[number_of_emptys(k)].append(k) - result = [ ] - for i in range(len(keys_by_empties)): - for j in range(len(keys_by_empties[i])): - result.append(keys_by_empties[i][j]) - if no_exclusives: - result.append("") - result.reverse() - self.ordering = result - return self.ordering - - def _get_exclusives_key(self, target): - # compute an exclusives group key: a list of the exclusives values for the keys - # in the conflicting keys list. - target_key = [] - for k in self.conflicting_exclusives: - excl = target.exclusives if isinstance(target, Target) else target.declared_exclusives - if len(excl[k]) > 0: - target_key.append("%s=%s" % (k, list(excl[k])[0])) - else: - target_key.append("%s=" % k) - - if target_key == []: - return "" - else: - return ','.join(target_key) - - def _populate_target_maps(self, targets): - """Populates maps of exclusive keys to targets, and vice versa.""" - all_targets = set() - workqueue = copy(targets) - while len(workqueue) > 0: - t = workqueue.pop() - if t not in all_targets: - all_targets.add(t) - if isinstance(t, InternalTarget): - workqueue += t.dependencies - - for t in all_targets: - key = self._get_exclusives_key(t) - if key == '': - raise TaskError('Invalid empty group key') - if key not in self._group_classpaths: - self._group_classpaths[key] = OrderedSet() - self.key_to_targets[key].add(t) - self.target_to_key[t] = key - - def get_classpath_for_group(self, group_key): - """Get the classpath to use for jvm compilations of a group. - - Each exclusives group requires a distinct classpath. We maintain - them here as a map from the exclusives key to a classpath. The - classpath is updated during compilations to add the results of - compiling a group to the classpaths of other groups that could depend on it. - """ - if group_key not in self._group_classpaths: - self._group_classpaths[group_key] = OrderedSet() - # get the classpath to use for compiling targets within the group specified by group_key. - return list(reversed(self._group_classpaths[group_key])) - - def _key_to_map(self, key): - result = {} - if key == '' or key == '': - return result - pairs = key.split(',') - for p in pairs: - (k, v) = p.split("=") - result[k] = v - return result - - def _is_compatible(self, mod_key, other_key): - # Check if a set of classpath modifications produced by compiling elements of the group - # specified by mod_key should be added to the classpath of other_key's group. - - # A key is a list of comma separated name=value keys. - # keys match, if and only of for all pairs k=v1 from mod, and k=v2 from other, - # either v1 == v2 or v1 == . - mod_map = self._key_to_map(mod_key) - other_map = self._key_to_map(other_key) - for k in mod_map: - vm = mod_map[k] - vo = other_map[k] - if not (vm == vo or vm == ""): - return False - return True - - def update_compatible_classpaths(self, group_key, path_additions): - """Update the classpath of all groups compatible with group_key, adding path_additions to their - classpath. - """ - additions = list(reversed(path_additions)) - for key in self._group_classpaths: - if group_key is None or self._is_compatible(group_key, key): - group_classpath = self._group_classpaths[key] - group_classpath.update(additions) - - def set_base_classpath_for_group(self, group_key, classpath): - # set the initial classpath of the elements of group_key to classpath. - self._group_classpaths[group_key] = OrderedSet(reversed(classpath)) - diff --git a/src/python/twitter/pants/tasks/check_published_deps.py b/src/python/twitter/pants/tasks/check_published_deps.py deleted file mode 100644 index 216b1d628..000000000 --- a/src/python/twitter/pants/tasks/check_published_deps.py +++ /dev/null @@ -1,74 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.target import Target -from twitter.pants.targets.jar_dependency import JarDependency -from twitter.pants.tasks.console_task import ConsoleTask -from twitter.pants.tasks.jar_publish import PushDb - - -class CheckPublishedDeps(ConsoleTask): - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(CheckPublishedDeps, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag('print-uptodate'), mkflag('print-uptodate', negate=True), - dest='check_deps_print_uptodate', default=False, - action='callback', callback=mkflag.set_bool, - help='[%default] Also print up-to-date dependencies.') - - def __init__(self, context): - ConsoleTask.__init__(self, context) - - self._print_uptodate = context.options.check_deps_print_uptodate - self.repos = context.config.getdict('jar-publish', 'repos') - self._artifacts_to_targets = {} - all_addresses = (address for buildfile in BuildFile.scan_buildfiles(get_buildroot()) - for address in Target.get_all_addresses(buildfile)) - for address in all_addresses: - target = Target.get(address) - if target.is_exported: - provided_jar, _, _ = target.get_artifact_info() - artifact = (provided_jar.org, provided_jar.name) - if not artifact in self._artifacts_to_targets: - self._artifacts_to_targets[artifact] = target - - def console_output(self, targets): - push_dbs = {} - - def get_jar_with_version(target): - db = target.provides.repo.push_db - if db not in push_dbs: - push_dbs[db] = PushDb.load(db) - return push_dbs[db].as_jar_with_version(target) - - visited = set() - for target in targets: - for dependency in target.dependencies: - for dep in dependency.resolve(): - if isinstance(dep, JarDependency): - artifact = (dep.org, dep.name) - if artifact in self._artifacts_to_targets and not artifact in visited: - visited.add(artifact) - artifact_target = self._artifacts_to_targets[artifact] - _, semver, sha, _ = get_jar_with_version(artifact_target) - if semver.version() != dep.rev: - yield 'outdated %s#%s %s latest %s' % (dep.org, dep.name, dep.rev, semver.version()) - elif self._print_uptodate: - yield 'up-to-date %s#%s %s' % (dep.org, dep.name, semver.version()) diff --git a/src/python/twitter/pants/tasks/checkstyle.py b/src/python/twitter/pants/tasks/checkstyle.py deleted file mode 100644 index beaa40474..000000000 --- a/src/python/twitter/pants/tasks/checkstyle.py +++ /dev/null @@ -1,105 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.common.dirutil import safe_open - -from twitter.pants.process.xargs import Xargs - -from .nailgun_task import NailgunTask - -from . import TaskError - - -CHECKSTYLE_MAIN = 'com.puppycrawl.tools.checkstyle.Main' - - -class Checkstyle(NailgunTask): - @staticmethod - def _is_checked(target): - return target.is_java and not target.is_synthetic - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - NailgunTask.setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag("skip"), mkflag("skip", negate=True), - dest="checkstyle_skip", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Skip checkstyle.") - - def __init__(self, context): - super(Checkstyle, self).__init__(context) - - self._checkstyle_bootstrap_key = 'checkstyle' - bootstrap_tools = context.config.getlist('checkstyle', 'bootstrap-tools', - default=[':twitter-checkstyle']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._checkstyle_bootstrap_key, bootstrap_tools) - - self._configuration_file = context.config.get('checkstyle', 'configuration') - - self._work_dir = context.config.get('checkstyle', 'workdir') - self._properties = context.config.getdict('checkstyle', 'properties') - self._confs = context.config.getlist('checkstyle', 'confs', default=['default']) - self.context.products.require_data('exclusives_groups') - - def execute(self, targets): - if not self.context.options.checkstyle_skip: - with self.invalidated(filter(Checkstyle._is_checked, targets)) as invalidation_check: - invalid_targets = [] - for vt in invalidation_check.invalid_vts: - invalid_targets.extend(vt.targets) - sources = self.calculate_sources(invalid_targets) - if sources: - result = self.checkstyle(sources, invalid_targets) - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (CHECKSTYLE_MAIN, result)) - - def calculate_sources(self, targets): - sources = set() - for target in targets: - sources.update([os.path.join(target.target_base, source) - for source in target.sources if source.endswith('.java')]) - return sources - - def checkstyle(self, sources, targets): - egroups = self.context.products.get_data('exclusives_groups') - etag = egroups.get_group_key_for_target(targets[0]) - classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._checkstyle_bootstrap_key) - cp = egroups.get_classpath_for_group(etag) - classpath.extend(jar for conf, jar in cp if conf in self._confs) - - args = [ - '-c', self._configuration_file, - '-f', 'plain' - ] - - if self._properties: - properties_file = os.path.join(self._work_dir, 'checkstyle.properties') - with safe_open(properties_file, 'w') as pf: - for k, v in self._properties.items(): - pf.write('%s=%s\n' % (k, v)) - args.extend(['-p', properties_file]) - - # We've hit known cases of checkstyle command lines being too long for the system so we guard - # with Xargs since checkstyle does not accept, for example, @argfile style arguments. - def call(xargs): - return self.runjava(classpath=classpath, main=CHECKSTYLE_MAIN, - args=args + xargs, workunit_name='checkstyle') - checks = Xargs(call) - - return checks.execute(sources) diff --git a/src/python/twitter/pants/tasks/code_gen.py b/src/python/twitter/pants/tasks/code_gen.py deleted file mode 100644 index ebe65a476..000000000 --- a/src/python/twitter/pants/tasks/code_gen.py +++ /dev/null @@ -1,150 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.tasks import Task - - -class CodeGen(Task): - """Encapsulates the common machinery for codegen targets that support multiple output languages. - - This Task will only invoke code generation for changed targets and for the set of languages - in the active context that require codegen unless forced. - """ - - def is_gentarget(self, target): - """Subclass must return True if it handles generating for the target.""" - raise NotImplementedError - - def is_forced(self, lang): - """Subclass may return True to force code generation for the given language.""" - return False - - def genlangs(self): - """Subclass must use this to identify the targets consuming each language it generates for. - - Return value is a dict mapping supported generation target language names - to a predicate that can select targets consuming that language. - """ - raise NotImplementedError - - def prepare_gen(self, targets): - """ - Subclasses should override if they need to prepare for potential upcoming calls to genlang. - - Note that this does not mean genlang will necessarily be called. - """ - pass - - def genlang(self, lang, targets): - """Subclass must override and generate code in :lang for the given targets. - - May return a list of pairs (target, files) where files is a list of files - to be cached against the target. - """ - raise NotImplementedError - - def createtarget(self, lang, gentarget, dependees): - """Subclass must override and create a synthetic target. - - The target must contain the sources generated for the given gentarget. - """ - raise NotImplementedError - - def getdependencies(self, gentarget): - # TODO(John Sirois): fix python/jvm dependencies handling to be uniform - if hasattr(gentarget, 'internal_dependencies'): - return gentarget.internal_dependencies - else: - return gentarget.dependencies - - def updatedependencies(self, target, dependency): - if hasattr(target, 'update_dependencies'): - target.update_dependencies([dependency]) - else: - target.dependencies.add(dependency) - - def execute(self, targets): - gentargets = [t for t in targets if self.is_gentarget(t)] - capabilities = self.genlangs() # lang_name => predicate - gentargets_by_dependee = self.context.dependents( - on_predicate=self.is_gentarget, - from_predicate=lambda t: not self.is_gentarget(t) - ) - dependees_by_gentarget = defaultdict(set) - for dependee, tgts in gentargets_by_dependee.items(): - for gentarget in tgts: - dependees_by_gentarget[gentarget].add(dependee) - - def find_gentargets(predicate): - tgts = set() - for dependee in gentargets_by_dependee.keys(): - if predicate(dependee): - for tgt in gentargets_by_dependee.pop(dependee): - tgt.walk(tgts.add, self.is_gentarget) - return tgts.intersection(set(gentargets)) - - gentargets_bylang = {} - for lang, predicate in capabilities.items(): - gentargets_bylang[lang] = gentargets if self.is_forced(lang) else find_gentargets(predicate) - if gentargets_by_dependee: - self.context.log.warn('Left with unexpected unconsumed gen targets:\n\t%s' % '\n\t'.join( - '%s -> %s' % (dependee, gentargets) - for dependee, gentargets in gentargets_by_dependee.items() - )) - - if gentargets: - self.prepare_gen(gentargets) - with self.invalidated(gentargets, invalidate_dependents=True) as invalidation_check: - for vts in invalidation_check.invalid_vts_partitioned: - invalid_targets = set(vts.targets) - for lang, tgts in gentargets_bylang.items(): - invalid_lang_tgts = invalid_targets.intersection(tgts) - if invalid_lang_tgts: - self.genlang(lang, invalid_lang_tgts) - - # Link synthetic targets for all in-play gen targets. - invalid_vts_by_target = dict([(vt.target, vt) for vt in invalidation_check.invalid_vts]) - vts_artifactfiles_pairs = [] - write_to_artifact_cache = self.artifact_cache_writes_enabled() if invalid_vts_by_target else False - for lang, tgts in gentargets_bylang.items(): - if tgts: - langtarget_by_gentarget = {} - for target in tgts: - syn_target = self.createtarget( - lang, - target, - dependees_by_gentarget.get(target, []) - ) - syn_target.derived_from = target - syn_target.add_labels('codegen', 'synthetic') - if write_to_artifact_cache and target in invalid_vts_by_target: - generated_sources = list(syn_target.sources_absolute_paths()) - vts_artifactfiles_pairs.append((invalid_vts_by_target[target], generated_sources)) - langtarget_by_gentarget[target] = syn_target - genmap = self.context.products.get(lang) - for gentarget, langtarget in langtarget_by_gentarget.items(): - genmap.add(gentarget, get_buildroot(), [langtarget]) - # Transfer dependencies from gentarget to its synthetic counterpart. - for dep in self.getdependencies(gentarget): - if self.is_gentarget(dep): # Translate the dep to its synthetic counterpart. - self.updatedependencies(langtarget, langtarget_by_gentarget[dep]) - else: # Depend directly on the dep. - self.updatedependencies(langtarget, dep) - if write_to_artifact_cache: - self.update_artifact_cache(vts_artifactfiles_pairs) diff --git a/src/python/twitter/pants/tasks/confluence_publish.py b/src/python/twitter/pants/tasks/confluence_publish.py deleted file mode 100644 index 5b0d146a9..000000000 --- a/src/python/twitter/pants/tasks/confluence_publish.py +++ /dev/null @@ -1,143 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import textwrap - -import os - -from twitter.common.confluence import Confluence, ConfluenceError -from twitter.common.dirutil import safe_open - -from twitter.pants import binary_util -from twitter.pants.targets.doc import Page -from twitter.pants.tasks import Task, TaskError - -"""Classes to ease publishing Page targets to Confluence wikis.""" - -class ConfluencePublish(Task): - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - cls.url_option = option_group.add_option(mkflag("url"), dest="confluence_publish_url", - help="The url of the confluence site to post to.") - - option_group.add_option(mkflag("force"), mkflag("force", negate=True), - dest = "confluence_publish_force", - action="callback", callback=mkflag.set_bool, default=False, - help = "[%default] Force publish the page even if its contents is " - "identical to the contents on confluence.") - - option_group.add_option(mkflag("open"), mkflag("open", negate=True), - dest="confluence_publish_open", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Attempt to open the published confluence wiki page " - "in a browser.") - - option_group.add_option(mkflag("user"), dest="confluence_user", - help="Confluence user name, defaults to unix user.") - - def __init__(self, context): - Task.__init__(self, context) - - self.url = ( - context.options.confluence_publish_url - or context.config.get('confluence-publish', 'url') - ) - - if not self.url: - raise TaskError("Unable to proceed publishing to confluence. Please configure a 'url' under " - "the 'confluence-publish' heading in pants.ini or using the %s command line " - "option." % self.url_option) - - self.force = context.options.confluence_publish_force - self.open = context.options.confluence_publish_open - self.context.products.require('wiki_html') - self._wiki = None - self.user = context.options.confluence_user - - def wiki(self): - raise NotImplementedError('Subclasses must provide the wiki target they are associated with') - - def api(self): - return 'confluence1' - - def execute(self, targets): - pages = [] - for target in targets: - if isinstance(target, Page): - wikiconfig = target.wiki_config(self.wiki()) - if wikiconfig: - pages.append((target, wikiconfig)) - - urls = list() - - genmap = self.context.products.get('wiki_html') - for page, wikiconfig in pages: - html_info = genmap.get((self.wiki(), page)) - if len(html_info) > 1: - raise TaskError('Unexpected resources for %s: %s' % (page, html_info)) - basedir, htmls = html_info.items()[0] - if len(htmls) != 1: - raise TaskError('Unexpected resources for %s: %s' % (page, htmls)) - with safe_open(os.path.join(basedir, htmls[0])) as contents: - url = self.publish_page( - page.address, - wikiconfig['space'], - wikiconfig['title'], - contents.read(), - parent=wikiconfig.get('parent') - ) - if url: - urls.append(url) - self.context.log.info('Published %s to %s' % (page, url)) - - if self.open and urls: - binary_util.ui_open(*urls) - - def publish_page(self, address, space, title, content, parent=None): - body = textwrap.dedent(''' - - - - %s - ''').strip() % (address, content) - - pageopts = dict( - versionComment = 'updated by pants!' - ) - wiki = self.login() - existing = wiki.getpage(space, title) - if existing: - if not self.force and existing['content'].strip() == body.strip(): - self.context.log.warn("Skipping publish of '%s' - no changes" % title) - return - - pageopts['id'] = existing['id'] - pageopts['version'] = existing['version'] - - try: - page = wiki.create_html_page(space, title, body, parent, **pageopts) - return page['url'] - except ConfluenceError as e: - raise TaskError('Failed to update confluence: %s' % e) - - def login(self): - if not self._wiki: - try: - self._wiki = Confluence.login(self.url, self.user, self.api()) - except ConfluenceError as e: - raise TaskError('Failed to login to confluence: %s' % e) - return self._wiki diff --git a/src/python/twitter/pants/tasks/console_task.py b/src/python/twitter/pants/tasks/console_task.py deleted file mode 100644 index b0438d51d..000000000 --- a/src/python/twitter/pants/tasks/console_task.py +++ /dev/null @@ -1,57 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import errno -import sys - -from contextlib import contextmanager - -from . import Task - - -class ConsoleTask(Task): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("sep"), dest="console_%s_separator" % cls.__name__, - default='\\n', help="String to use to separate results.") - - def __init__(self, context, outstream=sys.stdout): - super(ConsoleTask, self).__init__(context) - separator_option = "console_%s_separator" % self.__class__.__name__ - self._console_separator = getattr(context.options, separator_option).decode('string-escape') - self._outstream = outstream - - @contextmanager - def _guard_sigpipe(self): - try: - yield - except IOError as e: - # If the pipeline only wants to read so much, that's fine; otherwise, this error is probably - # legitimate. - if e.errno != errno.EPIPE: - raise e - - def execute(self, targets): - with self._guard_sigpipe(): - try: - for value in self.console_output(targets): - self._outstream.write(str(value)) - self._outstream.write(self._console_separator) - finally: - self._outstream.flush() - - def console_output(self, targets): - raise NotImplementedError('console_output must be implemented by subclasses of ConsoleTask') diff --git a/src/python/twitter/pants/tasks/dependees.py b/src/python/twitter/pants/tasks/dependees.py deleted file mode 100644 index 056ef6896..000000000 --- a/src/python/twitter/pants/tasks/dependees.py +++ /dev/null @@ -1,123 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -from twitter.common.collections import OrderedSet - -import twitter.pants.base.build_file_context - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.base.build_file import BuildFile -from twitter.pants.targets.sources import SourceRoot - -from .console_task import ConsoleTask - -from . import TaskError - - -class ReverseDepmap(ConsoleTask): - """Outputs all targets whose dependencies include at least one of the input targets.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(ReverseDepmap, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag("transitive"), mkflag("transitive", negate=True), - dest="reverse_depmap_transitive", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] List transitive dependees.") - - option_group.add_option(mkflag("closed"), mkflag("closed", negate=True), - dest="reverse_depmap_closed", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Include the input targets in the output along with " - "the dependees.") - - option_group.add_option(mkflag('type'), dest='dependees_type', action='append', default=[], - help="Identifies target types to include. Multiple type inclusions " - "can be specified at once in a comma separated list or else by " - "using multiple instances of this flag.") - - def __init__(self, context): - ConsoleTask.__init__(self, context) - - self._transitive = context.options.reverse_depmap_transitive - self._closed = context.options.reverse_depmap_closed - self._dependees_type = context.options.dependees_type - - def console_output(self, _): - buildfiles = OrderedSet() - if self._dependees_type: - base_paths = OrderedSet() - for dependees_type in self._dependees_type: - try: - # Try to do a fully qualified import 1st for filtering on custom types. - from_list, module, type_name = dependees_type.rsplit('.', 2) - __import__('%s.%s' % (from_list, module), fromlist=[from_list]) - except (ImportError, ValueError): - # Fall back on pants provided target types. - if hasattr(twitter.pants.base.build_file_context, dependees_type): - type_name = getattr(twitter.pants.base.build_file_context, dependees_type) - else: - raise TaskError('Invalid type name: %s' % dependees_type) - # Find the SourceRoot for the given input type - base_paths.update(SourceRoot.roots(type_name)) - if not base_paths: - raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + - '\nPlease define a source root in BUILD file as:' + - '\n\tsource_root(\'\', %s)' % ', '.join(self._dependees_type)) - for base_path in base_paths: - buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), base_path)) - else: - buildfiles = BuildFile.scan_buildfiles(get_buildroot()) - - dependees_by_target = defaultdict(set) - for buildfile in buildfiles: - for address in Target.get_all_addresses(buildfile): - for target in Target.get(address).resolve(): - # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a - # user vs. targets created by pants at runtime. - target = self.get_concrete_target(target) - if hasattr(target, 'dependencies'): - for dependencies in target.dependencies: - for dependency in dependencies.resolve(): - dependency = self.get_concrete_target(dependency) - dependees_by_target[dependency].add(target) - - roots = set(self.context.target_roots) - if self._closed: - for root in roots: - yield str(root.address) - - for dependant in self.get_dependants(dependees_by_target, roots): - yield str(dependant.address) - - def get_dependants(self, dependees_by_target, roots): - check = set(roots) - known_dependants = set() - while True: - dependants = set(known_dependants) - for target in check: - dependants.update(dependees_by_target[target]) - check = dependants - known_dependants - if not check or not self._transitive: - return dependants - set(roots) - known_dependants = dependants - - def get_concrete_target(self, target): - return target.derived_from if isinstance(target, Target) else target diff --git a/src/python/twitter/pants/tasks/dependencies.py b/src/python/twitter/pants/tasks/dependencies.py deleted file mode 100644 index 99688c508..000000000 --- a/src/python/twitter/pants/tasks/dependencies.py +++ /dev/null @@ -1,130 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -from twitter.pants.targets.jar_dependency import JarDependency -from twitter.pants.targets.python_requirement import PythonRequirement -from twitter.pants.tasks import TaskError -from .console_task import ConsoleTask - - -class Dependencies(ConsoleTask): - """Generates a textual list (using the target format) for the dependency set of a target.""" - - @staticmethod - def _is_jvm(target): - return target.is_jvm or target.is_jvm_app - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(Dependencies, cls).setup_parser(option_group, args, mkflag) - - cls.internal_only_flag = mkflag("internal-only") - cls.external_only_flag = mkflag("external-only") - - option_group.add_option(cls.internal_only_flag, - action="store_true", - dest="dependencies_is_internal_only", - default=False, - help='Specifies that only internal dependencies should' - ' be included in the graph output (no external jars).') - option_group.add_option(cls.external_only_flag, - action="store_true", - dest="dependencies_is_external_only", - default=False, - help='Specifies that only external dependencies should' - ' be included in the graph output (only external jars).') - - def __init__(self, context, **kwargs): - super(Dependencies, self).__init__(context, **kwargs) - - if (self.context.options.dependencies_is_internal_only and - self.context.options.dependencies_is_external_only): - - error_str = "At most one of %s or %s can be selected." % (self.internal_only_flag, - self.external_only_flag) - raise TaskError(error_str) - - self.is_internal_only = self.context.options.dependencies_is_internal_only - self.is_external_only = self.context.options.dependencies_is_external_only - - def console_output(self, unused_method_argument): - for target in self.context.target_roots: - if all(self._is_jvm(t) for t in target.resolve() if t.is_concrete): - for line in self._dependencies_list(target): - yield line - - elif target.is_python: - if self.is_internal_only: - raise TaskError('Unsupported option for Python target: is_internal_only: %s' % - self.is_internal_only) - if self.is_external_only: - raise TaskError('Unsupported option for Python target: is_external_only: %s' % - self.is_external_only) - for line in self._python_dependencies_list(target): - yield line - - def _dep_id(self, dep): - if isinstance(dep, JarDependency): - if dep.rev: - return False, '%s:%s:%s' % (dep.org, dep.name, dep.rev) - else: - return True, '%s:%s' % (dep.org, dep.name) - else: - return True, str(dep.address) - - def _python_dependencies_list(self, target): - if isinstance(target, PythonRequirement): - yield str(target._requirement) - else: - yield str(target.address) - - if hasattr(target, 'dependencies'): - for dep in target.dependencies: - for d in dep.resolve(): - for dep in self._python_dependencies_list(d): - yield dep - - def _dependencies_list(self, target): - def print_deps(visited, dep): - internal, address = self._dep_id(dep) - - if not dep in visited: - if internal and (not self.is_external_only or self.is_internal_only): - yield address - - visited.add(dep) - - if self._is_jvm(dep): - for internal_dependency in dep.internal_dependencies: - for line in print_deps(visited, internal_dependency): - yield line - - if not self.is_internal_only: - if self._is_jvm(dep): - for jar_dep in dep.jar_dependencies: - internal, address = self._dep_id(jar_dep) - if not internal: - if jar_dep not in visited: - if self.is_external_only or not self.is_internal_only: - yield address - visited.add(jar_dep) - - visited = set() - for t in target.resolve(): - for dep in print_deps(visited, t): - yield dep diff --git a/src/python/twitter/pants/tasks/depmap.py b/src/python/twitter/pants/tasks/depmap.py deleted file mode 100644 index e9cfc6efb..000000000 --- a/src/python/twitter/pants/tasks/depmap.py +++ /dev/null @@ -1,199 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -from twitter.pants.tasks.console_task import ConsoleTask -from twitter.pants.tasks import TaskError - -from twitter.pants.targets.jar_dependency import JarDependency - - -class Depmap(ConsoleTask): - """Generates either a textual dependency tree or a graphviz digraph dot file for the dependency - set of a target. - """ - - @staticmethod - def _is_jvm(dep): - return dep.is_jvm or dep.is_jvm_app - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(Depmap, cls).setup_parser(option_group, args, mkflag) - - cls.internal_only_flag = mkflag("internal-only") - cls.external_only_flag = mkflag("external-only") - option_group.add_option(cls.internal_only_flag, - action="store_true", - dest="depmap_is_internal_only", - default=False, - help='Specifies that only internal dependencies should' - ' be included in the graph output (no external jars).') - option_group.add_option(cls.external_only_flag, - action="store_true", - dest="depmap_is_external_only", - default=False, - help='Specifies that only external dependencies should' - ' be included in the graph output (only external jars).') - option_group.add_option(mkflag("minimal"), - action="store_true", - dest="depmap_is_minimal", - default=False, - help='For a textual dependency tree, only prints a dependency the 1st' - ' time it is encountered. For graph output this does nothing.') - option_group.add_option(mkflag("separator"), - dest="depmap_separator", - default="-", - help='Specifies the separator to use between the org/name/rev' - ' components of a dependency\'s fully qualified name.') - option_group.add_option(mkflag("graph"), - action="store_true", - dest="depmap_is_graph", - default=False, - help='Specifies the internal dependency graph should be' - ' output in the dot digraph format') - - def __init__(self, context): - ConsoleTask.__init__(self, context) - - if (self.context.options.depmap_is_internal_only - and self.context.options.depmap_is_external_only): - cls = self.__class__ - error_str = "At most one of %s or %s can be selected." % (cls.internal_only_flag, - cls.external_only_flag) - raise TaskError(error_str) - - self.is_internal_only = self.context.options.depmap_is_internal_only - self.is_external_only = self.context.options.depmap_is_external_only - self.is_minimal = self.context.options.depmap_is_minimal - self.is_graph = self.context.options.depmap_is_graph - self.separator = self.context.options.depmap_separator - - def console_output(self, targets): - if len(self.context.target_roots) == 0: - raise TaskError("One or more target addresses are required.") - - for target in self.context.target_roots: - if all(self._is_jvm(t) for t in target.resolve() if t.is_concrete): - if self.is_graph: - for line in self._output_digraph(target): - yield line - else: - for line in self._output_dependency_tree(target): - yield line - elif target.is_python: - raise TaskError('Unsupported for Python targets') - else: - raise TaskError('Unsupported for target %s' % target) - - def _dep_id(self, dependency): - """Returns a tuple of dependency_id , is_internal_dep.""" - - params = dict(sep=self.separator) - if isinstance(dependency, JarDependency): - params.update(org=dependency.org, name=dependency.name, rev=dependency.rev) - else: - params.update(org='internal', name=dependency.id) - - if params.get('rev'): - return "%(org)s%(sep)s%(name)s%(sep)s%(rev)s" % params, False - else: - return "%(org)s%(sep)s%(name)s" % params, True - - def _output_dependency_tree(self, target): - def output_dep(dep, indent): - return "%s%s" % (indent * " ", dep) - - def output_deps(dep, indent=0, outputted=set()): - dep_id, _ = self._dep_id(dep) - if dep_id in outputted: - return [output_dep("*%s" % dep_id, indent)] if not self.is_minimal else [] - else: - output = [] - if not self.is_external_only: - output += [output_dep(dep_id, indent)] - outputted.add(dep_id) - indent += 1 - - if self._is_jvm(dep): - for internal_dep in dep.internal_dependencies: - output += output_deps(internal_dep, indent, outputted) - - if not self.is_internal_only: - if self._is_jvm(dep): - for jar_dep in dep.jar_dependencies: - jar_dep_id, internal = self._dep_id(jar_dep) - if not internal: - if jar_dep_id not in outputted or (not self.is_minimal - and not self.is_external_only): - output += [output_dep(jar_dep_id, indent)] - outputted.add(jar_dep_id) - return output - - return [dependency for t in target.resolve() for dependency in output_deps(t)] - - def _output_digraph(self, target): - color_by_type = {} - - def output_candidate(internal): - return ((self.is_internal_only and internal) - or (self.is_external_only and not internal) - or (not self.is_internal_only and not self.is_external_only)) - - def output_dep(dep): - dep_id, internal = self._dep_id(dep) - if internal: - fmt = ' "%(id)s" [style=filled, fillcolor="%(color)d"];' - else: - fmt = ' "%(id)s" [style=filled, fillcolor="%(color)d", shape=ellipse];' - if not color_by_type.has_key(type(dep)): - color_by_type[type(dep)] = len(color_by_type.keys()) + 1 - return fmt % {'id': dep_id, 'color': color_by_type[type(dep)]} - - def output_deps(outputted, dep, parent=None): - output = [] - - if dep not in outputted: - outputted.add(dep) - output.append(output_dep(dep)) - if parent: - output.append(' "%s" -> "%s";' % (self._dep_id(parent)[0], self._dep_id(dep)[0])) - - for dependency in dep.resolve(): - if self._is_jvm(dependency): - for internal_dependency in dependency.internal_dependencies: - output += output_deps(outputted, internal_dependency, dependency) - - for jar in (dependency.jar_dependencies if self._is_jvm(dependency) else [dependency]): - jar_id, internal = self._dep_id(jar) - if output_candidate(internal): - if jar not in outputted: - output += [output_dep(jar)] - outputted.add(jar) - - target_id, _ = self._dep_id(target) - dep_id, _ = self._dep_id(dependency) - left_id = target_id if self.is_external_only else dep_id - if (left_id, jar_id) not in outputted: - styled = internal and not self.is_internal_only - output += [' "%s" -> "%s"%s;' % (left_id, jar_id, - ' [style="dashed"]' if styled else '')] - outputted.add((left_id, jar_id)) - return output - header = ['digraph "%s" {' % target.id] - graph_attr = [' node [shape=rectangle, colorscheme=set312;];', ' rankdir=LR;'] - return header + graph_attr + output_deps(set(), target) + ['}'] diff --git a/src/python/twitter/pants/tasks/detect_duplicates.py b/src/python/twitter/pants/tasks/detect_duplicates.py deleted file mode 100644 index 87afee0f1..000000000 --- a/src/python/twitter/pants/tasks/detect_duplicates.py +++ /dev/null @@ -1,95 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from collections import defaultdict -from contextlib import closing -from zipfile import ZipFile - -from twitter.pants.java.jar import Manifest - -from .jvm_binary_task import JvmBinaryTask - -from . import TaskError - - -class DuplicateDetector(JvmBinaryTask): - """ Detect classes and resources with the same qualified name on the classpath. """ - - @staticmethod - def _isdir(name): - return name[-1] == '/' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - JvmBinaryTask.setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("fail-fast"), mkflag("fail-fast", negate=True), - dest="fail_fast", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Fail fast if duplicate classes/resources are found.") - - def __init__(self, context): - JvmBinaryTask.__init__(self, context) - self.require_jar_dependencies() - self.fail_fast = context.options.fail_fast - - def execute(self, targets): - for binary in filter(self.is_binary, targets): - self.detect_duplicates_for_target(binary) - - def detect_duplicates_for_target(self, binary_target): - list_path = [] - for basedir, externaljar in self.list_jar_dependencies(binary_target): - list_path.append(os.path.join(basedir, externaljar)) - self._is_conflicts(list_path, binary_target) - - def _is_conflicts(self, jar_paths, binary_target): - artifacts_by_file_name = defaultdict(set) - for jarpath in jar_paths: - self.context.log.debug(' scanning %s' % jarpath) - with closing(ZipFile(jarpath)) as zip: - for file_name in zip.namelist(): - jar_name = os.path.basename(jarpath) - if (not self._isdir(file_name)) and Manifest.PATH != file_name: - artifacts_by_file_name[file_name].add(jar_name) - zip.close() - - conflicts_by_artifacts = self._get_conflicts_by_artifacts(artifacts_by_file_name) - - if len(conflicts_by_artifacts) > 0: - self._log_conflicts(conflicts_by_artifacts, binary_target) - if self.fail_fast: - raise TaskError('Failing build for target %s.' % binary_target) - return True - return False - - def _get_conflicts_by_artifacts(self, artifacts_by_file_name): - conflicts_by_artifacts = defaultdict(set) - for (file_name, artifacts) in artifacts_by_file_name.items(): - if (not artifacts) or len(artifacts) < 2: continue - conflicts_by_artifacts[tuple(sorted(artifacts))].add(file_name) - return conflicts_by_artifacts - - def _log_conflicts(self, conflicts_by_artifacts, target): - self.context.log.warn('\n ===== For target %s:' % target) - for artifacts, duplicate_files in conflicts_by_artifacts.items(): - if len(artifacts) < 2: continue - self.context.log.warn( - 'Duplicate classes and/or resources detected in artifacts: %s' % str(artifacts)) - for duplicate_file in list(duplicate_files)[:10]: - self.context.log.warn(' %s' % duplicate_file) - diff --git a/src/python/twitter/pants/tasks/eclipse_gen.py b/src/python/twitter/pants/tasks/eclipse_gen.py deleted file mode 100644 index ba1510763..000000000 --- a/src/python/twitter/pants/tasks/eclipse_gen.py +++ /dev/null @@ -1,190 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pkgutil - -from collections import defaultdict - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_delete, safe_mkdir, safe_open - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.generator import TemplateData, Generator -from twitter.pants.tasks.ide_gen import IdeGen - - -_TEMPLATE_BASEDIR = os.path.join('templates', 'eclipse') - - -_VERSIONS = { - '3.5': '3.7', # 3.5-3.7 are .project/.classpath compatible - '3.6': '3.7', - '3.7': '3.7', -} - - -_SETTINGS = ( - 'org.eclipse.core.resources.prefs', - 'org.eclipse.jdt.ui.prefs', -) - - -class EclipseGen(IdeGen): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - IdeGen.setup_parser(option_group, args, mkflag) - - supported_versions = sorted(list(_VERSIONS.keys())) - option_group.add_option(mkflag("eclipse-version"), dest="eclipse_gen_version", - default='3.6', type="choice", choices=supported_versions, - help="[%%default] The Eclipse version the project " - "configuration should be generated for; can be one of: " - "%s" % supported_versions) - - def __init__(self, context): - IdeGen.__init__(self, context) - - version = _VERSIONS[context.options.eclipse_gen_version] - self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-%s.mustache' % version) - self.classpath_template = os.path.join(_TEMPLATE_BASEDIR, 'classpath-%s.mustache' % version) - self.apt_template = os.path.join(_TEMPLATE_BASEDIR, 'factorypath-%s.mustache' % version) - self.pydev_template = os.path.join(_TEMPLATE_BASEDIR, 'pydevproject-%s.mustache' % version) - self.debug_template = os.path.join(_TEMPLATE_BASEDIR, 'debug-launcher-%s.mustache' % version) - self.coreprefs_template = os.path.join(_TEMPLATE_BASEDIR, - 'org.eclipse.jdt.core.prefs-%s.mustache' % version) - - self.project_filename = os.path.join(self.cwd, '.project') - self.classpath_filename = os.path.join(self.cwd, '.classpath') - self.apt_filename = os.path.join(self.cwd, '.factorypath') - self.pydev_filename = os.path.join(self.cwd, '.pydevproject') - self.coreprefs_filename = os.path.join(self.cwd, '.settings', 'org.eclipse.jdt.core.prefs') - - def generate_project(self, project): - def linked_folder_id(source_set): - return source_set.source_base.replace(os.path.sep, '.') - - def base_path(source_set): - return os.path.join(source_set.root_dir, source_set.source_base) - - def create_source_base_template(source_set): - source_base = base_path(source_set) - return source_base, TemplateData( - id=linked_folder_id(source_set), - path=source_base - ) - - source_bases = dict(map(create_source_base_template, project.sources)) - if project.has_python: - source_bases.update(map(create_source_base_template, project.py_sources)) - source_bases.update(map(create_source_base_template, project.py_libs)) - - def create_source_template(base_id, includes=None, excludes=None): - return TemplateData( - base=base_id, - includes='|'.join(OrderedSet(includes)) if includes else None, - excludes='|'.join(OrderedSet(excludes)) if excludes else None, - ) - - def create_sourcepath(base_id, sources): - def normalize_path_pattern(path): - return '%s/' % path if not path.endswith('/') else path - - includes = [normalize_path_pattern(src_set.path) for src_set in sources if src_set.path] - excludes = [] - for source_set in sources: - excludes.extend(normalize_path_pattern(exclude) for exclude in source_set.excludes) - - return create_source_template(base_id, includes, excludes) - - pythonpaths = [] - if project.has_python: - for source_set in project.py_sources: - pythonpaths.append(create_source_template(linked_folder_id(source_set))) - for source_set in project.py_libs: - lib_path = source_set.path if source_set.path.endswith('.egg') else '%s/' % source_set.path - pythonpaths.append(create_source_template(linked_folder_id(source_set), - includes=[lib_path])) - - configured_project = TemplateData( - name=self.project_name, - java=TemplateData( - jdk=self.java_jdk, - language_level=('1.%d' % self.java_language_level) - ), - python=project.has_python, - scala=project.has_scala and not project.skip_scala, - source_bases=source_bases.values(), - pythonpaths=pythonpaths, - debug_port=project.debug_port, - ) - - outdir = os.path.abspath(os.path.join(self.work_dir, 'bin')) - safe_mkdir(outdir) - - source_sets = defaultdict(OrderedSet) # base_id -> source_set - for source_set in project.sources: - source_sets[linked_folder_id(source_set)].add(source_set) - sourcepaths = [create_sourcepath(base_id, sources) for base_id, sources in source_sets.items()] - - libs = [] - - def add_jarlibs(classpath_entries): - for classpath_entry in classpath_entries: - # TODO(John Sirois): Plumb javadoc jars - libs.append((classpath_entry.jar, classpath_entry.source_jar)) - add_jarlibs(project.internal_jars) - add_jarlibs(project.external_jars) - - configured_classpath = TemplateData( - sourcepaths=sourcepaths, - has_tests=project.has_tests, - libs=libs, - scala=project.has_scala, - - # Eclipse insists the outdir be a relative path unlike other paths - outdir=os.path.relpath(outdir, get_buildroot()), - ) - - def apply_template(output_path, template_relpath, **template_data): - with safe_open(output_path, 'w') as output: - Generator(pkgutil.get_data(__name__, template_relpath), **template_data).write(output) - - apply_template(self.project_filename, self.project_template, project=configured_project) - apply_template(self.classpath_filename, self.classpath_template, classpath=configured_classpath) - apply_template(os.path.join(self.work_dir, 'Debug on port %d.launch' % project.debug_port), - self.debug_template, project=configured_project) - apply_template(self.coreprefs_filename, self.coreprefs_template, project=configured_project) - - for resource in _SETTINGS: - with safe_open(os.path.join(self.cwd, '.settings', resource), 'w') as prefs: - prefs.write(pkgutil.get_data(__name__, os.path.join('files', 'eclipse', resource))) - - factorypath = TemplateData( - project_name=self.project_name, - - # The easiest way to make sure eclipse sees all annotation processors is to put all libs on - # the apt factorypath - this does not seem to hurt eclipse performance in any noticeable way. - jarpaths=libs - ) - apply_template(self.apt_filename, self.apt_template, factorypath=factorypath) - - if project.has_python: - apply_template(self.pydev_filename, self.pydev_template, project=configured_project) - else: - safe_delete(self.pydev_filename) - - print('\nGenerated project at %s%s' % (self.work_dir, os.sep)) diff --git a/src/python/twitter/pants/tasks/filedeps.py b/src/python/twitter/pants/tasks/filedeps.py deleted file mode 100644 index 355492323..000000000 --- a/src/python/twitter/pants/tasks/filedeps.py +++ /dev/null @@ -1,36 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import itertools -import os - -from twitter.pants.targets.jvm_binary import JvmApp -from twitter.pants.targets.with_sources import TargetWithSources - -from .console_task import ConsoleTask - -__author__ = 'Dave Buchfuhrer' - - -class FileDeps(ConsoleTask): - def console_output(self, targets): - files = set() - for target in targets: - if isinstance(target, TargetWithSources): - files.update(target.expand_files(recursive=False)) - if isinstance(target, JvmApp): - files.update(itertools.chain(*[bundle.filemap.keys() for bundle in target.bundles])) - return files diff --git a/src/python/twitter/pants/tasks/filemap.py b/src/python/twitter/pants/tasks/filemap.py deleted file mode 100644 index 2c695da46..000000000 --- a/src/python/twitter/pants/tasks/filemap.py +++ /dev/null @@ -1,47 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.target import Target - -from .console_task import ConsoleTask - - -class Filemap(ConsoleTask): - """Outputs a mapping from source file to the target that owns the source file.""" - - def console_output(self, _): - visited = set() - for target in self._find_targets(): - if target not in visited: - visited.add(target) - if hasattr(target, 'sources') and target.sources is not None: - for sourcefile in target.sources: - path = os.path.join(target.target_base, sourcefile) - yield '%s %s' % (path, target.address) - - def _find_targets(self): - if len(self.context.target_roots) > 0: - for target in self.context.target_roots: - yield target - else: - for buildfile in BuildFile.scan_buildfiles(get_buildroot()): - target_addresses = Target.get_all_addresses(buildfile) - for target_address in target_addresses: - yield Target.get(target_address) diff --git a/src/python/twitter/pants/tasks/filter.py b/src/python/twitter/pants/tasks/filter.py deleted file mode 100644 index 0d3bd99fc..000000000 --- a/src/python/twitter/pants/tasks/filter.py +++ /dev/null @@ -1,143 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import operator -import re -import sys - -import twitter.pants.base.build_file_aliases - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.address import Address -from twitter.pants.base.target import Target - -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.console_task import ConsoleTask - - -_identity = lambda x: x - - -def _extract_modifier(value): - if value.startswith('+'): - return _identity, value[1:] - elif value.startswith('-'): - return operator.not_, value[1:] - else: - return _identity, value - - -def _create_filters(list_option, predicate): - for value in list_option: - modifier, value = _extract_modifier(value) - predicates = map(predicate, value.split(',')) - def filter(target): - return modifier(any(map(lambda predicate: predicate(target), predicates))) - yield filter - - -def _get_target(address): - try: - address = Address.parse(get_buildroot(), address, is_relative=False) - except IOError as e: - raise TaskError('Failed to parse address: %s: %s' % (address, e)) - match = Target.get(address) - if not match: - raise TaskError('Invalid target address: %s' % address) - return match - - -class Filter(ConsoleTask): - """Filters targets based on various criteria.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(Filter, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag('type'), dest='filter_type', action='append', default=[], - help="Identifies target types to include (optional '+' prefix) or " - "exclude ('-' prefix). Multiple type inclusions or exclusions " - "can be specified at once in a comma separated list or else by " - "using multiple instances of this flag.") - - option_group.add_option(mkflag('target'), dest='filter_target', action='append', default=[], - help="Identifies specific targets to include (optional '+' prefix) or " - "exclude ('-' prefix). Multiple target inclusions or exclusions " - "can be specified at once in a comma separated list or else by " - "using multiple instances of this flag.") - - option_group.add_option(mkflag('ancestor'), dest='filter_ancestor', action='append', default=[], - help="Identifies ancestor targets (containing targets) that make a " - "select child (contained) targets to include " - "(optional '+' prefix) or exclude ('-' prefix). Multiple " - "ancestor inclusions or exclusions can be specified at once in " - "a comma separated list or else by using multiple instances of " - "this flag.") - - option_group.add_option(mkflag('regex'), dest='filter_regex', action='append', default=[], - help="Identifies regexes of target addresses to include " - "(optional '+' prefix) or exclude ('-' prefix). Multiple target " - "inclusions or exclusions can be specified at once in a comma " - "separated list or else by using multiple instances of this flag.") - - def __init__(self, context, outstream=sys.stdout): - super(Filter, self).__init__(context, outstream) - - self._filters = [] - - def filter_for_address(address): - match = _get_target(address) - return lambda target: target == match - self._filters.extend(_create_filters(context.options.filter_target, filter_for_address)) - - def filter_for_type(name): - try: - # Try to do a fully qualified import 1st for filtering on custom types. - from_list, module, type_name = name.rsplit('.', 2) - module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) - target_type = getattr(module, type_name) - except (ImportError, ValueError): - # Fall back on pants provided target types. - if not hasattr(twitter.pants.base.build_file_aliases, name): - raise TaskError('Invalid type name: %s' % name) - target_type = getattr(twitter.pants.base.build_file_aliases, name) - if not issubclass(target_type, Target): - raise TaskError('Not a Target type: %s' % name) - return lambda target: isinstance(target, target_type) - self._filters.extend(_create_filters(context.options.filter_type, filter_for_type)) - - def filter_for_ancestor(address): - ancestor = _get_target(address) - children = set() - ancestor.walk(children.add) - return lambda target: target in children - self._filters.extend(_create_filters(context.options.filter_ancestor, filter_for_ancestor)) - - def filter_for_regex(regex): - parser = re.compile(regex) - return lambda target: parser.search(str(target.address)) - self._filters.extend(_create_filters(context.options.filter_regex, filter_for_regex)) - - def console_output(self, _): - filtered = set() - for target in self.context.target_roots: - if target not in filtered: - filtered.add(target) - for filter in self._filters: - if not filter(target): - break - else: - yield str(target.address) diff --git a/src/python/twitter/pants/tasks/ide_gen.py b/src/python/twitter/pants/tasks/ide_gen.py deleted file mode 100644 index d17ae4701..000000000 --- a/src/python/twitter/pants/tasks/ide_gen.py +++ /dev/null @@ -1,537 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import shutil - -from collections import defaultdict - -from twitter.common.collections.orderedset import OrderedSet -from twitter.common.dirutil import safe_mkdir - -from twitter.pants import binary_util -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.goal.phase import Phase -from twitter.pants.targets.jvm_binary import JvmBinary -from twitter.pants.tasks.checkstyle import Checkstyle - -from .jvm_binary_task import JvmBinaryTask - -from . import TaskError - - -# We use custom checks for scala and java targets here for 2 reasons: -# 1.) jvm_binary could have either a scala or java source file attached so we can't do a pure -# target type test -# 2.) the target may be under development in which case it may not have sources yet - its pretty -# common to write a BUILD and ./pants goal idea the target inside to start development at which -# point there are no source files yet - and the developer intents to add them using the ide. - -def is_scala(target): - return target.has_sources('.scala') or target.is_scala - - -def is_java(target): - return target.has_sources('.java') or target.is_java - - -class IdeGen(JvmBinaryTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("project-name"), dest="ide_gen_project_name", default="project", - help="[%default] Specifies the name to use for the generated project.") - - gen_dir = mkflag("project-dir") - option_group.add_option(gen_dir, dest="ide_gen_project_dir", - help="[%default] Specifies the directory to output the generated " - "project files to.") - option_group.add_option(mkflag("project-cwd"), dest="ide_gen_project_cwd", - help="[%%default] Specifies the directory the generated project should " - "use as the cwd for processes it launches. Note that specifying " - "this trumps %s and not all project related files will be stored " - "there." % gen_dir) - - option_group.add_option(mkflag("intransitive"), default=False, - action="store_true", dest='ide_gen_intransitive', - help="Limits the sources included in the generated project to just " - "those owned by the targets specified on the command line") - - option_group.add_option(mkflag("python"), mkflag("python", negate=True), default=False, - action="callback", callback=mkflag.set_bool, dest='ide_gen_python', - help="[%default] Adds python support to the generated project " - "configuration.") - - option_group.add_option(mkflag("java"), mkflag("java", negate=True), default=True, - action="callback", callback=mkflag.set_bool, dest='ide_gen_java', - help="[%default] Includes java sources in the project; otherwise " - "compiles them and adds them to the project classpath.") - java_language_level = mkflag("java-language-level") - # TODO(John Sirois): Advance the default to 7 when 8 is released. - option_group.add_option(java_language_level, default=6, - dest="ide_gen_java_language_level", type="int", - help="[%default] Sets the java language and jdk used to compile the " - "project's java sources.") - option_group.add_option(mkflag("java-jdk-name"), default=None, - dest="ide_gen_java_jdk", - help="Sets the jdk used to compile the project's java sources. If " - "unset the default jdk name for the " - "%s is used." % java_language_level) - - option_group.add_option(mkflag("scala"), mkflag("scala", negate=True), default=True, - action="callback", callback=mkflag.set_bool, dest='ide_gen_scala', - help="[%default] Includes scala sources in the project; otherwise " - "compiles them and adds them to the project classpath.") - - def __init__(self, context): - super(IdeGen, self).__init__(context) - - self.project_name = context.options.ide_gen_project_name - self.python = context.options.ide_gen_python - self.skip_java = not context.options.ide_gen_java - self.skip_scala = not context.options.ide_gen_scala - - self.java_language_level = context.options.ide_gen_java_language_level - if context.options.ide_gen_java_jdk: - self.java_jdk = context.options.ide_gen_java_jdk - else: - self.java_jdk = '1.%d' % self.java_language_level - - self.work_dir = os.path.abspath( - context.options.ide_gen_project_dir - or os.path.join( - context.config.get('ide', 'workdir'), self.__class__.__name__, self.project_name - ) - ) - self.cwd = ( - os.path.abspath(context.options.ide_gen_project_cwd) if context.options.ide_gen_project_cwd - else self.work_dir - ) - - self.intransitive = context.options.ide_gen_intransitive - - self.checkstyle_suppression_files = context.config.getdefault( - 'checkstyle_suppression_files', type=list, default=[] - ) - self.debug_port = context.config.getint('ide', 'debug_port') - - self.checkstyle_bootstrap_key = 'checkstyle' - checkstyle = context.config.getlist('checkstyle', 'bootstrap-tools', - default=[':twitter-checkstyle']) - self._jvm_tool_bootstrapper.register_jvm_tool(self.checkstyle_bootstrap_key, checkstyle) - - self.scalac_bootstrap_key = None - if not self.skip_scala: - self.scalac_bootstrap_key = 'scalac' - scalac = context.config.getlist('scala-compile', 'compile-bootstrap-tools', - default=[':scala-compile-2.9.3']) - self._jvm_tool_bootstrapper.register_jvm_tool(self.scalac_bootstrap_key, scalac) - - targets, self._project = self.configure_project( - context.targets(), - self.checkstyle_suppression_files, - self.debug_port) - - self.configure_compile_context(targets) - - if self.python: - self.context.products.require('python') - if not self.skip_java: - self.context.products.require('java') - if not self.skip_scala: - self.context.products.require('scala') - - self.context.products.require('jars') - self.context.products.require('source_jars') - - def configure_project(self, targets, checkstyle_suppression_files, debug_port): - - jvm_targets = Target.extract_jvm_targets(targets) - if self.intransitive: - jvm_targets = set(self.context.target_roots).intersection(jvm_targets) - project = Project(self.project_name, - self.python, - self.skip_java, - self.skip_scala, - get_buildroot(), - checkstyle_suppression_files, - debug_port, - jvm_targets, - not self.intransitive, - self.context.new_workunit) - - if self.python: - python_source_paths = self.context.config.getlist('ide', 'python_source_paths', default=[]) - python_test_paths = self.context.config.getlist('ide', 'python_test_paths', default=[]) - python_lib_paths = self.context.config.getlist('ide', 'python_lib_paths', default=[]) - project.configure_python(python_source_paths, python_test_paths, python_lib_paths) - - extra_source_paths = self.context.config.getlist('ide', 'extra_jvm_source_paths', default=[]) - extra_test_paths = self.context.config.getlist('ide', 'extra_jvm_test_paths', default=[]) - all_targets = project.configure_jvm(extra_source_paths, extra_test_paths) - return all_targets, project - - def configure_compile_context(self, targets): - """ - Trims the context's target set to just those targets needed as jars on the IDE classpath. - All other targets only contribute their external jar dependencies and excludes to the - classpath definition. - """ - def is_cp(target): - return ( - target.is_codegen or - - # Some IDEs need annotation processors pre-compiled, others are smart enough to detect and - # proceed in 2 compile rounds - target.is_apt or - - (self.skip_java and is_java(target)) or - (self.skip_scala and is_scala(target)) or - (self.intransitive and target not in self.context.target_roots) - ) - - jars = OrderedSet() - excludes = OrderedSet() - compiles = OrderedSet() - def prune(target): - if target.is_jvm: - if target.excludes: - excludes.update(target.excludes) - jars.update(jar for jar in target.jar_dependencies if jar.rev) - if is_cp(target): - target.walk(compiles.add) - - for target in targets: - target.walk(prune) - - self.context.replace_targets(compiles) - - self.binary = self.context.add_new_target(self.work_dir, - JvmBinary, - name='%s-external-jars' % self.project_name, - dependencies=jars, - excludes=excludes, - configurations=('default', 'sources', 'javadoc')) - self.require_jar_dependencies(predicate=lambda t: t == self.binary) - - self.context.log.debug('pruned to cp:\n\t%s' % '\n\t'.join( - str(t) for t in self.context.targets()) - ) - - def map_internal_jars(self, targets): - internal_jar_dir = os.path.join(self.work_dir, 'internal-libs') - safe_mkdir(internal_jar_dir, clean=True) - - internal_source_jar_dir = os.path.join(self.work_dir, 'internal-libsources') - safe_mkdir(internal_source_jar_dir, clean=True) - - internal_jars = self.context.products.get('jars') - internal_source_jars = self.context.products.get('source_jars') - for target in targets: - mappings = internal_jars.get(target) - if mappings: - for base, jars in mappings.items(): - if len(jars) != 1: - raise TaskError('Unexpected mapping, multiple jars for %s: %s' % (target, jars)) - - jar = jars[0] - cp_jar = os.path.join(internal_jar_dir, jar) - shutil.copy(os.path.join(base, jar), cp_jar) - - cp_source_jar = None - mappings = internal_source_jars.get(target) - if mappings: - for base, jars in mappings.items(): - if len(jars) != 1: - raise TaskError( - 'Unexpected mapping, multiple source jars for %s: %s' % (target, jars) - ) - jar = jars[0] - cp_source_jar = os.path.join(internal_source_jar_dir, jar) - shutil.copy(os.path.join(base, jar), cp_source_jar) - - self._project.internal_jars.add(ClasspathEntry(cp_jar, source_jar=cp_source_jar)) - - def map_external_jars(self): - external_jar_dir = os.path.join(self.work_dir, 'external-libs') - safe_mkdir(external_jar_dir, clean=True) - - external_source_jar_dir = os.path.join(self.work_dir, 'external-libsources') - safe_mkdir(external_source_jar_dir, clean=True) - - external_javadoc_jar_dir = os.path.join(self.work_dir, 'external-libjavadoc') - safe_mkdir(external_javadoc_jar_dir, clean=True) - - confs = ['default', 'sources', 'javadoc'] - for entry in self.list_jar_dependencies(self.binary, confs=confs): - jar = entry.get('default') - if jar: - cp_jar = os.path.join(external_jar_dir, os.path.basename(jar)) - shutil.copy(jar, cp_jar) - - cp_source_jar = None - source_jar = entry.get('sources') - if source_jar: - cp_source_jar = os.path.join(external_source_jar_dir, os.path.basename(source_jar)) - shutil.copy(source_jar, cp_source_jar) - - cp_javadoc_jar = None - javadoc_jar = entry.get('javadoc') - if javadoc_jar: - cp_javadoc_jar = os.path.join(external_javadoc_jar_dir, os.path.basename(javadoc_jar)) - shutil.copy(javadoc_jar, cp_javadoc_jar) - - self._project.external_jars.add(ClasspathEntry(cp_jar, - source_jar=cp_source_jar, - javadoc_jar=cp_javadoc_jar)) - - def execute(self, targets): - """Stages IDE project artifacts to a project directory and generates IDE configuration files.""" - checkstyle_enabled = len(Phase.goals_of_type(Checkstyle)) > 0 - if checkstyle_enabled: - checkstyle_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self.checkstyle_bootstrap_key) - else: - checkstyle_classpath = [] - - if self.scalac_bootstrap_key: - scalac_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self.scalac_bootstrap_key) - else: - scalac_classpath = [] - - self._project.set_tool_classpaths(checkstyle_classpath, scalac_classpath) - - self.map_internal_jars(targets) - self.map_external_jars() - - idefile = self.generate_project(self._project) - if idefile: - binary_util.ui_open(idefile) - - def generate_project(self, project): - raise NotImplementedError('Subclasses must generate a project for an ide') - - -class ClasspathEntry(object): - """Represents a classpath entry that may have sources available.""" - def __init__(self, jar, source_jar=None, javadoc_jar=None): - self.jar = jar - self.source_jar = source_jar - self.javadoc_jar = javadoc_jar - - -class SourceSet(object): - """Models a set of source files.""" - - TEST_BASES = set() - - def __init__(self, root_dir, source_base, path, is_test): - """ - root_dir: the full path to the root directory of the project containing this source set - source_base: the relative path from root_dir to the base of this source set - path: the relative path from the source_base to the base of the sources in this set - is_test: true iff the sources contained by this set implement test cases - """ - - self.root_dir = root_dir - self.source_base = source_base - self.path = path - self.is_test = is_test - self._excludes = [] - if is_test: - SourceSet.TEST_BASES.add(self.source_base) - - @property - def excludes(self): - """Paths relative to self.path that are excluded from this source set.""" - - return self._excludes - - -class Project(object): - """Models a generic IDE project that is comprised of a set of BUILD targets.""" - - @staticmethod - def extract_resource_extensions(resources): - """Returns the set of unique extensions (including the .) from the given resource files.""" - - if resources: - for resource in resources: - _, ext = os.path.splitext(resource) - yield ext - - def __init__(self, name, has_python, skip_java, skip_scala, root_dir, - checkstyle_suppression_files, debug_port, targets, transitive, workunit_factory): - """Creates a new, unconfigured, Project based at root_dir and comprised of the sources visible - to the given targets.""" - - self.name = name - self.root_dir = root_dir - self.targets = OrderedSet(targets) - self.transitive = transitive - self.workunit_factory = workunit_factory - - self.sources = [] - self.py_sources = [] - self.py_libs = [] - self.resource_extensions = set() - - self.has_python = has_python - self.skip_java = skip_java - self.skip_scala = skip_scala - self.has_scala = False - self.has_tests = False - - self.checkstyle_suppression_files = checkstyle_suppression_files # Absolute paths. - self.debug_port = debug_port - - self.internal_jars = OrderedSet() - self.external_jars = OrderedSet() - - def configure_python(self, source_roots, test_roots, lib_roots): - self.py_sources.extend(SourceSet(get_buildroot(), root, None, False) for root in source_roots) - self.py_sources.extend(SourceSet(get_buildroot(), root, None, True) for root in test_roots) - for root in lib_roots: - for path in os.listdir(os.path.join(get_buildroot(), root)): - if os.path.isdir(os.path.join(get_buildroot(), root, path)) or path.endswith('.egg'): - self.py_libs.append(SourceSet(get_buildroot(), root, path, False)) - - def configure_jvm(self, extra_source_paths, extra_test_paths): - """ - Configures this project's source sets returning the full set of targets the project is - comprised of. The full set can be larger than the initial set of targets when any of the - initial targets only has partial ownership of its source set's directories. - """ - - # TODO(John Sirois): much waste lies here, revisit structuring for more readable and efficient - # construction of source sets and excludes ... and add a test! - - analyzed = OrderedSet() - targeted = set() - - def source_target(target): - return ((self.transitive or target in self.targets) and - target.has_sources() and - (not target.is_codegen and - not (self.skip_java and is_java(target)) and - not (self.skip_scala and is_scala(target)))) - - def configure_source_sets(relative_base, sources, is_test): - absolute_base = os.path.join(self.root_dir, relative_base) - paths = set([ os.path.dirname(source) for source in sources]) - for path in paths: - absolute_path = os.path.join(absolute_base, path) - if absolute_path not in targeted: - targeted.add(absolute_path) - self.sources.append(SourceSet(self.root_dir, relative_base, path, is_test)) - - def find_source_basedirs(target): - dirs = set() - if source_target(target): - absolute_base = os.path.join(self.root_dir, target.target_base) - dirs.update([ os.path.join(absolute_base, os.path.dirname(source)) - for source in target.sources ]) - return dirs - - def configure_target(target): - if target not in analyzed: - analyzed.add(target) - - self.has_scala = not self.skip_scala and (self.has_scala or is_scala(target)) - - if target.has_resources: - resources_by_basedir = defaultdict(set) - for resources in target.resources: - resources_by_basedir[resources.target_base].update(resources.sources) - for basedir, resources in resources_by_basedir.items(): - self.resource_extensions.update(Project.extract_resource_extensions(resources)) - configure_source_sets(basedir, resources, is_test=False) - - if target.sources: - test = target.is_test - self.has_tests = self.has_tests or test - configure_source_sets(target.target_base, target.sources, is_test = test) - - # Other BUILD files may specify sources in the same directory as this target. Those BUILD - # files might be in parent directories (globs('a/b/*.java')) or even children directories if - # this target globs children as well. Gather all these candidate BUILD files to test for - # sources they own that live in the directories this targets sources live in. - target_dirset = find_source_basedirs(target) - candidates = Target.get_all_addresses(target.address.buildfile) - for ancestor in target.address.buildfile.ancestors(): - candidates.update(Target.get_all_addresses(ancestor)) - for sibling in target.address.buildfile.siblings(): - candidates.update(Target.get_all_addresses(sibling)) - for descendant in target.address.buildfile.descendants(): - candidates.update(Target.get_all_addresses(descendant)) - - def is_sibling(target): - return source_target(target) and target_dirset.intersection(find_source_basedirs(target)) - - return filter(is_sibling, [ Target.get(a) for a in candidates if a != target.address ]) - - for target in self.targets: - target.walk(configure_target, predicate = source_target) - - # We need to figure out excludes, in doing so there are 2 cases we should not exclude: - # 1.) targets depend on A only should lead to an exclude of B - # A/BUILD - # A/B/BUILD - # - # 2.) targets depend on A and C should not lead to an exclude of B (would wipe out C) - # A/BUILD - # A/B - # A/B/C/BUILD - # - # 1 approach: build set of all paths and parent paths containing BUILDs our targets depend on - - # these are unexcludable - - unexcludable_paths = set() - for source_set in self.sources: - parent = os.path.join(self.root_dir, source_set.source_base, source_set.path) - while True: - unexcludable_paths.add(parent) - parent, _ = os.path.split(parent) - # no need to add the repo root or above, all source paths and extra paths are children - if parent == self.root_dir: - break - - for source_set in self.sources: - paths = set() - source_base = os.path.join(self.root_dir, source_set.source_base) - for root, dirs, _ in os.walk(os.path.join(source_base, source_set.path)): - if dirs: - paths.update([os.path.join(root, directory) for directory in dirs]) - unused_children = paths - targeted - if unused_children: - for child in unused_children: - if child not in unexcludable_paths: - source_set.excludes.append(os.path.relpath(child, source_base)) - - targets = OrderedSet() - for target in self.targets: - target.walk(lambda target: targets.add(target), source_target) - targets.update(analyzed - targets) - - self.sources.extend(SourceSet(get_buildroot(), p, None, False) for p in extra_source_paths) - self.sources.extend(SourceSet(get_buildroot(), p, None, True) for p in extra_test_paths) - - return targets - - def set_tool_classpaths(self, checkstyle_classpath, scalac_classpath): - self.checkstyle_classpath = checkstyle_classpath - self.scala_compiler_classpath = scalac_classpath diff --git a/src/python/twitter/pants/tasks/idea_gen.py b/src/python/twitter/pants/tasks/idea_gen.py deleted file mode 100644 index 6cc83f076..000000000 --- a/src/python/twitter/pants/tasks/idea_gen.py +++ /dev/null @@ -1,280 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pkgutil -import shutil -import tempfile - -from xml.dom import minidom - -from twitter.common.dirutil import safe_mkdir -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.generator import TemplateData, Generator -from twitter.pants.targets.java_tests import JavaTests -from twitter.pants.targets.scala_tests import ScalaTests -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks.ide_gen import IdeGen, Project, SourceSet - - -_TEMPLATE_BASEDIR = 'templates/idea' - - -_VERSIONS = { - '9': '12', # 9 and 12 are ipr/iml compatible - '10': '12', # 10 and 12 are ipr/iml compatible - '11': '12', # 11 and 12 are ipr/iml compatible - '12': '12' -} - - -_SCALA_VERSION_DEFAULT = '2.9' -_SCALA_VERSIONS = { - '2.8': 'Scala 2.8', - _SCALA_VERSION_DEFAULT: 'Scala 2.9', - '2.10': 'Scala 2.10', - '2.10-virt': 'Scala 2.10 virtualized' -} - - -class IdeaGen(IdeGen): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - IdeGen.setup_parser(option_group, args, mkflag) - - supported_versions = sorted(list(_VERSIONS.keys())) - option_group.add_option(mkflag("idea-version"), dest="idea_gen_version", - default='11', type="choice", choices=supported_versions, - help="[%%default] The IntelliJ IDEA version the project " - "configuration should be generated for; can be one of: " \ - "%s" % supported_versions) - - option_group.add_option(mkflag("merge"), mkflag("merge", negate=True), default=True, - action="callback", callback=mkflag.set_bool, dest="idea_gen_merge", - help="[%default] Merge any manual customizations in existing " - "Intellij IDEA configuration. If False, manual customizations " - "will be over-written.") - - option_group.add_option(mkflag("open"), mkflag("open", negate=True), default=True, - action="callback", callback=mkflag.set_bool, dest="idea_gen_open", - help="[%default] Attempts top open the generated project in IDEA.") - - option_group.add_option(mkflag("bash"), mkflag("bash", negate=True), default=False, - action="callback", callback=mkflag.set_bool, dest="idea_gen_bash", - help="Adds a bash facet to the generated project configuration.") - - option_group.add_option(mkflag("scala-language-level"), default=_SCALA_VERSION_DEFAULT, - type="choice", choices=_SCALA_VERSIONS.keys(), - dest="idea_scala_language_level", - help="[%default] Set the scala language level used for IDEA linting.") - option_group.add_option(mkflag("scala-maximum-heap-size"), - dest="idea_gen_scala_maximum_heap_size", - help="[%default] Sets the maximum heap size (in megabytes) for scalac.") - option_group.add_option(mkflag("fsc"), mkflag("fsc", negate=True), default=False, - action="callback", callback=mkflag.set_bool, dest="idea_gen_fsc", - help="If the project contains any scala targets this specifies the " - "fsc compiler should be enabled.") - - option_group.add_option(mkflag("java-encoding"), default="UTF-8", - dest="idea_gen_java_encoding", - help="[%default] Sets the file encoding for java files in this " - "project.") - option_group.add_option(mkflag("java-maximum-heap-size"), - dest="idea_gen_java_maximum_heap_size", - help="[%default] Sets the maximum heap size (in megabytes) for javac.") - - def __init__(self, context): - IdeGen.__init__(self, context) - - - self.intellij_output_dir = os.path.join(self.work_dir, 'out') - self.nomerge = not context.options.idea_gen_merge - self.open = context.options.idea_gen_open - self.bash = context.options.idea_gen_bash - - self.scala_language_level = _SCALA_VERSIONS.get(context.options.idea_scala_language_level, None) - self.scala_maximum_heap_size = ( - context.options.idea_gen_scala_maximum_heap_size - or context.config.getint('idea', 'scala_maximum_heap_size_mb', default=512) - ) - self.fsc = context.options.idea_gen_fsc - - self.java_encoding = context.options.idea_gen_java_encoding - self.java_maximum_heap_size = ( - context.options.idea_gen_java_maximum_heap_size - or context.config.getint('idea', 'java_maximum_heap_size_mb', default=128) - ) - - idea_version = _VERSIONS[context.options.idea_gen_version] - self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-%s.mustache' % idea_version) - self.module_template = os.path.join(_TEMPLATE_BASEDIR, 'module-%s.mustache' % idea_version) - - self.project_filename = os.path.join(self.cwd, '%s.ipr' % self.project_name) - self.module_filename = os.path.join(self.work_dir, '%s.iml' % self.project_name) - - def generate_project(self, project): - def is_test(source_set): - # Non test targets that otherwise live in test target roots (say a java_library), must - # be marked as test for IDEA to correctly link the targets with the test code that uses - # them. Therefore we check the base instead of the is_test flag. - return source_set.source_base in SourceSet.TEST_BASES - - def create_content_root(source_set): - root_relative_path = os.path.join(source_set.source_base, source_set.path) \ - if source_set.path else source_set.source_base - - sources = TemplateData( - path=root_relative_path, - package_prefix=source_set.path.replace('/', '.') if source_set.path else None, - is_test=is_test(source_set) - ) - - return TemplateData( - path=root_relative_path, - sources=[sources], - exclude_paths=[os.path.join(source_set.source_base, x) for x in source_set.excludes], - ) - - content_roots = [create_content_root(source_set) for source_set in project.sources] - if project.has_python: - content_roots.extend(create_content_root(source_set) for source_set in project.py_sources) - - scala = None - if project.has_scala: - scala = TemplateData( - language_level=self.scala_language_level, - maximum_heap_size=self.scala_maximum_heap_size, - fsc=self.fsc, - compiler_classpath=project.scala_compiler_classpath - ) - - configured_module = TemplateData( - root_dir=get_buildroot(), - path=self.module_filename, - content_roots=content_roots, - bash=self.bash, - python=project.has_python, - scala=scala, - internal_jars=[cp_entry.jar for cp_entry in project.internal_jars], - internal_source_jars=[cp_entry.source_jar for cp_entry in project.internal_jars - if cp_entry.source_jar], - external_jars=[cp_entry.jar for cp_entry in project.external_jars], - external_javadoc_jars=[cp_entry.javadoc_jar for cp_entry in project.external_jars - if cp_entry.javadoc_jar], - external_source_jars=[cp_entry.source_jar for cp_entry in project.external_jars - if cp_entry.source_jar], - extra_components=[], - ) - - outdir = os.path.abspath(self.intellij_output_dir) - if not os.path.exists(outdir): - os.makedirs(outdir) - - configured_project = TemplateData( - root_dir=get_buildroot(), - outdir=outdir, - modules=[ configured_module ], - java=TemplateData( - encoding=self.java_encoding, - maximum_heap_size=self.java_maximum_heap_size, - jdk=self.java_jdk, - language_level = 'JDK_1_%d' % self.java_language_level - ), - resource_extensions=list(project.resource_extensions), - scala=scala, - checkstyle_suppression_files=','.join(project.checkstyle_suppression_files), - checkstyle_classpath=';'.join(project.checkstyle_classpath), - debug_port=project.debug_port, - extra_components=[], - ) - - existing_project_components = None - existing_module_components = None - if not self.nomerge: - # Grab the existing components, which may include customized ones. - existing_project_components = self._parse_xml_component_elements(self.project_filename) - existing_module_components = self._parse_xml_component_elements(self.module_filename) - - # Generate (without merging in any extra components). - safe_mkdir(os.path.abspath(self.intellij_output_dir)) - - ipr = self._generate_to_tempfile( - Generator(pkgutil.get_data(__name__, self.project_template), project = configured_project)) - iml = self._generate_to_tempfile( - Generator(pkgutil.get_data(__name__, self.module_template), module = configured_module)) - - if not self.nomerge: - # Get the names of the components we generated, and then delete the - # generated files. Clunky, but performance is not an issue, and this - # is an easy way to get those component names from the templates. - extra_project_components = self._get_components_to_merge(existing_project_components, ipr) - extra_module_components = self._get_components_to_merge(existing_module_components, iml) - os.remove(ipr) - os.remove(iml) - - # Generate again, with the extra components. - ipr = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.project_template), - project = configured_project.extend(extra_components = extra_project_components))) - iml = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.module_template), - module = configured_module.extend(extra_components = extra_module_components))) - - shutil.move(ipr, self.project_filename) - shutil.move(iml, self.module_filename) - - print('\nGenerated project at %s%s' % (self.work_dir, os.sep)) - - return self.project_filename if self.open else None - - def _generate_to_tempfile(self, generator): - """Applies the specified generator to a temp file and returns the path to that file. - We generate into a temp file so that we don't lose any manual customizations on error.""" - (output_fd, output_path) = tempfile.mkstemp() - with os.fdopen(output_fd, 'w') as output: - generator.write(output) - return output_path - - def _get_resource_extensions(self, project): - resource_extensions = set() - resource_extensions.update(project.resource_extensions) - - # TODO(John Sirois): make test resources 1st class in ant build and punch this through to pants - # model - for _, _, files in os.walk(os.path.join(get_buildroot(), 'tests', 'resources')): - resource_extensions.update(Project.extract_resource_extensions(files)) - - return resource_extensions - - def _parse_xml_component_elements(self, path): - """Returns a list of pairs (component_name, xml_fragment) where xml_fragment is the xml text of - that in the specified xml file.""" - if not os.path.exists(path): - return [] # No existing components. - dom = minidom.parse(path) - # .ipr and .iml files both consist of elements directly under a root element. - return [ (x.getAttribute('name'), x.toxml()) for x in dom.getElementsByTagName('component') ] - - def _get_components_to_merge(self, mergable_components, path): - """Returns a list of the fragments in mergable_components that are not - superceded by a in the specified xml file. - mergable_components is a list of (name, xml_fragment) pairs.""" - - # As a convenience, we use _parse_xml_component_elements to get the - # superceding component names, ignoring the generated xml fragments. - # This is fine, since performance is not an issue. - generated_component_names = set( - [ name for (name, _) in self._parse_xml_component_elements(path) ]) - return [ x[1] for x in mergable_components if x[0] not in generated_component_names] - diff --git a/src/python/twitter/pants/tasks/ivy_resolve.py b/src/python/twitter/pants/tasks/ivy_resolve.py deleted file mode 100644 index 13d444563..000000000 --- a/src/python/twitter/pants/tasks/ivy_resolve.py +++ /dev/null @@ -1,220 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function -from collections import defaultdict - -import os -import shutil -import time - -from twitter.common.dirutil import safe_mkdir -from twitter.pants import binary_util -from twitter.pants.ivy.bootstrapper import Bootstrapper -from .cache_manager import VersionedTargetSet -from .ivy_utils import IvyUtils -from .nailgun_task import NailgunTask -from . import TaskError - - -class IvyResolve(NailgunTask): - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - NailgunTask.setup_parser(option_group, args, mkflag) - - flag = mkflag('override') - option_group.add_option(flag, action='append', dest='ivy_resolve_overrides', - help="""Specifies a jar dependency override in the form: - [org]#[name]=(revision|url) - - For example, to specify 2 overrides: - %(flag)s=com.foo#bar=0.1.2 \\ - %(flag)s=com.baz#spam=file:///tmp/spam.jar - """ % dict(flag=flag)) - - report = mkflag("report") - option_group.add_option(report, mkflag("report", negate=True), dest = "ivy_resolve_report", - action="callback", callback=mkflag.set_bool, default=False, - help = "[%default] Generate an ivy resolve html report") - - option_group.add_option(mkflag("open"), mkflag("open", negate=True), - dest="ivy_resolve_open", default=False, - action="callback", callback=mkflag.set_bool, - help="[%%default] Attempt to open the generated ivy resolve report " - "in a browser (implies %s)." % report) - - option_group.add_option(mkflag("outdir"), dest="ivy_resolve_outdir", - help="Emit ivy report outputs in to this directory.") - - option_group.add_option(mkflag("args"), dest="ivy_args", action="append", default=[], - help = "Pass these extra args to ivy.") - - option_group.add_option(mkflag("mutable-pattern"), dest="ivy_mutable_pattern", - help="If specified, all artifact revisions matching this pattern will " - "be treated as mutable unless a matching artifact explicitly " - "marks mutable as False.") - - def __init__(self, context, confs=None): - super(IvyResolve, self).__init__(context) - work_dir = context.config.get('ivy-resolve', 'workdir') - - self._ivy_bootstrapper = Bootstrapper.instance() - self._cachedir = self._ivy_bootstrapper.ivy_cache_dir - self._confs = confs or context.config.getlist('ivy-resolve', 'confs', default=['default']) - self._classpath_dir = os.path.join(work_dir, 'mapped') - - self._outdir = context.options.ivy_resolve_outdir or os.path.join(work_dir, 'reports') - self._open = context.options.ivy_resolve_open - self._report = self._open or context.options.ivy_resolve_report - - self._ivy_bootstrap_key = 'ivy' - ivy_bootstrap_tools = context.config.getlist('ivy-resolve', 'bootstrap-tools', ':xalan') - self._jvm_tool_bootstrapper.register_jvm_tool(self._ivy_bootstrap_key, ivy_bootstrap_tools) - - self._ivy_utils = IvyUtils(config=context.config, - options=context.options, - log=context.log) - context.products.require_data('exclusives_groups') - - # Typically this should be a local cache only, since classpaths aren't portable. - self.setup_artifact_cache_from_config(config_section='ivy-resolve') - - def invalidate_for(self): - return self.context.options.ivy_resolve_overrides - - def execute(self, targets): - """Resolves the specified confs for the configured targets and returns an iterator over - tuples of (conf, jar path). - """ - groups = self.context.products.get_data('exclusives_groups') - executor = self.create_java_executor() - - # Below, need to take the code that actually execs ivy, and invoke it once for each - # group. Then after running ivy, we need to take the resulting classpath, and load it into - # the build products. - - # The set of groups we need to consider is complicated: - # - If there are no conflicting exclusives (ie, there's only one entry in the map), - # then we just do the one. - # - If there are conflicts, then there will be at least three entries in the groups map: - # - the group with no exclusives (X) - # - the two groups that are in conflict (A and B). - # In the latter case, we need to do the resolve twice: Once for A+X, and once for B+X, - # because things in A and B can depend on things in X; and so they can indirectly depend - # on the dependencies of X. - # (I think this well be covered by the computed transitive dependencies of - # A and B. But before pushing this change, review this comment, and make sure that this is - # working correctly.) - for group_key in groups.get_group_keys(): - # Narrow the groups target set to just the set of targets that we're supposed to build. - # Normally, this shouldn't be different from the contents of the group. - group_targets = groups.get_targets_for_group_key(group_key) & set(targets) - - # NOTE(pl): The symlinked ivy.xml (for IDEs, particularly IntelliJ) in the presence of - # multiple exclusives groups will end up as the last exclusives group run. I'd like to - # deprecate this eventually, but some people rely on it, and it's not clear to me right now - # whether telling them to use IdeaGen instead is feasible. - classpath = self.ivy_resolve(group_targets, - executor=executor, - symlink_ivyxml=True, - workunit_name='ivy-resolve') - if self.context.products.is_required_data('ivy_jar_products'): - self._populate_ivy_jar_products(group_targets) - for conf in self._confs: - # It's important we add the full classpath as an (ordered) unit for code that is classpath - # order sensitive - classpath_entries = map(lambda entry: (conf, entry), classpath) - groups.update_compatible_classpaths(group_key, classpath_entries) - - if self._report: - self._generate_ivy_report(group_targets) - - create_jardeps_for = self.context.products.isrequired('jar_dependencies') - if create_jardeps_for: - genmap = self.context.products.get('jar_dependencies') - for target in filter(create_jardeps_for, targets): - self._ivy_utils.mapjars(genmap, target, executor=executor, - workunit_factory=self.context.new_workunit) - - def check_artifact_cache_for(self, invalidation_check): - # Ivy resolution is an output dependent on the entire target set, and is not divisible - # by target. So we can only cache it keyed by the entire target set. - global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts) - return [global_vts] - - def _populate_ivy_jar_products(self, targets): - """Populate the build products with an IvyInfo object for each generated ivy report.""" - ivy_products = self.context.products.get_data('ivy_jar_products') or defaultdict(list) - for conf in self._confs: - ivyinfo = self._ivy_utils.parse_xml_report(targets, conf) - if ivyinfo: - # Value is a list, to accommodate multiple exclusives groups. - ivy_products[conf].append(ivyinfo) - self.context.products.safe_create_data('ivy_jar_products', lambda: ivy_products) - - def _generate_ivy_report(self, targets): - def make_empty_report(report, organisation, module, conf): - no_deps_xml_template = """ - - - - - - """ - no_deps_xml = no_deps_xml_template % dict(organisation=organisation, - module=module, - conf=conf, - timestamp=time.strftime('%Y%m%d%H%M%S')) - with open(report, 'w') as report_handle: - print(no_deps_xml, file=report_handle) - - classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._ivy_bootstrap_key, - self.create_java_executor()) - - reports = [] - org, name = self._ivy_utils.identify(targets) - xsl = os.path.join(self._cachedir, 'ivy-report.xsl') - - # Xalan needs this dir to exist - ensure that, but do no more - we have no clue where this - # points. - safe_mkdir(self._outdir, clean=False) - - for conf in self._confs: - params = dict(org=org, name=name, conf=conf) - xml = self._ivy_utils.xml_report_path(targets, conf) - if not os.path.exists(xml): - make_empty_report(xml, org, name, conf) - out = os.path.join(self._outdir, '%(org)s-%(name)s-%(conf)s.html' % params) - args = ['-IN', xml, '-XSL', xsl, '-OUT', out] - if 0 != self.runjava(classpath=classpath, main='org.apache.xalan.xslt.Process', - args=args, workunit_name='report'): - raise TaskError - reports.append(out) - - css = os.path.join(self._outdir, 'ivy-report.css') - if os.path.exists(css): - os.unlink(css) - shutil.copy(os.path.join(self._cachedir, 'ivy-report.css'), self._outdir) - - if self._open: - binary_util.ui_open(*reports) diff --git a/src/python/twitter/pants/tasks/ivy_utils.py b/src/python/twitter/pants/tasks/ivy_utils.py deleted file mode 100644 index be2f54e51..000000000 --- a/src/python/twitter/pants/tasks/ivy_utils.py +++ /dev/null @@ -1,423 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import os -import xml -import pkgutil -import re -import threading -import errno - -from collections import namedtuple, defaultdict -from contextlib import contextmanager - -from twitter.common.collections import OrderedDict, OrderedSet -from twitter.common.dirutil import safe_mkdir, safe_open - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.generator import Generator, TemplateData -from twitter.pants.base.revision import Revision -from twitter.pants.base.target import Target -from twitter.pants.ivy.bootstrapper import Bootstrapper -from twitter.pants.ivy.ivy import Ivy -from twitter.pants.java import util -from twitter.pants.tasks.task_error import TaskError - - -IvyModuleRef = namedtuple('IvyModuleRef', ['org', 'name', 'rev']) -IvyArtifact = namedtuple('IvyArtifact', ['path', 'classifier']) -IvyModule = namedtuple('IvyModule', ['ref', 'artifacts', 'callers']) - - -class IvyInfo(object): - def __init__(self): - self.modules_by_ref = {} # Map from ref to referenced module. - # Map from ref of caller to refs of modules required by that caller. - self.deps_by_caller = defaultdict(OrderedSet) - - def add_module(self, module): - self.modules_by_ref[module.ref] = module - for caller in module.callers: - self.deps_by_caller[caller].add(module.ref) - - -class IvyUtils(object): - """Useful methods related to interaction with ivy.""" - def __init__(self, config, options, log): - self._log = log - self._config = config - self._options = options - - # TODO(pl): This is super awful, but options doesn't have a nice way to get out - # attributes that might not be there, and even then the attribute value might be - # None, which we still want to override - # Benjy thinks we should probably hoist these options to the global set of options, - # rather than just keeping them within IvyResolve.setup_parser - self._mutable_pattern = (getattr(options, 'ivy_mutable_pattern', None) or - config.get('ivy-resolve', 'mutable_pattern', default=None)) - - self._transitive = config.getbool('ivy-resolve', 'transitive', default=True) - self._args = config.getlist('ivy-resolve', 'args', default=[]) - self._jvm_options = config.getlist('ivy-resolve', 'jvm_args', default=[]) - # Disable cache in File.getCanonicalPath(), makes Ivy work with -symlink option properly on ng. - self._jvm_options.append('-Dsun.io.useCanonCaches=false') - self._work_dir = config.get('ivy-resolve', 'workdir') - self._template_path = os.path.join('templates', 'ivy_resolve', 'ivy.mustache') - - if self._mutable_pattern: - try: - self._mutable_pattern = re.compile(self._mutable_pattern) - except re.error as e: - raise TaskError('Invalid mutable pattern specified: %s %s' % (self._mutable_pattern, e)) - - def parse_override(override): - match = re.match(r'^([^#]+)#([^=]+)=([^\s]+)$', override) - if not match: - raise TaskError('Invalid dependency override: %s' % override) - - org, name, rev_or_url = match.groups() - - def fmt_message(message, template): - return message % dict( - overridden='%s#%s;%s' % (template.org, template.module, template.version), - rev=rev_or_url, - url=rev_or_url) - - def replace_rev(template): - self._log.info(fmt_message('Overrode %(overridden)s with rev %(rev)s', template)) - return template.extend(version=rev_or_url, url=None, force=True) - - def replace_url(template): - self._log.info(fmt_message('Overrode %(overridden)s with snapshot at %(url)s', template)) - return template.extend(version='SNAPSHOT', url=rev_or_url, force=True) - - replace = replace_url if re.match(r'^\w+://.+', rev_or_url) else replace_rev - return (org, name), replace - self._overrides = {} - # TODO(pl): See above comment wrt options - if hasattr(options, 'ivy_resolve_overrides') and options.ivy_resolve_overrides: - self._overrides.update(parse_override(o) for o in options.ivy_resolve_overrides) - - @staticmethod - @contextmanager - def cachepath(path): - if not os.path.exists(path): - yield () - else: - with safe_open(path, 'r') as cp: - yield (path.strip() for path in cp.read().split(os.pathsep) if path.strip()) - - @staticmethod - def symlink_cachepath(ivy_home, inpath, symlink_dir, outpath): - """Symlinks all paths listed in inpath that are under ivy_home into symlink_dir. - - Preserves all other paths. Writes the resulting paths to outpath. - Returns a map of path -> symlink to that path. - """ - safe_mkdir(symlink_dir) - with safe_open(inpath, 'r') as infile: - paths = filter(None, infile.read().strip().split(os.pathsep)) - new_paths = [] - for path in paths: - if not path.startswith(ivy_home): - new_paths.append(path) - continue - symlink = os.path.join(symlink_dir, os.path.relpath(path, ivy_home)) - try: - os.makedirs(os.path.dirname(symlink)) - except OSError as e: - if e.errno != errno.EEXIST: - raise - # Note: The try blocks cannot be combined. It may be that the dir exists but the link doesn't. - try: - os.symlink(path, symlink) - except OSError as e: - # We don't delete and recreate the symlink, as this may break concurrently executing code. - if e.errno != errno.EEXIST: - raise - new_paths.append(symlink) - with safe_open(outpath, 'w') as outfile: - outfile.write(':'.join(new_paths)) - symlink_map = dict(zip(paths, new_paths)) - return symlink_map - - def identify(self, targets): - targets = list(targets) - if len(targets) == 1 and hasattr(targets[0], 'provides') and targets[0].provides: - return targets[0].provides.org, targets[0].provides.name - else: - return 'internal', Target.maybe_readable_identify(targets) - - def xml_report_path(self, targets, conf): - """The path to the xml report ivy creates after a retrieve.""" - org, name = self.identify(targets) - cachedir = Bootstrapper.instance().ivy_cache_dir - return os.path.join(cachedir, '%s-%s-%s.xml' % (org, name, conf)) - - def parse_xml_report(self, targets, conf): - """Returns the IvyInfo representing the info in the xml report, or None if no report exists.""" - path = self.xml_report_path(targets, conf) - if not os.path.exists(path): - return None - - ret = IvyInfo() - etree = xml.etree.ElementTree.parse(self.xml_report_path(targets, conf)) - doc = etree.getroot() - for module in doc.findall('dependencies/module'): - org = module.get('organisation') - name = module.get('name') - for revision in module.findall('revision'): - rev = revision.get('name') - artifacts = [] - for artifact in revision.findall('artifacts/artifact'): - artifacts.append(IvyArtifact(path=artifact.get('location'), - classifier=artifact.get('extra-classifier'))) - callers = [] - for caller in revision.findall('caller'): - callers.append(IvyModuleRef(caller.get('organisation'), - caller.get('name'), - caller.get('callerrev'))) - ret.add_module(IvyModule(IvyModuleRef(org, name, rev), artifacts, callers)) - return ret - - def _extract_classpathdeps(self, targets): - """Subclasses can override to filter out a set of targets that should be resolved for classpath - dependencies. - """ - def is_classpath(target): - return (target.is_jar or - target.is_internal and any(jar for jar in target.jar_dependencies if jar.rev)) - - classpath_deps = OrderedSet() - for target in targets: - classpath_deps.update(t for t in target.resolve() if t.is_concrete and is_classpath(t)) - return classpath_deps - - def _generate_ivy(self, targets, jars, excludes, ivyxml, confs): - org, name = self.identify(targets) - template_data = TemplateData( - org=org, - module=name, - version='latest.integration', - publications=None, - configurations=confs, - dependencies=[self._generate_jar_template(jar, confs) for jar in jars], - excludes=[self._generate_exclude_template(exclude) for exclude in excludes]) - - safe_mkdir(os.path.dirname(ivyxml)) - with open(ivyxml, 'w') as output: - generator = Generator(pkgutil.get_data(__name__, self._template_path), - root_dir=get_buildroot(), - lib=template_data) - generator.write(output) - - def _calculate_classpath(self, targets): - - def is_jardependant(target): - return target.is_jar or target.is_jvm - - jars = OrderedDict() - excludes = set() - - # Support the ivy force concept when we sanely can for internal dep conflicts. - # TODO(John Sirois): Consider supporting / implementing the configured ivy revision picking - # strategy generally. - def add_jar(jar): - coordinate = (jar.org, jar.name) - existing = jars.get(coordinate) - jars[coordinate] = jar if not existing else ( - self._resolve_conflict(existing=existing, proposed=jar) - ) - - def collect_jars(target): - if target.is_jar: - add_jar(target) - elif target.jar_dependencies: - for jar in target.jar_dependencies: - if jar.rev: - add_jar(jar) - - # Lift jvm target-level excludes up to the global excludes set - if target.is_jvm and target.excludes: - excludes.update(target.excludes) - - for target in targets: - target.walk(collect_jars, is_jardependant) - - return jars.values(), excludes - - def _resolve_conflict(self, existing, proposed): - if proposed == existing: - return existing - elif existing.force and proposed.force: - raise TaskError('Cannot force %s#%s to both rev %s and %s' % ( - proposed.org, proposed.name, existing.rev, proposed.rev - )) - elif existing.force: - self._log.debug('Ignoring rev %s for %s#%s already forced to %s' % ( - proposed.rev, proposed.org, proposed.name, existing.rev - )) - return existing - elif proposed.force: - self._log.debug('Forcing %s#%s from %s to %s' % ( - proposed.org, proposed.name, existing.rev, proposed.rev - )) - return proposed - else: - try: - if Revision.lenient(proposed.rev) > Revision.lenient(existing.rev): - self._log.debug('Upgrading %s#%s from rev %s to %s' % ( - proposed.org, proposed.name, existing.rev, proposed.rev, - )) - return proposed - else: - return existing - except Revision.BadRevision as e: - raise TaskError('Failed to parse jar revision', e) - - def _is_mutable(self, jar): - if jar.mutable is not None: - return jar.mutable - if self._mutable_pattern: - return self._mutable_pattern.match(jar.rev) - return False - - def _generate_jar_template(self, jar, confs): - template = TemplateData( - org=jar.org, - module=jar.name, - version=jar.rev, - mutable=self._is_mutable(jar), - force=jar.force, - excludes=[self._generate_exclude_template(exclude) for exclude in jar.excludes], - transitive=jar.transitive, - artifacts=jar.artifacts, - configurations=[conf for conf in jar.configurations if conf in confs]) - override = self._overrides.get((jar.org, jar.name)) - return override(template) if override else template - - def _generate_exclude_template(self, exclude): - return TemplateData(org=exclude.org, name=exclude.name) - - def is_classpath_artifact(self, path): - """Subclasses can override to determine whether a given artifact represents a classpath - artifact.""" - return path.endswith('.jar') or path.endswith('.war') - - def is_mappable_artifact(self, org, name, path): - """Subclasses can override to determine whether a given artifact represents a mappable - artifact.""" - return self.is_classpath_artifact(path) - - def mapto_dir(self): - """Subclasses can override to establish an isolated jar mapping directory.""" - return os.path.join(self._work_dir, 'mapped-jars') - - def mapjars(self, genmap, target, executor, workunit_factory=None): - """ - Parameters: - genmap: the jar_dependencies ProductMapping entry for the required products. - target: the target whose jar dependencies are being retrieved. - """ - mapdir = os.path.join(self.mapto_dir(), target.id) - safe_mkdir(mapdir, clean=True) - ivyargs = [ - '-retrieve', '%s/[organisation]/[artifact]/[conf]/' - '[organisation]-[artifact]-[revision](-[classifier]).[ext]' % mapdir, - '-symlink', - ] - self.exec_ivy(mapdir, - [target], - ivyargs, - confs=target.configurations, - ivy=Bootstrapper.default_ivy(executor), - workunit_factory=workunit_factory, - workunit_name='map-jars') - - for org in os.listdir(mapdir): - orgdir = os.path.join(mapdir, org) - if os.path.isdir(orgdir): - for name in os.listdir(orgdir): - artifactdir = os.path.join(orgdir, name) - if os.path.isdir(artifactdir): - for conf in os.listdir(artifactdir): - confdir = os.path.join(artifactdir, conf) - for f in os.listdir(confdir): - if self.is_mappable_artifact(org, name, f): - # TODO(John Sirois): kill the org and (org, name) exclude mappings in favor of a - # conf whitelist - genmap.add(org, confdir).append(f) - genmap.add((org, name), confdir).append(f) - - genmap.add(target, confdir).append(f) - genmap.add((target, conf), confdir).append(f) - genmap.add((org, name, conf), confdir).append(f) - - ivy_lock = threading.RLock() - - def exec_ivy(self, - target_workdir, - targets, - args, - confs=None, - ivy=None, - workunit_name='ivy', - workunit_factory=None, - symlink_ivyxml=False): - - ivy = ivy or Bootstrapper.default_ivy() - if not isinstance(ivy, Ivy): - raise ValueError('The ivy argument supplied must be an Ivy instance, given %s of type %s' - % (ivy, type(ivy))) - - ivyxml = os.path.join(target_workdir, 'ivy.xml') - jars, excludes = self._calculate_classpath(targets) - - ivy_args = ['-ivy', ivyxml] - - confs_to_resolve = confs or ['default'] - ivy_args.append('-confs') - ivy_args.extend(confs_to_resolve) - - ivy_args.extend(args) - if not self._transitive: - ivy_args.append('-notransitive') - ivy_args.extend(self._args) - - def safe_link(src, dest): - if os.path.exists(dest): - os.unlink(dest) - os.symlink(src, dest) - - with IvyUtils.ivy_lock: - self._generate_ivy(targets, jars, excludes, ivyxml, confs_to_resolve) - runner = ivy.runner(jvm_options=self._jvm_options, args=ivy_args) - try: - result = util.execute_runner(runner, - workunit_factory=workunit_factory, - workunit_name=workunit_name) - - # Symlink to the current ivy.xml file (useful for IDEs that read it). - if symlink_ivyxml: - ivyxml_symlink = os.path.join(self._work_dir, 'ivy.xml') - safe_link(ivyxml, ivyxml_symlink) - - if result != 0: - raise TaskError('Ivy returned %d' % result) - except runner.executor.Error as e: - raise TaskError(e) diff --git a/src/python/twitter/pants/tasks/jar_create.py b/src/python/twitter/pants/tasks/jar_create.py deleted file mode 100644 index d01610875..000000000 --- a/src/python/twitter/pants/tasks/jar_create.py +++ /dev/null @@ -1,235 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import functools -import os - -from contextlib import contextmanager -from zipfile import ZIP_DEFLATED, ZIP_STORED - -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.fs.fs import safe_filename -from twitter.pants.java.jar import Manifest, open_jar -from twitter.pants.targets.jvm_binary import JvmBinary - -from .javadoc_gen import javadoc -from .scaladoc_gen import scaladoc - -from . import Task, TaskError - - -DEFAULT_CONFS = ['default'] - - -def is_binary(target): - return isinstance(target, JvmBinary) - - -def is_java_library(target): - return target.has_sources('.java') and not is_binary(target) - - -def is_scala_library(target): - return target.has_sources('.scala') and not is_binary(target) - - -def is_jvm_library(target): - return is_java_library(target) or is_scala_library(target) - - -def jarname(target, extension='.jar'): - # TODO(John Sirois): incorporate version - _, id_, _ = target.get_artifact_info() - # Cap jar names quite a bit lower than the standard fs limit of 255 characters since these - # artifacts will often be used outside pants and those uses may manipulate (expand) the jar - # filenames blindly. - return safe_filename(id_, extension, max_length=200) - - -class JarCreate(Task): - """Jars jvm libraries and optionally their sources and their docs.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag('outdir'), dest='jar_create_outdir', - help='Emit jars in to this directory.') - - option_group.add_option(mkflag('compressed'), mkflag('compressed', negate=True), - dest='jar_create_compressed', default=True, - action='callback', callback=mkflag.set_bool, - help='[%default] Create compressed jars.') - - option_group.add_option(mkflag('transitive'), mkflag('transitive', negate=True), - dest='jar_create_transitive', default=True, - action='callback', callback=mkflag.set_bool, - help='[%default] Create jars for the transitive closure of internal ' - 'targets reachable from the roots specified on the command line.') - - option_group.add_option(mkflag('classes'), mkflag('classes', negate=True), - dest='jar_create_classes', default=True, - action='callback', callback=mkflag.set_bool, - help='[%default] Create class jars.') - option_group.add_option(mkflag('sources'), mkflag('sources', negate=True), - dest='jar_create_sources', default=False, - action='callback', callback=mkflag.set_bool, - help='[%default] Create source jars.') - #TODO tdesai: Think about a better way to set defaults per goal basis. - javadoc_defaults = True if option_group.title.split(':')[0] == 'publish' else False - option_group.add_option(mkflag('javadoc'), mkflag('javadoc', negate=True), - dest='jar_create_javadoc', - default=javadoc_defaults, - action='callback', callback=mkflag.set_bool, - help='[%default] Create javadoc jars.') - - def __init__(self, context): - Task.__init__(self, context) - - options = context.options - products = context.products - - self._output_dir = (options.jar_create_outdir or - self.get_workdir(section='jar-create', workdir='jars')) - self.transitive = options.jar_create_transitive - self.confs = context.config.getlist('jar-create', 'confs', default=DEFAULT_CONFS) - self.compression = ZIP_DEFLATED if options.jar_create_compressed else ZIP_STORED - - self.jar_classes = options.jar_create_classes or products.isrequired('jars') - if self.jar_classes: - products.require_data('classes_by_target') - products.require_data('resources_by_target') - - definitely_create_javadoc = options.jar_create_javadoc or products.isrequired('javadoc_jars') - definitely_dont_create_javadoc = options.jar_create_javadoc is False - create_javadoc = options.jar_create_javadoc - if definitely_create_javadoc and definitely_dont_create_javadoc: - self.context.log.warn('javadoc jars are required but you have requested they not be created, ' - 'creating anyway') - self.jar_javadoc = (True if definitely_create_javadoc else - False if definitely_dont_create_javadoc else - create_javadoc) - if self.jar_javadoc: - products.require(javadoc.product_type) - products.require(scaladoc.product_type) - - self.jar_sources = products.isrequired('source_jars') or options.jar_create_sources - - self._jars = {} - - def execute(self, targets): - safe_mkdir(self._output_dir) - - def jar_targets(predicate): - return filter(predicate, (targets if self.transitive else self.context.target_roots)) - - def add_genjar(typename, target, name): - self.context.products.get(typename).add(target, self._output_dir).append(name) - - if self.jar_classes: - self._jar(jar_targets(is_jvm_library), functools.partial(add_genjar, 'jars')) - - if self.jar_sources: - self.sourcejar(jar_targets(is_jvm_library), functools.partial(add_genjar, 'source_jars')) - - if self.jar_javadoc: - javadoc_add_genjar = functools.partial(add_genjar, 'javadoc_jars') - self.javadocjar(jar_targets(is_java_library), - self.context.products.get(javadoc.product_type), - javadoc_add_genjar) - self.javadocjar(jar_targets(is_scala_library), - self.context.products.get(scaladoc.product_type), - javadoc_add_genjar) - - @contextmanager - def create_jar(self, target, path): - existing = self._jars.setdefault(path, target) - if target != existing: - raise TaskError('Duplicate name: target %s tried to write %s already mapped to target %s' % ( - target, path, existing - )) - self._jars[path] = target - with open_jar(path, 'w', compression=self.compression) as jar: - yield jar - - def _jar(self, jvm_targets, add_genjar): - classes_by_target = self.context.products.get_data('classes_by_target') - resources_by_target = self.context.products.get_data('resources_by_target') - - for target in jvm_targets: - target_classes = classes_by_target.get(target) - - target_resources = [] - if target.has_resources: - target_resources.extend(resources_by_target.get(r) for r in target.resources) - - if target_classes or target_resources: - jar_name = jarname(target) - add_genjar(target, jar_name) - jar_path = os.path.join(self._output_dir, jar_name) - with self.create_jar(target, jar_path) as jarfile: - def add_to_jar(target_products): - if target_products: - for root, products in target_products.rel_paths(): - for prod in products: - jarfile.write(os.path.join(root, prod), prod) - add_to_jar(target_classes) - for resources_target in target_resources: - add_to_jar(resources_target) - if target.is_java_agent: - self.write_agent_manifest(target, jarfile) - - def sourcejar(self, jvm_targets, add_genjar): - for target in jvm_targets: - jar_name = jarname(target, '-sources.jar') - add_genjar(target, jar_name) - jar_path = os.path.join(self._output_dir, jar_name) - with self.create_jar(target, jar_path) as jar: - for source in target.sources: - jar.write(os.path.join(get_buildroot(), target.target_base, source), source) - - if target.has_resources: - for resources in target.resources: - for resource in resources.sources: - jar.write(os.path.join(get_buildroot(), resources.target_base, resource), resource) - - def javadocjar(self, java_targets, genmap, add_genjar): - for target in java_targets: - generated = genmap.get(target) - if generated: - jar_name = jarname(target, '-javadoc.jar') - add_genjar(target, jar_name) - jar_path = os.path.join(self._output_dir, jar_name) - with self.create_jar(target, jar_path) as jar: - for basedir, javadocfiles in generated.items(): - for javadocfile in javadocfiles: - jar.write(os.path.join(basedir, javadocfile), javadocfile) - - def write_agent_manifest(self, agent, jarfile): - # TODO(John Sirois): refactor an agent model to suport 'Boot-Class-Path' properly. - manifest = Manifest() - manifest.addentry(Manifest.MANIFEST_VERSION, '1.0') - if agent.premain: - manifest.addentry('Premain-Class', agent.premain) - if agent.agent_class: - manifest.addentry('Agent-Class', agent.agent_class) - if agent.can_redefine: - manifest.addentry('Can-Redefine-Classes', 'true') - if agent.can_retransform: - manifest.addentry('Can-Retransform-Classes', 'true') - if agent.can_set_native_method_prefix: - manifest.addentry('Can-Set-Native-Method-Prefix', 'true') - jarfile.writestr(Manifest.PATH, manifest.contents()) diff --git a/src/python/twitter/pants/tasks/jar_publish.py b/src/python/twitter/pants/tasks/jar_publish.py deleted file mode 100644 index 8e6b55191..000000000 --- a/src/python/twitter/pants/tasks/jar_publish.py +++ /dev/null @@ -1,761 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import functools -import getpass -import hashlib -import logging -import os -import pkgutil -import shutil -import sys - -from collections import defaultdict - -from twitter.common.collections import OrderedDict, OrderedSet -from twitter.common.config import Properties -from twitter.common.dirutil import safe_open, safe_rmtree -from twitter.common.log.options import LogOptions - -from twitter.pants.base.build_environment import get_buildroot, get_scm -from twitter.pants.base.address import Address -from twitter.pants.base.target import Target -from twitter.pants.base.generator import Generator, TemplateData -from twitter.pants.ivy.bootstrapper import Bootstrapper -from twitter.pants.ivy.ivy import Ivy -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.resources import Resources -from twitter.pants.tasks.scm_publish import ScmPublish, Semver - -from . import Task, TaskError - - -class PushDb(object): - @staticmethod - def load(path): - """Loads a pushdb maintained in a properties file at the given path.""" - with open(path, 'r') as props: - properties = Properties.load(props) - return PushDb(properties) - - def __init__(self, props): - self._props = props - - def as_jar_with_version(self, target): - """ - Given an internal target, return a JarDependency with the last published revision filled in. - """ - jar_dep, db_get, _ = self._accessors_for_target(target) - - major = int(db_get('revision.major', '0')) - minor = int(db_get('revision.minor', '0')) - patch = int(db_get('revision.patch', '0')) - snapshot = db_get('revision.snapshot', 'false').lower() == 'true' - sha = db_get('revision.sha', None) - fingerprint = db_get('revision.fingerprint', None) - semver = Semver(major, minor, patch, snapshot=snapshot) - jar_dep.rev = semver.version() - return jar_dep, semver, sha, fingerprint - - def set_version(self, target, version, sha, fingerprint): - version = version if isinstance(version, Semver) else Semver.parse(version) - _, _, db_set = self._accessors_for_target(target) - db_set('revision.major', version.major) - db_set('revision.minor', version.minor) - db_set('revision.patch', version.patch) - db_set('revision.snapshot', str(version.snapshot).lower()) - db_set('revision.sha', sha) - db_set('revision.fingerprint', fingerprint) - - def _accessors_for_target(self, target): - jar_dep, _, exported = target.get_artifact_info() - if not exported: - raise ValueError - - def key(prefix): - return '%s.%s%%%s' % (prefix, jar_dep.org, jar_dep.name) - - def getter(prefix, default=None): - return self._props.get(key(prefix), default) - - def setter(prefix, value): - self._props[key(prefix)] = value - - return jar_dep, getter, setter - - def dump(self, path): - """Saves the pushdb as a properties file to the given path.""" - with open(path, 'w') as props: - Properties.dump(self._props, props) - - -class DependencyWriter(object): - """ - Builds up a template data representing a target and applies this to a template to produce a - dependency descriptor. - """ - - @staticmethod - def create_exclude(exclude): - return TemplateData(org=exclude.org, name=exclude.name) - - def __init__(self, get_db, template_relpath): - self.get_db = get_db - self.template_relpath = template_relpath - - def write(self, target, path, confs=None): - def as_jar(internal_target): - jar, _, _, _ = self.get_db(internal_target).as_jar_with_version(internal_target) - return jar - - # TODO(John Sirois): a dict is used here to de-dup codegen targets which have both the original - # codegen target - say java_thrift_library - and the synthetic generated target (java_library) - # Consider reworking codegen tasks to add removal of the original codegen targets when rewriting - # the graph - dependencies = OrderedDict() - internal_codegen = {} - configurations = set() - for dep in target_internal_dependencies(target): - jar = as_jar(dep) - dependencies[(jar.org, jar.name)] = self.internaldep(jar, dep) - if dep.is_codegen: - internal_codegen[jar.name] = jar.name - for jar in target.jar_dependencies: - if jar.rev: - dependencies[(jar.org, jar.name)] = self.jardep(jar) - configurations |= set(jar._configurations) - - target_jar = self.internaldep( - as_jar(target), - configurations=list(configurations)).extend(dependencies=dependencies.values()) - - template_kwargs = self.templateargs(target_jar, confs) - with safe_open(path, 'w') as output: - template = pkgutil.get_data(__name__, self.template_relpath) - Generator(template, **template_kwargs).write(output) - - def templateargs(self, target_jar, confs=None): - """ - Subclasses must return a dict for use by their template given the target jar template data - and optional specific ivy configurations. - """ - raise NotImplementedError() - - def internaldep(self, jar_dependency, dep=None, configurations=None): - """ - Subclasses must return a template data for the given internal target (provided in jar - dependency form). - """ - raise NotImplementedError() - - def jardep(self, jar_dependency): - """Subclasses must return a template data for the given external jar dependency.""" - raise NotImplementedError() - - -class PomWriter(DependencyWriter): - def __init__(self, get_db): - super(PomWriter, self).__init__( - get_db, - os.path.join('templates', 'jar_publish', 'pom.mustache')) - - def templateargs(self, target_jar, confs=None): - return dict(artifact=target_jar) - - def jardep(self, jar): - return TemplateData( - org=jar.org, - name=jar.name, - rev=jar.rev, - scope='compile', - excludes=[self.create_exclude(exclude) for exclude in jar.excludes if exclude.name]) - - def internaldep(self, jar_dependency, dep=None, configurations=None): - return self.jardep(jar_dependency) - - -class IvyWriter(DependencyWriter): - def __init__(self, get_db): - super(IvyWriter, self).__init__( - get_db, - os.path.join('templates', 'ivy_resolve', 'ivy.mustache')) - - def templateargs(self, target_jar, confs=None): - return dict(lib=target_jar.extend( - publications=set(confs) if confs else set(), - overrides=None)) - - def _jardep(self, jar, transitive=True, configurations='default'): - return TemplateData( - org=jar.org, - module=jar.name, - version=jar.rev, - mutable=False, - force=jar.force, - excludes=[self.create_exclude(exclude) for exclude in jar.excludes], - transitive=transitive, - artifacts=jar.artifacts, - configurations=configurations) - - def jardep(self, jar): - return self._jardep(jar, - transitive=jar.transitive, - configurations=jar._configurations) - - def internaldep(self, jar_dependency, dep=None, configurations=None): - return self._jardep(jar_dependency, configurations=configurations) - - -def coordinate(org, name, rev=None): - return '%s#%s;%s' % (org, name, rev) if rev else '%s#%s' % (org, name) - - -def jar_coordinate(jar, rev=None): - return coordinate(jar.org, jar.name, rev or jar.rev) - - -def target_internal_dependencies(target): - return filter(lambda tgt: not isinstance(tgt, Resources), target.internal_dependencies) - - -class JarPublish(ScmPublish, Task): - """Publish jars to a maven repository. - - At a high-level, pants uses `Apache Ivy `_ to - publish artifacts to Maven-style repositories. Pants performs prerequisite - tasks like compiling, creating jars, and generating ``pom.xml`` files then - invokes Ivy to actually publish the artifacts, so publishing is largely - configured in ``ivysettings.xml``. ``BUILD`` and ``pants.ini`` files - primarily provide linkage between publishable targets and the - Ivy ``resolvers`` used to publish them. - - The following target types are publishable: :ref:`bdict_java_library`, - :ref:`bdict_scala_library`, :ref:`bdict_java_thrift_library`, - :ref:`bdict_annotation_processor`. - Targets to publish and their dependencies must be publishable target - types and specify the ``provides`` argument. One exception is - :ref:`bdict_jar`\s - pants will generate a pom file that - depends on the already-published jar. - - Example usage: :: - - # By default pants will perform a dry-run. - ./pants goal clean-all publish src/java/com/twitter/mybird - - # Actually publish. - ./pants goal clean-all publish src/java/com/twitter/mybird --no-publish-dryrun - - Please see ``./pants goal publish -h`` for a detailed description of all - publishing options. - - Publishing can be configured in ``pants.ini`` as follows. - - ``jar-publish`` section: - - * ``repos`` - Required dictionary of settings for repos that may be pushed to. - * ``ivy_jvmargs`` - Optional list of JVM command-line args when invoking Ivy. - * ``restrict_push_branches`` - Optional list of branches to restrict publishing to. - - Example pants.ini jar-publish repos dictionary: :: - - repos = { - # repository target name is paired with this key - 'myrepo': { - # ivysettings.xml resolver to use for publishing - 'resolver': 'maven.twttr.com', - # ivy configurations to publish - 'confs': ['default', 'sources', 'docs'], - # address of a Credentials target to use when publishing - 'auth': 'address/of/credentials/BUILD:target', - # help message if unable to initialize the Credentials target. - 'help': 'Please check your credentials and try again.', - }, - } - - Additionally the ``ivy`` section ``ivy_settings`` property specifies which - Ivy settings file to use when publishing is required. - """ - - _CONFIG_SECTION = 'jar-publish' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - # TODO(John Sirois): Support a preview mode that outputs a file with entries like: - # artifact id: - # revision: - # publish: (true|false) - # changelog: - # - # Allow re-running this goal with the file as input to support forcing an arbitrary set of - # revisions and supply of hand endited changelogs. - - option_group.add_option(mkflag("dryrun"), mkflag("dryrun", negate=True), - dest="jar_publish_dryrun", default=True, - action="callback", callback=mkflag.set_bool, - help="[%default] Runs through a push without actually pushing " - "artifacts, editing publish dbs or otherwise writing data") - - option_group.add_option(mkflag("commit", negate=True), - dest="jar_publish_commit", default=True, - action="callback", callback=mkflag.set_bool, - help="Turns off commits of the push db for local testing.") - - local_flag = mkflag("local") - option_group.add_option(local_flag, dest="jar_publish_local", - help="Publishes jars to a maven repository on the local filesystem at " - "the specified path.") - - option_group.add_option(mkflag("local-snapshot"), mkflag("local-snapshot", negate=True), - dest="jar_publish_local_snapshot", default=True, - action="callback", callback=mkflag.set_bool, - help="[%%default] If %s is specified, publishes jars with '-SNAPSHOT' " - "revisions." % local_flag) - - option_group.add_option(mkflag("transitive"), mkflag("transitive", negate=True), - dest="jar_publish_transitive", default=True, - action="callback", callback=mkflag.set_bool, - help="[%default] Publishes the specified targets and all their " - "internal dependencies transitively.") - - option_group.add_option(mkflag("force"), mkflag("force", negate=True), - dest="jar_publish_force", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Forces pushing jars even if there have been no " - "changes since the last push.") - - flag = mkflag('override') - option_group.add_option(flag, action='append', dest='jar_publish_override', - help='''Specifies a published jar revision override in the form: - ([org]#[name]|[target spec])=[new revision] - - For example, to specify 2 overrides: - %(flag)s=com.twitter.common#quantity=0.1.2 \\ - %(flag)s=src/java/com/twitter/common/base=1.0.0 \\ - ''' % dict(flag=flag)) - - flag = mkflag("restart-at") - option_group.add_option(flag, dest="jar_publish_restart_at", - help='''Restart a fail push at the given jar. Jars can be identified by - maven coordinate [org]#[name] or target. - - For example: - %(flag)s=com.twitter.common#quantity - - Or: - %(flag)s=src/java/com/twitter/common/base - ''' % dict(flag=flag)) - - def __init__(self, context, scm=None): - Task.__init__(self, context) - ScmPublish.__init__(self, scm or get_scm(), - self.context.config.getlist( - JarPublish._CONFIG_SECTION, 'restrict_push_branches')) - self.outdir = os.path.join(context.config.getdefault('pants_workdir'), 'publish') - self.cachedir = os.path.join(self.outdir, 'cache') - - self._jvmargs = context.config.getlist(JarPublish._CONFIG_SECTION, 'ivy_jvmargs', default=[]) - - if context.options.jar_publish_local: - local_repo = dict( - resolver='publish_local', - path=os.path.abspath(os.path.expanduser(context.options.jar_publish_local)), - confs=['*'], - auth=None - ) - self.repos = defaultdict(lambda: local_repo) - self.commit = False - self.snapshot = context.options.jar_publish_local_snapshot - else: - self.repos = context.config.getdict(JarPublish._CONFIG_SECTION, 'repos') - if not self.repos: - raise TaskError("This repo is not yet set for publishing to the world! Please re-run with --publish-local") - for repo, data in self.repos.items(): - auth = data.get('auth') - if auth: - credentials = context.resolve(auth).next() - user = credentials.username(data['resolver']) - password = credentials.password(data['resolver']) - self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user)) - self.repos[repo]['username'] = user - self.repos[repo]['password'] = password - self.commit = context.options.jar_publish_commit - self.snapshot = False - - self.ivycp = context.config.getlist('ivy', 'classpath') - self.ivysettings = context.config.get('jar-publish', 'ivy_settings') - - self.dryrun = context.options.jar_publish_dryrun - self.transitive = context.options.jar_publish_transitive - self.force = context.options.jar_publish_force - - def parse_jarcoordinate(coordinate): - components = coordinate.split('#', 1) - if len(components) == 2: - org, name = components - return org, name - else: - try: - address = Address.parse(get_buildroot(), coordinate) - try: - target = Target.get(address) - if not target: - siblings = Target.get_all_addresses(address.buildfile) - prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' - raise TaskError('%s => %s?:\n %s' % (address, prompt, - '\n '.join(str(a) for a in siblings))) - if not target.is_exported: - raise TaskError('%s is not an exported target' % coordinate) - return target.provides.org, target.provides.name - except (ImportError, SyntaxError, TypeError): - raise TaskError('Failed to parse %s' % address.buildfile.relpath) - except IOError: - raise TaskError('No BUILD file could be found at %s' % coordinate) - - self.overrides = {} - if context.options.jar_publish_override: - def parse_override(override): - try: - coordinate, rev = override.split('=', 1) - try: - rev = Semver.parse(rev) - except ValueError as e: - raise TaskError('Invalid version %s: %s' % (rev, e)) - return parse_jarcoordinate(coordinate), rev - except ValueError: - raise TaskError('Invalid override: %s' % override) - - self.overrides.update(parse_override(o) for o in context.options.jar_publish_override) - - self.restart_at = None - if context.options.jar_publish_restart_at: - self.restart_at = parse_jarcoordinate(context.options.jar_publish_restart_at) - - context.products.require('jars') - context.products.require('source_jars') - - def execute(self, targets): - self.check_clean_master(commit=(not self.dryrun and self.commit)) - - exported_targets = self.exported_targets() - self.check_targets(exported_targets) - - pushdbs = {} - - def get_db(tgt): - # TODO(tdesai) Handle resource type in get_db. - if tgt.provides is None: - raise TaskError('trying to publish target %r which does not provide an artifact' % tgt) - dbfile = tgt.provides.repo.push_db - result = pushdbs.get(dbfile) - if not result: - db = PushDb.load(dbfile) - repo = self.repos[tgt.provides.repo.name] - result = (db, dbfile, repo) - pushdbs[dbfile] = result - return result - - def get_pushdb(tgt): - return get_db(tgt)[0] - - def fingerprint_internal(tgt): - if not tgt.is_internal: - raise ValueError('Expected an internal target for fingerprinting, got %s' % tgt) - pushdb, _, _ = get_db(tgt) - _, _, _, fingerprint = pushdb.as_jar_with_version(tgt) - return fingerprint or '0.0.0' - - def artifact_path(jar, version, name=None, suffix='', extension='jar', artifact_ext=''): - return os.path.join(self.outdir, jar.org, jar.name + artifact_ext, - '%s%s-%s%s.%s' % ((name or jar.name), - artifact_ext if name != 'ivy' else '', - version, - suffix, - extension)) - - def stage_artifact(tgt, jar, version, changelog, confs=None, artifact_ext=''): - def path(name=None, suffix='', extension='jar'): - return artifact_path(jar, version, name=name, suffix=suffix, extension=extension, - artifact_ext=artifact_ext) - - with safe_open(path(suffix='-CHANGELOG', extension='txt'), 'w') as changelog_file: - changelog_file.write(changelog) - ivyxml = path(name='ivy', extension='xml') - - IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs) - PomWriter(get_pushdb).write(tgt, path(extension='pom')) - - return ivyxml - - def copy_artifact(tgt, version, typename, suffix='', artifact_ext=''): - genmap = self.context.products.get(typename) - for basedir, jars in genmap.get(tgt).items(): - for artifact in jars: - path = artifact_path(jar, version, suffix=suffix, artifact_ext=artifact_ext) - shutil.copy(os.path.join(basedir, artifact), path) - - def stage_artifacts(tgt, jar, version, changelog, confs=None): - ivyxml_path = stage_artifact(tgt, jar, version, changelog, confs) - copy_artifact(tgt, version, typename='jars') - copy_artifact(tgt, version, typename='source_jars', suffix='-sources') - - jarmap = self.context.products.get('javadoc_jars') - if not jarmap.empty() and (tgt.is_java or tgt.is_scala): - copy_artifact(tgt, version, typename='javadoc_jars', suffix='-javadoc') - - return ivyxml_path - - if self.overrides: - print('Publishing with revision overrides:\n %s' % '\n '.join( - '%s=%s' % (coordinate(org, name), rev) for (org, name), rev in self.overrides.items() - )) - - head_sha = self.scm.commit_id - - safe_rmtree(self.outdir) - published = [] - skip = (self.restart_at is not None) - for target in exported_targets: - pushdb, dbfile, repo = get_db(target) - jar, semver, sha, fingerprint = pushdb.as_jar_with_version(target) - - published.append(jar) - - if skip and (jar.org, jar.name) == self.restart_at: - skip = False - - newver = self.overrides.get((jar.org, jar.name)) or semver.bump() - if self.snapshot: - newver = newver.make_snapshot() - - if newver <= semver: - raise TaskError('Requested version %s must be greater than the current version %s' % ( - newver.version(), semver.version() - )) - - newfingerprint = self.fingerprint(target, fingerprint_internal) - no_changes = newfingerprint == fingerprint - - if no_changes: - changelog = 'No changes for %s - forced push.\n' % jar_coordinate(jar, semver.version()) - else: - changelog = self.changelog(target, sha) or 'Direct dependencies changed.\n' - - if no_changes and not self.force: - print('No changes for %s' % jar_coordinate(jar, semver.version())) - stage_artifacts(target, jar, (newver if self.force else semver).version(), changelog) - elif skip: - print('Skipping %s to resume at %s' % ( - jar_coordinate(jar, (newver if self.force else semver).version()), - coordinate(self.restart_at[0], self.restart_at[1]) - )) - stage_artifacts(target, jar, semver.version(), changelog) - else: - if not self.dryrun: - # Confirm push looks good - if no_changes: - print(changelog) - else: - print('\nChanges for %s since %s @ %s:\n\n%s' % ( - coordinate(jar.org, jar.name), semver.version(), sha, changelog - )) - if os.isatty(sys.stdin.fileno()): - push = raw_input('Publish %s with revision %s ? [y|N] ' % ( - coordinate(jar.org, jar.name), newver.version() - )) - print('\n') - if push.strip().lower() != 'y': - raise TaskError('User aborted push') - - pushdb.set_version(target, newver, head_sha, newfingerprint) - - ivyxml = stage_artifacts(target, jar, newver.version(), changelog, confs=repo['confs']) - - if self.dryrun: - print('Skipping publish of %s in test mode.' % jar_coordinate(jar, newver.version())) - else: - resolver = repo['resolver'] - path = repo.get('path') - - # Get authentication for the publish repo if needed - jvm_args = self._jvmargs - if repo.get('auth'): - user = repo.get('username') - password = repo.get('password') - if user and password: - jvm_args.append('-Dlogin=%s' % user) - jvm_args.append('-Dpassword=%s' % password) - else: - raise TaskError('Unable to publish to %s. %s' % - (repo['resolver'], repo.get('help', ''))) - - # Do the publish - def publish(ivyxml_path): - ivysettings = self.generate_ivysettings(published, publish_local=path) - args = [ - '-settings', ivysettings, - '-ivy', ivyxml_path, - '-deliverto', '%s/[organisation]/[module]/ivy-[revision].xml' % self.outdir, - '-publish', resolver, - '-publishpattern', '%s/[organisation]/[module]/' - '[artifact]-[revision](-[classifier]).[ext]' % self.outdir, - '-revision', newver.version(), - '-m2compatible', - ] - - if LogOptions.stderr_log_level() == logging.DEBUG: - args.append('-verbose') - - if self.snapshot: - args.append('-overwrite') - - try: - ivy = Bootstrapper.default_ivy() - ivy.execute(jvm_options=jvm_args, args=args, - workunit_factory=self.context.new_workunit, workunit_name='jar-publish') - except (Bootstrapper.Error, Ivy.Error) as e: - raise TaskError('Failed to push %s! %s' % (jar_coordinate(jar, newver.version()), e)) - - publish(ivyxml) - - if self.commit: - org = jar.org - name = jar.name - rev = newver.version() - args = dict( - org=org, - name=name, - rev=rev, - coordinate=coordinate(org, name, rev), - user=getpass.getuser(), - cause='with forced revision' if (org, name) in self.overrides else '(autoinc)' - ) - - pushdb.dump(dbfile) - self.commit_push(coordinate(org, name, rev)) - self.scm.refresh() - self.scm.tag('%(org)s-%(name)s-%(rev)s' % args, - message='Publish of %(coordinate)s initiated by %(user)s %(cause)s' % args) - - def check_targets(self, targets): - invalid = defaultdict(lambda: defaultdict(set)) - derived_by_target = dict() - - def collect(publish_target, walked_target): - derived_by_target[walked_target.derived_from] = walked_target - if not walked_target.has_sources() or not walked_target.sources: - invalid[publish_target][walked_target].add('No sources.') - if not walked_target.is_exported: - invalid[publish_target][walked_target].add('Does not provide an artifact.') - - for target in targets: - target.walk(functools.partial(collect, target), predicate=lambda t: t.is_concrete) - - # When walking the graph of a publishable target, we may encounter families of sibling targets - # that form a derivation chain. As long as one of these siblings is publishable, we can - # proceed and publish a valid graph. - # TODO(John Sirois): This does not actually handle derivation chains longer than 2 with the - # exported item in the most derived position - fix this. - for publish_target, invalid_targets in list(invalid.items()): - for invalid_target, reasons in list(invalid_targets.items()): - derived_target = derived_by_target[invalid_target] - if derived_target not in invalid_targets: - invalid_targets.pop(invalid_target) - if not invalid_targets: - invalid.pop(publish_target) - - if invalid: - msg = list() - - def first_address(pair): - first, _ = pair - return str(first.address) - - for publish_target, invalid_targets in sorted(invalid.items(), key=first_address): - msg.append('\n Cannot publish %s due to:' % publish_target.address) - for invalid_target, reasons in sorted(invalid_targets.items(), key=first_address): - for reason in sorted(reasons): - msg.append('\n %s - %s' % (invalid_target.address, reason)) - - raise TaskError('The following errors must be resolved to publish.%s' % ''.join(msg)) - - def exported_targets(self): - candidates = set() - if self.transitive: - candidates.update(self.context.targets()) - else: - candidates.update(self.context.target_roots) - - def get_synthetic(lang, target): - mappings = self.context.products.get(lang).get(target) - if mappings: - for key, generated in mappings.items(): - for synthetic in generated: - yield synthetic - - # Handle the case where a code gen target is in the listed roots and the thus the publishable - # target is a synthetic twin generated by a code gen task upstream. - for candidate in self.context.target_roots: - candidates.update(get_synthetic('java', candidate)) - candidates.update(get_synthetic('scala', candidate)) - - def exportable(tgt): - return tgt in candidates and tgt.is_exported - - return OrderedSet(filter(exportable, - reversed(InternalTarget.sort_targets(filter(exportable, candidates))))) - - def fingerprint(self, target, fingerprint_internal): - sha = hashlib.sha1() - - for source in sorted(target.sources): - path = os.path.join(target.target_base, source) - with open(path) as fd: - sha.update(source) - sha.update(fd.read()) - - # TODO(John Sirois): handle resources and circular dep scala_library java_sources - - for jarsig in sorted([jar_coordinate(j) for j in target.jar_dependencies if j.rev]): - sha.update(jarsig) - - # TODO(tdesai) Handle resource type in get_db. - internal_dependencies = sorted(target_internal_dependencies(target), key=lambda t: t.id) - for internal_target in internal_dependencies: - fingerprint = fingerprint_internal(internal_target) - sha.update(fingerprint) - - return sha.hexdigest() - - def changelog(self, target, sha): - return self.scm.changelog(from_commit=sha, - files=[os.path.join(target.target_base, source) - for source in target.sources]) - - def generate_ivysettings(self, publishedjars, publish_local=None): - template_relpath = os.path.join('templates', 'jar_publish', 'ivysettings.mustache') - template = pkgutil.get_data(__name__, template_relpath) - with safe_open(os.path.join(self.outdir, 'ivysettings.xml'), 'w') as wrapper: - generator = Generator(template, - ivysettings=self.ivysettings, - dir=self.outdir, - cachedir=self.cachedir, - published=[TemplateData(org=jar.org, name=jar.name) - for jar in publishedjars], - publish_local=publish_local) - generator.write(wrapper) - return wrapper.name diff --git a/src/python/twitter/pants/tasks/jar_publish/ivysettings.mustache b/src/python/twitter/pants/tasks/jar_publish/ivysettings.mustache deleted file mode 100644 index 58543573e..000000000 --- a/src/python/twitter/pants/tasks/jar_publish/ivysettings.mustache +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - {{#publish_local}} - - - - - {{/publish_local}} - - - - {{#published}} - - {{/published}} - - diff --git a/src/python/twitter/pants/tasks/jar_publish/pom.mustache b/src/python/twitter/pants/tasks/jar_publish/pom.mustache deleted file mode 100644 index 56353182a..000000000 --- a/src/python/twitter/pants/tasks/jar_publish/pom.mustache +++ /dev/null @@ -1,35 +0,0 @@ - - - - - 4.0.0 - {{artifact.org}} - {{artifact.name}} - jar - {{artifact.rev}} - - {{#artifact.dependencies?}} - - {{#artifact.dependencies}} - - {{org}} - {{name}} - {{rev}} - {{scope}} - {{#excludes?}} - - {{#excludes}} - - {{org}} - {{name}} - - {{/excludes}} - - {{/excludes?}} - - {{/artifact.dependencies}} - - {{/artifact.dependencies?}} - diff --git a/src/python/twitter/pants/tasks/javadoc/__init__.py b/src/python/twitter/pants/tasks/javadoc/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/tasks/javadoc/assets/__init__.py b/src/python/twitter/pants/tasks/javadoc/assets/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/python/twitter/pants/tasks/javadoc/assets/favicon.ico b/src/python/twitter/pants/tasks/javadoc/assets/favicon.ico deleted file mode 100644 index 8a66efe93..000000000 Binary files a/src/python/twitter/pants/tasks/javadoc/assets/favicon.ico and /dev/null differ diff --git a/src/python/twitter/pants/tasks/javadoc/assets/index.html b/src/python/twitter/pants/tasks/javadoc/assets/index.html deleted file mode 100644 index 5c5d94209..000000000 --- a/src/python/twitter/pants/tasks/javadoc/assets/index.html +++ /dev/null @@ -1,256 +0,0 @@ - - - - - - - - API Javadoc - - - - - - - - - - -
- - - - diff --git a/src/python/twitter/pants/tasks/javadoc/assets/javadoc-single-package.html b/src/python/twitter/pants/tasks/javadoc/assets/javadoc-single-package.html deleted file mode 100644 index 6755df50f..000000000 --- a/src/python/twitter/pants/tasks/javadoc/assets/javadoc-single-package.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - - diff --git a/src/python/twitter/pants/tasks/javadoc/assets/javadoc.html b/src/python/twitter/pants/tasks/javadoc/assets/javadoc.html deleted file mode 100644 index 818fafc37..000000000 --- a/src/python/twitter/pants/tasks/javadoc/assets/javadoc.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - diff --git a/src/python/twitter/pants/tasks/javadoc_gen.py b/src/python/twitter/pants/tasks/javadoc_gen.py deleted file mode 100644 index fde8ba3f5..000000000 --- a/src/python/twitter/pants/tasks/javadoc_gen.py +++ /dev/null @@ -1,72 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.tasks.jvmdoc_gen import Jvmdoc, JvmdocGen - - -javadoc = Jvmdoc(tool_name='javadoc', product_type='javadoc') - - -def is_java(target): - return target.has_sources('.java') - - -class JavadocGen(JvmdocGen): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - cls.generate_setup_parser(option_group, args, mkflag, javadoc) - - def __init__(self, context, output_dir=None, confs=None, active=True): - super(JavadocGen, self).__init__(context, javadoc, output_dir, confs, active) - - def execute(self, targets): - self.generate_execute(targets, is_java, create_javadoc_command) - - -def create_javadoc_command(classpath, gendir, *targets): - sources = [] - for target in targets: - sources.extend(target.sources_relative_to_buildroot()) - - if not sources: - return None - - # TODO(John Sirois): try com.sun.tools.javadoc.Main via ng - command = [ - 'javadoc', - '-quiet', - '-encoding', 'UTF-8', - '-notimestamp', - '-use', - '-classpath', ':'.join(classpath), - '-d', gendir, - ] - - # Always provide external linking for java API - offlinelinks = set(['http://download.oracle.com/javase/6/docs/api/']) - - def link(target): - for jar in target.jar_dependencies: - if jar.apidocs: - offlinelinks.add(jar.apidocs) - for target in targets: - target.walk(link, lambda t: t.is_jvm) - - for link in offlinelinks: - command.extend(['-linkoffline', link, link]) - - command.extend(sources) - return command diff --git a/src/python/twitter/pants/tasks/junit_run.py b/src/python/twitter/pants/tasks/junit_run.py deleted file mode 100644 index 7cbb4f2cc..000000000 --- a/src/python/twitter/pants/tasks/junit_run.py +++ /dev/null @@ -1,436 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import re -import sys - -from twitter.common.dirutil import safe_mkdir, safe_open -from twitter.pants import binary_util -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.java.util import execute_java -from twitter.pants.targets.java_tests import JavaTests as junit_tests - -from .jvm_task import JvmTask -from . import TaskError - - -class JUnitRun(JvmTask): - _MAIN = 'com.twitter.common.junit.runner.ConsoleRunner' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag('skip'), mkflag('skip', negate=True), dest = 'junit_run_skip', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Skip running tests') - - option_group.add_option(mkflag('debug'), mkflag('debug', negate=True), dest = 'junit_run_debug', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Run junit tests with a debugger') - - option_group.add_option(mkflag('fail-fast'), mkflag('fail-fast', negate=True), - dest = 'junit_run_fail_fast', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Fail fast on the first test failure in a suite') - - option_group.add_option(mkflag('batch-size'), type = 'int', default=sys.maxint, - dest = 'junit_run_batch_size', - help = '[ALL] Runs at most this many tests in a single test process.') - - # TODO: Rename flag to jvm-options. - option_group.add_option(mkflag('jvmargs'), dest = 'junit_run_jvmargs', action='append', - help = 'Runs junit tests in a jvm with these extra jvm args.') - - option_group.add_option(mkflag('test'), dest = 'junit_run_tests', action='append', - help = '[%default] Force running of just these tests. Tests can be ' - 'specified using any of: [classname], [classname]#[methodname], ' - '[filename] or [filename]#[methodname]') - - outdir = mkflag('outdir') - option_group.add_option(outdir, dest='junit_run_outdir', - help='Emit output in to this directory.') - - xmlreport = mkflag('xmlreport') - option_group.add_option(xmlreport, mkflag('xmlreport', negate=True), - dest = 'junit_run_xmlreport', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Causes an xml report to be output for each test ' - 'class that is run.') - - option_group.add_option(mkflag('per-test-timer'), mkflag('per-test-timer', negate=True), - dest = 'junit_run_per_test_timer', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Shows progress and timer for each test ' - 'class that is run.') - - option_group.add_option(mkflag('default-parallel'), mkflag('default-parallel', negate=True), - dest = 'junit_run_default_parallel', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Whether to run classes without @TestParallel or ' - '@TestSerial annotations in parallel.') - - option_group.add_option(mkflag('parallel-threads'), type = 'int', default=0, - dest = 'junit_run_parallel_threads', - help = 'Number of threads to run tests in parallel. 0 for autoset.') - - option_group.add_option(mkflag("test-shard"), dest = "junit_run_test_shard", - help = "Subset of tests to run, in the form M/N, 0 <= M < N." - "For example, 1/3 means run tests number 2, 5, 8, 11, ...") - - option_group.add_option(mkflag('coverage'), mkflag('coverage', negate=True), - dest = 'junit_run_coverage', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%default] Collects code coverage data') - - coverage_patterns = mkflag('coverage-patterns') - option_group.add_option(coverage_patterns, dest='junit_run_coverage_patterns', - action='append', - help='By default all non-test code depended on by the selected tests ' - 'is measured for coverage during the test run. By specifying ' - 'coverage patterns you can select which classes and packages ' - 'should be counted. Values should be class name prefixes in ' - 'dotted form with ? and * wildcard support. If preceded with a - ' - 'the pattern is excluded. ' - 'For example, to include all code in com.twitter.raven except ' - 'claws and the eye you would use: ' - '%(flag)s=com.twitter.raven.* ' - '%(flag)s=-com.twitter.raven.claw ' - '%(flag)s=-com.twitter.raven.Eye' - 'This option can be specified multiple times. ' % dict( - flag=coverage_patterns - )) - - option_group.add_option(mkflag('coverage-console'), mkflag('coverage-console', negate=True), - dest = 'junit_run_coverage_console', - action='callback', callback=mkflag.set_bool, default=True, - help = '[%default] Outputs a simple coverage report to the console.') - - option_group.add_option(mkflag('coverage-xml'), mkflag('coverage-xml', negate=True), - dest = 'junit_run_coverage_xml', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%%default] Produces an xml coverage report in %s.' % outdir) - - coverage_html_flag = mkflag('coverage-html') - option_group.add_option(coverage_html_flag, mkflag('coverage-html', negate=True), - dest = 'junit_run_coverage_html', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%%default] Produces an html coverage report in %s.' % outdir) - - option_group.add_option(mkflag('coverage-html-open'), mkflag('coverage-html-open', negate=True), - dest = 'junit_run_coverage_html_open', - action='callback', callback=mkflag.set_bool, default=False, - help = '[%%default] Tries to open the generated html coverage report, ' - 'implies %s.' % coverage_html_flag) - - option_group.add_option(mkflag('suppress-output'), mkflag('suppress-output', negate=True), - dest = 'junit_run_suppress_output', - action='callback', callback=mkflag.set_bool, default=True, - help = '[%%default] Redirects test output to files in %s. ' - 'Implied by %s' % (outdir, xmlreport)) - - option_group.add_option(mkflag("arg"), dest="junit_run_arg", - action="append", - help = "An arbitrary argument to pass directly to the test runner. " - "This option can be specified multiple times.") - - def __init__(self, context): - super(JUnitRun, self).__init__(context) - - context.products.require_data('exclusives_groups') - - self.confs = context.config.getlist('junit-run', 'confs', default=['default']) - - self._junit_bootstrap_key = 'junit' - junit_bootstrap_tools = context.config.getlist('junit-run', 'junit-bootstrap-tools', - default=[':junit']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._junit_bootstrap_key, junit_bootstrap_tools) - - self._emma_bootstrap_key = 'emma' - emma_bootstrap_tools = context.config.getlist('junit-run', 'emma-bootstrap-tools', - default=[':emma']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._emma_bootstrap_key, emma_bootstrap_tools) - - self.jvm_args = context.config.getlist('junit-run', 'jvm_args', default=[]) - if context.options.junit_run_jvmargs: - self.jvm_args.extend(context.options.junit_run_jvmargs) - if context.options.junit_run_debug: - self.jvm_args.extend(context.config.getlist('jvm', 'debug_args')) - - # List of FQCN, FQCN#method, sourcefile or sourcefile#method. - self.tests_to_run = context.options.junit_run_tests - self.context.products.require_data('classes_by_target') - self.context.products.require_data('classes_by_source') - - self.outdir = ( - context.options.junit_run_outdir - or context.config.get('junit-run', 'workdir') - ) - - self.batch_size = context.options.junit_run_batch_size - self.fail_fast = context.options.junit_run_fail_fast - - self.coverage = context.options.junit_run_coverage - self.coverage_filters = context.options.junit_run_coverage_patterns or [] - self.coverage_dir = os.path.join(self.outdir, 'coverage') - self.coverage_instrument_dir = os.path.join(self.coverage_dir, 'classes') - self.coverage_metadata_file = os.path.join(self.coverage_dir, 'coverage.em') - self.coverage_file = os.path.join(self.coverage_dir, 'coverage.ec') - - self.coverage_report_console = context.options.junit_run_coverage_console - self.coverage_console_file = os.path.join(self.coverage_dir, 'coverage.txt') - - self.coverage_report_xml = context.options.junit_run_coverage_xml - self.coverage_xml_file = os.path.join(self.coverage_dir, 'coverage.xml') - - self.coverage_report_html_open = context.options.junit_run_coverage_html_open - self.coverage_report_html = ( - self.coverage_report_html_open - or context.options.junit_run_coverage_html - ) - self.coverage = self.coverage or self.coverage_report_html_open - self.coverage_html_file = os.path.join(self.coverage_dir, 'html', 'index.html') - - self.opts = [] - if context.options.junit_run_xmlreport or context.options.junit_run_suppress_output: - if self.fail_fast: - self.opts.append('-fail-fast') - if context.options.junit_run_xmlreport: - self.opts.append('-xmlreport') - self.opts.append('-suppress-output') - self.opts.append('-outdir') - self.opts.append(self.outdir) - - if context.options.junit_run_per_test_timer: - self.opts.append('-per-test-timer') - if context.options.junit_run_default_parallel: - self.opts.append('-default-parallel') - self.opts.append('-parallel-threads') - self.opts.append(str(context.options.junit_run_parallel_threads)) - - if context.options.junit_run_test_shard: - self.opts.append('-test-shard') - self.opts.append(context.options.junit_run_test_shard) - - if context.options.junit_run_arg: - self.opts.extend(context.options.junit_run_arg) - - def _partition(self, tests): - stride = min(self.batch_size, len(tests)) - for i in xrange(0, len(tests), stride): - yield tests[i:i+stride] - - def execute(self, targets): - if not self.context.options.junit_run_skip: - tests = list(self.get_tests_to_run() if self.tests_to_run - else self.calculate_tests_from_targets(targets)) - if tests: - bootstrapped_cp = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._junit_bootstrap_key) - junit_classpath = self.classpath( - bootstrapped_cp, - confs=self.confs, - exclusives_classpath=self.get_base_classpath_for_target(targets[0])) - - def run_tests(classpath, main, jvm_args=None): - # TODO(John Sirois): Integrated batching with the test runner. As things stand we get - # results summaries for example for each batch but no overall summary. - # http://jira.local.twitter.com/browse/AWESOME-1114 - result = 0 - for batch in self._partition(tests): - with binary_util.safe_args(batch) as batch_tests: - result += abs(execute_java( - classpath=classpath, - main=main, - jvm_options=(jvm_args or []) + self.jvm_args, - args=self.opts + batch_tests, - workunit_factory=self.context.new_workunit, - workunit_name='run', - workunit_labels=[WorkUnit.TEST] - )) - if result != 0 and self.fail_fast: - break - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (main, result)) - - if self.coverage: - emma_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._emma_bootstrap_key) - - def instrument_code(): - safe_mkdir(self.coverage_instrument_dir, clean=True) - with binary_util.safe_args(self.get_coverage_patterns(targets)) as patterns: - args = [ - 'instr', - '-out', self.coverage_metadata_file, - '-d', self.coverage_instrument_dir, - '-cp', os.pathsep.join(junit_classpath), - '-exit' - ] - for pattern in patterns: - args.extend(['-filter', pattern]) - main = 'emma' - result = execute_java(classpath=emma_classpath, main=main, args=args, - workunit_factory=self.context.new_workunit, - workunit_name='emma-instrument') - if result != 0: - raise TaskError("java %s ... exited non-zero (%i)" - " 'failed to instrument'" % (main, result)) - - def generate_reports(): - args = [ - 'report', - '-in', self.coverage_metadata_file, - '-in', self.coverage_file, - '-exit' - ] - source_bases = set() - def collect_source_base(target): - if self.is_coverage_target(target): - source_bases.add(target.target_base) - for target in self.test_target_candidates(targets): - target.walk(collect_source_base) - for source_base in source_bases: - args.extend(['-sp', source_base]) - - sorting = ['-Dreport.sort', '+name,+class,+method,+block'] - if self.coverage_report_console: - args.extend(['-r', 'txt', - '-Dreport.txt.out.file=%s' % self.coverage_console_file] + sorting) - if self.coverage_report_xml: - args.extend(['-r', 'xml','-Dreport.xml.out.file=%s' % self.coverage_xml_file]) - if self.coverage_report_html: - args.extend(['-r', 'html', - '-Dreport.html.out.file=%s' % self.coverage_html_file, - '-Dreport.out.encoding=UTF-8'] + sorting) - - main = 'emma' - result = execute_java(classpath=emma_classpath, main=main, args=args, - workunit_factory=self.context.new_workunit, - workunit_name='emma-report') - if result != 0: - raise TaskError("java %s ... exited non-zero (%i)" - " 'failed to generate code coverage reports'" % (main, result)) - - if self.coverage_report_console: - with safe_open(self.coverage_console_file) as console_report: - sys.stdout.write(console_report.read()) - if self.coverage_report_html_open: - binary_util.ui_open(self.coverage_html_file) - - instrument_code() - try: - # Coverage runs over instrumented classes require the instrumented classes come 1st in - # the classpath followed by the normal classpath. The instrumentation also adds a - # dependency on emma libs that must be satisfied on the classpath. - run_tests([self.coverage_instrument_dir] + junit_classpath + emma_classpath, - JUnitRun._MAIN, - jvm_args=['-Demma.coverage.out.file=%s' % self.coverage_file]) - finally: - generate_reports() - else: - self.context.lock.release() - run_tests(junit_classpath, JUnitRun._MAIN) - - def is_coverage_target(self, tgt): - return (tgt.is_java or tgt.is_scala) and not tgt.is_test and not tgt.is_codegen - - def get_coverage_patterns(self, targets): - if self.coverage_filters: - return self.coverage_filters - else: - classes_under_test = set() - classes_by_source = self.context.products.get_data('classes_by_source') - def add_sources_under_test(tgt): - if self.is_coverage_target(tgt): - for source in tgt.sources_relative_to_buildroot(): - source_products = classes_by_source.get(source) - if source_products: - for _, classes in source_products.rel_paths(): - classes_under_test.update(JUnitRun.classfile_to_classname(cls) for cls in classes) - - for target in targets: - target.walk(add_sources_under_test) - return classes_under_test - - def get_tests_to_run(self): - for test_spec in self.tests_to_run: - for c in self.interpret_test_spec(test_spec): - yield c - - def test_target_candidates(self, targets): - for target in targets: - if isinstance(target, junit_tests): - yield target - - def calculate_tests_from_targets(self, targets): - targets_to_classes = self.context.products.get_data('classes_by_target') - for target in self.test_target_candidates(targets): - target_products = targets_to_classes.get(target) - if target_products: - for _, classes in target_products.rel_paths(): - for cls in classes: - yield JUnitRun.classfile_to_classname(cls) - - def classnames_from_source_file(self, srcfile): - relsrc = os.path.relpath(srcfile, get_buildroot()) if os.path.isabs(srcfile) else srcfile - source_products = self.context.products.get_data('classes_by_source').get(relsrc) - if not source_products: - # It's valid - if questionable - to have a source file with no classes when, for - # example, the source file has all its code commented out. - self.context.log.warn('Source file %s generated no classes' % srcfile) - else: - for _, classes in source_products.rel_paths(): - for cls in classes: - yield JUnitRun.classfile_to_classname(cls) - - @staticmethod - def classfile_to_classname(cls): - clsname, _ = os.path.splitext(cls.replace('/', '.')) - return clsname - - def interpret_test_spec(self, test_spec): - components = test_spec.split('#', 2) - classname_or_srcfile = components[0] - methodname = '#' + components[1] if len(components) == 2 else '' - - if os.path.exists(classname_or_srcfile): # It's a source file. - srcfile = classname_or_srcfile # Alias for clarity. - for cls in self.classnames_from_source_file(srcfile): - # Tack the methodname onto all classes in the source file, as we - # can't know which method the user intended. - yield cls + methodname - else: # It's a classname. - classname = classname_or_srcfile - yield classname + methodname - - -PACKAGE_PARSER = re.compile(r'^\s*package\s+([\w.]+)\s*;?\s*') - - -def calculate_basedir(filepath): - with open(filepath, 'r') as source: - for line in source: - match = PACKAGE_PARSER.match(line) - if match: - package = match.group(1) - packagedir = package.replace('.', '/') - dirname = os.path.dirname(filepath) - if not dirname.endswith(packagedir): - raise TaskError('File %s declares a mismatching package %s' % (file, package)) - return dirname[:-len(packagedir)] - - raise TaskError('Could not calculate a base dir for: %s' % file) diff --git a/src/python/twitter/pants/tasks/jvm_binary_task.py b/src/python/twitter/pants/tasks/jvm_binary_task.py deleted file mode 100644 index 0a20d2242..000000000 --- a/src/python/twitter/pants/tasks/jvm_binary_task.py +++ /dev/null @@ -1,107 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'John Sirois' - -import os - -from twitter.common.collections.ordereddict import OrderedDict -from twitter.common.collections.orderedset import OrderedSet - -from twitter.pants.targets.jvm_binary import JvmBinary -from twitter.pants.tasks import Task - - -class JvmBinaryTask(Task): - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("outdir"), dest="jvm_binary_create_outdir", - help="Create bundles and archives in this directory.") - - option_group.add_option(mkflag("deployjar"), mkflag("deployjar", negate=True), - dest="jvm_binary_create_deployjar", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Create a monolithic deploy jar containing this " - "binaries classfiles as well as all classfiles it depends on " - "transitively.") - - def __init__(self, context): - Task.__init__(self, context) - - def is_binary(self, target): - return isinstance(target, JvmBinary) - - def require_jar_dependencies(self, predicate=None): - self.context.products.require('jar_dependencies', predicate=predicate or self.is_binary) - - def list_jar_dependencies(self, binary, confs=None): - jardepmap = self.context.products.get('jar_dependencies') or {} - - if confs: - return self._mapped_dependencies(jardepmap, binary, confs) - else: - return self._unexcluded_dependencies(jardepmap, binary) - - def _mapped_dependencies(self, jardepmap, binary, confs): - # TODO(John Sirois): rework product mapping towards well known types - - # Generate a map of jars for each unique artifact (org, name) - externaljars = OrderedDict() - visited = set() - for conf in confs: - mapped = jardepmap.get((binary, conf)) - if mapped: - for basedir, jars in mapped.items(): - for externaljar in jars: - if (basedir, externaljar) not in visited: - visited.add((basedir, externaljar)) - keys = jardepmap.keys_for(basedir, externaljar) - for key in keys: - if isinstance(key, tuple) and len(key) == 3: - org, name, configuration = key - classpath_entry = externaljars.get((org, name)) - if not classpath_entry: - classpath_entry = {} - externaljars[(org, name)] = classpath_entry - classpath_entry[conf] = os.path.join(basedir, externaljar) - return externaljars.values() - - def _unexcluded_dependencies(self, jardepmap, binary): - # TODO(John Sirois): Kill this and move jar exclusion to use confs - excludes = set() - for exclude_key in ((e.org, e.name) if e.name else e.org for e in binary.deploy_excludes): - exclude = jardepmap.get(exclude_key) - if exclude: - for basedir, jars in exclude.items(): - for jar in jars: - excludes.add((basedir, jar)) - self.context.log.debug('Calculated excludes:\n\t%s' % '\n\t'.join(str(e) for e in excludes)) - - externaljars = OrderedSet() - - def add_jars(target): - mapped = jardepmap.get(target) - if mapped: - for basedir, jars in mapped.items(): - for externaljar in jars: - if (basedir, externaljar) not in excludes: - externaljars.add((basedir, externaljar)) - else: - self.context.log.debug('Excluding %s from binary' % externaljar) - - binary.walk(add_jars, lambda t: t.is_internal) - return externaljars diff --git a/src/python/twitter/pants/tasks/jvm_compile/__init__.py b/src/python/twitter/pants/tasks/jvm_compile/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/python/twitter/pants/tasks/jvm_compile/analysis.py b/src/python/twitter/pants/tasks/jvm_compile/analysis.py deleted file mode 100644 index fadd9970a..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/analysis.py +++ /dev/null @@ -1,32 +0,0 @@ - -class Analysis(object): - """Parsed representation of an analysis for some JVM language. - - An analysis provides information on the src -> class product mappings - and on the src -> {src|class|jar} file dependency mappings. - """ - @classmethod - def merge(cls, analyses): - """Merge multiple analysis instances into one.""" - raise NotImplementedError() - - def split(self, splits, catchall=False): - """Split the analysis according to splits, which is a list of K iterables of source files. - - If catchall is False, returns a list of K ZincAnalysis objects, one for each of the splits, in order. - If catchall is True, returns K+1 ZincAnalysis objects, the last one containing the analysis for any - remainder sources not mentioned in the K splits. - """ - raise NotImplementedError() - - def write_to_path(self, outfile_path, rebasings=None): - with open(outfile_path, 'w') as outfile: - self.write(outfile, rebasings) - - def write(self, outfile, rebasings=None): - """Write this Analysis to outfile. - - rebasings: A list of path prefix pairs [from_prefix, to_prefix] to rewrite. - to_prefix may be None, in which case matching paths are removed entirely. - """ - raise NotImplementedError() diff --git a/src/python/twitter/pants/tasks/jvm_compile/analysis_parser.py b/src/python/twitter/pants/tasks/jvm_compile/analysis_parser.py deleted file mode 100644 index c3d1eb3a0..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/analysis_parser.py +++ /dev/null @@ -1,78 +0,0 @@ -import os - -from twitter.pants.tasks.task_error import TaskError - - -class ParseError(TaskError): - pass - - -class AnalysisParser(object): - """Parse a file containing representation of an analysis for some JVM language.""" - def __init__(self, classes_dir): - self.classes_dir = classes_dir # The output dir for classes in this analysis. - - def is_nonempty_analysis(self, path): - """Returns whether an analysis at a specified path is nontrivial.""" - if not os.path.exists(path): - return False - empty_prefix = self.empty_prefix() - with open(path, 'r') as infile: - prefix = infile.read(len(empty_prefix)) - return prefix != empty_prefix - - def empty_prefix(self): - """Returns a prefix indicating a trivial analysis file. - - I.e., this prefix is present at the begnning of an analysis file iff the analysis is trivial. - """ - raise NotImplementedError() - - def parse_from_path(self, infile_path): - """Parse an analysis instance from a text file.""" - with open(infile_path, 'r') as infile: - return self.parse(infile) - - def parse(self, infile): - """Parse an analysis instance from an open file.""" - raise NotImplementedError() - - def parse_products_from_path(self, infile_path): - """An efficient parser of just the src->class mappings. - - Returns a map of src -> list of classfiles. All paths are absolute. - """ - with open(infile_path, 'r') as infile: - return self.parse_products(infile) - - def parse_products(self, infile): - """An efficient parser of just the src->class mappings. - - Returns a map of src -> list of classfiles. All paths are absolute. - """ - raise NotImplementedError() - - def parse_deps_from_path(self, infile_path, classpath_indexer): - """An efficient parser of just the src->dep mappings. - - classpath_indexer - a no-arg method that an implementation may call if it needs a mapping - of class->element on the classpath that provides that class. - We use this indirection to avoid unnecessary precomputation. - """ - with open(infile_path, 'r') as infile: - return self.parse_deps(infile, classpath_indexer) - - def parse_deps(self, infile, classpath_indexer): - """An efficient parser of just the binary, source and external deps sections. - - classpath_indexer - a no-arg method that an implementation may call if it needs a mapping - of class->element on the classpath that provides that class. - We use this indirection to avoid unnecessary precomputation. - - Returns a dict of src -> iterable of deps, where each item in deps is either a binary dep, - source dep or external dep, i.e., either a source file, a class file or a jar file. - - All paths are absolute. - """ - raise NotImplementedError() - diff --git a/src/python/twitter/pants/tasks/jvm_compile/analysis_tools.py b/src/python/twitter/pants/tasks/jvm_compile/analysis_tools.py deleted file mode 100644 index 9f70299a3..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/analysis_tools.py +++ /dev/null @@ -1,81 +0,0 @@ -import os -import shutil -from twitter.common.contextutil import temporary_dir -from twitter.pants.base.build_environment import get_buildroot - - -class AnalysisTools(object): - """Analysis manipulation methods required by JvmCompile.""" - _IVY_HOME_PLACEHOLDER = '/_IVY_HOME_PLACEHOLDER' - _PANTS_HOME_PLACEHOLDER = '/_PANTS_HOME_PLACEHOLDER' - - def __init__(self, context, parser, analysis_cls): - self.parser = parser - self._java_home = context.java_home - self._ivy_home = context.ivy_home - self._pants_home = get_buildroot() - self._analysis_cls = analysis_cls - - def split_to_paths(self, analysis_path, split_path_pairs, catchall_path=None): - """Split an analysis file. - - split_path_pairs: A list of pairs (split, output_path) where split is a list of source files - whose analysis is to be split out into output_path. The source files may either be - absolute paths, or relative to the build root. - - If catchall_path is specified, the analysis for any sources not mentioned in the splits is - split out to that path. - """ - analysis = self.parser.parse_from_path(analysis_path) - splits, output_paths = zip(*split_path_pairs) - split_analyses = analysis.split(splits, catchall_path is not None) - if catchall_path is not None: - output_paths = output_paths + (catchall_path, ) - for analysis, path in zip(split_analyses, output_paths): - analysis.write_to_path(path) - - def merge_from_paths(self, analysis_paths, merged_analysis_path): - """Merge multiple analysis files into one.""" - analyses = [self.parser.parse_from_path(path) for path in analysis_paths] - merged_analysis = self._analysis_cls.merge(analyses) - merged_analysis.write_to_path(merged_analysis_path) - - def relativize(self, src_analysis, relativized_analysis): - with temporary_dir() as tmp_analysis_dir: - tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis.relativized') - - # NOTE: We can't port references to deps on the Java home. This is because different JVM - # implementations on different systems have different structures, and there's not - # necessarily a 1-1 mapping between Java jars on different systems. Instead we simply - # drop those references from the analysis file. - # - # In practice the JVM changes rarely, and it should be fine to require a full rebuild - # in those rare cases. - rebasings = [ - (self._java_home, None), - (self._ivy_home, self._IVY_HOME_PLACEHOLDER), - (self._pants_home, self._PANTS_HOME_PLACEHOLDER), - ] - # Work on a tmpfile, for safety. - self._rebase_from_path(src_analysis, tmp_analysis_file, rebasings) - shutil.move(tmp_analysis_file, relativized_analysis) - - def localize(self, src_analysis, localized_analysis): - with temporary_dir() as tmp_analysis_dir: - tmp_analysis_file = os.path.join(tmp_analysis_dir, 'analysis') - rebasings = [ - (AnalysisTools._IVY_HOME_PLACEHOLDER, self._ivy_home), - (AnalysisTools._PANTS_HOME_PLACEHOLDER, self._pants_home), - ] - # Work on a tmpfile, for safety. - self._rebase_from_path(src_analysis, tmp_analysis_file, rebasings) - shutil.move(tmp_analysis_file, localized_analysis) - - def _rebase_from_path(self, input_analysis_path, output_analysis_path, rebasings): - """Rebase file paths in an analysis file. - - rebasings: A list of path prefix pairs [from_prefix, to_prefix] to rewrite. - to_prefix may be None, in which case matching paths are removed entirely. - """ - analysis = self.parser.parse_from_path(input_analysis_path) - analysis.write_to_path(output_analysis_path, rebasings=rebasings) diff --git a/src/python/twitter/pants/tasks/jvm_compile/java/__init__.py b/src/python/twitter/pants/tasks/jvm_compile/java/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/java/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/python/twitter/pants/tasks/jvm_compile/java/java_compile.py b/src/python/twitter/pants/tasks/jvm_compile/java/java_compile.py deleted file mode 100644 index d08f261fb..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/java/java_compile.py +++ /dev/null @@ -1,137 +0,0 @@ - -import os -import shlex - -from twitter.common.dirutil import safe_open - -from twitter.pants.base.target import Target -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.jvm_compile.analysis_tools import AnalysisTools -from twitter.pants.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis -from twitter.pants.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser -from twitter.pants.tasks.jvm_compile.jvm_compile import JvmCompile - - -# From http://kenai.com/projects/jmake/sources/mercurial/content/src/com/sun/tools/jmake/Main.java?rev=26 -# Main.mainExternal docs. -_JMAKE_ERROR_CODES = { - -1: 'invalid command line option detected', - -2: 'error reading command file', - -3: 'project database corrupted', - -4: 'error initializing or calling the compiler', - -5: 'compilation error', - -6: 'error parsing a class file', - -7: 'file not found', - -8: 'I/O exception', - -9: 'internal jmake exception', - -10: 'deduced and actual class name mismatch', - -11: 'invalid source file extension', - -12: 'a class in a JAR is found dependent on a class with the .java source', - -13: 'more than one entry for the same class is found in the project', - -20: 'internal Java error (caused by java.lang.InternalError)', - -30: 'internal Java error (caused by java.lang.RuntimeException).' -} -# When executed via a subprocess return codes will be treated as unsigned -_JMAKE_ERROR_CODES.update((256+code, msg) for code, msg in _JMAKE_ERROR_CODES.items()) - - -class JavaCompile(JvmCompile): - _language = 'java' - _file_suffix = '.java' - _config_section = 'java-compile' - - # Well known metadata file to auto-register annotation processors with a java 1.6+ compiler - _PROCESSOR_INFO_FILE = 'META-INF/services/javax.annotation.processing.Processor' - - - _JMAKE_MAIN = 'com.sun.tools.jmake.Main' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - JvmCompile.setup_parser(JavaCompile, option_group, args, mkflag) - - option_group.add_option(mkflag("args"), dest="java_compile_args", action="append", - help="Pass these extra args to javac.") - - def __init__(self, context): - super(JavaCompile, self).__init__(context, jdk=True) - - self._depfile = os.path.join(self._analysis_dir, 'global_depfile') - - self._jmake_bootstrap_key = 'jmake' - external_tools = context.config.getlist('java-compile', 'jmake-bootstrap-tools', default=[':jmake']) - self.register_jvm_tool(self._jmake_bootstrap_key, external_tools) - - self._compiler_bootstrap_key = 'java-compiler' - compiler_bootstrap_tools = context.config.getlist('java-compile', 'compiler-bootstrap-tools', - default=[':java-compiler']) - self.register_jvm_tool(self._compiler_bootstrap_key, compiler_bootstrap_tools) - - self._javac_opts = [] - if context.options.java_compile_args: - for arg in context.options.java_compile_args: - self._javac_opts.extend(shlex.split(arg)) - else: - self._javac_opts.extend(context.config.getlist('java-compile', 'javac_args', default=[])) - - def create_analysis_tools(self): - return AnalysisTools(self.context, JMakeAnalysisParser(self._classes_dir), JMakeAnalysis) - - def extra_products(self, target): - ret = [] - if target.is_apt and target.processors: - root = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) - processor_info_file = os.path.join(root, JavaCompile._PROCESSOR_INFO_FILE) - self._write_processor_info(processor_info_file, target.processors) - ret.append((root, [processor_info_file])) - return ret - - def compile(self, args, classpath, sources, classes_output_dir, analysis_file): - jmake_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._jmake_bootstrap_key) - args = [ - '-classpath', ':'.join(classpath + [self._classes_dir]), - '-d', self._classes_dir, - '-pdb', analysis_file, - '-pdb-text-format', - ] - - compiler_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._compiler_bootstrap_key) - args.extend([ - '-jcpath', ':'.join(compiler_classpath), - '-jcmainclass', 'com.twitter.common.tools.Compiler', - ]) - args.extend(map(lambda arg: '-C%s' % arg, self._javac_opts)) - - args.extend(self._args) - args.extend(sources) - result = self.runjava(classpath=jmake_classpath, - main=JavaCompile._JMAKE_MAIN, - jvm_options=self._jvm_options, - args=args, - workunit_name='jmake', - workunit_labels=[WorkUnit.COMPILER]) - if result: - default_message = 'Unexpected error - JMake returned %d' % result - raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) - - def post_process(self, relevant_targets): - # Produce a monolithic apt processor service info file for further compilation rounds - # and the unit test classpath. - # This is distinct from the per-target ones we create in extra_products(). - all_processors = set() - for target in relevant_targets: - if target.is_apt and target.processors: - all_processors.update(target.processors) - processor_info_file = os.path.join(self._classes_dir, JavaCompile._PROCESSOR_INFO_FILE) - if os.path.exists(processor_info_file): - with safe_open(processor_info_file, 'r') as f: - for processor in f: - all_processors.add(processor) - self._write_processor_info(processor_info_file, all_processors) - - def _write_processor_info(self, processor_info_file, processors): - with safe_open(processor_info_file, 'w') as f: - for processor in processors: - f.write('%s\n' % processor.strip()) diff --git a/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis.py b/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis.py deleted file mode 100644 index 37c6387c5..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis.py +++ /dev/null @@ -1,102 +0,0 @@ -import os - -from collections import defaultdict -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.tasks.jvm_compile.analysis import Analysis - - -class JMakeAnalysis(Analysis): - """Parsed representation of a jmake pdb. - - We use the term 'analysis' for uniformity with Zinc etc. - """ - @classmethod - def merge(cls, analyses): - merged_pcd_entries = [] - merged_src_to_deps = {} - for analysis in analyses: - merged_pcd_entries.extend(analysis.pcd_entries) - merged_src_to_deps.update(analysis.src_to_deps) - return JMakeAnalysis(merged_pcd_entries, merged_src_to_deps) - - def __init__(self, pcd_entries, src_to_deps): - self.pcd_entries = pcd_entries # Note that second item in tuple is the source file. - self.src_to_deps = src_to_deps - - def split(self, splits, catchall=False): - buildroot = get_buildroot() - src_to_split_idx = {} - for i, split in enumerate(splits): - for s in split: - src_to_split_idx[s if os.path.isabs(s) else os.path.join(buildroot, s)] = i - num_outputs = len(splits) + 1 if catchall else len(splits) - catchall_idx = len(splits) if catchall else -1 - - split_pcd_entries = [] - split_src_to_deps = [] - for _ in xrange(0, num_outputs): - split_pcd_entries.append([]) - split_src_to_deps.append({}) - - for pcd_entry in self.pcd_entries: - split_idx = src_to_split_idx.get(pcd_entry[1], catchall_idx) - if split_idx != -1: - split_pcd_entries[split_idx].append(pcd_entry) - for src, deps in self.src_to_deps.items(): - split_idx = src_to_split_idx.get(src, catchall_idx) - if split_idx != -1: - split_src_to_deps[split_idx][src] = deps - - return [JMakeAnalysis(x, y) for x, y in zip(split_pcd_entries, split_src_to_deps)] - - def write(self, outfile, rebasings=None): - # Note that the only paths in a jmake analysis are source files. - def rebase_path(path): - if rebasings: - for rebase_from, rebase_to in rebasings: - if rebase_to is None: - if path.startswith(rebase_from): - return None - else: - path = path.replace(rebase_from, rebase_to) - return path - - outfile.write('pcd entries:\n') - outfile.write('%d items\n' % len(self.pcd_entries)) - for pcd_entry in self.pcd_entries: - rebased_src = rebase_path(pcd_entry[1]) - if rebased_src: - outfile.write(pcd_entry[0]) - outfile.write('\t') - outfile.write(rebased_src) - for x in pcd_entry[2:]: - outfile.write('\t') - outfile.write(x) - # Note that last element already includes \n. - - outfile.write('dependencies:\n') - outfile.write('%d items\n' % len(self.src_to_deps)) - for src, deps in self.src_to_deps.items(): - rebased_src = rebase_path(src) - if rebased_src: - outfile.write(rebased_src) - for dep in deps: - outfile.write('\t') - outfile.write(dep) - outfile.write('\n') - - def compute_products(self): - """Returns the products in this analysis. - - Returns a map of -> list of classfiles, relative to the classes dir. - - Note that we don't currently use this method: We use JMakeAnalysisParser.parse_products() - to more efficiently read just the products out of the file. However we leave this - here for documentation of the meaning of the useful fields in pcd_entries. - """ - src_to_classfiles = defaultdict(list) - for pcd_entry in self.pcd_entries: - srcfile = pcd_entry[1] - # In the file classes are represented with slashes, not dots. E.g., com/foo/bar/Baz. - src_to_classfiles[srcfile].append(pcd_entry[0] + '.class') - return src_to_classfiles diff --git a/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis_parser.py b/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis_parser.py deleted file mode 100644 index 6efdd21f4..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/java/jmake_analysis_parser.py +++ /dev/null @@ -1,88 +0,0 @@ -from collections import defaultdict -import os -import re - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.tasks.jvm_compile.analysis_parser import ParseError, AnalysisParser -from twitter.pants.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis - - -class JMakeAnalysisParser(AnalysisParser): - """Parse a file containing representation of an analysis for some JVM language.""" - - def empty_prefix(self): - return 'pcd entries:\n0 items\n' - - def parse(self, infile): - self._expect_header(infile.readline(), 'pcd entries') - num_pcd_entries = self._parse_num_items(infile.readline()) - pcd_entries = [] - for i in xrange(0, num_pcd_entries): - line = infile.readline() - tpl = line.split('\t') - if len(tpl) != 5: - raise ParseError('Line must contain 5 tab-separated fields: %s' % line) - pcd_entries.append(tpl) # Note: we preserve the \n on the last entry. - src_to_deps = self._parse_deps_at_position(infile) - return JMakeAnalysis(pcd_entries, src_to_deps) - - def parse_products(self, infile): - self._expect_header(infile.readline(), 'pcd entries') - num_pcd_entries = self._parse_num_items(infile.readline()) - ret = defaultdict(list) - # Parse more efficiently than above, since we only care about - # the first two elements in the line. - for _ in xrange(0, num_pcd_entries): - line = infile.readline() - p1 = line.find('\t') - clsfile = os.path.join(self.classes_dir, line[0:p1] + '.class') - p2 = line.find('\t', p1 + 1) - src = line[p1+1:p2] - ret[src].append(clsfile) - return ret - - def parse_deps(self, infile, classpath_indexer): - buildroot = get_buildroot() - classpath_elements_by_class = classpath_indexer() - self._expect_header(infile.readline(), 'pcd entries') - num_pcd_entries = self._parse_num_items(infile.readline()) - for _ in xrange(0, num_pcd_entries): - infile.readline() # Skip these lines. - src_to_deps = self._parse_deps_at_position(infile) - ret = defaultdict(set) - for src, deps in src_to_deps.items(): - for dep in deps: - rel_classfile = dep + '.class' - classpath_element = classpath_elements_by_class.get(rel_classfile, None) - if classpath_element: # Dep is on an external jar/classes dir. - ret[src].add(classpath_element) - else: # Dep is on an internal class. - classfile = os.path.join(buildroot, self.classes_dir, rel_classfile) - ret[src].add(classfile) - return ret - - def _parse_deps_at_position(self, infile): - self._expect_header(infile.readline(), 'dependencies') - num_deps = self._parse_num_items(infile.readline()) - src_to_deps = {} - for i in xrange(0, num_deps): - tpl = infile.readline().split('\t') - src = tpl[0] - deps = tpl[1:] - deps[-1] = deps[-1][0:-1] # Trim off the \n. - src_to_deps[src] = deps - return src_to_deps - - num_items_re = re.compile(r'(\d+) items\n') - - def _parse_num_items(self, line): - """Parse a line of the form ' items' and returns as an int.""" - matchobj = JMakeAnalysisParser.num_items_re.match(line) - if not matchobj: - raise ParseError('Expected: " items". Found: "%s"' % line) - return int(matchobj.group(1)) - - def _expect_header(self, line, header): - expected = header + ':\n' - if line != expected: - raise ParseError('Expected: %s. Found: %s' % (expected, line)) diff --git a/src/python/twitter/pants/tasks/jvm_compile/jvm_compile.py b/src/python/twitter/pants/tasks/jvm_compile/jvm_compile.py deleted file mode 100644 index ff0d05915..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/jvm_compile.py +++ /dev/null @@ -1,655 +0,0 @@ -import itertools -import os -import shutil -import uuid - -from collections import defaultdict -from itertools import groupby - -from twitter.common import contextutil -from twitter.common.collections import OrderedSet -from twitter.common.contextutil import open_zip -from twitter.common.dirutil import safe_rmtree, safe_mkdir -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.base.worker_pool import Work -from twitter.pants.goal.products import MultipleRootedProducts -from twitter.pants.reporting.reporting_utils import items_to_report_element -from twitter.pants.tasks.jvm_compile.jvm_dependency_analyzer import JvmDependencyAnalyzer -from twitter.pants.tasks.nailgun_task import NailgunTask -from twitter.pants.tasks import Task - - -class JvmCompile(NailgunTask): - """A common framework for JVM compilation. - - To subclass for a specific JVM language, implement the static values and methods - mentioned below under "Subclasses must implement". - """ - - @staticmethod - def setup_parser(subcls, option_group, args, mkflag): - NailgunTask.setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag('warnings'), mkflag('warnings', negate=True), - dest=subcls._language+'_compile_warnings', - default=True, - action='callback', - callback=mkflag.set_bool, - help='[%default] Compile with all configured warnings enabled.') - - option_group.add_option(mkflag('partition-size-hint'), - dest=subcls._language+'_partition_size_hint', - action='store', - type='int', - default=-1, - help='Roughly how many source files to attempt to compile together. ' - 'Set to a large number to compile all sources together. Set this ' - 'to 0 to compile target-by-target. Default is set in pants.ini.') - - option_group.add_option(mkflag('missing-deps'), - dest=subcls._language+'_missing_deps', - choices=['off', 'warn', 'fatal'], - default='warn', - help='[%default] One of off, warn, fatal. ' - 'Check for missing dependencies in ' + subcls._language + 'code. ' - 'Reports actual dependencies A -> B where there is no ' - 'transitive BUILD file dependency path from A to B.' - 'If fatal, missing deps are treated as a build error.') - - option_group.add_option(mkflag('missing-direct-deps'), - dest=subcls._language+'_missing_direct_deps', - choices=['off', 'warn', 'fatal'], - default='off', - help='[%default] One of off, warn, fatal. ' - 'Check for missing direct dependencies in ' + subcls._language + - ' code. Reports actual dependencies A -> B where there is no ' - 'direct BUILD file dependency path from A to B. This is a very ' - 'strict check, as in practice it is common to rely on transitive, ' - 'non-direct dependencies, e.g., due to type inference or when the ' - 'main target in a BUILD file is modified to depend on other ' - 'targets in the same BUILD file as an implementation detail. It ' - 'may still be useful to set it to fatal temorarily, to detect ' - 'these.') - - option_group.add_option(mkflag('unnecessary-deps'), - dest=subcls._language+'_unnecessary_deps', - choices=['off', 'warn', 'fatal'], - default='off', - help='[%default] One of off, warn, fatal. Check for declared ' - 'dependencies in ' + subcls._language + ' code that are not ' - 'needed. This is a very strict check. For example, generated code ' - 'will often legitimately have BUILD dependencies that are unused ' - 'in practice.') - - option_group.add_option(mkflag('delete-scratch'), mkflag('delete-scratch', negate=True), - dest=subcls._language+'_delete_scratch', - default=True, - action='callback', - callback=mkflag.set_bool, - help='[%default] Leave intermediate scratch files around, ' - 'for debugging build problems.') - - # Subclasses must implement. - # -------------------------- - - _language = None - _file_suffix = None - _config_section = None - - def create_analysis_tools(self): - """Returns an AnalysisTools implementation. - - Subclasses must implement. - """ - raise NotImplementedError() - - def compile(self, args, classpath, sources, classes_output_dir, analysis_file): - """Invoke the compiler. - - Must raise TaskError on compile failure. - - Subclasses must implement.""" - raise NotImplementedError() - - - # Subclasses may override. - # ------------------------ - - def extra_classpath_elements(self): - """Extra classpath elements common to all compiler invocations. - - E.g., jars for compiler plugins. - """ - return [] - - def extra_products(self, target): - """Any extra, out-of-band products created for a target. - - E.g., targets that produce scala compiler plugins produce an info file. - Returns a list of pairs (root, [absolute paths of files under root]). - """ - return [] - - def post_process(self, relevant_targets): - """Any extra post-execute work.""" - pass - - - # Common code. - # ------------ - - @staticmethod - def _analysis_for_target(analysis_dir, target): - return os.path.join(analysis_dir, target.id + '.analysis') - - @staticmethod - def _portable_analysis_for_target(analysis_dir, target): - return JvmCompile._analysis_for_target(analysis_dir, target) + '.portable' - - def __init__(self, context, minimum_version=None, jdk=False): - # TODO(John Sirois): XXX plumb minimum_version via config or flags - super(JvmCompile, self).__init__(context, minimum_version=minimum_version, jdk=jdk) - concrete_class = type(self) - config_section = concrete_class._config_section - - def get_lang_specific_option(opt): - full_opt_name = self._language + '_' + opt - return getattr(context.options, full_opt_name, None) - - # Global workdir. - self._pants_workdir = context.config.getdefault('pants_workdir') - - # Various working directories. - workdir = context.config.get(config_section, 'workdir') - self._classes_dir = os.path.join(workdir, 'classes') - self._resources_dir = os.path.join(workdir, 'resources') - self._analysis_dir = os.path.join(workdir, 'analysis') - - self._delete_scratch = get_lang_specific_option('delete_scratch') - - safe_mkdir(self._classes_dir) - safe_mkdir(self._analysis_dir) - - self._analysis_file = os.path.join(self._analysis_dir, 'global_analysis.valid') - self._invalid_analysis_file = os.path.join(self._analysis_dir, 'global_analysis.invalid') - - # A temporary, but well-known, dir in which to munge analysis/dependency files in before - # caching. It must be well-known so we know where to find the files when we retrieve them from - # the cache. - self._analysis_tmpdir = os.path.join(self._analysis_dir, 'artifact_cache_tmpdir') - - # We can't create analysis tools until after construction. - self._lazy_analysis_tools = None - - # Compiler options. - self._args = context.config.getlist(config_section, 'args') - if get_lang_specific_option('compile_warnings'): - self._args.extend(context.config.getlist(config_section, 'warning_args')) - else: - self._args.extend(context.config.getlist(config_section, 'no_warning_args')) - - # The rough number of source files to build in each compiler pass. - self._partition_size_hint = get_lang_specific_option('partition_size_hint') - if self._partition_size_hint == -1: - self._partition_size_hint = context.config.getint(config_section, 'partition_size_hint', - default=1000) - - # JVM options for running the compiler. - self._jvm_options = context.config.getlist(config_section, 'jvm_args') - - # The ivy confs for which we're building. - self._confs = context.config.getlist(config_section, 'confs', default=['default']) - - # Set up dep checking if needed. - def munge_flag(flag): - return None if flag == 'off' else flag - check_missing_deps = munge_flag(get_lang_specific_option('missing_deps')) - check_missing_direct_deps = munge_flag(get_lang_specific_option('missing_direct_deps')) - check_unnecessary_deps = munge_flag(get_lang_specific_option('unnecessary_deps')) - - if check_missing_deps or check_missing_direct_deps or check_unnecessary_deps: - # Must init it here, so it can set requirements on the context. - self._dep_analyzer = JvmDependencyAnalyzer(self.context, - check_missing_deps, - check_missing_direct_deps, - check_unnecessary_deps) - else: - self._dep_analyzer = None - - self._class_to_jarfile = None # Computed lazily as needed. - - self.context.products.require_data('exclusives_groups') - self.setup_artifact_cache_from_config(config_section=config_section) - - # Sources (relative to buildroot) present in the last analysis that have since been deleted. - # Generated lazily, so do not access directly. Call self._get_deleted_sources(). - self._lazy_deleted_sources = None - - def product_type(self): - return 'classes' - - def can_dry_run(self): - return True - - def move(self, src, dst): - if self._delete_scratch: - shutil.move(src, dst) - else: - shutil.copy(src, dst) - - # TODO(benjy): Break this monstrosity up? Previous attempts to do so - # turned out to be more trouble than it was worth. - def execute(self, targets): - # TODO(benjy): Add a pre-execute phase for injecting deps into targets, so e.g., - # we can inject a dep on the scala runtime library and still have it ivy-resolve. - - # In case we have no relevant targets and return early. - self._create_empty_products() - - relevant_targets = [t for t in targets if t.has_sources(self._file_suffix)] - - if not relevant_targets: - return - - # Get the exclusives group for the targets to compile. - # Group guarantees that they'll be a single exclusives key for them. - egroups = self.context.products.get_data('exclusives_groups') - group_id = egroups.get_group_key_for_target(relevant_targets[0]) - - # Add resource dirs to the classpath for us and for downstream tasks. - for conf in self._confs: - egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) - - # Get the classpath generated by upstream JVM tasks (including previous calls to execute()). - classpath = egroups.get_classpath_for_group(group_id) - - # Add any extra classpath elements. - for conf in self._confs: - for jar in self.extra_classpath_elements(): - classpath.insert(0, (conf, jar)) - - # Target -> sources (relative to buildroot). - sources_by_target = self._compute_sources_by_target(relevant_targets) - - # Invalidation check. Everything inside the with block must succeed for the - # invalid targets to become valid. - with self.invalidated(relevant_targets, - invalidate_dependents=True, - partition_size_hint=self._partition_size_hint) as invalidation_check: - if invalidation_check.invalid_vts and not self.dry_run: - # The analysis for invalid and deleted sources is no longer valid. - invalid_targets = [vt.target for vt in invalidation_check.invalid_vts] - invalid_sources_by_target = {} - for tgt in invalid_targets: - invalid_sources_by_target[tgt] = sources_by_target[tgt] - invalid_sources = list(itertools.chain.from_iterable(invalid_sources_by_target.values())) - deleted_sources = self._deleted_sources() - - # Work in a tmpdir so we don't stomp the main analysis files on error. - # The tmpdir is cleaned up in a shutdown hook, because background work - # may need to access files we create here even after this method returns. - self._ensure_analysis_tmpdir() - tmpdir = os.path.join(self._analysis_tmpdir, str(uuid.uuid4())) - os.mkdir(tmpdir) - valid_analysis_tmp = os.path.join(tmpdir, 'valid_analysis') - newly_invalid_analysis_tmp = os.path.join(tmpdir, 'newly_invalid_analysis') - invalid_analysis_tmp = os.path.join(tmpdir, 'invalid_analysis') - if self._analysis_parser.is_nonempty_analysis(self._analysis_file): - with self.context.new_workunit(name='prepare-analysis'): - self._analysis_tools.split_to_paths(self._analysis_file, - [(invalid_sources + deleted_sources, newly_invalid_analysis_tmp)], valid_analysis_tmp) - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): - self._analysis_tools.merge_from_paths( - [self._invalid_analysis_file, newly_invalid_analysis_tmp], invalid_analysis_tmp) - else: - invalid_analysis_tmp = newly_invalid_analysis_tmp - - # Now it's OK to overwrite the main analysis files with the new state. - self.move(valid_analysis_tmp, self._analysis_file) - self.move(invalid_analysis_tmp, self._invalid_analysis_file) - - # Register products for all the valid targets. - # We register as we go, so dependency checking code can use this data. - valid_targets = list(set(relevant_targets) - set(invalid_targets)) - self._register_products(valid_targets, sources_by_target, self._analysis_file) - - # Figure out the sources and analysis belonging to each partition. - partitions = [] # Each element is a triple (vts, sources_by_target, analysis). - for vts in invalidation_check.invalid_vts_partitioned: - partition_tmpdir = os.path.join(tmpdir, Target.maybe_readable_identify(vts.targets)) - os.mkdir(partition_tmpdir) - sources = list(itertools.chain.from_iterable( - [invalid_sources_by_target.get(t, []) for t in vts.targets])) - de_duped_sources = list(OrderedSet(sources)) - if len(sources) != len(de_duped_sources): - counts = [(src, len(list(srcs))) for src, srcs in groupby(sorted(sources))] - self.context.log.warn( - 'De-duped the following sources:\n\t%s' % - '\n\t'.join(sorted('%d %s' % (cnt, src) for src, cnt in counts if cnt > 1))) - analysis_file = os.path.join(partition_tmpdir, 'analysis') - partitions.append((vts, de_duped_sources, analysis_file)) - - # Split per-partition files out of the global invalid analysis. - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file) and partitions: - with self.context.new_workunit(name='partition-analysis'): - splits = [(x[1], x[2]) for x in partitions] - # We have to pass the analysis for any deleted files through zinc, to give it - # a chance to delete the relevant class files. - if splits: - splits[0] = (splits[0][0] + deleted_sources, splits[0][1]) - self._analysis_tools.split_to_paths(self._invalid_analysis_file, splits) - - # Now compile partitions one by one. - for partition in partitions: - (vts, sources, analysis_file) = partition - cp_entries = [entry for conf, entry in classpath if conf in self._confs] - self._process_target_partition(partition, cp_entries) - # No exception was thrown, therefore the compile succeded and analysis_file is now valid. - if os.path.exists(analysis_file): # The compilation created an analysis. - # Merge the newly-valid analysis with our global valid analysis. - new_valid_analysis = analysis_file + '.valid.new' - if self._analysis_parser.is_nonempty_analysis(self._analysis_file): - with self.context.new_workunit(name='update-upstream-analysis'): - self._analysis_tools.merge_from_paths([self._analysis_file, analysis_file], - new_valid_analysis) - else: # We need to keep analysis_file around. Background tasks may need it. - shutil.copy(analysis_file, new_valid_analysis) - - # Move the merged valid analysis to its proper location. - # We do this before checking for missing dependencies, so that we can still - # enjoy an incremental compile after fixing missing deps. - self.move(new_valid_analysis, self._analysis_file) - - # Update the products with the latest classes. Must happen before the - # missing dependencies check. - self._register_products(vts.targets, sources_by_target, analysis_file) - if self._dep_analyzer: - # Check for missing dependencies. - actual_deps = self._analysis_parser.parse_deps_from_path(analysis_file, - lambda: self._compute_classpath_elements_by_class(cp_entries)) - with self.context.new_workunit(name='find-missing-dependencies'): - self._dep_analyzer.check(sources, actual_deps) - - # Kick off the background artifact cache write. - if self.artifact_cache_writes_enabled(): - self._write_to_artifact_cache(analysis_file, vts, invalid_sources_by_target) - - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): - with self.context.new_workunit(name='trim-downstream-analysis'): - # Trim out the newly-valid sources from our global invalid analysis. - new_invalid_analysis = analysis_file + '.invalid.new' - discarded_invalid_analysis = analysis_file + '.invalid.discard' - self._analysis_tools.split_to_paths(self._invalid_analysis_file, - [(sources, discarded_invalid_analysis)], new_invalid_analysis) - self.move(new_invalid_analysis, self._invalid_analysis_file) - - # Now that all the analysis accounting is complete, and we have no missing deps, - # we can safely mark the targets as valid. - vts.update() - else: - # Nothing to build. Register products for all the targets in one go. - self._register_products(relevant_targets, sources_by_target, self._analysis_file) - - # Update the classpath for downstream tasks. - for conf in self._confs: - egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) - - self.post_process(relevant_targets) - - def _process_target_partition(self, partition, classpath): - """Needs invoking only on invalid targets. - - partition - a triple (vts, sources_by_target, analysis_file). - classpath - a list of classpath entries. - - May be invoked concurrently on independent target sets. - - Postcondition: The individual targets in vts are up-to-date, as if each were - compiled individually. - """ - (vts, sources, analysis_file) = partition - - if not sources: - self.context.log.warn('Skipping %s compile for targets with no sources:\n %s' - % (self._language, vts.targets)) - else: - # Do some reporting. - self.context.log.info( - 'Compiling a partition containing ', - items_to_report_element(sources, 'source'), - ' in ', - items_to_report_element([t.address.reference() for t in vts.targets], 'target'), '.') - with self.context.new_workunit('compile'): - # The compiler may delete classfiles, then later exit on a compilation error. Then if the - # change triggering the error is reverted, we won't rebuild to restore the missing - # classfiles. So we force-invalidate here, to be on the safe side. - vts.force_invalidate() - self.compile(self._args, classpath, sources, self._classes_dir, analysis_file) - - def check_artifact_cache(self, vts): - # Special handling for scala analysis files. Class files are retrieved directly into their - # final locations in the global classes dir. - - def post_process_cached_vts(cached_vts): - # Merge the localized analysis with the global one (if any). - analyses_to_merge = [] - for vt in cached_vts: - for target in vt.targets: - analysis_file = JvmCompile._analysis_for_target(self._analysis_tmpdir, target) - portable_analysis_file = JvmCompile._portable_analysis_for_target(self._analysis_tmpdir, - target) - if os.path.exists(portable_analysis_file): - self._analysis_tools.localize(portable_analysis_file, analysis_file) - if os.path.exists(analysis_file): - analyses_to_merge.append(analysis_file) - - if len(analyses_to_merge) > 0: - if os.path.exists(self._analysis_file): - analyses_to_merge.append(self._analysis_file) - with contextutil.temporary_dir() as tmpdir: - tmp_analysis = os.path.join(tmpdir, 'analysis') - with self.context.new_workunit(name='merge_analysis'): - self._analysis_tools.merge_from_paths(analyses_to_merge, tmp_analysis) - self.move(tmp_analysis, self._analysis_file) - - self._ensure_analysis_tmpdir() - return Task.do_check_artifact_cache(self, vts, post_process_cached_vts=post_process_cached_vts) - - def _write_to_artifact_cache(self, analysis_file, vts, sources_by_target): - vt_by_target = dict([(vt.target, vt) for vt in vts.versioned_targets]) - - split_analysis_files = [ - JvmCompile._analysis_for_target(self._analysis_tmpdir, t) for t in vts.targets] - portable_split_analysis_files = [ - JvmCompile._portable_analysis_for_target(self._analysis_tmpdir, t) for t in vts.targets] - - # Set up args for splitting the analysis into per-target files. - splits = zip([sources_by_target.get(t, []) for t in vts.targets], split_analysis_files) - splits_args_tuples = [(analysis_file, splits)] - - # Set up args for rebasing the splits. - relativize_args_tuples = zip(split_analysis_files, portable_split_analysis_files) - - # Set up args for artifact cache updating. - vts_artifactfiles_pairs = [] - classes_by_source = self._compute_classes_by_source(analysis_file) - for target, sources in sources_by_target.items(): - artifacts = [] - for source in sources: - artifacts.extend(classes_by_source.get(source, [])) - vt = vt_by_target.get(target) - if vt is not None: - # NOTE: analysis_file doesn't exist yet. - vts_artifactfiles_pairs.append( - (vt, - artifacts + [JvmCompile._portable_analysis_for_target(self._analysis_tmpdir, target)])) - - update_artifact_cache_work = self.get_update_artifact_cache_work(vts_artifactfiles_pairs) - if update_artifact_cache_work: - work_chain = [ - Work(self._analysis_tools.split_to_paths, splits_args_tuples, 'split'), - Work(self._analysis_tools.relativize, relativize_args_tuples, 'relativize'), - update_artifact_cache_work - ] - self.context.submit_background_work_chain(work_chain, parent_workunit_name='cache') - - def _compute_classes_by_source(self, analysis_file=None): - """Compute src->classes. - - Srcs are relative to buildroot. Classes are absolute paths. - """ - if analysis_file is None: - analysis_file = self._analysis_file - - if not os.path.exists(analysis_file): - return {} - buildroot = get_buildroot() - products = self._analysis_parser.parse_products_from_path(analysis_file) - classes_by_src = {} - for src, classes in products.items(): - relsrc = os.path.relpath(src, buildroot) - classes_by_src[relsrc] = classes - return classes_by_src - - def _deleted_sources(self): - """Returns the list of sources present in the last analysis that have since been deleted. - - This is a global list. We have no way of associating them to individual targets. - Paths are relative to buildroot. - """ - # We compute the list lazily. - if self._lazy_deleted_sources is None: - with self.context.new_workunit('find-deleted-sources'): - if os.path.exists(self._analysis_file): - products = self._analysis_parser.parse_products_from_path(self._analysis_file) - buildroot = get_buildroot() - old_sources = products.keys() # Absolute paths. - self._lazy_deleted_sources = [os.path.relpath(src, buildroot) for src in old_sources - if not os.path.exists(src)] - else: - self._lazy_deleted_sources = [] - return self._lazy_deleted_sources - - def _compute_sources_by_target(self, targets): - """Returns map target -> list of sources (relative to buildroot).""" - def calculate_sources(target): - sources = [s for s in target.sources_relative_to_buildroot() if s.endswith(self._file_suffix)] - # TODO: Make this less hacky. Ideally target.java_sources will point to sources, not targets. - if hasattr(target, 'java_sources') and target.java_sources: - sources.extend(self._resolve_target_sources(target.java_sources, '.java')) - return sources - return dict([(t, calculate_sources(t)) for t in targets]) - - def _resolve_target_sources(self, target_sources, extension=None, relative_to_target_base=False): - """Given a list of pants targets, extract their sources as a list. - - Filters against the extension if given and optionally returns the paths relative to the target - base. - """ - resolved_sources = [] - for resolved in Target.resolve_all(target_sources): - if hasattr(resolved, 'sources'): - resolved_sources.extend( - source if relative_to_target_base else os.path.join(resolved.target_base, source) - for source in resolved.sources if not extension or source.endswith(extension) - ) - return resolved_sources - - def _compute_classpath_elements_by_class(self, classpath): - # Don't consider loose classes dirs in our classpath. Those will be considered - # separately, by looking at products. - def non_product(path): - return not (path.startswith(self._pants_workdir) and os.path.isdir(path)) - classpath_jars = filter(non_product, classpath) - if self._class_to_jarfile is None: - self._class_to_jarfile = {} - for jarpath in self.find_all_bootstrap_jars() + classpath_jars: - # Per the classloading spec, a 'jar' in this context can also be a .zip file. - if os.path.isfile(jarpath) and ((jarpath.endswith('.jar') or jarpath.endswith('.zip'))): - with open_zip(jarpath, 'r') as jar: - for cls in jar.namelist(): - # First jar with a given class wins, just like when classloading. - if cls.endswith('.class') and not cls in self._class_to_jarfile: - self._class_to_jarfile[cls] = jarpath - elif os.path.isdir(jarpath): - for dirpath, _, filenames in os.walk(jarpath, followlinks=True): - for f in filter(lambda x: x.endswith('.class'), filenames): - cls = os.path.relpath(os.path.join(dirpath, f), jarpath) - if not cls in self._class_to_jarfile: - self._class_to_jarfile[cls] = jarpath - return self._class_to_jarfile - - def find_all_bootstrap_jars(self): - def get_path(key): - return self.context.java_sysprops.get(key, '').split(':') - - def find_jars_in_dirs(dirs): - ret = [] - for d in dirs: - if os.path.isdir(d): - ret.extend(filter(lambda s: s.endswith('.jar'), os.listdir(d))) - return ret - - # Note: assumes HotSpot, or some JVM that supports sun.boot.class.path. - # TODO: Support other JVMs? Not clear if there's a standard way to do so. - # May include loose classes dirs. - boot_classpath = get_path('sun.boot.class.path') - - # Note that per the specs, overrides and extensions must be in jars. - # Loose class files will not be found by the JVM. - override_jars = find_jars_in_dirs(get_path('java.endorsed.dirs')) - extension_jars = find_jars_in_dirs(get_path('java.ext.dirs')) - - # Note that this order matters: it reflects the classloading order. - bootstrap_jars = filter(os.path.isfile, override_jars + boot_classpath + extension_jars) - return bootstrap_jars # Technically, may include loose class dirs from boot_classpath. - - @property - def _analysis_tools(self): - if self._lazy_analysis_tools is None: - self._lazy_analysis_tools = self.create_analysis_tools() - return self._lazy_analysis_tools - - @property - def _analysis_parser(self): - return self._analysis_tools.parser - - def _ensure_analysis_tmpdir(self): - # Do this lazily, so we don't trigger creation of a worker pool unless we need it. - if not os.path.exists(self._analysis_tmpdir): - os.makedirs(self._analysis_tmpdir) - if self._delete_scratch: - self.context.background_worker_pool().add_shutdown_hook( - lambda: safe_rmtree(self._analysis_tmpdir)) - - def _create_empty_products(self): - make_products = lambda: defaultdict(MultipleRootedProducts) - if self.context.products.is_required_data('classes_by_source'): - self.context.products.safe_create_data('classes_by_source', make_products) - if self.context.products.is_required_data('classes_by_target'): - self.context.products.safe_create_data('classes_by_target', make_products) - if self.context.products.is_required_data('resources_by_target'): - self.context.products.safe_create_data('resources_by_target', make_products) - - def _register_products(self, targets, sources_by_target, analysis_file): - classes_by_source = self.context.products.get_data('classes_by_source') - classes_by_target = self.context.products.get_data('classes_by_target') - resources_by_target = self.context.products.get_data('resources_by_target') - - if classes_by_source is not None or classes_by_target is not None: - computed_classes_by_source = self._compute_classes_by_source(analysis_file) - for target in targets: - target_products = classes_by_target[target] if classes_by_target is not None else None - for source in sources_by_target[target]: # Source is relative to buildroot. - classes = computed_classes_by_source.get(source, []) # Classes are absolute paths. - if classes_by_target is not None: - target_products.add_abs_paths(self._classes_dir, classes) - if classes_by_source is not None: - classes_by_source[source].add_abs_paths(self._classes_dir, classes) - - if resources_by_target is not None: - for target in targets: - target_resources = resources_by_target[target] - for root, abs_paths in self.extra_products(target): - target_resources.add_abs_paths(root, abs_paths) diff --git a/src/python/twitter/pants/tasks/jvm_compile/jvm_dependency_analyzer.py b/src/python/twitter/pants/tasks/jvm_compile/jvm_dependency_analyzer.py deleted file mode 100644 index 0b6b11ed3..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/jvm_dependency_analyzer.py +++ /dev/null @@ -1,235 +0,0 @@ - -import os -from collections import defaultdict - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.jar_dependency import JarDependency -from twitter.pants.targets.jar_library import JarLibrary -from twitter.pants.targets.jvm_target import JvmTarget -from twitter.pants.tasks import TaskError, Task - - -class JvmDependencyAnalyzer(object): - def __init__(self, - context, - check_missing_deps, - check_missing_direct_deps, - check_unnecessary_deps): - - self._context = context - self._context.products.require_data('classes_by_target') - self._context.products.require_data('ivy_jar_products') - - self._check_missing_deps = check_missing_deps - self._check_missing_direct_deps = check_missing_direct_deps - self._check_unnecessary_deps = check_unnecessary_deps - - def _compute_targets_by_file(self): - """Returns a map from abs path of source, class or jar file to an OrderedSet of targets. - - The value is usually a singleton, because a source or class file belongs to a single target. - However a single jar may be provided (transitively or intransitively) by multiple JarLibrary - targets. But if there is a JarLibrary target that depends on a jar directly, then that - "canonical" target will be the first one in the list of targets. - """ - targets_by_file = defaultdict(OrderedSet) - - # Multiple JarLibrary targets can provide the same (org, name). - jarlibs_by_id = defaultdict(set) - - # Compute src -> target. - with self._context.new_workunit(name='map_sources'): - buildroot = get_buildroot() - # Look at all targets in-play for this pants run. Does not include synthetic targets, - for target in self._context.targets(): - if isinstance(target, JvmTarget): - for src in target.sources_relative_to_buildroot(): - targets_by_file[os.path.join(buildroot, src)].add(target) - elif isinstance(target, JarLibrary): - for jardep in target.dependencies: - if isinstance(jardep, JarDependency): - jarlibs_by_id[(jardep.org, jardep.name)].add(target) - - # Compute class -> target. - with self._context.new_workunit(name='map_classes'): - classes_by_target = self._context.products.get_data('classes_by_target') - for tgt, target_products in classes_by_target.items(): - for _, classes in target_products.abs_paths(): - for cls in classes: - targets_by_file[cls].add(tgt) - - # Compute jar -> target. - with self._context.new_workunit(name='map_jars'): - with Task.symlink_map_lock: - all_symlinks_map = self._context.products.get_data('symlink_map').copy() - # We make a copy, so it's safe to use outside the lock. - - def register_transitive_jars_for_ref(ivyinfo, ref): - deps_by_ref_memo = {} - - def get_transitive_jars_by_ref(ref1, visited=None): - if ref1 in deps_by_ref_memo: - return deps_by_ref_memo[ref1] - else: - visited = visited or set() - if ref1 in visited: - return set() # Ivy allows circular deps. - visited.add(ref1) - jars = set() - jars.update(ivyinfo.modules_by_ref[ref1].artifacts) - for dep in ivyinfo.deps_by_caller.get(ref1, []): - jars.update(get_transitive_jars_by_ref(dep, visited)) - deps_by_ref_memo[ref1] = jars - return jars - - target_key = (ref.org, ref.name) - if target_key in jarlibs_by_id: - # These targets provide all the jars in ref, and all the jars ref transitively depends on. - jarlib_targets = jarlibs_by_id[target_key] - - for jar in get_transitive_jars_by_ref(ref): - # Register that each jarlib_target provides jar (via all its symlinks). - symlinks = all_symlinks_map.get(os.path.realpath(jar.path), []) - for symlink in symlinks: - for jarlib_target in jarlib_targets: - targets_by_file[symlink].add(jarlib_target) - - ivy_products = self._context.products.get_data('ivy_jar_products') - if ivy_products: - for ivyinfos in ivy_products.values(): - for ivyinfo in ivyinfos: - for ref in ivyinfo.modules_by_ref: - register_transitive_jars_for_ref(ivyinfo, ref) - - return targets_by_file - - def _compute_transitive_deps_by_target(self): - """Map from target to all the targets it depends on, transitively.""" - # Sort from least to most dependent. - sorted_targets = reversed(InternalTarget.sort_targets(self._context.targets())) - transitive_deps_by_target = defaultdict(set) - # Iterate in dep order, to accumulate the transitive deps for each target. - for target in sorted_targets: - transitive_deps = set() - if hasattr(target, 'dependencies'): - for dep in target.dependencies: - transitive_deps.update(transitive_deps_by_target.get(dep, [])) - transitive_deps.add(dep) - transitive_deps_by_target[target] = transitive_deps - return transitive_deps_by_target - - def check(self, srcs, actual_deps): - """Check for missing deps. - - See docstring for _compute_missing_deps for details. - """ - if self._check_missing_deps or self._check_missing_direct_deps or self._check_unnecessary_deps: - missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps = \ - self._compute_missing_deps(srcs, actual_deps) - - buildroot = get_buildroot() - def shorten(path): # Make the output easier to read. - for prefix in [buildroot, self._context.ivy_home]: - if path.startswith(prefix): - return os.path.relpath(path, prefix) - return path - - if self._check_missing_deps and (missing_file_deps or missing_tgt_deps): - for (tgt_pair, evidence) in missing_tgt_deps: - evidence_str = '\n'.join([' %s uses %s' % (shorten(e[0]), shorten(e[1])) - for e in evidence]) - self._context.log.error( - 'Missing BUILD dependency %s -> %s because:\n%s' - % (tgt_pair[0].address.reference(), tgt_pair[1].address.reference(), evidence_str)) - for (src_tgt, dep) in missing_file_deps: - self._context.log.error('Missing BUILD dependency %s -> %s' - % (src_tgt.address.reference(), shorten(dep))) - if self._check_missing_deps == 'fatal': - raise TaskError('Missing deps.') - - if self._check_missing_direct_deps: - for (tgt_pair, evidence) in missing_direct_tgt_deps: - evidence_str = '\n'.join([' %s uses %s' % (shorten(e[0]), shorten(e[1])) - for e in evidence]) - self._context.log.warn('Missing direct BUILD dependency %s -> %s because:\n%s' % - (tgt_pair[0].address, tgt_pair[1].address, evidence_str)) - if self._check_missing_direct_deps == 'fatal': - raise TaskError('Missing direct deps.') - - if self._check_unnecessary_deps: - raise TaskError('Unnecessary dep warnings not implemented yet.') - - def _compute_missing_deps(self, srcs, actual_deps): - """Computes deps that are used by the compiler but not specified in a BUILD file. - - These deps are bugs waiting to happen: the code may happen to compile because the dep was - brought in some other way (e.g., by some other root target), but that is obviously fragile. - - Note that in practice we're OK with reliance on indirect deps that are only brought in - transitively. E.g., in Scala type inference can bring in such a dep subtly. Fortunately these - cases aren't as fragile as a completely missing dependency. It's still a good idea to have - explicit direct deps where relevant, so we optionally warn about indirect deps, to make them - easy to find and reason about. - - - actual_deps: a map src -> list of actual deps (source, class or jar file) as noted by the - compiler. - - Returns a triple (missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps) where: - - - missing_file_deps: a list of pairs (src_tgt, dep_file) where src_tgt requires dep_file, and - we're unable to map to a target (because its target isn't in the total set of targets in play, - and we don't want to parse every BUILD file in the workspace just to find it). - - - missing_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a necessary - transitive dependency on dep_tgt. - - - missing_direct_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a direct - dependency on dep_tgt but has a transitive dep on it. - - All paths in the input and output are absolute. - """ - def must_be_explicit_dep(dep): - # We don't require explicit deps on the java runtime, so we shouldn't consider that - # a missing dep. - return not dep.startswith(self._context.java_home) - - # TODO: If recomputing these every time becomes a performance issue, memoize for - # already-seen targets and incrementally compute for new targets not seen in a previous - # partition, in this or a previous chunk. - targets_by_file = self._compute_targets_by_file() - transitive_deps_by_target = self._compute_transitive_deps_by_target() - - # Find deps that are actual but not specified. - with self._context.new_workunit(name='scan_deps'): - missing_file_deps = OrderedSet() # (src, src). - missing_tgt_deps_map = defaultdict(list) # (tgt, tgt) -> a list of (src, src) as evidence. - missing_direct_tgt_deps_map = defaultdict(list) # The same, but for direct deps. - - buildroot = get_buildroot() - abs_srcs = [os.path.join(buildroot, src) for src in srcs] - for src in abs_srcs: - src_tgt = next(iter(targets_by_file.get(src))) - if src_tgt is not None: - for actual_dep in filter(must_be_explicit_dep, actual_deps.get(src, [])): - actual_dep_tgts = targets_by_file.get(actual_dep) - # actual_dep_tgts is usually a singleton. If it's not, we only need one of these - # to be in our declared deps to be OK. - if actual_dep_tgts is None: - missing_file_deps.add((src_tgt, actual_dep)) - elif src_tgt not in actual_dep_tgts: # Obviously intra-target deps are fine. - canonical_actual_dep_tgt = next(iter(actual_dep_tgts)) - if actual_dep_tgts.isdisjoint(transitive_deps_by_target.get(src_tgt, [])): - missing_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append((src, actual_dep)) - elif canonical_actual_dep_tgt not in src_tgt.dependencies: - # The canonical dep is the only one a direct dependency makes sense on. - missing_direct_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append( - (src, actual_dep)) - else: - raise TaskError('Requested dep info for unknown source file: %s' % src) - - return (list(missing_file_deps), - missing_tgt_deps_map.items(), - missing_direct_tgt_deps_map.items()) diff --git a/src/python/twitter/pants/tasks/jvm_compile/scala/__init__.py b/src/python/twitter/pants/tasks/jvm_compile/scala/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/scala/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/python/twitter/pants/tasks/jvm_compile/scala/scala_compile.py b/src/python/twitter/pants/tasks/jvm_compile/scala/scala_compile.py deleted file mode 100644 index 33427e1a9..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/scala/scala_compile.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.tasks.jvm_compile.analysis_tools import AnalysisTools -from twitter.pants.tasks.jvm_compile.jvm_compile import JvmCompile -from twitter.pants.tasks.jvm_compile.scala.zinc_analysis import ZincAnalysis -from twitter.pants.tasks.jvm_compile.scala.zinc_analysis_parser import ZincAnalysisParser -from twitter.pants.tasks.jvm_compile.scala.zinc_utils import ZincUtils - - -class ScalaCompile(JvmCompile): - _language = 'scala' - _file_suffix = '.scala' - _config_section = 'scala-compile' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - JvmCompile.setup_parser(ScalaCompile, option_group, args, mkflag) - - option_group.add_option(mkflag('plugins'), dest='plugins', default=None, - action='append', help='Use these scalac plugins. Default is set in pants.ini.') - - def __init__(self, context): - JvmCompile.__init__(self, context, jdk=False) - - # Set up the zinc utils. - color = not context.options.no_color - self._zinc_utils = ZincUtils(context=context, - nailgun_task=self, - jvm_options = self._jvm_options, - color=color, - jvm_tool_bootstrapper=self._jvm_tool_bootstrapper) - - # If we are compiling scala libraries with circular deps on java libraries we need to - # make sure those cycle deps are present. - for scala_target in self.context.targets(lambda t: isinstance(t, ScalaLibrary)): - for java_target in scala_target.java_sources: - self.context.add_target(java_target) - - def create_analysis_tools(self): - return AnalysisTools(self.context, ZincAnalysisParser(self._classes_dir), ZincAnalysis) - - def extra_classpath_elements(self): - # Classpath entries necessary for our compiler plugins. - return self._zinc_utils.plugin_jars() - - def extra_products(self, target): - ret = [] - if target.is_scalac_plugin and target.classname: - root, plugin_info_file = ZincUtils.write_plugin_info(self._resources_dir, target) - ret.append((root, [plugin_info_file])) - return ret - - def compile(self, args, classpath, sources, classes_output_dir, analysis_file): - # We have to treat our output dir as an upstream element, so zinc can find valid - # analysis for previous partitions. We use the global valid analysis for the upstream. - upstream = { classes_output_dir: self._analysis_file } if os.path.exists(self._analysis_file) else {} - return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, - classes_output_dir, analysis_file, upstream) diff --git a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis.py b/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis.py deleted file mode 100644 index 4e91900ae..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis.py +++ /dev/null @@ -1,383 +0,0 @@ -from collections import defaultdict -import json -import os -import re -import itertools -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.tasks.jvm_compile.analysis import Analysis - - -class ZincAnalysisElement(object): - """Encapsulates one part of the analysis. - - Subclasses specify which section headers comprise this part. Note that data in these objects is - just text, possibly split on lines or '->'. - """ - headers = () # Override in subclasses. - - @classmethod - def from_json_obj(cls, obj): - return cls([obj[header] for header in cls.headers]) - - def __init__(self, args): - # Subclasses can alias the elements of self.args in their own __init__, for convenience. - self.args = args - - def write(self, outfile, inline_vals=True, rebasings=None): - self._write_multiple_sections(outfile, self.headers, self.args, inline_vals, rebasings) - - def _write_multiple_sections(self, outfile, headers, reps, inline_vals=True, rebasings=None): - """Write multiple sections.""" - for header, rep in zip(headers, reps): - self._write_section(outfile, header, rep, inline_vals, rebasings) - - def _write_section(self, outfile, header, rep, inline_vals=True, rebasings=None): - """Write a single section. - - Items are sorted, for ease of testing. - """ - def rebase(txt): - for rebase_from, rebase_to in rebasings: - if rebase_to is None: - if rebase_from in txt: - return None - else: - txt = txt.replace(rebase_from, rebase_to) - return txt - - rebasings = rebasings or [] - items = [] - for k, vals in rep.iteritems(): - for v in vals: - item = rebase('%s -> %s%s' % (k, '' if inline_vals else '\n', v)) - if item: - items.append(item) - items.sort() - outfile.write(header + ':\n') - outfile.write('%d items\n' % len(items)) - for item in items: - outfile.write(item) - outfile.write('\n') - - -class ZincAnalysis(Analysis): - """Parsed representation of a zinc analysis. - - Note also that all files in keys/values are full-path, just as they appear in the analysis file. - If you want paths relative to the build root or the classes dir or whatever, you must compute - those yourself. - """ - - # Implementation of class method required by Analysis. - - FORMAT_VERSION_LINE = 'format version: 4\n' - - @staticmethod - def merge_dicts(dicts): - """Merges multiple dicts into one. - - Assumes keys don't overlap. - """ - ret = defaultdict(list) - for d in dicts: - ret.update(d) - return ret - - @classmethod - def merge(cls, analyses): - # Note: correctly handles "internalizing" external deps that must be internal post-merge. - - # Merge relations. - src_prod = ZincAnalysis.merge_dicts([a.relations.src_prod for a in analyses]) - binary_dep = ZincAnalysis.merge_dicts([a.relations.binary_dep for a in analyses]) - classes = ZincAnalysis.merge_dicts([a.relations.classes for a in analyses]) - used = ZincAnalysis.merge_dicts([a.relations.used for a in analyses]) - - class_to_source = dict((v, k) for k, vs in classes.iteritems() for v in vs) - - def merge_dependencies(internals, externals): - internal = ZincAnalysis.merge_dicts(internals) - naive_external = ZincAnalysis.merge_dicts(externals) - external = defaultdict(list) - for k, vs in naive_external.iteritems(): - for v in vs: - vfile = class_to_source.get(v) - if vfile and vfile in src_prod: - internal[k].append(vfile) # Internalized. - else: - external[k].append(v) # Remains external. - return internal, external - - internal, external = merge_dependencies( - [a.relations.internal_src_dep for a in analyses], - [a.relations.external_dep for a in analyses]) - - internal_pi, external_pi = merge_dependencies( - [a.relations.internal_src_dep_pi for a in analyses], - [a.relations.external_dep_pi for a in analyses]) - - member_ref_internal, member_ref_external = merge_dependencies( - [a.relations.member_ref_internal_dep for a in analyses], - [a.relations.member_ref_external_dep for a in analyses]) - - inheritance_internal, inheritance_external = merge_dependencies( - [a.relations.inheritance_internal_dep for a in analyses], - [a.relations.inheritance_external_dep for a in analyses]) - - relations = Relations((src_prod, binary_dep, - internal, external, - internal_pi, external_pi, - member_ref_internal, member_ref_external, - inheritance_internal, inheritance_external, - classes, used)) - - # Merge stamps. - products = ZincAnalysis.merge_dicts([a.stamps.products for a in analyses]) - sources = ZincAnalysis.merge_dicts([a.stamps.sources for a in analyses]) - binaries = ZincAnalysis.merge_dicts([a.stamps.binaries for a in analyses]) - classnames = ZincAnalysis.merge_dicts([a.stamps.classnames for a in analyses]) - stamps = Stamps((products, sources, binaries, classnames)) - - # Merge APIs. - internal_apis = ZincAnalysis.merge_dicts([a.apis.internal for a in analyses]) - naive_external_apis = ZincAnalysis.merge_dicts([a.apis.external for a in analyses]) - external_apis = defaultdict(list) - for k, vs in naive_external_apis.iteritems(): - kfile = class_to_source.get(k) - if kfile and kfile in src_prod: - internal_apis[kfile] = vs # Internalized. - else: - external_apis[k] = vs # Remains external. - apis = APIs((internal_apis, external_apis)) - - # Merge source infos. - source_infos = SourceInfos((ZincAnalysis.merge_dicts([a.source_infos.source_infos for a in analyses]), )) - - # Merge compilations. - compilation_vals = sorted(set([x[0] for a in analyses for x in a.compilations.compilations.itervalues()])) - compilations_dict = defaultdict(list) - for i, v in enumerate(compilation_vals): - compilations_dict['%03d' % i] = [v] - compilations = Compilations((compilations_dict, )) - - compile_setup = analyses[0].compile_setup if len(analyses) > 0 else CompileSetup((defaultdict(list), )) - return ZincAnalysis(relations, stamps, apis, source_infos, compilations, compile_setup) - - def __init__(self, relations, stamps, apis, source_infos, compilations, compile_setup): - (self.relations, self.stamps, self.apis, self.source_infos, self.compilations, self.compile_setup) = \ - (relations, stamps, apis, source_infos, compilations, compile_setup) - - # Impelementation of methods required by Analysis. - - def split(self, splits, catchall=False): - # Note: correctly handles "externalizing" internal deps that must be external post-split. - buildroot = get_buildroot() - splits = [set([s if os.path.isabs(s) else os.path.join(buildroot, s) for s in x]) for x in splits] - if catchall: - # Even empty sources with no products have stamps. - remainder_sources = set(self.stamps.sources.keys()).difference(*splits) - splits.append(remainder_sources) # The catch-all - - # Split relations. - src_prod_splits = self._split_dict(self.relations.src_prod, splits) - binary_dep_splits = self._split_dict(self.relations.binary_dep, splits) - classes_splits = self._split_dict(self.relations.classes, splits) - - # For historical reasons, external deps are specified as src->class while internal deps are - # specified as src->src. So we pick a representative class for each src. - representatives = dict((k, min(vs)) for k, vs in self.relations.classes.iteritems()) - - def split_dependencies(all_internal, all_external): - naive_internals = self._split_dict(all_internal, splits) - naive_externals = self._split_dict(all_external, splits) - - internals = [] - externals = [] - for naive_internal, external, split in zip(naive_internals, naive_externals, splits): - internal = defaultdict(list) - for k, vs in naive_internal.iteritems(): - for v in vs: - if v in split: - internal[k].append(v) # Remains internal. - else: - external[k].append(representatives[v]) # Externalized. - internals.append(internal) - externals.append(external) - return internals, externals - - internal_splits, external_splits = \ - split_dependencies(self.relations.internal_src_dep, self.relations.external_dep) - internal_pi_splits, external_pi_splits = \ - split_dependencies(self.relations.internal_src_dep_pi, self.relations.external_dep_pi) - - member_ref_internal_splits, member_ref_external_splits = \ - split_dependencies(self.relations.member_ref_internal_dep, self.relations.member_ref_external_dep) - inheritance_internal_splits, inheritance_external_splits = \ - split_dependencies(self.relations.inheritance_internal_dep, self.relations.inheritance_external_dep) - used_splits = self._split_dict(self.relations.used, splits) - - relations_splits = [] - for args in zip(src_prod_splits, binary_dep_splits, - internal_splits, external_splits, - internal_pi_splits, external_pi_splits, - member_ref_internal_splits, member_ref_external_splits, - inheritance_internal_splits, inheritance_external_splits, - classes_splits, used_splits): - relations_splits.append(Relations(args)) - - # Split stamps. - stamps_splits = [] - for src_prod, binary_dep, split in zip(src_prod_splits, binary_dep_splits, splits): - products_set = set(itertools.chain(*src_prod.values())) - binaries_set = set(itertools.chain(*binary_dep.values())) - products = dict((k, v) for k, v in self.stamps.products.iteritems() if k in products_set) - sources = dict((k, v) for k, v in self.stamps.sources.iteritems() if k in split) - binaries = dict((k, v) for k, v in self.stamps.binaries.iteritems() if k in binaries_set) - classnames = dict((k, v) for k, v in self.stamps.classnames.iteritems() if k in binaries_set) - stamps_splits.append(Stamps((products, sources, binaries, classnames))) - - # Split apis. - - # The splits, but expressed via class representatives of the sources (see above). - representative_splits = [filter(None, [representatives.get(s) for s in srcs]) for srcs in splits] - representative_to_internal_api = {} - for src, rep in representatives.items(): - representative_to_internal_api[rep] = self.apis.internal.get(src) - - # Note that the keys in self.apis.external are classes, not sources. - internal_api_splits = self._split_dict(self.apis.internal, splits) - external_api_splits = self._split_dict(self.apis.external, representative_splits) - - # All externalized deps require a copy of the relevant api. - for external, external_api in zip(external_splits, external_api_splits): - for vs in external.values(): - for v in vs: - if v in representative_to_internal_api: - external_api[v] = representative_to_internal_api[v] - - apis_splits = [] - for args in zip(internal_api_splits, external_api_splits): - apis_splits.append(APIs(args)) - - # Split source infos. - source_info_splits = \ - [SourceInfos((x, )) for x in self._split_dict(self.source_infos.source_infos, splits)] - - analyses = [] - for relations, stamps, apis, source_infos in zip(relations_splits, stamps_splits, apis_splits, source_info_splits): - analyses.append(ZincAnalysis(relations, stamps, apis, source_infos, self.compilations, self.compile_setup)) - - return analyses - - def write(self, outfile, rebasings=None): - outfile.write(ZincAnalysis.FORMAT_VERSION_LINE) - self.relations.write(outfile, rebasings=rebasings) - self.stamps.write(outfile, rebasings=rebasings) - self.apis.write(outfile, inline_vals=False, rebasings=rebasings) - self.source_infos.write(outfile, inline_vals=False, rebasings=rebasings) - self.compilations.write(outfile, inline_vals=True, rebasings=rebasings) - self.compile_setup.write(outfile, inline_vals=True, rebasings=rebasings) - - # Extra methods re json. - - def write_json_to_path(self, outfile_path): - with open(outfile_path, 'w') as outfile: - self.write_json(outfile) - - def write_json(self, outfile): - obj = dict(zip(('relations', 'stamps', 'apis', 'source_infos', 'compilations', 'compile_setup'), - (self.relations, self.stamps, self.apis, self.source_infos, self.compilations, self.compile_setup))) - json.dump(obj, outfile, cls=ZincAnalysisJSONEncoder, sort_keys=True, indent=2) - - def _split_dict(self, d, splits): - """Split a dict by its keys. - - splits: A list of lists of keys. - Returns one dict per split. - """ - ret = [] - for split in splits: - dict_split = defaultdict(list) - for f in split: - if f in d: - dict_split[f] = d[f] - ret.append(dict_split) - return ret - - -class Relations(ZincAnalysisElement): - headers = ('products', 'binary dependencies', - # TODO: The following 4 headers will go away after SBT completes the - # transition to the new headers (the 4 after that). - 'direct source dependencies', 'direct external dependencies', - 'public inherited source dependencies', 'public inherited external dependencies', - 'member reference internal dependencies', 'member reference external dependencies', - 'inheritance internal dependencies', 'inheritance external dependencies', - 'class names', 'used names') - - def __init__(self, args): - super(Relations, self).__init__(args) - (self.src_prod, self.binary_dep, - self.internal_src_dep, self.external_dep, - self.internal_src_dep_pi, self.external_dep_pi, - self.member_ref_internal_dep, self.member_ref_external_dep, - self.inheritance_internal_dep, self.inheritance_external_dep, - self.classes, self.used) = self.args - - -class Stamps(ZincAnalysisElement): - headers = ('product stamps', 'source stamps', 'binary stamps', 'class names') - - def __init__(self, args): - super(Stamps, self).__init__(args) - (self.products, self.sources, self.binaries, self.classnames) = self.args - - -class APIs(ZincAnalysisElement): - headers = ('internal apis', 'external apis') - - def __init__(self, args): - super(APIs, self).__init__(args) - (self.internal, self.external) = self.args - - -class SourceInfos(ZincAnalysisElement): - headers = ("source infos", ) - - def __init__(self, args): - super(SourceInfos, self).__init__(args) - (self.source_infos, ) = self.args - - -class Compilations(ZincAnalysisElement): - headers = ('compilations', ) - - def __init__(self, args): - super(Compilations, self).__init__(args) - (self.compilations, ) = self.args - - -class CompileSetup(ZincAnalysisElement): - headers = ('output mode', 'output directories','compile options','javac options', - 'compiler version', 'compile order') - - def __init__(self, args): - super(CompileSetup, self).__init__(args) - (self.output_mode, self.output_dirs, self.compile_options, self.javac_options, - self.compiler_version, self.compile_order) = self.args - - -class ZincAnalysisJSONEncoder(json.JSONEncoder): - """A custom encoder for writing analysis elements as JSON. - - Not currently used, but might be useful in the future, e.g., for creating javascript-y - analysis browsing tools. - """ - def default(self, obj): - if isinstance(obj, ZincAnalysisElement): - ret = {} - for h, a in zip(type(obj).headers, obj.args): - ret[h] = a - return ret - else: - super(ZincAnalysisJSONEncoder, self).default(obj) diff --git a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis_parser.py b/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis_parser.py deleted file mode 100644 index 99f4502d9..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_analysis_parser.py +++ /dev/null @@ -1,120 +0,0 @@ -from collections import defaultdict -import json -import os -import re - -from twitter.pants.tasks.jvm_compile.analysis_parser import AnalysisParser, ParseError -from twitter.pants.tasks.jvm_compile.scala.zinc_analysis import ( - APIs, - Compilations, - CompileSetup, - Relations, - SourceInfos, - Stamps, - ZincAnalysis, -) - - -class ZincAnalysisParser(AnalysisParser): - """Parses a zinc analysis file.""" - - def empty_prefix(self): - return 'products:\n0 items\n' - - def parse(self, infile): - """Parse a ZincAnalysis instance from an open text file.""" - def parse_element(cls): - parsed_sections = [self._parse_section(infile, header) for header in cls.headers] - return cls(parsed_sections) - - self._verify_version(infile) - relations = parse_element(Relations) - stamps = parse_element(Stamps) - apis = parse_element(APIs) - source_infos = parse_element(SourceInfos) - compilations = parse_element(Compilations) - compile_setup = parse_element(CompileSetup) - return ZincAnalysis(relations, stamps, apis, source_infos, compilations, compile_setup) - - def parse_products(self, infile): - """An efficient parser of just the products section.""" - self._verify_version(infile) - return self._find_repeated_at_header(infile, 'products') - - def parse_deps(self, infile, classpath_indexer): - self._verify_version(infile) - # Note: relies on the fact that these headers appear in this order in the file. - bin_deps = self._find_repeated_at_header(infile, 'binary dependencies') - src_deps = self._find_repeated_at_header(infile, 'direct source dependencies') - ext_deps = self._find_repeated_at_header(infile, 'direct external dependencies') - - # TODO(benjy): Temporary hack until we inject a dep on the scala runtime jar. - scalalib_re = re.compile(r'scala-library-\d+\.\d+\.\d+\.jar$') - filtered_bin_deps = defaultdict(list) - for src, deps in bin_deps.iteritems(): - filtered_bin_deps[src] = filter(lambda x: scalalib_re.search(x) is None, deps) - - transformed_ext_deps = {} - def fqcn_to_path(fqcn): - return os.path.join(self.classes_dir, fqcn.replace('.', os.sep) + '.class') - for src, fqcns in ext_deps.items(): - transformed_ext_deps[src] = [fqcn_to_path(fqcn) for fqcn in fqcns] - - ret = defaultdict(list) - for d in [filtered_bin_deps, src_deps, transformed_ext_deps]: - ret.update(d) - return ret - - # Extra zinc-specific methods re json. - - def parse_json_from_path(self, infile_path): - """Parse a ZincAnalysis instance from a JSON file.""" - with open(infile_path, 'r') as infile: - return self.parse_from_json(infile) - - def parse_from_json(self, infile): - """Parse a ZincAnalysis instance from an open JSON file.""" - obj = json.load(infile) - relations = Relations.from_json_obj(obj['relations']) - stamps = Stamps.from_json_obj(obj['stamps']) - apis = APIs.from_json_obj(obj['apis']) - source_infos = SourceInfos.from_json_obj(obj['source infos']) - compilations = Compilations.from_json_obj(obj['compilations']) - compile_setup = Compilations.from_json_obj(obj['compile setup']) - return ZincAnalysis(relations, stamps, apis, source_infos, compilations, compile_setup) - - def _find_repeated_at_header(self, lines_iter, header): - header_line = header + ':\n' - while lines_iter.next() != header_line: - pass - return self._parse_section(lines_iter, expected_header=None) - - def _verify_version(self, lines_iter): - version_line = lines_iter.next() - if version_line != ZincAnalysis.FORMAT_VERSION_LINE: - raise ParseError('Unrecognized version line: ' + version_line) - - _num_items_re = re.compile(r'(\d+) items\n') - - def _parse_num_items(self, lines_iter): - """Parse a line of the form ' items' and returns as an int.""" - line = lines_iter.next() - matchobj = self._num_items_re.match(line) - if not matchobj: - raise ParseError('Expected: " items". Found: "%s"' % line) - return int(matchobj.group(1)) - - def _parse_section(self, lines_iter, expected_header=None): - """Parse a single section.""" - if expected_header: - line = lines_iter.next() - if expected_header + ':\n' != line: - raise ParseError('Expected: "%s:". Found: "%s"' % (expected_header, line)) - n = self._parse_num_items(lines_iter) - relation = defaultdict(list) # Values are lists, to accommodate relations. - for i in xrange(n): - k, _, v = lines_iter.next().partition(' -> ') - if len(v) == 1: # Value on its own line. - v = lines_iter.next() - relation[k].append(v[:-1]) - return relation diff --git a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_utils.py b/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_utils.py deleted file mode 100644 index 6c73869f5..000000000 --- a/src/python/twitter/pants/tasks/jvm_compile/scala/zinc_utils.py +++ /dev/null @@ -1,227 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import textwrap - -from contextlib import closing -from itertools import chain -from xml.etree import ElementTree - -from twitter.common.collections import OrderedDict -from twitter.common.contextutil import open_zip as open_jar -from twitter.common.dirutil import safe_open - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.hash_utils import hash_file -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.tasks import TaskError - - -# Well known metadata file required to register scalac plugins with nsc. -_PLUGIN_INFO_FILE = 'scalac-plugin.xml' - -class ZincUtils(object): - """Convenient wrapper around zinc invocations. - - Instances are immutable, and all methods are reentrant (assuming that the java_runner is). - """ - _ZINC_MAIN = 'com.typesafe.zinc.Main' - - def __init__(self, context, nailgun_task, jvm_options, color, jvm_tool_bootstrapper): - self.context = context - self._nailgun_task = nailgun_task # We run zinc on this task's behalf. - self._jvm_options = jvm_options - self._color = color - self._jvm_tool_bootstrapper = jvm_tool_bootstrapper - - # The target scala version. - self._compile_bootstrap_key = 'scalac' - compile_bootstrap_tools = context.config.getlist('scala-compile', 'compile-bootstrap-tools', - default=[':scala-compile-2.9.3']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._compile_bootstrap_key, compile_bootstrap_tools) - - # The zinc version (and the scala version it needs, which may differ from the target version). - self._zinc_bootstrap_key = 'zinc' - zinc_bootstrap_tools = context.config.getlist('scala-compile', 'zinc-bootstrap-tools', default=[':zinc']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._zinc_bootstrap_key, zinc_bootstrap_tools) - - # Compiler plugins. - plugins_bootstrap_tools = context.config.getlist('scala-compile', 'scalac-plugin-bootstrap-tools', - default=[]) - if plugins_bootstrap_tools: - self._plugins_bootstrap_key = 'plugins' - self._jvm_tool_bootstrapper.register_jvm_tool(self._plugins_bootstrap_key, plugins_bootstrap_tools) - else: - self._plugins_bootstrap_key = None - - @property - def _zinc_classpath(self): - return self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._zinc_bootstrap_key) - - @property - def _compiler_classpath(self): - return self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._compile_bootstrap_key) - - @property - def _plugin_jars(self): - if self._plugins_bootstrap_key: - return self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._plugins_bootstrap_key) - else: - return [] - - @property - def _zinc_jar_args(self): - zinc_jars = ZincUtils.identify_zinc_jars(self._zinc_classpath) - # The zinc jar names are also the flag names. - return (list(chain.from_iterable([['-%s' % name, jarpath] - for (name, jarpath) in zinc_jars.items()])) + - ['-scala-path', ':'.join(self._compiler_classpath)]) - - def _plugin_args(self): - # Allow multiple flags and also comma-separated values in a single flag. - if self.context.options.plugins is not None: - plugin_names = [p for val in self.context.options.plugins for p in val.split(',')] - else: - plugin_names = self.context.config.getlist('scala-compile', 'scalac-plugins', default=[]) - - plugin_args = self.context.config.getdict('scala-compile', 'scalac-plugin-args', default={}) - active_plugins = self.find_plugins(plugin_names) - - ret = [] - for name, jar in active_plugins.items(): - ret.append('-S-Xplugin:%s' % jar) - for arg in plugin_args.get(name, []): - ret.append('-S-P:%s:%s' % (name, arg)) - return ret - - def plugin_jars(self): - """The jars containing code for enabled plugins.""" - return self._plugin_jars - - def _run_zinc(self, args, workunit_name='zinc', workunit_labels=None): - zinc_args = [ - '-log-level', self.context.options.log_level or 'info', - ] - if not self._color: - zinc_args.append('-no-color') - zinc_args.extend(self._zinc_jar_args) - zinc_args.extend(args) - return self._nailgun_task.runjava(classpath=self._zinc_classpath, - main=ZincUtils._ZINC_MAIN, - jvm_options=self._jvm_options, - args=zinc_args, - workunit_name=workunit_name, - workunit_labels=workunit_labels) - - def compile(self, opts, classpath, sources, output_dir, analysis_file, upstream_analysis_files): - args = list(opts) # Make a copy - - args.extend(self._plugin_args()) - - if upstream_analysis_files: - args.extend( - ['-analysis-map', ','.join(['%s:%s' % kv for kv in upstream_analysis_files.items()])]) - - args.extend([ - '-analysis-cache', analysis_file, - # We add compiler_classpath to ensure the scala-library jar is on the classpath. - # TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't - # usually need. Be more selective? - '-classpath', ':'.join(self._compiler_classpath + classpath), - '-d', output_dir - ]) - args.extend(sources) - self.log_zinc_file(analysis_file) - if self._run_zinc(args, workunit_labels=[WorkUnit.COMPILER]): - raise TaskError('Zinc compile failed.') - - @staticmethod - def write_plugin_info(resources_dir, target): - root = os.path.join(resources_dir, target.id) - plugin_info_file = os.path.join(root, _PLUGIN_INFO_FILE) - with safe_open(plugin_info_file, 'w') as f: - f.write(textwrap.dedent(''' - - %s - %s - - ''' % (target.plugin, target.classname)).strip()) - return root, plugin_info_file - - # These are the names of the various jars zinc needs. They are, conveniently and - # non-coincidentally, the names of the flags used to pass the jar locations to zinc. - zinc_jar_names = ['compiler-interface', 'sbt-interface' ] - - @staticmethod - def identify_zinc_jars(zinc_classpath): - """Find the named jars in the zinc classpath. - - TODO: Make these mappings explicit instead of deriving them by jar name heuristics. - """ - ret = OrderedDict() - ret.update(ZincUtils.identify_jars(ZincUtils.zinc_jar_names, zinc_classpath)) - return ret - - @staticmethod - def identify_jars(names, jars): - jars_by_name = {} - jars_and_filenames = [(x, os.path.basename(x)) for x in jars] - - for name in names: - jar_for_name = None - for jar, filename in jars_and_filenames: - if filename.startswith(name): - jar_for_name = jar - break - if jar_for_name is None: - raise TaskError('Couldn\'t find jar named %s' % name) - else: - jars_by_name[name] = jar_for_name - return jars_by_name - - def find_plugins(self, plugin_names): - """Returns a map from plugin name to plugin jar.""" - plugin_names = set(plugin_names) - plugins = {} - buildroot = get_buildroot() - # plugin_jars is the universe of all possible plugins and their transitive deps. - # Here we select the ones to actually use. - for jar in self.plugin_jars(): - with open_jar(jar, 'r') as jarfile: - try: - with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file: - plugin_info = ElementTree.parse(plugin_info_file).getroot() - if plugin_info.tag != 'plugin': - raise TaskError( - 'File %s in %s is not a valid scalac plugin descriptor' % (_PLUGIN_INFO_FILE, jar)) - name = plugin_info.find('name').text - if name in plugin_names: - if name in plugins: - raise TaskError('Plugin %s defined in %s and in %s' % (name, plugins[name], jar)) - # It's important to use relative paths, as the compiler flags get embedded in the zinc - # analysis file, and we port those between systems via the artifact cache. - plugins[name] = os.path.relpath(jar, buildroot) - except KeyError: - pass - - unresolved_plugins = plugin_names - set(plugins.keys()) - if unresolved_plugins: - raise TaskError('Could not find requested plugins: %s' % list(unresolved_plugins)) - return plugins - - def log_zinc_file(self, analysis_file): - self.context.log.debug('Calling zinc on: %s (%s)' % (analysis_file, hash_file(analysis_file).upper() if os.path.exists(analysis_file) else 'nonexistent')) diff --git a/src/python/twitter/pants/tasks/jvm_run.py b/src/python/twitter/pants/tasks/jvm_run.py deleted file mode 100644 index 50db2dcd6..000000000 --- a/src/python/twitter/pants/tasks/jvm_run.py +++ /dev/null @@ -1,104 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import shlex - -from twitter.common.dirutil import safe_open -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.java.executor import CommandLineGrabber -from twitter.pants.targets.jvm_binary import JvmBinary -from twitter.pants.java.util import execute_java - -from .jvm_task import JvmTask - -from . import Task, TaskError - - -def is_binary(target): - return isinstance(target, JvmBinary) - - -class JvmRun(JvmTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("jvmargs"), dest = "run_jvmargs", action="append", - help = "Run binary in a jvm with these extra jvm args.") - - option_group.add_option(mkflag("args"), dest = "run_args", action="append", - help = "Run binary with these main() args.") - - option_group.add_option(mkflag("debug"), mkflag("debug", negate=True), dest = "run_debug", - action="callback", callback=mkflag.set_bool, default=False, - help = "[%default] Run binary with a debugger") - - option_group.add_option(mkflag('only-write-cmd-line'), dest = 'only_write_cmd_line', - action='store', default=None, - help = '[%default] Instead of running, just write the cmd line to this file') - - def __init__(self, context): - Task.__init__(self, context) - self.jvm_args = context.config.getlist('jvm-run', 'jvm_args', default=[]) - if context.options.run_jvmargs: - for arg in context.options.run_jvmargs: - self.jvm_args.extend(shlex.split(arg)) - self.args = [] - if context.options.run_args: - for arg in context.options.run_args: - self.args.extend(shlex.split(arg)) - if context.options.run_debug: - self.jvm_args.extend(context.config.getlist('jvm', 'debug_args')) - self.confs = context.config.getlist('jvm-run', 'confs', default=['default']) - self.only_write_cmd_line = context.options.only_write_cmd_line - context.products.require_data('exclusives_groups') - - def execute(self, targets): - # The called binary may block for a while, allow concurrent pants activity during this pants - # idle period. - # - # TODO(John Sirois): refactor lock so that I can do: - # with self.context.lock.yield(): - # - blocking code - # - # Currently re-acquiring the lock requires a path argument that was set up by the goal - # execution engine. I do not want task code to learn the lock location. - # http://jira.local.twitter.com/browse/AWESOME-1317 - - self.context.lock.release() - # Run the first target that is a binary. - binaries = filter(is_binary, targets) - if len(binaries) > 0: # We only run the first one. - main = binaries[0].main - egroups = self.context.products.get_data('exclusives_groups') - group_key = egroups.get_group_key_for_target(binaries[0]) - group_classpath = egroups.get_classpath_for_group(group_key) - - executor = CommandLineGrabber() if self.only_write_cmd_line else None - result = execute_java( - classpath=(self.classpath(confs=self.confs, exclusives_classpath=group_classpath)), - main=main, - executor=executor, - jvm_options=self.jvm_args, - args=self.args, - workunit_factory=self.context.new_workunit, - workunit_name='run', - workunit_labels=[WorkUnit.RUN] - ) - - if self.only_write_cmd_line: - with safe_open(self.only_write_cmd_line, 'w') as outfile: - outfile.write(' '.join(executor.cmd)) - elif result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (main, result), exit_code=result) diff --git a/src/python/twitter/pants/tasks/jvm_task.py b/src/python/twitter/pants/tasks/jvm_task.py deleted file mode 100644 index 791d0f354..000000000 --- a/src/python/twitter/pants/tasks/jvm_task.py +++ /dev/null @@ -1,52 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.tasks import Task - - -class JvmTask(Task): - def get_base_classpath_for_target(self, target): - """Note: to use this method, the exclusives_groups data product must be available. This should - have been set by the prerequisite java/scala compile.""" - egroups = self.context.products.get_data('exclusives_groups') - group_key = egroups.get_group_key_for_target(target) - return egroups.get_classpath_for_group(group_key) - - def classpath(self, cp=None, confs=None, exclusives_classpath=None): - classpath = list(cp) if cp else [] - exclusives_classpath = exclusives_classpath or [] - - classpath.extend(path for conf, path in exclusives_classpath if not confs or conf in confs) - - def add_resource_paths(predicate): - bases = set() - for target in self.context.targets(): - if predicate(target): - if target.target_base not in bases: - sibling_resources_base = os.path.join(os.path.dirname(target.target_base), 'resources') - classpath.append(os.path.join(get_buildroot(), sibling_resources_base)) - bases.add(target.target_base) - - if self.context.config.getbool('jvm', 'parallel_src_paths', default=False): - add_resource_paths(lambda t: t.is_jvm and not t.is_test) - - if self.context.config.getbool('jvm', 'parallel_test_paths', default=False): - add_resource_paths(lambda t: t.is_jvm and not t.is_test) - - return classpath diff --git a/src/python/twitter/pants/tasks/jvm_tool_bootstrapper.py b/src/python/twitter/pants/tasks/jvm_tool_bootstrapper.py deleted file mode 100644 index cfc546062..000000000 --- a/src/python/twitter/pants/tasks/jvm_tool_bootstrapper.py +++ /dev/null @@ -1,59 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.tasks.task_error import TaskError - - -class JvmToolBootstrapper(object): - def __init__(self, products): - self._products = products - - def get_jvm_tool_classpath(self, key, executor=None): - """Get a classpath for the tool previously registered under the key. - - Returns a list of paths. - """ - return self.get_lazy_jvm_tool_classpath(key, executor)() - - def get_lazy_jvm_tool_classpath(self, key, executor=None): - """Get a lazy classpath for the tool previously registered under the key. - - Returns a no-arg callable. Invoking it returns a list of paths. - """ - callback_product_map = self._products.get_data('jvm_build_tools_classpath_callbacks') or {} - callback = callback_product_map.get(key) - if not callback: - raise TaskError('No bootstrap callback registered for %s' % key) - return lambda: callback(executor=executor) - - def register_jvm_tool(self, key, tools): - """Register a list of targets against a key. - - We can later use this key to get a callback that will resolve these targets. - Note: Not reentrant. We assume that all registration is done in the main thread. - """ - self._products.require_data('jvm_build_tools_classpath_callbacks') - tool_product_map = self._products.get_data('jvm_build_tools') or {} - existing = tool_product_map.get(key) - # It's OK to re-register with the same value, but not to change the value. - if existing is not None: - if existing != tools: - raise TaskError('Attemping to change tools under %s from %s to %s.' - % (key, existing, tools)) - else: - tool_product_map[key] = tools - self._products.safe_create_data('jvm_build_tools', lambda: tool_product_map) - diff --git a/src/python/twitter/pants/tasks/jvmdoc_gen.py b/src/python/twitter/pants/tasks/jvmdoc_gen.py deleted file mode 100644 index 0d984f4f2..000000000 --- a/src/python/twitter/pants/tasks/jvmdoc_gen.py +++ /dev/null @@ -1,237 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import collections -import contextlib -import multiprocessing -import os -import subprocess - -from twitter.common.dirutil import safe_mkdir - -from twitter.pants import binary_util -from twitter.pants.tasks import Task, TaskError - -Jvmdoc = collections.namedtuple('Jvmdoc', ['tool_name', 'product_type']) - -ParserConfig = collections.namedtuple( - 'JvmdocGenParserConfig', - ['outdir_opt', 'include_codegen_opt', 'transitive_opt', 'open_opt', 'combined_opt', - 'ignore_failure_opt']) - - -class JvmdocGen(Task): - @classmethod - def setup_parser_config(cls): - opts = ['%s_%s' % (cls.__name__, opt) for opt in ParserConfig._fields] - return ParserConfig(*opts) - - @classmethod - def generate_setup_parser(cls, option_group, args, mkflag, jvmdoc): - parser_config = cls.setup_parser_config() - option_group.add_option( - mkflag('outdir'), - dest=parser_config.outdir_opt, - help='Emit %s in this directory.' % jvmdoc.tool_name) - - option_group.add_option( - mkflag('include-codegen'), - mkflag('include-codegen', negate=True), - dest=parser_config.include_codegen_opt, - default=None, - action='callback', - callback=mkflag.set_bool, - help='[%%default] Create %s for generated code.' % jvmdoc.tool_name) - - option_group.add_option( - mkflag('transitive'), - mkflag('transitive', negate=True), - dest=parser_config.transitive_opt, - default=True, - action='callback', - callback=mkflag.set_bool, - help='[%%default] Create %s for the transitive closure of internal ' - 'targets reachable from the roots specified on the command line.' - % jvmdoc.tool_name) - - combined_flag = mkflag('combined') - option_group.add_option( - combined_flag, - mkflag('combined', negate=True), - dest=parser_config.combined_opt, - default=False, - action='callback', - callback=mkflag.set_bool, - help='[%%default] Generate %s for all targets combined instead of ' - 'each target individually.' - % jvmdoc.tool_name) - - option_group.add_option( - mkflag('open'), - mkflag('open', negate=True), - dest=parser_config.open_opt, - default=False, - action='callback', - callback=mkflag.set_bool, - help='[%%default] Attempt to open the generated %s in a browser ' - '(implies %s).' % (jvmdoc.tool_name, combined_flag)) - - option_group.add_option( - mkflag('ignore-failure'), - mkflag('ignore-failure', negate=True), - dest=parser_config.ignore_failure_opt, - default=False, - action='callback', - callback=mkflag.set_bool, - help='Specifies that %s errors should not cause build errors' - % jvmdoc.tool_name) - - def __init__(self, context, jvmdoc, output_dir, confs, active): - def getattr_options(option): - return getattr(context.options, option) - - super(JvmdocGen, self).__init__(context) - - self._jvmdoc = jvmdoc - jvmdoc_tool_name = self._jvmdoc.tool_name - - config_section = '%s-gen' % jvmdoc_tool_name - parser_config = self.setup_parser_config() - - pants_workdir = context.config.getdefault('pants_workdir') - self._output_dir = ( - output_dir - or getattr_options(parser_config.outdir_opt) - or context.config.get(config_section, - 'workdir', - default=os.path.join(pants_workdir, jvmdoc_tool_name)) - ) - - flagged_codegen = getattr_options(parser_config.include_codegen_opt) - self._include_codegen = (flagged_codegen if flagged_codegen is not None - else context.config.getbool(config_section, 'include_codegen', - default=False)) - - self.transitive = getattr_options(parser_config.transitive_opt) - self.confs = confs or context.config.getlist(config_section, 'confs', default=['default']) - self.active = active - self.open = getattr_options(parser_config.open_opt) - self.combined = self.open or getattr_options(parser_config.combined_opt) - self.ignore_failure = getattr_options(parser_config.ignore_failure_opt) - - def invalidate_for(self): - return (self.combined, self.transitive, self._output_dir, self.confs, self._include_codegen) - - def generate_execute(self, targets, language_predicate, create_jvmdoc_command): - """ - Generate an execute method given a language predicate and command to create documentation - - language_predicate: a function that accepts a target and returns True if the target is of that - language - create_jvmdoc_command: (classpath, directory, *targets) -> command (string) that will generate - documentation documentation for targets - """ - catalog = self.context.products.isrequired(self._jvmdoc.product_type) - if catalog and self.combined: - raise TaskError( - 'Cannot provide %s target mappings for combined output' % self._jvmdoc.product_type) - elif catalog or self.active: - def docable(target): - return language_predicate(target) and (self._include_codegen or not target.is_codegen) - - with self.invalidated(filter(docable, targets)) as invalidation_check: - safe_mkdir(self._output_dir) - with self.context.state('classpath', []) as cp: - classpath = [jar for conf, jar in cp if conf in self.confs] - - def find_jvmdoc_targets(): - invalid_targets = set() - for vt in invalidation_check.invalid_vts: - invalid_targets.update(vt.targets) - - if self.transitive: - return invalid_targets - else: - return set(invalid_targets).intersection(set(self.context.target_roots)) - - jvmdoc_targets = list(filter(docable, find_jvmdoc_targets())) - if self.combined: - self._generate_combined(classpath, jvmdoc_targets, create_jvmdoc_command) - else: - self._generate_individual(classpath, jvmdoc_targets, create_jvmdoc_command) - - if catalog: - for target in targets: - gendir = self._gendir(target) - jvmdocs = [] - for root, dirs, files in os.walk(gendir): - jvmdocs.extend(os.path.relpath(os.path.join(root, f), gendir) for f in files) - self.context.products.get(self._jvmdoc.product_type).add(target, gendir, jvmdocs) - - def _generate_combined(self, classpath, targets, create_jvmdoc_command): - gendir = os.path.join(self._output_dir, 'combined') - if targets: - safe_mkdir(gendir, clean=True) - command = create_jvmdoc_command(classpath, gendir, *targets) - if command: - create_jvmdoc(command, gendir) - if self.open: - binary_util.ui_open(os.path.join(gendir, 'index.html')) - - def _generate_individual(self, classpath, targets, create_jvmdoc_command): - jobs = {} - for target in targets: - gendir = self._gendir(target) - command = create_jvmdoc_command(classpath, gendir, target) - if command: - jobs[gendir] = (target, command) - - if jobs: - with contextlib.closing( - multiprocessing.Pool(processes=min(len(jobs), multiprocessing.cpu_count()))) as pool: - # map would be a preferable api here but fails after the 1st batch with an internal: - # ... - # File "...src/python/twitter/pants/tasks/jar_create.py", line 170, in javadocjar - # pool.map(createjar, jobs) - # File "...lib/python2.6/multiprocessing/pool.py", line 148, in map - # return self.map_async(func, iterable, chunksize).get() - # File "...lib/python2.6/multiprocessing/pool.py", line 422, in get - # raise self._value - # NameError: global name 'self' is not defined - futures = [] - for gendir, (target, command) in jobs.items(): - futures.append(pool.apply_async(create_jvmdoc, args=(command, gendir))) - - for future in futures: - result, gendir = future.get() - target, command = jobs[gendir] - if result != 0: - message = 'Failed to process %s for %s [%d]: %s' % ( - self._jvmdoc.tool_name, target, result, command) - if self.ignore_failure: - self.context.log.warn(message) - else: - raise TaskError(message) - - def _gendir(self, target): - return os.path.join(self._output_dir, target.id) - - -def create_jvmdoc(command, gendir): - safe_mkdir(gendir, clean=True) - process = subprocess.Popen(command) - result = process.wait() - return result, gendir diff --git a/src/python/twitter/pants/tasks/list_goals.py b/src/python/twitter/pants/tasks/list_goals.py deleted file mode 100644 index db2f22f74..000000000 --- a/src/python/twitter/pants/tasks/list_goals.py +++ /dev/null @@ -1,98 +0,0 @@ -# ============================================================================= -# Copyright 2014 Twitter, Inc. -# ----------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================= - -from twitter.pants.goal import Phase - -from .console_task import ConsoleTask - - -class ListGoals(ConsoleTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(ListGoals, cls).setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("all"), - dest="goal_list_all", - default=False, - action="store_true", - help="[%default] List all goals even if no description is available.") - option_group.add_option(mkflag('graph'), - dest='goal_list_graph', - action='store_true', - help='[%default] Generate a graphviz graph of installed goals.') - - def console_output(self, targets): - def report(): - yield 'Installed goals:' - documented_rows = [] - undocumented = [] - max_width = 0 - for phase, _ in Phase.all(): - if phase.description: - documented_rows.append((phase.name, phase.description)) - max_width = max(max_width, len(phase.name)) - elif self.context.options.goal_list_all: - undocumented.append(phase.name) - for name, description in documented_rows: - yield ' %s: %s' % (name.rjust(max_width), description) - if undocumented: - yield '' - yield 'Undocumented goals:' - yield ' %s' % ' '.join(undocumented) - - def graph(): - def get_cluster_name(phase): - return 'cluster_%s' % phase.name.replace('-', '_') - - def get_goal_name(phase, goal): - name = '%s_%s' % (phase.name, goal.name) - return name.replace('-', '_') - - phase_by_phasename = {} - for phase, goals in Phase.all(): - phase_by_phasename[phase.name] = phase - - yield '\n'.join([ - 'digraph G {', - ' rankdir=LR;', - ' graph [compound=true];', - ]) - for phase, installed_goals in Phase.all(): - yield '\n'.join([ - ' subgraph %s {' % get_cluster_name(phase), - ' node [style=filled];', - ' color = blue;', - ' label = "%s";' % phase.name, - ]) - for installed_goal in installed_goals: - yield ' %s [label="%s"];' % (get_goal_name(phase, installed_goal), - installed_goal.name) - yield ' }' - - edges = set() - for phase, installed_goals in Phase.all(): - for installed_goal in installed_goals: - for dependency in installed_goal.dependencies: - tail_goal = phase_by_phasename.get(dependency.name).goals()[-1] - edge = 'ltail=%s lhead=%s' % (get_cluster_name(phase), - get_cluster_name(Phase.of(tail_goal))) - if edge not in edges: - yield ' %s -> %s [%s];' % (get_goal_name(phase, installed_goal), - get_goal_name(Phase.of(tail_goal), tail_goal), - edge) - edges.add(edge) - yield '}' - - return graph() if self.context.options.goal_list_graph else report() diff --git a/src/python/twitter/pants/tasks/listtargets.py b/src/python/twitter/pants/tasks/listtargets.py deleted file mode 100644 index 50e17a5a3..000000000 --- a/src/python/twitter/pants/tasks/listtargets.py +++ /dev/null @@ -1,116 +0,0 @@ -# ============================================================================= -# Copyright 2012 Twitter, Inc. -# ----------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================= - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.target import Target - -from .console_task import ConsoleTask -from .task_error import TaskError - - -class ListTargets(ConsoleTask): - """ - Lists all BUILD targets in the system with no arguments, otherwise lists all - the BUILD targets that reside in the the BUILD files hosting the specified - targets. - """ - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(ListTargets, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option( - mkflag("provides"), - action="store_true", - dest="list_provides", default=False, - help="Specifies only targets that provide an artifact should be " - "listed. The output will be 2 columns in this case: " - "[target address] [artifact id]") - - option_group.add_option( - mkflag("provides-columns"), - dest="list_provides_columns", - default='address,artifact_id', - help="Specifies the columns to include in listing output when " - "restricting the listing to targets that provide an artifact. " - "Available columns are: address, artifact_id, repo_name, repo_url " - "and repo_db") - - option_group.add_option( - mkflag("documented"), - action="store_true", - dest="list_documented", - default=False, - help="Prints only targets that are documented with a description.") - - def __init__(self, context, **kwargs): - super(ListTargets, self).__init__(context, **kwargs) - - self._provides = context.options.list_provides - self._provides_columns = context.options.list_provides_columns - self._documented = context.options.list_documented - self._root_dir = get_buildroot() - - def console_output(self, targets): - if self._provides: - def extract_artifact_id(target): - provided_jar, _, _ = target.get_artifact_info() - return "%s%s%s" % (provided_jar.org, '#', provided_jar.name) - - extractors = dict( - address=lambda target: str(target.address), - artifact_id=extract_artifact_id, - repo_name=lambda target: target.provides.repo.name, - repo_url=lambda target: target.provides.repo.url, - repo_db=lambda target: target.provides.repo.push_db, - ) - - def print_provides(column_extractors, address): - target = Target.get(address) - if target.is_exported: - return " ".join(extractor(target) for extractor in column_extractors) - - try: - column_extractors = [extractors[col] for col in (self._provides_columns.split(','))] - except KeyError: - raise TaskError("Invalid columns specified %s. Valid ones include address, artifact_id, " - "repo_name, repo_url and repo_db." % self._provides_columns) - - print_fn = lambda address: print_provides(column_extractors, address) - elif self._documented: - def print_documented(address): - target = Target.get(address) - if target.description: - return '%s\n %s' % (address, '\n '.join(target.description.strip().split('\n'))) - print_fn = print_documented - else: - print_fn = lambda addr: str(addr) - - visited = set() - for address in self._addresses(): - result = print_fn(address) - if result and result not in visited: - visited.add(result) - yield result - - def _addresses(self): - if self.context.target_roots: - for target in self.context.target_roots: - yield target.address - else: - for buildfile in BuildFile.scan_buildfiles(self._root_dir): - for address in Target.get_all_addresses(buildfile): - yield address diff --git a/src/python/twitter/pants/tasks/markdown_to_html.py b/src/python/twitter/pants/tasks/markdown_to_html.py deleted file mode 100644 index 1cdf2a6e4..000000000 --- a/src/python/twitter/pants/tasks/markdown_to_html.py +++ /dev/null @@ -1,266 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'John Sirois' - -try: - import markdown - - WIKILINKS_PATTERN = r'\[\[([^\]]+)\]\]' - - class WikilinksPattern(markdown.inlinepatterns.Pattern): - def __init__(self, build_url, markdown_instance=None): - markdown.inlinepatterns.Pattern.__init__(self, WIKILINKS_PATTERN, markdown_instance) - self.build_url = build_url - - def handleMatch(self, m): - alias, url = self.build_url(m.group(2).strip()) - el = markdown.util.etree.Element('a') - el.set('href', url) - el.text = markdown.util.AtomicString(alias) - return el - - class WikilinksExtension(markdown.Extension): - def __init__(self, build_url, configs=None): - markdown.Extension.__init__(self, configs or {}) - self.build_url = build_url - - def extendMarkdown(self, md, md_globals): - md.inlinePatterns['wikilinks'] = WikilinksPattern(self.build_url, md) - - HAS_MARKDOWN = True -except ImportError: - HAS_MARKDOWN = False - -try: - from pygments.formatters.html import HtmlFormatter - from pygments.styles import get_all_styles - - def configure_codehighlight_options(option_group, mkflag): - all_styles = list(get_all_styles()) - option_group.add_option(mkflag("code-style"), dest="markdown_to_html_code_style", - type="choice", choices=all_styles, - help="Selects the stylesheet to use for code highlights, one of: " - "%s." % ' '.join(all_styles)) - - def emit_codehighlight_css(path, style): - with safe_open(path, 'w') as css: - css.write((HtmlFormatter(style=style)).get_style_defs('.codehilite')) - return path -except ImportError: - def configure_codehighlight_options(option_group, mkflag): pass - def emit_codehighlight_css(path, style): pass - - -import codecs -import os -import re -import textwrap - -from twitter.common.dirutil import safe_mkdir, safe_open - -from twitter.pants import binary_util -from twitter.pants.base.address import Address -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.targets.doc import Page -from twitter.pants.tasks import Task, TaskError - - -class MarkdownToHtml(Task): - AVAILABLE = HAS_MARKDOWN - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - configure_codehighlight_options(option_group, mkflag) - - option_group.add_option(mkflag("open"), mkflag("open", negate=True), - dest = "markdown_to_html_open", - action="callback", callback=mkflag.set_bool, default=False, - help = "[%default] Open the generated documents in a browser.") - - option_group.add_option(mkflag("fragment"), mkflag("fragment", negate=True), - dest = "markdown_to_html_fragment", - action="callback", callback=mkflag.set_bool, default=False, - help = "[%default] Generate a fragment of html to embed in a page.") - - option_group.add_option(mkflag("outdir"), dest="markdown_to_html_outdir", - help="Emit generated html in to this directory.") - - option_group.add_option(mkflag("extension"), dest = "markdown_to_html_extensions", - action="append", - help = "Override the default markdown extensions and process pages " - "whose source have these extensions instead.") - - def __init__(self, context): - Task.__init__(self, context) - - self.open = context.options.markdown_to_html_open - - pants_workdir = context.config.getdefault('pants_workdir') - self.outdir = ( - context.options.markdown_to_html_outdir - or context.config.get('markdown-to-html', - 'workdir', - default=os.path.join(pants_workdir, 'markdown')) - ) - - self.extensions = set( - context.options.markdown_to_html_extensions - or context.config.getlist('markdown-to-html', 'extensions', default=['.md', '.markdown']) - ) - - self.fragment = context.options.markdown_to_html_fragment - - self.code_style = context.config.get('markdown-to-html', 'code-style', default='friendly') - if hasattr(context.options, 'markdown_to_html_code_style'): - if context.options.markdown_to_html_code_style: - self.code_style = context.options.markdown_to_html_code_style - - def execute(self, targets): - if not MarkdownToHtml.AVAILABLE: - raise TaskError('Cannot process markdown - no markdown lib on the sys.path') - - # TODO(John Sirois): consider adding change detection - - css_relpath = os.path.join('css', 'codehighlight.css') - css = emit_codehighlight_css(os.path.join(self.outdir, css_relpath), self.code_style) - if css: - self.context.log.info('Emitted %s' % css) - - def is_page(target): - return isinstance(target, Page) - - roots = set() - interior_nodes = set() - if self.open: - dependencies_by_page = self.context.dependents(on_predicate=is_page, from_predicate=is_page) - roots.update(dependencies_by_page.keys()) - for dependencies in dependencies_by_page.values(): - interior_nodes.update(dependencies) - roots.difference_update(dependencies) - for page in self.context.targets(is_page): - # There are no in or out edges so we need to show show this isolated page. - if not page.dependencies and page not in interior_nodes: - roots.add(page) - - plaingenmap = self.context.products.get('markdown_html') - wikigenmap = self.context.products.get('wiki_html') - show = [] - for page in filter(is_page, targets): - _, ext = os.path.splitext(page.source) - if ext in self.extensions: - def process_page(key, outdir, url_builder, config, genmap, fragment=False): - html_path = self.process( - outdir, - page.target_base, - page.source, - self.fragment or fragment, - url_builder, - config, - css=css - ) - self.context.log.info('Processed %s to %s' % (page.source, html_path)) - relpath = os.path.relpath(html_path, outdir) - genmap.add(key, outdir, [relpath]) - return html_path - - def url_builder(linked_page, config=None): - path, ext = os.path.splitext(linked_page.source) - return linked_page.name, os.path.relpath(path + '.html', os.path.dirname(page.source)) - - page_path = os.path.join(self.outdir, 'html') - html = process_page(page, page_path, url_builder, lambda p: None, plaingenmap) - if css and not self.fragment: - plaingenmap.add(page, self.outdir, list(css_relpath)) - if self.open and page in roots: - show.append(html) - - for wiki in page.wikis(): - def get_config(page): - return page.wiki_config(wiki) - basedir = os.path.join(self.outdir, wiki.id) - process_page((wiki, page), basedir, wiki.url_builder, get_config, - wikigenmap, fragment=True) - - if show: - binary_util.ui_open(*show) - - PANTS_LINK = re.compile(r'''pants\(['"]([^)]+)['"]\)(#.*)?''') - - def process(self, outdir, base, source, fragmented, url_builder, get_config, css=None): - def parse_url(spec): - match = MarkdownToHtml.PANTS_LINK.match(spec) - if match: - page = Target.get(Address.parse(get_buildroot(), match.group(1))) - anchor = match.group(2) or '' - if not page: - raise TaskError('Invalid link %s' % match.group(1)) - alias, url = url_builder(page, config=get_config(page)) - return alias, url + anchor - else: - return spec, spec - - def build_url(label): - components = label.split('|', 1) - if len(components) == 1: - return parse_url(label.strip()) - else: - alias, link = components - _, url = parse_url(link.strip()) - return alias, url - - wikilinks = WikilinksExtension(build_url) - - path, ext = os.path.splitext(source) - output_path = os.path.join(outdir, path + '.html') - safe_mkdir(os.path.dirname(output_path)) - with codecs.open(output_path, 'w', 'utf-8') as output: - with codecs.open(os.path.join(get_buildroot(), base, source), 'r', 'utf-8') as input: - md_html = markdown.markdown( - input.read(), - extensions=['codehilite(guess_lang=False)', 'extra', 'tables', 'toc', wikilinks], - ) - if fragmented: - if css: - with safe_open(css) as fd: - output.write(textwrap.dedent(''' - - ''').strip() % fd.read()) - output.write('\n') - output.write(md_html) - else: - if css: - css_relpath = os.path.relpath(css, outdir) - out_relpath = os.path.dirname(source) - link_relpath = os.path.relpath(css_relpath, out_relpath) - css = '' % link_relpath - html = textwrap.dedent(''' - - - - %s - - - - %s - - - ''').strip() % (css or '', md_html) - output.write(html) - return output.name diff --git a/src/python/twitter/pants/tasks/minimal_cover.py b/src/python/twitter/pants/tasks/minimal_cover.py deleted file mode 100644 index ab1317ffa..000000000 --- a/src/python/twitter/pants/tasks/minimal_cover.py +++ /dev/null @@ -1,42 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.tasks.console_task import ConsoleTask - - -class MinimalCover(ConsoleTask): - """Outputs a minimal covering set of targets. - - For a given set of input targets, the output targets transitive dependency set will include all - the input targets without gaps. - """ - - def console_output(self, _): - internal_deps = set() - for target in self.context.target_roots: - internal_deps.update(self._collect_internal_deps(target)) - - minimal_cover = set() - for target in self.context.target_roots: - if target not in internal_deps and target not in minimal_cover: - minimal_cover.add(target) - yield str(target.address) - - def _collect_internal_deps(self, target): - internal_deps = set() - target.walk(internal_deps.add) - internal_deps.discard(target) - return internal_deps diff --git a/src/python/twitter/pants/tasks/nailgun_task.py b/src/python/twitter/pants/tasks/nailgun_task.py deleted file mode 100644 index 270a10163..000000000 --- a/src/python/twitter/pants/tasks/nailgun_task.py +++ /dev/null @@ -1,114 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import time - -from twitter.pants.java import util -from twitter.pants.java.distribution import Distribution -from twitter.pants.java.executor import SubprocessExecutor -from twitter.pants.java.nailgun_executor import NailgunExecutor - -from . import Task, TaskError - - -class NailgunTask(Task): - - _DAEMON_OPTION_PRESENT = False - - @staticmethod - def killall(logger=None, everywhere=False): - """Kills all nailgun servers launched by pants in the current repo. - - Returns ``True`` if all nailguns were successfully killed, ``False`` otherwise. - - :param logger: a callable that accepts a message string describing the killed nailgun process - :param bool everywhere: ``True`` to kill all nailguns servers launched by pants on this machine - """ - if not NailgunExecutor.killall: - return False - else: - return NailgunExecutor.killall(logger=logger, everywhere=everywhere) - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - if not NailgunTask._DAEMON_OPTION_PRESENT: - option_group.parser.add_option("--ng-daemons", "--no-ng-daemons", dest="nailgun_daemon", - default=True, action="callback", callback=mkflag.set_bool, - help="[%default] Use nailgun daemons to execute java tasks.") - NailgunTask._DAEMON_OPTION_PRESENT = True - - def __init__(self, context, minimum_version=None, jdk=False): - super(NailgunTask, self).__init__(context) - - default_workdir_root = os.path.join(context.config.getdefault('pants_workdir'), 'ng') - self._workdir = os.path.join( - context.config.get('nailgun', 'workdir', default=default_workdir_root), - self.__class__.__name__) - - self._nailgun_bootstrap_key = 'nailgun' - self._jvm_tool_bootstrapper.register_jvm_tool(self._nailgun_bootstrap_key, [':nailgun-server']) - - start = time.time() - try: - self._dist = Distribution.cached(minimum_version=minimum_version, jdk=jdk) - # TODO(John Sirois): Use a context timer when AWESOME-1265 gets merged. - context.log.debug('Located java distribution in %.3fs' % (time.time() - start)) - except Distribution.Error as e: - raise TaskError(e) - - def create_java_executor(self): - """Create java executor that uses this task's ng daemon, if allowed. - - Call only in execute() or later. TODO: Enforce this. - """ - if self.context.options.nailgun_daemon and not os.environ.get('PANTS_DEV'): - classpath = os.pathsep.join( - self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._nailgun_bootstrap_key)) - client = NailgunExecutor(self._workdir, classpath, distribution=self._dist) - else: - client = SubprocessExecutor(self._dist) - return client - - @property - def jvm_args(self): - """Default jvm args the nailgun will be launched with. - - By default no special jvm args are used. If a value for ``jvm_args`` is specified in pants.ini - globally in the ``DEFAULT`` section or in the ``nailgun`` section, then that list will be used. - """ - return self.context.config.getlist('nailgun', 'jvm_args', default=[]) - - def runjava(self, classpath, main, jvm_options=None, args=None, workunit_name=None, - workunit_labels=None): - """Runs the java main using the given classpath and args. - - If --no-ng-daemons is specified then the java main is run in a freshly spawned subprocess, - otherwise a persistent nailgun server dedicated to this Task subclass is used to speed up - amortized run times. - """ - executor = self.create_java_executor() - try: - return util.execute_java(classpath=classpath, - main=main, - jvm_options=jvm_options, - args=args, - executor=executor, - workunit_factory=self.context.new_workunit, - workunit_name=workunit_name, - workunit_labels=workunit_labels) - except executor.Error as e: - raise TaskError(e) diff --git a/src/python/twitter/pants/tasks/pathdeps.py b/src/python/twitter/pants/tasks/pathdeps.py deleted file mode 100644 index 38896290a..000000000 --- a/src/python/twitter/pants/tasks/pathdeps.py +++ /dev/null @@ -1,23 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.tasks.console_task import ConsoleTask - -__author__ = 'Dave Buchfuhrer' - -class PathDeps(ConsoleTask): - def console_output(self, targets): - return set(t.address.buildfile.parent_path for t in targets if hasattr(t, 'address')) diff --git a/src/python/twitter/pants/tasks/paths.py b/src/python/twitter/pants/tasks/paths.py deleted file mode 100644 index 976559d54..000000000 --- a/src/python/twitter/pants/tasks/paths.py +++ /dev/null @@ -1,121 +0,0 @@ -from __future__ import print_function - -from collections import defaultdict -import copy - -from twitter.common.lang import Compatibility -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.address import Address -from twitter.pants.base.target import Target -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.console_task import ConsoleTask - - -class PathFinder(ConsoleTask): - def __init__(self, context): - ConsoleTask.__init__(self, context) - self.log = context.log - self.target_roots = context.target_roots - - @classmethod - def _coerce_to_targets(cls, from_str, to_str): - if isinstance(from_str, Compatibility.string): - if not isinstance(to_str, Compatibility.string): - raise TaskError('Finding paths from string %s to non-string %s' % (from_str, str(to_str))) - - from_address = Address.parse(get_buildroot(), from_str) - to_address = Address.parse(get_buildroot(), to_str) - - from_target = Target.get(from_address) - to_target = Target.get(to_address) - - if not from_target: - raise TaskError('Target %s doesn\'t exist' % from_address.reference()) - if not to_target: - raise TaskError('Target %s doesn\'t exist' % to_address.reference()) - - return from_target, to_target - - elif isinstance(to_str, Compatibility.string): - raise TaskError('Finding paths from string %s to non-string %s' % (to_str, str(from_str))) - return from_str, to_str - - @classmethod - def _find_paths(cls, from_target, to_target, log): - from_target, to_target = cls._coerce_to_targets(from_target, to_target) - - log.debug('Looking for all paths from %s to %s' % (from_target.address.reference(), to_target.address.reference())) - - paths = cls._find_paths_rec(from_target, to_target) - print('Found %d paths' % len(paths)) - print('') - for path in paths: - log.debug('\t[%s]' % ', '.join([target.address.reference() for target in path])) - - all_paths = defaultdict(lambda: defaultdict(list)) - @classmethod - def _find_paths_rec(cls, from_target, to_target): - if from_target == to_target: - return [[from_target]] - - if from_target not in cls.all_paths or to_target not in cls.all_paths[from_target]: - paths = [] - if hasattr(from_target, 'dependency_addresses'): - for address in from_target.dependency_addresses: - dep = Target.get(address) - for path in cls._find_paths_rec(dep, to_target): - new_path = copy.copy(path) - new_path.insert(0, from_target) - paths.append(new_path) - - cls.all_paths[from_target][to_target] = paths - - return cls.all_paths[from_target][to_target] - - examined_targets = set() - - @classmethod - def _find_path(cls, from_target, to_target, log): - from_target, to_target = cls._coerce_to_targets(from_target, to_target) - - log.debug('Looking for path from %s to %s' % (from_target.address.reference(), to_target.address.reference())) - - queue = [([from_target], 0)] - while True: - if not queue: - print('no path found from %s to %s!' % (from_target.address.reference(), to_target.address.reference())) - break - - path, indent = queue.pop(0) - next_target = path[-1] - if next_target in cls.examined_targets: - continue - cls.examined_targets.add(next_target) - - log.debug('%sexamining %s' % (' ' * indent, next_target)) - - if next_target == to_target: - print('') - for target in path: - print('%s' % target.address.reference()) - break - - if hasattr(next_target, 'dependency_addresses'): - for address in next_target.dependency_addresses: - dep = Target.get(address) - queue.append((path + [dep], indent + 1)) - - -class Path(PathFinder): - def execute(self, targets): - if len(self.target_roots) != 2: - raise TaskError('Specify two targets please (found %d)' % len(self.target_roots)) - - self._find_path(self.target_roots[0], self.target_roots[1], self.log) - -class Paths(PathFinder): - def execute(self, targets): - if len(self.target_roots) != 2: - raise TaskError('Specify two targets please (found %d)' % len(self.target_roots)) - - self._find_paths(self.target_roots[0], self.target_roots[1], self.log) diff --git a/src/python/twitter/pants/tasks/prepare_resources.py b/src/python/twitter/pants/tasks/prepare_resources.py deleted file mode 100644 index f6348667b..000000000 --- a/src/python/twitter/pants/tasks/prepare_resources.py +++ /dev/null @@ -1,78 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== -from collections import defaultdict - -import os -import shutil - -from twitter.common.dirutil import safe_mkdir -from twitter.pants.goal.products import MultipleRootedProducts - -from twitter.pants.tasks import Task - - -class PrepareResources(Task): - - def __init__(self, context): - Task.__init__(self, context) - - self.workdir = context.config.get('prepare-resources', 'workdir') - self.confs = context.config.getlist('prepare-resources', 'confs', default=['default']) - self.context.products.require_data('exclusives_groups') - - def execute(self, targets): - if self.context.products.is_required_data('resources_by_target'): - self.context.products.safe_create_data('resources_by_target', - lambda: defaultdict(MultipleRootedProducts)) - - if len(targets) == 0: - return - def extract_resources(target): - return target.resources if target.has_resources else () - all_resources_tgts = set() - for resources_tgts in map(extract_resources, targets): - all_resources_tgts.update(resources_tgts) - - def target_dir(resources_tgt): - return os.path.join(self.workdir, resources_tgt.id) - - with self.invalidated(all_resources_tgts) as invalidation_check: - invalid_targets = set() - for vt in invalidation_check.invalid_vts: - invalid_targets.update(vt.targets) - - for resources_tgt in invalid_targets: - resources_dir = target_dir(resources_tgt) - safe_mkdir(resources_dir, clean=True) - for resource_path in resources_tgt.sources: - basedir = os.path.dirname(resource_path) - destdir = os.path.join(resources_dir, basedir) - safe_mkdir(destdir) - # TODO: Symlink instead? - shutil.copy(os.path.join(resources_tgt.target_base, resource_path), - os.path.join(resources_dir, resource_path)) - - resources_by_target = self.context.products.get_data('resources_by_target') - egroups = self.context.products.get_data('exclusives_groups') - group_key = egroups.get_group_key_for_target(targets[0]) - - for resources_tgt in all_resources_tgts: - resources_dir = target_dir(resources_tgt) - for conf in self.confs: - egroups.update_compatible_classpaths(group_key, [(conf, resources_dir)]) - if resources_by_target is not None: - target_resources = resources_by_target[resources_tgt] - target_resources.add_rel_paths(resources_dir, resources_tgt.sources) diff --git a/src/python/twitter/pants/tasks/protobuf_gen.py b/src/python/twitter/pants/tasks/protobuf_gen.py deleted file mode 100644 index 035485f72..000000000 --- a/src/python/twitter/pants/tasks/protobuf_gen.py +++ /dev/null @@ -1,248 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import re -import subprocess - -from collections import defaultdict - -from twitter.common import log -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.binary_util import select_binary -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_protobuf_library import JavaProtobufLibrary -from twitter.pants.targets.python_library import PythonLibrary - -from .code_gen import CodeGen - -from . import TaskError - - -class ProtobufGen(CodeGen): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("outdir"), dest="protobuf_gen_create_outdir", - help="Emit generated code in to this directory.") - - option_group.add_option(mkflag("lang"), dest="protobuf_gen_langs", default=[], - action="append", type="choice", choices=['python', 'java'], - help="Force generation of protobuf code for these languages. Both " - "'python' and 'java' are supported") - - def __init__(self, context): - CodeGen.__init__(self, context) - - self.protoc_supportdir = self.context.config.get('protobuf-gen', 'supportdir') - self.protoc_version = self.context.config.get('protobuf-gen', 'version') - self.output_dir = (context.options.protobuf_gen_create_outdir or - context.config.get('protobuf-gen', 'workdir')) - - def resolve_deps(key): - deps = OrderedSet() - for dep in context.config.getlist('protobuf-gen', key): - deps.update(context.resolve(dep)) - return deps - - self.javadeps = resolve_deps('javadeps') - self.java_out = os.path.join(self.output_dir, 'gen-java') - - self.pythondeps = resolve_deps('pythondeps') - self.py_out = os.path.join(self.output_dir, 'gen-py') - - self.gen_langs = set(context.options.protobuf_gen_langs) - for lang in ('java', 'python'): - if self.context.products.isrequired(lang): - self.gen_langs.add(lang) - - self.protobuf_binary = select_binary( - self.protoc_supportdir, - self.protoc_version, - 'protoc', - context.config - ) - - def invalidate_for(self): - return self.gen_langs - - def invalidate_for_files(self): - return [self.protobuf_binary] - - def is_gentarget(self, target): - return isinstance(target, JavaProtobufLibrary) - - def is_forced(self, lang): - return lang in self.gen_langs - - def genlangs(self): - return dict(java=lambda t: t.is_jvm, python=lambda t: t.is_python) - - def genlang(self, lang, targets): - protobuf_binary = select_binary( - self.protoc_supportdir, - self.protoc_version, - 'protoc', - self.context.config - ) - - bases, sources = self._calculate_sources(targets) - - if lang == 'java': - safe_mkdir(self.java_out) - gen = '--java_out=%s' % self.java_out - elif lang == 'python': - safe_mkdir(self.py_out) - gen = '--python_out=%s' % self.py_out - else: - raise TaskError('Unrecognized protobuf gen lang: %s' % lang) - - args = [self.protobuf_binary, gen] - - for base in bases: - args.append('--proto_path=%s' % base) - - args.extend(sources) - log.debug('Executing: %s' % ' '.join(args)) - process = subprocess.Popen(args) - result = process.wait() - if result != 0: - raise TaskError('%s ... exited non-zero (%i)' % (self.protobuf_binary, result)) - - def _calculate_sources(self, targets): - bases = set() - sources = set() - - def collect_sources(target): - if self.is_gentarget(target): - bases.add(target.target_base) - sources.update(target.sources_relative_to_buildroot()) - - for target in targets: - target.walk(collect_sources) - return bases, sources - - def createtarget(self, lang, gentarget, dependees): - if lang == 'java': - return self._create_java_target(gentarget, dependees) - elif lang == 'python': - return self._create_python_target(gentarget, dependees) - else: - raise TaskError('Unrecognized protobuf gen lang: %s' % lang) - - def _create_java_target(self, target, dependees): - genfiles = [] - for source in target.sources: - path = os.path.join(target.target_base, source) - genfiles.extend(calculate_genfiles(path, source).get('java', [])) - tgt = self.context.add_new_target(self.java_out, - JavaLibrary, - name=target.id, - sources=genfiles, - provides=target.provides, - dependencies=self.javadeps, - excludes=target.excludes) - tgt.id = target.id + '.protobuf_gen' - for dependee in dependees: - dependee.update_dependencies([tgt]) - return tgt - - def _create_python_target(self, target, dependees): - genfiles = [] - for source in target.sources: - path = os.path.join(target.target_base, source) - genfiles.extend(calculate_genfiles(path, source).get('py', [])) - tgt = self.context.add_new_target(self.py_out, - PythonLibrary, - name=target.id, - sources=genfiles, - dependencies=self.pythondeps) - tgt.id = target.id - for dependee in dependees: - dependee.dependencies.add(tgt) - return tgt - - -DEFAULT_PACKAGE_PARSER = re.compile(r'^\s*package\s+([^;]+)\s*;\s*$') -OPTION_PARSER = re.compile(r'^\s*option\s+([^ =]+)\s*=\s*([^\s]+)\s*;\s*$') -TYPE_PARSER = re.compile(r'^\s*(enum|message)\s+([^\s{]+).*') - - -def camelcase(string): - """Convert snake casing where present to camel casing""" - return ''.join(word.capitalize() for word in string.split('_')) - - -def calculate_genfiles(path, source): - with open(path, 'r') as protobuf: - lines = protobuf.readlines() - package = '' - filename = re.sub(r'\.proto$', '', os.path.basename(source)) - outer_class_name = camelcase(filename) - multiple_files = False - types = set() - for line in lines: - match = DEFAULT_PACKAGE_PARSER.match(line) - if match: - package = match.group(1) - else: - match = OPTION_PARSER.match(line) - if match: - name = match.group(1) - value = match.group(2) - - def string_value(): - return value.lstrip('"').rstrip('"') - - def bool_value(): - return value == 'true' - - if 'java_package' == name: - package = string_value() - elif 'java_outer_classname' == name: - outer_class_name = string_value() - elif 'java_multiple_files' == name: - multiple_files = bool_value() - else: - match = TYPE_PARSER.match(line) - if match: - type_ = match.group(2) - types.add(type_) - if match.group(1) == 'message': - types.add('%sOrBuilder' % type_) - - genfiles = defaultdict(set) - genfiles['py'].update(calculate_python_genfiles(source)) - genfiles['java'].update(calculate_java_genfiles(package, - outer_class_name, - types if multiple_files else [])) - return genfiles - - -def calculate_python_genfiles(source): - yield re.sub(r'\.proto$', '_pb2.py', source) - - -def calculate_java_genfiles(package, outer_class_name, types): - basepath = package.replace('.', '/') - - def path(name): - return os.path.join(basepath, '%s.java' % name) - - yield path(outer_class_name) - for type_ in types: - yield path(type_) diff --git a/src/python/twitter/pants/tasks/provides.py b/src/python/twitter/pants/tasks/provides.py deleted file mode 100644 index d3b715d9f..000000000 --- a/src/python/twitter/pants/tasks/provides.py +++ /dev/null @@ -1,117 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Benjy Weinberger' - -import os -import sys - -from twitter.common.collections import OrderedSet -from twitter.common.contextutil import open_zip as open_jar -from twitter.pants.tasks import Task -from twitter.pants.tasks.ivy_utils import IvyModuleRef, IvyUtils -from twitter.pants.targets.jvm_binary import JvmBinary - - -class Provides(Task): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("outdir"), dest="provides_outdir", - help="Emit provides outputs into this directory.") - option_group.add_option(mkflag("transitive"), default=False, - action="store_true", dest='provides_transitive', - help="Shows the symbols provided not just by the specified targets but by all their transitive dependencies.") - option_group.add_option(mkflag("also-write-to-stdout"), default=False, - action="store_true", dest='provides_also_write_to_stdout', - help="If set, also outputs the provides information to stdout.") - - def __init__(self, context): - Task.__init__(self, context) - self.ivy_utils = IvyUtils(config=context.config, - options=context.options, - log=context.log) - self.confs = context.config.getlist('ivy', 'confs', default=['default']) - self.target_roots = context.target_roots - self.transitive = context.options.provides_transitive - self.workdir = context.config.get('provides', 'workdir') - self.outdir = context.options.provides_outdir or self.workdir - self.also_write_to_stdout = context.options.provides_also_write_to_stdout or False - # Create a fake target, in case we were run directly on a JarLibrary containing nothing but JarDependencies. - # TODO(benjy): Get rid of this special-casing of jar dependencies. - context.add_new_target(self.workdir, - JvmBinary, - name='provides', - dependencies=self.target_roots, - configurations=self.confs) - context.products.require('jars') - - def execute(self, targets): - for conf in self.confs: - outpath = os.path.join(self.outdir, '%s.%s.provides' % - (self.ivy_utils.identify(targets)[1], conf)) - if self.transitive: - outpath += '.transitive' - ivyinfo = self.ivy_utils.parse_xml_report(self.context, conf) - jar_paths = OrderedSet() - for root in self.target_roots: - jar_paths.update(self.get_jar_paths(ivyinfo, root, conf)) - - with open(outpath, 'w') as outfile: - def do_write(s): - outfile.write(s) - if self.also_write_to_stdout: - sys.stdout.write(s) - for jar in jar_paths: - do_write('# from jar %s\n' % jar) - for line in self.list_jar(jar): - if line.endswith('.class'): - class_name = line[:-6].replace('/', '.') - do_write(class_name) - do_write('\n') - print('Wrote provides information to %s' % outpath) - - def get_jar_paths(self, ivyinfo, target, conf): - jar_paths = OrderedSet() - if target.is_jar_library: - # Jar library proxies jar dependencies or jvm targets, so the jars are just those of the - # dependencies. - for paths in [ self.get_jar_paths(ivyinfo, dep, conf) for dep in target.dependencies ]: - jar_paths.update(paths) - elif target.is_jar_dependency: - ref = IvyModuleRef(target.org, target.name, target.rev, conf) - jar_paths.update(self.get_jar_paths_for_ivy_module(ivyinfo, ref)) - elif target.is_jvm: - for basedir, jars in self.context.products.get('jars').get(target).items(): - jar_paths.update([os.path.join(basedir, jar) for jar in jars]) - if self.transitive: - for dep in target.dependencies: - jar_paths.update(self.get_jar_paths(ivyinfo, dep, conf)) - - return jar_paths - - def get_jar_paths_for_ivy_module(self, ivyinfo, ref): - jar_paths = OrderedSet() - module = ivyinfo.modules_by_ref[ref] - jar_paths.update([a.path for a in module.artifacts]) - if self.transitive: - for dep in ivyinfo.deps_by_caller.get(ref, []): - jar_paths.update(self.get_jar_paths_for_ivy_module(ivyinfo, dep)) - return jar_paths - - def list_jar(self, path): - with open_jar(path, 'r') as jar: - return jar.namelist() - diff --git a/src/python/twitter/pants/tasks/python/__init__.py b/src/python/twitter/pants/tasks/python/__init__.py deleted file mode 100644 index 1e805c925..000000000 --- a/src/python/twitter/pants/tasks/python/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - diff --git a/src/python/twitter/pants/tasks/python/setup.py b/src/python/twitter/pants/tasks/python/setup.py deleted file mode 100644 index 049fef39a..000000000 --- a/src/python/twitter/pants/tasks/python/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from functools import reduce - -from twitter.pants.tasks import Task, TaskError - -from twitter.pants.python.interpreter_cache import PythonInterpreterCache - -from .target import is_python_root - - -class SetupPythonEnvironment(Task): - """ - Establishes the python intepreter(s) for downstream Python tasks e.g. Resolve, Run, PytestRun. - - Populates the product namespace (for typename = 'python'): - 'intepreters': ordered list of PythonInterpreter objects - """ - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("force"), dest="python_setup_force", - action="store_true", default=False, - help="Force clean and install.") - option_group.add_option(mkflag("path"), dest="python_setup_paths", - action="append", default=[], - help="Add a path to search for interpreters, by default PATH.") - option_group.add_option(mkflag("interpreter"), dest="python_interpreter", - default=[], action='append', - help="Constrain what Python interpreters to use. Uses Requirement " - "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. " - "By default, no constraints are used. Multiple constraints may " - "be added. They will be ORed together.") - option_group.add_option(mkflag("multi"), dest="python_multi", - default=False, action='store_true', - help="Allow multiple interpreters to be bound to an upstream chroot.") - - def __init__(self, context): - context.products.require('python') - self._cache = PythonInterpreterCache(context.config, logger=context.log.debug) - super(SetupPythonEnvironment, self).__init__(context) - - def execute(self, _): - ifilters = self.context.options.python_interpreter - self._cache.setup(force=self.context.options.python_setup_force, - paths=self.context.options.python_setup_paths, - filters=ifilters or ['']) - all_interpreters = set(self._cache.interpreters) - for target in self.context.targets(is_python_root): - self.context.log.info('Setting up interpreters for %s' % target) - closure = target.closure() - self.context.log.debug(' - Target closure: %d targets' % len(closure)) - target_compatibilities = [ - set(self._cache.matches(getattr(closure_target, 'compatibility', ['']))) - for closure_target in closure] - target_compatibilities = reduce(set.intersection, target_compatibilities, all_interpreters) - self.context.log.debug(' - Target minimum compatibility: %s' % ( - ' '.join(interp.version_string for interp in target_compatibilities))) - interpreters = self._cache.select_interpreter(target_compatibilities, - allow_multiple=self.context.options.python_multi) - self.context.log.debug(' - Selected: %s' % interpreters) - if not interpreters: - raise TaskError('No compatible interpreters for %s' % target) - target.interpreters = interpreters diff --git a/src/python/twitter/pants/tasks/python/target.py b/src/python/twitter/pants/tasks/python/target.py deleted file mode 100644 index 89d82dc65..000000000 --- a/src/python/twitter/pants/tasks/python/target.py +++ /dev/null @@ -1,66 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.targets.python_requirement import PythonRequirement - - -def is_python_root(target): - return isinstance(target, PythonRoot) - - -class PythonRoot(Target): - """ - Internal target for managing python chroot state. - """ - @classmethod - def synthetic_name(cls, targets): - return list(targets)[0].name if len(targets) > 0 else 'empty' - - @classmethod - def union(cls, targets, name=None): - name = name or (cls.synthetic_name(targets) + '-union') - with ParseContext.temp(): - return cls(name, dependencies=targets) - - @classmethod - def of(cls, target): - with ParseContext.temp(): - return cls(target.name, dependencies=[target]) - - def __init__(self, name, dependencies=None): - self.dependencies = OrderedSet(dependencies) if dependencies else OrderedSet() - self.internal_dependencies = OrderedSet() - self.interpreters = [] - self.distributions = {} # interpreter => distributions - self.chroots = {} # interpreter => chroots - super(PythonRoot, self).__init__(name) - - def closure(self): - os = OrderedSet() - for target in self.dependencies | self.internal_dependencies: - os.update(target.closure()) - return os - - def select(self, target_class): - return OrderedSet(target for target in self.closure() if isinstance(target, target_class)) - - @property - def requirements(self): - return self.select(PythonRequirement) diff --git a/src/python/twitter/pants/tasks/roots.py b/src/python/twitter/pants/tasks/roots.py deleted file mode 100644 index 3923c48f4..000000000 --- a/src/python/twitter/pants/tasks/roots.py +++ /dev/null @@ -1,18 +0,0 @@ -from twitter.pants.targets.sources import SourceRoot - -from .console_task import ConsoleTask - - -class ListRoots(ConsoleTask): - """ - List the registered source roots of the repo. - """ - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(ListRoots, cls).setup_parser(option_group, args, mkflag) - - def console_output(self, targets): - for src_root, targets in SourceRoot.all_roots().items(): - all_targets = ','.join(sorted([tgt.__name__ for tgt in targets])) - yield '%s: %s' % (src_root, all_targets or '*') diff --git a/src/python/twitter/pants/tasks/scala_repl.py b/src/python/twitter/pants/tasks/scala_repl.py deleted file mode 100644 index 9887e9455..000000000 --- a/src/python/twitter/pants/tasks/scala_repl.py +++ /dev/null @@ -1,92 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import shlex -import subprocess - -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.java.util import execute_java -from .jvm_task import JvmTask - -from . import Task - - -class ScalaRepl(JvmTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("jvmargs"), dest = "run_jvmargs", action="append", - help = "Run the repl in a jvm with these extra jvm args.") - option_group.add_option(mkflag('args'), dest = 'run_args', action='append', - help = 'run the repl in a jvm with extra args.') - - def __init__(self, context): - Task.__init__(self, context) - self.jvm_args = context.config.getlist('scala-repl', 'jvm_args', default=[]) - if context.options.run_jvmargs: - for arg in context.options.run_jvmargs: - self.jvm_args.extend(shlex.split(arg)) - self.confs = context.config.getlist('scala-repl', 'confs', default=['default']) - self._bootstrap_key = 'scala-repl' - bootstrap_tools = context.config.getlist('scala-repl', 'bootstrap-tools') - self._jvm_tool_bootstrapper.register_jvm_tool(self._bootstrap_key, bootstrap_tools) - self.main = context.config.get('scala-repl', 'main') - self.args = context.config.getlist('scala-repl', 'args', default=[]) - if context.options.run_args: - for arg in context.options.run_args: - self.args.extend(shlex.split(arg)) - - def execute(self, targets): - # The repl session may last a while, allow concurrent pants activity during this pants idle - # period. - tools_classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._bootstrap_key) - - self.context.lock.release() - self.save_stty_options() - - classpath = self.classpath(tools_classpath, - confs=self.confs, - exclusives_classpath=self.get_base_classpath_for_target(targets[0])) - - print('') # Start REPL output on a new line. - try: - execute_java(classpath=classpath, - main=self.main, - jvm_options=self.jvm_args, - args=self.args, - workunit_factory=self.context.new_workunit, - workunit_name='repl', - workunit_labels=[WorkUnit.REPL, WorkUnit.JVM]) - except KeyboardInterrupt: - # TODO(John Sirois): Confirm with Steve Gury that finally does not work on mac and an - # explicit catch of KeyboardInterrupt is required. - pass - self.restore_ssty_options() - - def save_stty_options(self): - """ - The scala REPL changes some stty parameters and doesn't save/restore them after - execution, so if you have a terminal with non-default stty options, you end - up to a broken terminal (need to do a 'reset'). - """ - self.stty_options = self.run_cmd('stty -g 2>/dev/null') - - def restore_ssty_options(self): - self.run_cmd('stty ' + self.stty_options) - - def run_cmd(self, cmd): - po = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) - stdout, _ = po.communicate() - return stdout diff --git a/src/python/twitter/pants/tasks/scaladoc_gen.py b/src/python/twitter/pants/tasks/scaladoc_gen.py deleted file mode 100644 index 9f65666ae..000000000 --- a/src/python/twitter/pants/tasks/scaladoc_gen.py +++ /dev/null @@ -1,56 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.tasks.jvmdoc_gen import Jvmdoc, JvmdocGen - - -scaladoc = Jvmdoc(tool_name='scaladoc', product_type='scaladoc') - - -def is_scala(target): - return target.has_sources('.scala') - - -class ScaladocGen(JvmdocGen): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - cls.generate_setup_parser(option_group, args, mkflag, scaladoc) - - def __init__(self, context, output_dir=None, confs=None, active=True): - super(ScaladocGen, self).__init__(context, scaladoc, output_dir, confs, active) - - def execute(self, targets): - self.generate_execute(targets, lambda t: t.is_scala, create_scaladoc_command) - - -def create_scaladoc_command(classpath, gendir, *targets): - sources = [] - for target in targets: - sources.extend(target.sources_relative_to_buildroot()) - - if not sources: - return None - - # TODO(John Chee): try scala.tools.nsc.ScalaDoc via ng - command = [ - 'scaladoc', - '-usejavacp', - '-classpath', ':'.join(classpath), - '-d', gendir, - ] - - command.extend(sources) - return command diff --git a/src/python/twitter/pants/tasks/scalastyle.py b/src/python/twitter/pants/tasks/scalastyle.py deleted file mode 100644 index d24b86a24..000000000 --- a/src/python/twitter/pants/tasks/scalastyle.py +++ /dev/null @@ -1,106 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import re - -from twitter.pants.base.config import Config -from twitter.pants.base.target import Target -from twitter.pants.process.xargs import Xargs -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.nailgun_task import NailgunTask - - -class Scalastyle(NailgunTask): - """Checks scala source files to ensure they're stylish. - - Scalastyle is configured via the 'scalastyle' pants.ini section. - - * ``config`` - Required path of the scalastyle configuration file. - * ``excludes`` - Optional path of an excludes file that contains - lines of regular expressions used to exclude matching files - from style checks. File names matched against these regular - expressions are relative to the repository root - (e.g.: com/twitter/mybird/MyBird.scala). - """ - - _CONFIG_SECTION = 'scalastyle' - _MAIN = 'org.scalastyle.Main' - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - NailgunTask.setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag("skip"), mkflag("skip", negate=True), - dest="scalastyle_skip", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Skip scalastyle.") - - def __init__(self, context): - NailgunTask.__init__(self, context) - self._scalastyle_config = self.context.config.get_required( - Scalastyle._CONFIG_SECTION, 'config') - if not os.path.exists(self._scalastyle_config): - raise Config.ConfigError( - 'Scalastyle config file does not exist: %s' % self._scalastyle_config) - - excludes_file = self.context.config.get(Scalastyle._CONFIG_SECTION, 'excludes') - self._excludes = set() - if excludes_file: - if not os.path.exists(excludes_file): - raise Config.ConfigError('Scalastyle excludes file does not exist: %s' % excludes_file) - self.context.log.debug('Using scalastyle excludes file %s' % excludes_file) - with open(excludes_file) as fh: - for pattern in fh.readlines(): - self._excludes.add(re.compile(pattern.strip())) - - self._scalastyle_bootstrap_key = 'scalastyle' - self.register_jvm_tool(self._scalastyle_bootstrap_key, [':scalastyle']) - - def execute(self, targets): - if self.context.options.scalastyle_skip: - self.context.log.debug('Skipping checkstyle.') - return - - check_targets = list() - for target in targets: - for tgt in target.resolve(): - if isinstance(tgt, Target) and tgt.has_sources('.scala'): - check_targets.append(tgt) - - def filter_excludes(filename): - if self._excludes: - for exclude in self._excludes: - if exclude.match(filename): - return False - return True - - scala_sources = list() - for target in check_targets: - def collect(filename): - if filename.endswith('.scala'): - scala_sources.append(os.path.join(target.target_base, filename)) - map(collect, filter(filter_excludes, target.sources)) - - if scala_sources: - def call(srcs): - cp = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._scalastyle_bootstrap_key) - return self.runjava(classpath=cp, - main=Scalastyle._MAIN, - args=['-c', self._scalastyle_config] + srcs) - result = Xargs(call).execute(scala_sources) - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (Scalastyle._MAIN, result)) diff --git a/src/python/twitter/pants/tasks/scm_publish.py b/src/python/twitter/pants/tasks/scm_publish.py deleted file mode 100644 index 0cd468ac4..000000000 --- a/src/python/twitter/pants/tasks/scm_publish.py +++ /dev/null @@ -1,101 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from . import Task, TaskError - - -class Semver(object): - @staticmethod - def parse(version): - components = version.split('.', 3) - if len(components) != 3: - raise ValueError - major, minor, patch = components - - def to_i(component): - try: - return int(component) - except (TypeError, ValueError): - raise ValueError('Invalid revision component %s in %s - ' - 'must be an integer' % (component, version)) - return Semver(to_i(major), to_i(minor), to_i(patch)) - - def __init__(self, major, minor, patch, snapshot=False): - self.major = major - self.minor = minor - self.patch = patch - self.snapshot = snapshot - - def bump(self): - # A bump of a snapshot discards snapshot status - return Semver(self.major, self.minor, self.patch + 1) - - def make_snapshot(self): - return Semver(self.major, self.minor, self.patch, snapshot=True) - - def version(self): - return '%s.%s.%s' % ( - self.major, - self.minor, - ('%s-SNAPSHOT' % self.patch) if self.snapshot else self.patch - ) - - def __eq__(self, other): - return self.__cmp__(other) == 0 - - def __cmp__(self, other): - diff = self.major - other.major - if not diff: - diff = self.minor - other.minor - if not diff: - diff = self.patch - other.patch - if not diff: - if self.snapshot and not other.snapshot: - diff = 1 - elif not self.snapshot and other.snapshot: - diff = -1 - else: - diff = 0 - return diff - - def __repr__(self): - return 'Semver(%s)' % self.version() - - -class ScmPublish(object): - def __init__(self, scm, restrict_push_branches): - self.restrict_push_branches = frozenset(restrict_push_branches or ()) - self.scm = scm - - def check_clean_master(self, commit=False): - if commit: - if self.restrict_push_branches: - branch = self.scm.branch_name - if branch not in self.restrict_push_branches: - raise TaskError('Can only push from %s, currently on branch: %s' % ( - ' '.join(sorted(self.restrict_push_branches)), branch - )) - - changed_files = self.scm.changed_files() - if changed_files: - raise TaskError('Can only push from a clean branch, found : %s' % ' '.join(changed_files)) - else: - print('Skipping check for a clean %s in test mode.' % self.scm.branch_name) - - def commit_push(self, coordinates): - self.scm.refresh() - self.scm.commit('pants build committing publish data for push of %s' % coordinates) - diff --git a/src/python/twitter/pants/tasks/scrooge_gen.py b/src/python/twitter/pants/tasks/scrooge_gen.py deleted file mode 100644 index 2abe87410..000000000 --- a/src/python/twitter/pants/tasks/scrooge_gen.py +++ /dev/null @@ -1,393 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import hashlib -import os -import re -import tempfile - -from collections import defaultdict, namedtuple - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_mkdir, safe_open - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_thrift_library import JavaThriftLibrary -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.nailgun_task import NailgunTask -from twitter.pants.thrift_util import ( - calculate_compile_sources, - calculate_compile_sources_HACK_FOR_SCROOGE_LEGACY) - -CompilerConfig = namedtuple('CompilerConfig', ['name', 'config_section', 'profile', - 'main', 'calc_srcs', 'langs']) - - -class Compiler(namedtuple('CompilerConfigWithContext', ('context',) + CompilerConfig._fields)): - @classmethod - def fromConfig(cls, context, config): - return cls(context, **config._asdict()) - - @property - def jvm_args(self): - args = self.context.config.getlist(self.config_section, 'jvm_args', default=[]) - args.append('-Dfile.encoding=UTF-8') - return args - - @property - def outdir(self): - pants_workdir_fallback = os.path.join(get_buildroot(), '.pants.d') - workdir_fallback = os.path.join(self.context.config.getdefault('pants_workdir', - default=pants_workdir_fallback), - self.name) - outdir = (self.context.options.scrooge_gen_create_outdir - or self.context.config.get(self.config_section, 'workdir', default=workdir_fallback)) - return os.path.relpath(outdir) - - @property - def verbose(self): - if self.context.options.scrooge_gen_quiet is not None: - return not self.context.options.scrooge_gen_quiet - else: - return self.context.config.getbool(self.config_section, 'verbose', default=False) - - @property - def strict(self): - return self.context.config.getbool(self.config_section, 'strict', default=False) - - -_COMPILERS = [ - CompilerConfig(name='scrooge', - config_section='scrooge-gen', - profile='scrooge-gen', - main='com.twitter.scrooge.Main', - calc_srcs=calculate_compile_sources, - langs=frozenset(['scala', 'java'])), - CompilerConfig(name='scrooge-legacy', - config_section='scrooge-legacy-gen', - profile='scrooge-legacy-gen', - main='com.twitter.scrooge.Main', - calc_srcs=calculate_compile_sources_HACK_FOR_SCROOGE_LEGACY, - langs=frozenset(['scala'])) -] - -_CONFIG_FOR_COMPILER = dict((compiler.name, compiler) for compiler in _COMPILERS) - -_TARGET_TYPE_FOR_LANG = dict(scala=ScalaLibrary, java=JavaLibrary) - - -class ScroogeGen(NailgunTask): - GenInfo = namedtuple('GenInfo', ['gen', 'deps']) - - class PartialCmd(namedtuple('PC', ['compiler', 'language', 'rpc_style', 'namespace_map'])): - @property - def outdir(self): - namespace_sig = None - if self.namespace_map: - sha = hashlib.sha1() - for ns_from, ns_to in sorted(self.namespace_map): - sha.update(ns_from) - sha.update(ns_to) - namespace_sig = sha.hexdigest() - output_style = '-'.join(filter(None, (self.language, self.rpc_style, namespace_sig))) - return os.path.join(self.compiler.outdir, output_style) - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("outdir"), dest="scrooge_gen_create_outdir", - help="Emit generated code in to this directory.") - option_group.add_option(mkflag("quiet"), dest="scrooge_gen_quiet", - action="callback", callback=mkflag.set_bool, default=None, - help="[%default] Suppress output, overrides verbose flag in pants.ini.") - - def __init__(self, context): - super(ScroogeGen, self).__init__(context) - self.compiler_for_name = dict((name, Compiler.fromConfig(context, config)) - for name, config in _CONFIG_FOR_COMPILER.items()) - - for name, compiler in self.compiler_for_name.items(): - bootstrap_tools = context.config.getlist(compiler.config_section, 'bootstrap-tools', - default=[':%s' % compiler.profile]) - self._jvm_tool_bootstrapper.register_jvm_tool(compiler.name, bootstrap_tools) - - def _tempname(self): - # don't assume the user's cwd is buildroot - pants_workdir = self.context.config.getdefault('pants_workdir') - tmp_dir = os.path.join(pants_workdir, 'tmp') - safe_mkdir(tmp_dir) - fd, path = tempfile.mkstemp(dir=tmp_dir, prefix='') - os.close(fd) - return path - - def execute(self, targets): - gentargets_by_dependee = self.context.dependents( - on_predicate=self.is_gentarget, - from_predicate=lambda t: not self.is_gentarget(t)) - - dependees_by_gentarget = defaultdict(set) - for dependee, tgts in gentargets_by_dependee.items(): - for gentarget in tgts: - dependees_by_gentarget[gentarget].add(dependee) - - partial_cmds = defaultdict(set) - gentargets = filter(self.is_gentarget, targets) - - for target in gentargets: - partial_cmd = self.PartialCmd( - compiler=self.compiler_for_name[target.compiler], - language=target.language, - rpc_style=target.rpc_style, - namespace_map=tuple(sorted(target.namespace_map.items()) if target.namespace_map else ())) - partial_cmds[partial_cmd].add(target) - - for partial_cmd, tgts in partial_cmds.items(): - gen_files_for_source = self.gen(partial_cmd, tgts) - - outdir = partial_cmd.outdir - langtarget_by_gentarget = {} - for target in tgts: - dependees = dependees_by_gentarget.get(target, []) - langtarget_by_gentarget[target] = self.createtarget(target, dependees, outdir, - gen_files_for_source) - - genmap = self.context.products.get(partial_cmd.language) - for gentarget, langtarget in langtarget_by_gentarget.items(): - genmap.add(gentarget, get_buildroot(), [langtarget]) - for dep in gentarget.internal_dependencies: - if self.is_gentarget(dep): - langtarget.update_dependencies([langtarget_by_gentarget[dep]]) - - def gen(self, partial_cmd, targets): - with self.invalidated(targets, invalidate_dependents=True) as invalidation_check: - invalid_targets = [] - for vt in invalidation_check.invalid_vts: - invalid_targets.extend(vt.targets) - - compiler = partial_cmd.compiler - import_paths, changed_srcs = compiler.calc_srcs(invalid_targets, self.is_gentarget) - outdir = partial_cmd.outdir - if changed_srcs: - args = [] - - for import_path in import_paths: - args.extend(['--import-path', import_path]) - - args.extend(['--language', partial_cmd.language]) - - for lhs, rhs in partial_cmd.namespace_map: - args.extend(['--namespace-map', '%s=%s' % (lhs, rhs)]) - - if partial_cmd.rpc_style == 'ostrich': - args.append('--finagle') - args.append('--ostrich') - elif partial_cmd.rpc_style == 'finagle': - args.append('--finagle') - - args.extend(['--dest', outdir]) - safe_mkdir(outdir) - - if not compiler.strict: - args.append('--disable-strict') - - if compiler.verbose: - args.append('--verbose') - - gen_file_map_path = os.path.relpath(self._tempname()) - args.extend(['--gen-file-map', gen_file_map_path]) - - args.extend(changed_srcs) - - classpath = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(compiler.name) - returncode = self.runjava(classpath=classpath, - main=compiler.main, - jvm_options=compiler.jvm_args, - args=args, - workunit_name=compiler.name) - try: - if 0 == returncode: - gen_files_for_source = self.parse_gen_file_map(gen_file_map_path, outdir) - finally: - os.remove(gen_file_map_path) - - if 0 != returncode: - raise TaskError('java %s ... exited non-zero (%i)' % (compiler.main, returncode)) - self.write_gen_file_map(gen_files_for_source, invalid_targets, outdir) - - return self.gen_file_map(targets, outdir) - - def createtarget(self, gentarget, dependees, outdir, gen_files_for_source): - assert self.is_gentarget(gentarget) - - def create_target(files, deps, target_type): - return self.context.add_new_target(outdir, - target_type, - name=gentarget.id, - sources=files, - provides=gentarget.provides, - dependencies=deps, - excludes=gentarget.excludes) - - def create_geninfo(key): - compiler = self.compiler_for_name[gentarget.compiler] - gen_info = self.context.config.getdict(compiler.config_section, key, - default={'gen': key, - 'deps': {'service': [], 'structs': []}}) - gen = gen_info['gen'] - deps = dict() - for category, depspecs in gen_info['deps'].items(): - dependencies = OrderedSet() - deps[category] = dependencies - for depspec in depspecs: - dependencies.update(self.context.resolve(depspec)) - return self.GenInfo(gen, deps) - - return self._inject_target(gentarget, dependees, - create_geninfo(gentarget.language), - gen_files_for_source, - create_target) - - def _inject_target(self, target, dependees, geninfo, gen_files_for_source, create_target): - files = [] - has_service = False - for source in target.sources_relative_to_buildroot(): - services = calculate_services(source) - genfiles = gen_files_for_source[source] - has_service = has_service or services - files.extend(genfiles) - deps = OrderedSet(geninfo.deps['service' if has_service else 'structs']) - deps.update(target.dependencies) - target_type = _TARGET_TYPE_FOR_LANG[target.language] - tgt = create_target(files, deps, target_type) - tgt.derived_from = target - tgt.add_labels('codegen', 'synthetic') - for dependee in dependees: - if isinstance(dependee, InternalTarget): - dependee.update_dependencies((tgt,)) - else: - # TODO(John Sirois): rationalize targets with dependencies. - # JarLibrary or PythonTarget dependee on the thrift target - dependee.dependencies.add(tgt) - return tgt - - def parse_gen_file_map(self, gen_file_map_path, outdir): - d = defaultdict(set) - with open(gen_file_map_path, 'r') as deps: - for dep in deps: - src, cls = dep.strip().split('->') - src = os.path.relpath(src.strip()) - cls = os.path.relpath(cls.strip(), outdir) - d[src].add(cls) - return d - - def gen_file_map_path_for_target(self, target, outdir): - return os.path.join(outdir, 'gen-file-map-by-target', target.id) - - def gen_file_map_for_target(self, target, outdir): - gen_file_map = self.gen_file_map_path_for_target(target, outdir) - return self.parse_gen_file_map(gen_file_map, outdir) - - def gen_file_map(self, targets, outdir): - gen_file_map = defaultdict(set) - for target in targets: - target_gen_file_map = self.gen_file_map_for_target(target, outdir) - gen_file_map.update(target_gen_file_map) - return gen_file_map - - def write_gen_file_map_for_target(self, gen_file_map, target, outdir): - def calc_srcs(target): - _, srcs = calculate_compile_sources([target], self.is_gentarget) - return srcs - with safe_open(self.gen_file_map_path_for_target(target, outdir), 'w') as f: - for src in sorted(calc_srcs(target)): - clss = gen_file_map[src] - for cls in sorted(clss): - print('%s -> %s' % (src, os.path.join(outdir, cls)), file=f) - - def write_gen_file_map(self, gen_file_map, targets, outdir): - for target in targets: - self.write_gen_file_map_for_target(gen_file_map, target, outdir) - - def is_gentarget(self, target): - result = (isinstance(target, JavaThriftLibrary) - and target.compiler in self.compiler_for_name.keys()) - - if result and target.language not in self.compiler_for_name[target.compiler].langs: - raise TaskError("%s can not generate %s" % (target.compiler, target.language)) - return result - - @staticmethod - def _validate(targets): - ValidateCompilerConfig = namedtuple('ValidateCompilerConfig', ['language', 'rpc_style']) - - def compiler_config(tgt): - # Note compiler is not present in this signature. At this time - # Scrooge and the Apache thrift generators produce identical - # java sources, and the Apache generator does not produce scala - # sources. As there's no permutation allowing the creation of - # incompatible sources with the same language+rpc_style we omit - # the compiler from the signature at this time. - return ValidateCompilerConfig(language=tgt.language, rpc_style=tgt.rpc_style) - - mismatched_compiler_configs = defaultdict(set) - - for target in filter(lambda t: isinstance(t, JavaThriftLibrary), targets): - mycompilerconfig = compiler_config(target) - def collect(dep): - if mycompilerconfig != compiler_config(dep): - mismatched_compiler_configs[target].add(dep) - target.walk(collect, predicate=lambda t: isinstance(t, JavaThriftLibrary)) - - if mismatched_compiler_configs: - msg = ['Thrift dependency trees must be generated with a uniform compiler configuration.\n\n'] - for tgt in sorted(mismatched_compiler_configs.keys()): - msg.append('%s - %s\n' % (tgt, compiler_config(tgt))) - for dep in mismatched_compiler_configs[tgt]: - msg.append(' %s - %s\n' % (dep, compiler_config(dep))) - raise TaskError(''.join(msg)) - - -NAMESPACE_PARSER = re.compile(r'^\s*namespace\s+([^\s]+)\s+([^\s]+)\s*$') -TYPE_PARSER = re.compile(r'^\s*(const|enum|exception|service|struct|union)\s+([^\s{]+).*') - - -# TODO(John Sirois): consolidate thrift parsing to 1 pass instead of 2 -def calculate_services(source): - """Calculates the services generated for the given thrift IDL source. - Returns an interable of services - """ - - with open(source, 'r') as thrift: - namespaces = dict() - types = defaultdict(set) - for line in thrift: - match = NAMESPACE_PARSER.match(line) - if match: - lang = match.group(1) - namespace = match.group(2) - namespaces[lang] = namespace - else: - match = TYPE_PARSER.match(line) - if match: - typename = match.group(1) - name = match.group(2) - types[typename].add(name) - - return types['service'] diff --git a/src/python/twitter/pants/tasks/sorttargets.py b/src/python/twitter/pants/tasks/sorttargets.py deleted file mode 100644 index 66a30ccc5..000000000 --- a/src/python/twitter/pants/tasks/sorttargets.py +++ /dev/null @@ -1,67 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -from twitter.common.util import topological_sort - -from twitter.pants.base.target import Target -from twitter.pants.tasks.console_task import ConsoleTask - - -class SortTargets(ConsoleTask): - @staticmethod - def _is_target(item): - return isinstance(item, Target) - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(SortTargets, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag("reverse"), mkflag("reverse", negate=True), - dest="sort_targets_reverse", default=False, - action="callback", callback=mkflag.set_bool, - help="[%default] Sort least depenendent to most.") - - def __init__(self, *args, **kwargs): - super(SortTargets, self).__init__(*args, **kwargs) - self._reverse = self.context.options.sort_targets_reverse - - def console_output(self, targets): - depmap = defaultdict(set) - - def map_deps(target): - # TODO(John Sirois): rationalize target hierarchies - this is the only 'safe' way to treat - # both python and jvm targets today. - if hasattr(target, 'dependencies'): - deps = depmap[str(target.address)] - for dep in target.dependencies: - for resolved in filter(self._is_target, dep.resolve()): - deps.add(str(resolved.address)) - - for root in self.context.target_roots: - root.walk(map_deps, self._is_target) - - tsorted = [] - for group in topological_sort(depmap): - tsorted.extend(group) - if self._reverse: - tsorted = reversed(tsorted) - - roots = set(str(root.address) for root in self.context.target_roots) - for address in tsorted: - if address in roots: - yield address diff --git a/src/python/twitter/pants/tasks/specs_run.py b/src/python/twitter/pants/tasks/specs_run.py deleted file mode 100644 index b22d5f493..000000000 --- a/src/python/twitter/pants/tasks/specs_run.py +++ /dev/null @@ -1,112 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.common.collections import OrderedSet - -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.binary_util import safe_args -from twitter.pants.java.util import execute_java -from .jvm_task import JvmTask - -from . import TaskError - - -class SpecsRun(JvmTask): - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag('skip'), mkflag('skip', negate=True), dest = 'specs_run_skip', - action='callback', callback=mkflag.set_bool, default=False, - help='[%default] Skip running specs') - - option_group.add_option(mkflag('debug'), mkflag('debug', negate=True), dest = 'specs_run_debug', - action='callback', callback=mkflag.set_bool, default=False, - help='[%default] Run specs with a debugger') - - option_group.add_option(mkflag('jvmargs'), dest='specs_run_jvm_options', action='append', - help='Runs specs in a jvm with these extra jvm options.') - - option_group.add_option(mkflag('test'), dest='specs_run_tests', action='append', - help='[%default] Force running of just these specs. Tests can be ' - 'specified either by fully qualified classname or ' - 'full file path.') - - option_group.add_option(mkflag('color'), mkflag('color', negate=True), - dest='specs_run_color', default=True, - action='callback', callback=mkflag.set_bool, - help='[%default] Emit test result with ANSI terminal color codes.') - - def __init__(self, context): - super(SpecsRun, self).__init__(context) - - self._specs_bootstrap_key = 'specs' - bootstrap_tools = context.config.getlist('specs-run', 'bootstrap-tools', - default=[':scala-specs-2.9.3']) - self._jvm_tool_bootstrapper.register_jvm_tool(self._specs_bootstrap_key, bootstrap_tools) - - self.confs = context.config.getlist('specs-run', 'confs', default=['default']) - - self._jvm_options = context.config.getlist('specs-run', 'jvm_args', default=[]) - if context.options.specs_run_jvm_options: - self._jvm_options.extend(context.options.specs_run_jvm_options) - if context.options.specs_run_debug: - self._jvm_options.extend(context.config.getlist('jvm', 'debug_args')) - - self.skip = context.options.specs_run_skip - self.color = context.options.specs_run_color - - self.workdir = context.config.get('specs-run', 'workdir') - - self.tests = context.options.specs_run_tests - - def execute(self, targets): - if not self.skip: - def run_tests(tests): - args = ['--color'] if self.color else [] - args.append('--specs=%s' % ','.join(tests)) - specs_runner_main = 'com.twitter.common.testing.ExplicitSpecsRunnerMain' - - bootstrapped_cp = self._jvm_tool_bootstrapper.get_jvm_tool_classpath( - self._specs_bootstrap_key) - classpath = self.classpath( - bootstrapped_cp, - confs=self.confs, - exclusives_classpath=self.get_base_classpath_for_target(targets[0])) - - result = execute_java( - classpath=classpath, - main=specs_runner_main, - jvm_options=self._jvm_options, - args=args, - workunit_factory=self.context.new_workunit, - workunit_name='specs', - workunit_labels=[WorkUnit.TEST] - ) - if result != 0: - raise TaskError('java %s ... exited non-zero (%i)' % (specs_runner_main, result)) - - if self.tests: - run_tests(self.tests) - else: - with safe_args(self.calculate_tests(targets)) as tests: - if tests: - run_tests(tests) - - def calculate_tests(self, targets): - tests = OrderedSet() - for target in targets: - if target.is_scala and target.is_test: - tests.update(target.sources_relative_to_buildroot()) - return tests diff --git a/src/python/twitter/pants/tasks/targets_help.py b/src/python/twitter/pants/tasks/targets_help.py deleted file mode 100644 index 3385e77c8..000000000 --- a/src/python/twitter/pants/tasks/targets_help.py +++ /dev/null @@ -1,211 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from __future__ import print_function - -import inspect -import textwrap - -from string import Template - -from twitter.pants.base.target import Target -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks.console_task import ConsoleTask - - -class TargetsHelp(ConsoleTask): - """Provides online help for installed targets. - - This task provides online help modes for installed targets. Without args, - all installed targets are listed with their one-line description. - An optional flag allows users to specify a target they want detailed - help about.""" - - INSTALLED_TARGETS_HEADER = '\n'.join([ - 'For details about a specific target, try: ./pants goal targets --targets-details=target_name', - 'Installed target types:\n', - ]) - - DETAILS_HEADER = Template('TARGET NAME\n\n $name -- $desc\n\nTARGET ARGUMENTS\n') - - # TODO(Travis Crawford): Eliminate this mapping once pants has been moved to Github. - # Since pants already aliases Target names, the ideal way of doing this would be: - # - # (a) Add a method to all Target objects that provides their alias, rather - # than renaming elsewhere. This way Target instances are self-contained. - # Of course, if BUILD files used a template system this would not be necessary. - # - # @classmethod - # def get_alias(cls): - # raise ValueError('subclasses must override alias name.') - # - # (b) Replace aliases with something like: - # https://cgit.twitter.biz/science/tree/src/python/twitter/pants/__init__.py#n88 - # - # to_alias = [AnnotationProcessor, ...] - # for t in to_alias: - # vars()[t.get_alias()] = t - TARGET_TO_ALIAS = { - 'AnnotationProcessor': 'annotation_processor', - 'Artifact': 'artifact', - 'Benchmark': 'benchmark', - 'Bundle': 'bundle', - 'Credentials': 'credentials', - 'JarLibrary': 'dependencies', - 'PythonEgg': 'egg', - 'Exclude': 'exclude', - 'Pants': 'fancy_pants', - 'JarDependency': 'jar', - 'JavaLibrary': 'java_library', - 'JavaAntlrLibrary': 'java_antlr_library', - 'JavaProtobufLibrary': 'java_protobuf_library', - 'JavaTests': 'junit_tests', - 'JavaThriftLibrary': 'java_thrift_library', - 'JavaThriftstoreDMLLibrary': 'java_thriftstore_dml_library', - 'JvmBinary': 'jvm_binary', - 'JvmApp': 'jvm_app', - # For testing. When targets define their own alias (or we use a template - # system for BUILD files) this need to register targets goes away. - 'MyTarget': 'my_target', - 'OinkQuery': 'oink_query', - 'Page': 'page', - 'PythonArtifact': 'python_artifact', - 'PythonBinary': 'python_binary', - 'PythonLibrary': 'python_library', - 'PythonAntlrLibrary': 'python_antlr_library', - 'PythonRequirement': 'python_requirement', - 'PythonThriftLibrary': 'python_thrift_library', - 'PythonTests': 'python_tests', - 'PythonTestSuite': 'python_test_suite', - 'Repository': 'repo', - 'Resources': 'resources', - 'ScalaLibrary': 'scala_library', - 'ScalaTests': 'scala_specs', - 'ScalacPlugin': 'scalac_plugin', - 'SourceRoot': 'source_root', - 'ThriftJar': 'thrift_jar', - 'ThriftLibrary': 'thrift_library', - 'Wiki': 'wiki', - } - - ALIAS_TO_TARGET = {} - MAX_ALIAS_LEN = 0 - - for k, v in TARGET_TO_ALIAS.items(): - ALIAS_TO_TARGET[v] = k - MAX_ALIAS_LEN = max(MAX_ALIAS_LEN, len(v)) - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(TargetsHelp, cls).setup_parser(option_group, args, mkflag) - option_group.add_option(mkflag("details"), dest="goal_targets_details", default=None, - help='Display detailed information about the specific target type.') - - def console_output(self, targets): - """Display a list of installed target types, or details about a specific target type.""" - target_types = {} - for target_type in SourceRoot._ROOTS_BY_TYPE.keys(): - target_types[target_type.__name__] = target_type - - if self.context.options.goal_targets_details is None: - return self._get_installed_targets(target_types) - else: - return self._get_details(target_types[self.ALIAS_TO_TARGET[self.context.options.goal_targets_details]]) - - @staticmethod - def _get_arg_help(docstring): - """Given a docstring, return a map of arg to help string. - - Pants target constructor docstrings should document arguments as follows. - Note constructor docstrings only document arguments. All documentation about - the class itself belong in the class docstring. - - myarg: the description - anotherarg: this description is continued - on the next line""" - arg_help = {} - - if docstring is None: - return arg_help - - last = None - import re - for line in docstring.split('\n'): - if line == '': - continue - match = re.search('^\s*:param[\w ]* (\w+):\s(.*)$', line) - if match: - last = match.group(1) - arg_help[last] = match.group(2) - else: - arg_help[last] += ' %s' % line.strip() - return arg_help - - @staticmethod - def _get_installed_targets(target_types): - """List installed targets and their one-line description.""" - lines = [TargetsHelp.INSTALLED_TARGETS_HEADER] - for target_type in sorted(target_types.keys()): - if target_types[target_type].__doc__ is None: - desc = 'Description unavailable.' - else: - desc = target_types[target_type].__doc__.split('\n')[0] - lines.append(' %s: %s' % ( - TargetsHelp.TARGET_TO_ALIAS[target_type].rjust(TargetsHelp.MAX_ALIAS_LEN), desc)) - return lines - - @staticmethod - def _get_details(target): - """Get detailed help for the given target.""" - assert target is not None and issubclass(target, Target) - - arg_spec = inspect.getargspec(target.__init__) - arg_help = TargetsHelp._get_arg_help(target.__init__.__doc__) - - min_default_idx = 0 - if arg_spec.defaults is None: - min_default_idx = len(arg_spec.args) - elif len(arg_spec.args) > len(arg_spec.defaults): - min_default_idx = len(arg_spec.args) - len(arg_spec.defaults) - - lines = [TargetsHelp.DETAILS_HEADER.substitute( - name=TargetsHelp.TARGET_TO_ALIAS[target.__name__], desc=target.__doc__)] - - max_width = 0 - for arg in arg_spec.args: - max_width = max(max_width, len(arg)) - - wrapper = textwrap.TextWrapper(subsequent_indent=' '*(max_width+4)) - - for idx, val in enumerate(arg_spec.args): - has_default = False - default_val = None - - if idx >= min_default_idx: - has_default = True - default_val = arg_spec.defaults[idx-min_default_idx] - - if val == 'self': - continue - help_str = 'No help available for this argument.' - try: - help_str = arg_help[val] - except KeyError: - pass - if has_default: - help_str += ' (default: %s) ' % str(default_val) - lines.append(' %s: %s' % (val.rjust(max_width), '\n'.join(wrapper.wrap(help_str)))) - return lines diff --git a/src/python/twitter/pants/tasks/task.py b/src/python/twitter/pants/tasks/task.py deleted file mode 100644 index 610fafba8..000000000 --- a/src/python/twitter/pants/tasks/task.py +++ /dev/null @@ -1,429 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import defaultdict - -import itertools -import os -import shutil -import sys -import threading - -from contextlib import contextmanager - -from twitter.common.collections.orderedset import OrderedSet - -from twitter.pants.base.build_invalidator import BuildInvalidator, CacheKeyGenerator -from twitter.pants.base.config import Config -from twitter.pants.base.hash_utils import hash_file -from twitter.pants.base.worker_pool import Work -from twitter.pants.base.workunit import WorkUnit -from twitter.pants.cache.cache_setup import create_artifact_cache -from twitter.pants.cache.read_write_artifact_cache import ReadWriteArtifactCache -from twitter.pants.ivy.bootstrapper import Bootstrapper # XXX -from twitter.pants.java.executor import Executor # XXX -from twitter.pants.reporting.reporting_utils import items_to_report_element - -from .jvm_tool_bootstrapper import JvmToolBootstrapper # XXX -from .cache_manager import CacheManager, InvalidationCheck, VersionedTargetSet -from .ivy_utils import IvyUtils # XXX -from .task_error import TaskError - - -class Task(object): - # Protect writes to the global map of jar path -> symlinks to that jar. - symlink_map_lock = threading.Lock() - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - """Set up the cmd-line parser. - - Subclasses can add flags to the pants command line using the given option group. - Flag names should be created with mkflag([name]) to ensure flags are properly name-spaced - amongst other tasks. - """ - - def __init__(self, context): - self.context = context - self.dry_run = self.can_dry_run() and context.options.dry_run - self._pants_workdir = self.context.config.getdefault('pants_workdir') - self._cache_key_generator = CacheKeyGenerator( - context.config.getdefault('cache_key_gen_version', default=None)) - self._read_artifact_cache_spec = None - self._write_artifact_cache_spec = None - self._artifact_cache = None - self._artifact_cache_setup_lock = threading.Lock() - - default_invalidator_root = os.path.join(self.context.config.getdefault('pants_workdir'), - 'build_invalidator') - self._build_invalidator_dir = os.path.join( - context.config.get('tasks', 'build_invalidator', default=default_invalidator_root), - self.product_type()) - self._jvm_tool_bootstrapper = JvmToolBootstrapper(self.context.products) - - def register_jvm_tool(self, key, target_addrs): - self._jvm_tool_bootstrapper.register_jvm_tool(key, target_addrs) - - def tool_classpath(self, key, executor=None): - return self._jvm_tool_bootstrapper.get_jvm_tool_classpath(key, executor) - - def lazy_tool_classpath(self, key, executor=None): - return self._jvm_tool_bootstrapper.get_lazy_jvm_tool_classpath(key, executor) - - def setup_artifact_cache_from_config(self, config_section=None): - """Subclasses can call this in their __init__() to set up artifact caching for that task type. - - Uses standard config file keys to find the cache spec. - The cache is created lazily, as needed. - """ - section = config_section or Config.DEFAULT_SECTION - read_spec = self.context.config.getlist(section, 'read_artifact_caches', default=[]) - write_spec = self.context.config.getlist(section, 'write_artifact_caches', default=[]) - self.setup_artifact_cache(read_spec, write_spec) - - def setup_artifact_cache(self, read_spec, write_spec): - """Subclasses can call this in their __init__() to set up artifact caching for that task type. - - See docstring for pants.cache.create_artifact_cache() for details on the spec format. - The cache is created lazily, as needed. - - """ - self._read_artifact_cache_spec = read_spec - self._write_artifact_cache_spec = write_spec - - def _create_artifact_cache(self, spec, action): - if len(spec) > 0: - pants_workdir = self.context.config.getdefault('pants_workdir') - my_name = self.__class__.__name__ - return create_artifact_cache(self.context.log, pants_workdir, spec, my_name, action) - else: - return None - - def get_artifact_cache(self): - with self._artifact_cache_setup_lock: - if (self._artifact_cache is None - and (self._read_artifact_cache_spec or self._write_artifact_cache_spec)): - self._artifact_cache = ReadWriteArtifactCache( - self._create_artifact_cache(self._read_artifact_cache_spec, 'will read from'), - self._create_artifact_cache(self._write_artifact_cache_spec, 'will write to')) - return self._artifact_cache - - def artifact_cache_reads_enabled(self): - return bool(self._read_artifact_cache_spec) and self.context.options.read_from_artifact_cache - - def artifact_cache_writes_enabled(self): - return bool(self._write_artifact_cache_spec) and self.context.options.write_to_artifact_cache - - def product_type(self): - """Set the product type for this task. - - By default, each task is considered as creating a unique product type. - Subclasses can override this to specify a shared product type, e.g., 'classes'. - - Tasks with the same product type can invalidate each other's targets, e.g., if a ScalaLibrary - depends on a JavaLibrary, a change to the JavaLibrary will invalidate the ScalaLibrary because - they both have the same product type. - """ - return self.__class__.__name__ - - def can_dry_run(self): - """Subclasses can override this to indicate that they respect the --dry-run flag. - - It's the subclass task's responsibility to do the right thing if this flag is set. - - Note that tasks such as codegen and ivy resolution cannot dry-run, because subsequent - cache key computation will fail on missing sources/external deps. - """ - return False - - def execute(self, targets): - """Executes this task against targets, which may be a subset of the current context targets.""" - raise TaskError('execute() not implemented') - - def invalidate_for(self): - """Provides extra objects that participate in invalidation. - - Subclasses can override and return an object that should be checked for changes when - managing target invalidation. If the pickled form of returned object changes - between runs all targets will be invalidated. - """ - return None - - def invalidate_for_files(self): - """Provides extra files that participate in invalidation. - - Subclasses can override and return a list of full paths to extra, non-source files that should - be checked for changes when managing target invalidation. This is useful for tracking - changes to pre-built build tools, e.g., the thrift compiler. - """ - return [] - - def invalidate(self): - """Invalidates all targets for this task.""" - BuildInvalidator(self._build_invalidator_dir).force_invalidate_all() - - @contextmanager - def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False, - partition_size_hint=sys.maxint, silent=False): - """Checks targets for invalidation, first checking the artifact cache. - Subclasses call this to figure out what to work on. - - targets: The targets to check for changes. - only_buildfiles: If True, then only the target's BUILD files are checked for changes, not - its sources. - invalidate_dependents: If True then any targets depending on changed targets are invalidated. - partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets - containing roughly this number of source files, if possible. Set to - sys.maxint for a single VersionedTargetSet. Set to 0 for one - VersionedTargetSet per target. It is up to the caller to do the right - thing with whatever partitioning it asks for. - - Yields an InvalidationCheck object reflecting the (partitioned) targets. - - If no exceptions are thrown by work in the block, the build cache is updated for the targets. - Note: the artifact cache is not updated. That must be done manually. - """ - extra_data = [self.invalidate_for()] - - for f in self.invalidate_for_files(): - extra_data.append(hash_file(f)) - - cache_manager = CacheManager(self._cache_key_generator, - self._build_invalidator_dir, - invalidate_dependents, - extra_data, - only_externaldeps=only_buildfiles) - - invalidation_check = cache_manager.check(targets, partition_size_hint) - - if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(): - with self.context.new_workunit('cache'): - cached_vts, uncached_vts = \ - self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check)) - if cached_vts: - cached_targets = [vt.target for vt in cached_vts] - for t in cached_targets: - self.context.run_tracker.artifact_cache_stats.add_hit('default', t) - if not silent: - self._report_targets('Using cached artifacts for ', cached_targets, '.') - if uncached_vts: - uncached_targets = [vt.target for vt in uncached_vts] - for t in uncached_targets: - self.context.run_tracker.artifact_cache_stats.add_miss('default', t) - if not silent: - self._report_targets('No cached artifacts for ', uncached_targets, '.') - # Now that we've checked the cache, re-partition whatever is still invalid. - invalidation_check = \ - InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint) - - if not silent: - targets = [] - sources = [] - num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned) - for vt in invalidation_check.invalid_vts_partitioned: - targets.extend(vt.targets) - sources.extend(vt.cache_key.sources) - if len(targets): - msg_elements = ['Invalidated ', - items_to_report_element([t.address.reference() for t in targets], 'target')] - if len(sources) > 0: - msg_elements.append(' containing ') - msg_elements.append(items_to_report_element(sources, 'source file')) - if num_invalid_partitions > 1: - msg_elements.append(' in %d target partitions' % num_invalid_partitions) - msg_elements.append('.') - self.context.log.info(*msg_elements) - - # Yield the result, and then mark the targets as up to date. - yield invalidation_check - if not self.dry_run: - for vt in invalidation_check.invalid_vts: - vt.update() # In case the caller doesn't update. - - def check_artifact_cache_for(self, invalidation_check): - """Decides which VTS to check the artifact cache for. - - By default we check for each invalid target. Can be overridden, e.g., to - instead check only for a single artifact for the entire target set. - """ - return invalidation_check.invalid_vts - - def check_artifact_cache(self, vts): - """Checks the artifact cache for the specified list of VersionedTargetSets. - - Returns a pair (cached, uncached) of VersionedTargets that were - satisfied/unsatisfied from the cache. - """ - return self.do_check_artifact_cache(vts) - - def do_check_artifact_cache(self, vts, post_process_cached_vts=None): - """Checks the artifact cache for the specified list of VersionedTargetSets. - - Returns a pair (cached, uncached) of VersionedTargets that were - satisfied/unsatisfied from the cache. - """ - if not vts: - return [], [] - - cached_vts = [] - uncached_vts = OrderedSet(vts) - - with self.context.new_workunit(name='check', labels=[WorkUnit.MULTITOOL]) as parent: - res = self.context.submit_foreground_work_and_wait( - Work(lambda vt: bool(self.get_artifact_cache().use_cached_files(vt.cache_key)), - [(vt, ) for vt in vts], 'fetch'), workunit_parent=parent) - for vt, was_in_cache in zip(vts, res): - if was_in_cache: - cached_vts.append(vt) - uncached_vts.discard(vt) - # Note that while the input vts may represent multiple targets (for tasks that overrride - # check_artifact_cache_for), the ones we return must represent single targets. - def flatten(vts): - return list(itertools.chain.from_iterable([vt.versioned_targets for vt in vts])) - all_cached_vts, all_uncached_vts = flatten(cached_vts), flatten(uncached_vts) - if post_process_cached_vts: - post_process_cached_vts(all_cached_vts) - for vt in all_cached_vts: - vt.update() - return all_cached_vts, all_uncached_vts - - def update_artifact_cache(self, vts_artifactfiles_pairs): - """Write to the artifact cache, if we're configured to. - - vts_artifactfiles_pairs - a list of pairs (vts, artifactfiles) where - - vts is single VersionedTargetSet. - - artifactfiles is a list of absolute paths to artifacts for the VersionedTargetSet. - """ - update_artifact_cache_work = self.get_update_artifact_cache_work(vts_artifactfiles_pairs) - if update_artifact_cache_work: - self.context.submit_background_work_chain([update_artifact_cache_work], - parent_workunit_name='cache') - - def get_update_artifact_cache_work(self, vts_artifactfiles_pairs, cache=None): - """Create a Work instance to update the artifact cache, if we're configured to. - - vts_artifactfiles_pairs - a list of pairs (vts, artifactfiles) where - - vts is single VersionedTargetSet. - - artifactfiles is a list of paths to artifacts for the VersionedTargetSet. - """ - cache = cache or self.get_artifact_cache() - if cache: - if len(vts_artifactfiles_pairs) == 0: - return None - # Do some reporting. - targets = set() - for vts, _ in vts_artifactfiles_pairs: - targets.update(vts.targets) - self._report_targets('Caching artifacts for ', list(targets), '.') - # Cache the artifacts. - args_tuples = [] - for vts, artifactfiles in vts_artifactfiles_pairs: - if self.context.options.verify_artifact_cache: - pass # TODO: Verify that the artifact we just built is identical to the cached one? - args_tuples.append((vts.cache_key, artifactfiles)) - return Work(lambda *args: cache.insert(*args), args_tuples, 'insert') - else: - return None - - def _report_targets(self, prefix, targets, suffix): - self.context.log.info( - prefix, - items_to_report_element([t.address.reference() for t in targets], 'target'), - suffix) - - def ivy_resolve(self, targets, executor=None, symlink_ivyxml=False, silent=False, - workunit_name=None, workunit_labels=None): - - if executor and not isinstance(executor, Executor): - raise ValueError('The executor must be an Executor instance, given %s of type %s' - % (executor, type(executor))) - ivy = Bootstrapper.default_ivy(java_executor=executor, - bootstrap_workunit_factory=self.context.new_workunit) - - targets = set(targets) - - if not targets: - return [] - - work_dir = self.context.config.get('ivy-resolve', 'workdir') - ivy_utils = IvyUtils(config=self.context.config, - options=self.context.options, - log=self.context.log) - - with self.invalidated(targets, - only_buildfiles=True, - invalidate_dependents=True, - silent=silent) as invalidation_check: - global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts) - target_workdir = os.path.join(work_dir, global_vts.cache_key.hash) - target_classpath_file = os.path.join(target_workdir, 'classpath') - raw_target_classpath_file = target_classpath_file + '.raw' - raw_target_classpath_file_tmp = raw_target_classpath_file + '.tmp' - # A common dir for symlinks into the ivy2 cache. This ensures that paths to jars - # in artifact-cached analysis files are consistent across systems. - # Note that we have one global, well-known symlink dir, again so that paths are - # consistent across builds. - symlink_dir = os.path.join(work_dir, 'jars') - - # Note that it's possible for all targets to be valid but for no classpath file to exist at - # target_classpath_file, e.g., if we previously built a superset of targets. - if invalidation_check.invalid_vts or not os.path.exists(raw_target_classpath_file): - args = ['-cachepath', raw_target_classpath_file_tmp] - - def exec_ivy(): - ivy_utils.exec_ivy( - target_workdir=target_workdir, - targets=targets, - args=args, - ivy=ivy, - workunit_name='ivy', - workunit_factory=self.context.new_workunit, - symlink_ivyxml=symlink_ivyxml) - - if workunit_name: - with self.context.new_workunit(name=workunit_name, labels=workunit_labels or []): - exec_ivy() - else: - exec_ivy() - - if not os.path.exists(raw_target_classpath_file_tmp): - raise TaskError('Ivy failed to create classpath file at %s' - % raw_target_classpath_file_tmp) - shutil.move(raw_target_classpath_file_tmp, raw_target_classpath_file) - - if self.artifact_cache_writes_enabled(): - self.update_artifact_cache([(global_vts, [raw_target_classpath_file])]) - - # Make our actual classpath be symlinks, so that the paths are uniform across systems. - # Note that we must do this even if we read the raw_target_classpath_file from the artifact - # cache. If we cache the target_classpath_file we won't know how to create the symlinks. - symlink_map = IvyUtils.symlink_cachepath(self.context.ivy_home, raw_target_classpath_file, - symlink_dir, target_classpath_file) - with Task.symlink_map_lock: - all_symlinks_map = self.context.products.get_data('symlink_map') or defaultdict(list) - for path, symlink in symlink_map.items(): - all_symlinks_map[os.path.realpath(path)].append(symlink) - self.context.products.safe_create_data('symlink_map', lambda: all_symlinks_map) - - with IvyUtils.cachepath(target_classpath_file) as classpath: - stripped_classpath = [path.strip() for path in classpath] - return [path for path in stripped_classpath if ivy_utils.is_classpath_artifact(path)] - - def get_workdir(self, section="default", key="workdir", workdir=None): - return self.context.config.get(section, - key, - default=os.path.join(self._pants_workdir, - workdir or self.__class__.__name__.lower())) diff --git a/src/python/twitter/pants/tasks/task_error.py b/src/python/twitter/pants/tasks/task_error.py deleted file mode 100644 index 6f698cd11..000000000 --- a/src/python/twitter/pants/tasks/task_error.py +++ /dev/null @@ -1,17 +0,0 @@ -from twitter.pants.base.build_manual import manual - - -@manual.builddict() -class TaskError(Exception): - """ - Raised to indicate a task has failed. - - :param int exit_code: an optional exit code (1, by default) - """ - def __init__(self, *args, **kwargs): - self._exit_code = kwargs.pop('exit_code', 1) - super(TaskError, self).__init__(*args, **kwargs) - - @property - def exit_code(self): - return self._exit_code diff --git a/src/python/twitter/pants/tasks/templates/builddictionary/goals_reference.mustache b/src/python/twitter/pants/tasks/templates/builddictionary/goals_reference.mustache deleted file mode 100644 index dd9b8a740..000000000 --- a/src/python/twitter/pants/tasks/templates/builddictionary/goals_reference.mustache +++ /dev/null @@ -1,44 +0,0 @@ -*************** -Goals Reference -*************** - -This page documents ``goals``, actions that can be taken on build targets. -A goal identifies some high-level goal that you wish to accomplish, such as -compiling your code. - -A goal depends on other goals that must happen first; for example, Pants must -compile code before it can run tests. - -Each goal is implemented by one or more Tasks. For example, the -``JavaCompile`` task compiles java code, and its installed in the ``compile`` -goal. - - -**Goals and Tasks summary** - -{{#phases}} - * :ref:`gref_phase_{{phase.name}}` - {{phase.description}} -{{/phases}} - -{{#phases}} - -.. _gref_phase_{{phase.name}}: - -{{phase.name}} ------------------------------------------------------------------------------ - -{{#phase.description}} -{{phase.description}} -{{/phase.description}} - -**Installed tasks:** - -{{#goals}} -* Implemented by class ``{{name}}`` - -{{#doc}} - {{{doc}}} -{{/doc}} - -{{/goals}} -{{/phases}} diff --git a/src/python/twitter/pants/tasks/templates/builddictionary/page.mustache b/src/python/twitter/pants/tasks/templates/builddictionary/page.mustache deleted file mode 100644 index 815275f81..000000000 --- a/src/python/twitter/pants/tasks/templates/builddictionary/page.mustache +++ /dev/null @@ -1,53 +0,0 @@ -BUILD Dictionary -==================== - -A ``BUILD`` file defines one or more *build targets*. -A build target might create a jar or a deployable package, -run some tests or perform whatever actions you might associate -with the ant concept of a build target. This page describes the -commands and values available. Many of these define ``BUILD`` -targets; some are handy utilities. - -{{#tocs}} -**{{t}}** | - {{#e}} -`{{.}}`_ | - {{/e}} - -{{/tocs}} - -{{#defns}} -.. _bdict_{{nom}}: - -{{nom}} -------------------------------------------------------------------------------- - -{{#argspec}} -.. py:function:: {{nom}}{{argspec}} -{{/argspec}} - -{{#classdoc}} -{{{classdoc}}} -{{/classdoc}} - -{{#msg_rst}} -{{{msg_rst}}} -{{/msg_rst}} - -{{#funcdoc}} -{{{funcdoc}}} -{{/funcdoc}} - -{{#showmethods}} -**Methods:** -{{#methods}} - - .. py:method:: {{nom}}{{argspec}} - -{{{funcdoc}}} -{{/methods}} -{{/showmethods}} - -{{/defns}} - - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/classpath-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/classpath-3.7.mustache deleted file mode 100644 index 369763165..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/classpath-3.7.mustache +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - {{#classpath.scala?}} - - {{/classpath.scala?}} - - - - {{#classpath.sourcepaths}} - - {{/classpath.sourcepaths}} - - {{#classpath.libs}} - - {{/classpath.libs}} - - - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/debug-launcher-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/debug-launcher-3.7.mustache deleted file mode 100644 index bb41efd4a..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/debug-launcher-3.7.mustache +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/factorypath-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/factorypath-3.7.mustache deleted file mode 100644 index 9003a4fcc..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/factorypath-3.7.mustache +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - {{#factorypath.jarpaths}} - - {{/factorypath.jarpaths}} - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/org.eclipse.jdt.core.prefs-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/org.eclipse.jdt.core.prefs-3.7.mustache deleted file mode 100644 index 14708601a..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/org.eclipse.jdt.core.prefs-3.7.mustache +++ /dev/null @@ -1,292 +0,0 @@ -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform={{project.java.jdk}} -org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve -org.eclipse.jdt.core.compiler.compliance={{project.java.language_level}} -org.eclipse.jdt.core.compiler.debug.lineNumber=generate -org.eclipse.jdt.core.compiler.debug.localVariable=generate -org.eclipse.jdt.core.compiler.debug.sourceFile=generate -org.eclipse.jdt.core.compiler.problem.assertIdentifier=error -org.eclipse.jdt.core.compiler.problem.enumIdentifier=error -org.eclipse.jdt.core.compiler.source={{project.java.jdk}} -org.eclipse.jdt.core.formatter.align_type_members_on_columns=false -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16 -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=16 -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16 -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16 -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16 -org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16 -org.eclipse.jdt.core.formatter.alignment_for_assignment=16 -org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16 -org.eclipse.jdt.core.formatter.alignment_for_compact_if=16 -org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80 -org.eclipse.jdt.core.formatter.alignment_for_enum_constants=16 -org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16 -org.eclipse.jdt.core.formatter.alignment_for_method_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16 -org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_resources_in_try=80 -org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16 -org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16 -org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch=16 -org.eclipse.jdt.core.formatter.blank_lines_after_imports=1 -org.eclipse.jdt.core.formatter.blank_lines_after_package=1 -org.eclipse.jdt.core.formatter.blank_lines_before_field=0 -org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0 -org.eclipse.jdt.core.formatter.blank_lines_before_imports=1 -org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1 -org.eclipse.jdt.core.formatter.blank_lines_before_method=1 -org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1 -org.eclipse.jdt.core.formatter.blank_lines_before_package=0 -org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1 -org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1 -org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line -org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line -org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=true -org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false -org.eclipse.jdt.core.formatter.comment.format_block_comments=true -org.eclipse.jdt.core.formatter.comment.format_header=true -org.eclipse.jdt.core.formatter.comment.format_html=true -org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true -org.eclipse.jdt.core.formatter.comment.format_line_comments=true -org.eclipse.jdt.core.formatter.comment.format_source_code=true -org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true -org.eclipse.jdt.core.formatter.comment.indent_root_tags=false -org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert -org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=do not insert -org.eclipse.jdt.core.formatter.comment.line_length=100 -org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true -org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true -org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false -org.eclipse.jdt.core.formatter.compact_else_if=true -org.eclipse.jdt.core.formatter.continuation_indentation=4 -org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=4 -org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off -org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on -org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false -org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true -org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true -org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true -org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true -org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true -org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true -org.eclipse.jdt.core.formatter.indent_empty_lines=false -org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true -org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true -org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true -org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true -org.eclipse.jdt.core.formatter.indentation.size=2 -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert -org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=insert -org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert -org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert -org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert -org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert -org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert -org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert -org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert -org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert -org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert -org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert -org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert -org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert -org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert -org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert -org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert -org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert -org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources=insert -org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert -org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert -org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert -org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try=insert -org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert -org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert -org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert -org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert -org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources=do not insert -org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert -org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert -org.eclipse.jdt.core.formatter.join_lines_in_comments=true -org.eclipse.jdt.core.formatter.join_wrapped_lines=false -org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false -org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false -org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false -org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false -org.eclipse.jdt.core.formatter.lineSplit=100 -org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false -org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false -org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0 -org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1 -org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true -org.eclipse.jdt.core.formatter.tabulation.char=space -org.eclipse.jdt.core.formatter.tabulation.size=2 -org.eclipse.jdt.core.formatter.use_on_off_tags=false -org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=true -org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true -org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch=true -org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/project-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/project-3.7.mustache deleted file mode 100644 index 37ecd600b..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/project-3.7.mustache +++ /dev/null @@ -1,75 +0,0 @@ - - - - - - - {{project.name}} - - - - - {{! For whatever reason in a mixed scala/java project, the only jvm builder must be the scala - builder circa eclipse 3.6}} - {{#project.scala?}} - - org.scala-ide.sdt.core.scalabuilder - - - - {{/project.scala?}} - {{^project.scala?}} - - org.eclipse.jdt.core.javabuilder - - - - {{/project.scala?}} - {{#project.python?}} - - org.python.pydev.PyDevBuilder - - - - {{/project.python?}} - - - {{#project.scala?}} - org.scala-ide.sdt.core.scalanature - {{/project.scala?}} - org.eclipse.jdt.core.javanature - {{#project.python?}} - org.python.pydev.pythonNature - {{/project.python?}} - - - {{#project.source_bases?}} - - {{#project.source_bases}} - - {{id}} - {{! TODO(John Sirois): What does 2 mean? - find out and document.}} - 2 - {{path}} - - {{/project.source_bases}} - - {{/project.source_bases?}} - - diff --git a/src/python/twitter/pants/tasks/templates/eclipse/pydevproject-3.7.mustache b/src/python/twitter/pants/tasks/templates/eclipse/pydevproject-3.7.mustache deleted file mode 100644 index 904955e16..000000000 --- a/src/python/twitter/pants/tasks/templates/eclipse/pydevproject-3.7.mustache +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - {{! TODO(John Sirois): support python project setups with interpreter of choice.}} - Default - python 2.6 - - {{#project.pythonpaths}} - {{#includes?}} - {{#includes}} - /{{project.name}}/{{base}}/{{.}} - {{/includes}} - {{/includes?}} - {{^includes?}} - /{{project.name}}/{{base}} - {{/includes?}} - {{/project.pythonpaths}} - - diff --git a/src/python/twitter/pants/tasks/templates/idea/module-12.mustache b/src/python/twitter/pants/tasks/templates/idea/module-12.mustache deleted file mode 100644 index 38940ba84..000000000 --- a/src/python/twitter/pants/tasks/templates/idea/module-12.mustache +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - {{#module.python?}} - - {{/module.python?}} - - {{#module.scala?}} - - - - - {{/module.scala?}} - - {{#module.bash?}} - - {{/module.bash?}} - - - - - - {{#module.content_roots}} - {{#sources}} - {{#package_prefix}} - - {{/package_prefix}} - {{^package_prefix}} - - {{/package_prefix}} - {{/sources}} - {{/module.content_roots}} - {{#module.content_roots}} - {{#exclude_paths}} - - {{/exclude_paths}} - {{/module.content_roots}} - - - - - - - - - - - - {{#module.scala?}} - - {{/module.scala?}} - - - - {{#module.internal_jars}} - - {{/module.internal_jars}} - - - - {{#module.internal_source_jars}} - - {{/module.internal_source_jars}} - - - - - - - {{#module.external_jars}} - - {{/module.external_jars}} - - - {{#module.external_javadoc_jars}} - - {{/module.external_javadoc_jars}} - - - {{#module.external_source_jars}} - - {{/module.external_source_jars}} - - - - - -{{#module.extra_components}} - {{{.}}} - -{{/module.extra_components}} - diff --git a/src/python/twitter/pants/tasks/templates/idea/project-12.mustache b/src/python/twitter/pants/tasks/templates/idea/project-12.mustache deleted file mode 100644 index f58af8e38..000000000 --- a/src/python/twitter/pants/tasks/templates/idea/project-12.mustache +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - - - - - - - - - - {{#project.modules}} - - {{/project.modules}} - - - - - - - - - - - - - {{#project.scala?}} - - - - - - {{#project.scala.compiler_classpath}} - - {{/project.scala.compiler_classpath}} - - - - - - {{/project.scala?}} - - - - - - - - - -{{#project.extra_components}} - {{{.}}} - -{{/project.extra_components}} - diff --git a/src/python/twitter/pants/tasks/templates/ivy_resolve/ivy.mustache b/src/python/twitter/pants/tasks/templates/ivy_resolve/ivy.mustache deleted file mode 100644 index f8b6bcece..000000000 --- a/src/python/twitter/pants/tasks/templates/ivy_resolve/ivy.mustache +++ /dev/null @@ -1,97 +0,0 @@ - - - - - - - - - - {{#lib.configurations?}} - - {{#lib.configurations}} - - {{/lib.configurations}} - - {{/lib.configurations?}} - - {{#lib.publications?}} - - - - {{#lib.publications.sources}} - - {{/lib.publications.sources}} - {{#lib.publications.docs}} - - {{/lib.publications.docs}} - {{#lib.publications.changelog}} - - {{/lib.publications.changelog}} - - {{/lib.publications?}} - - {{#lib.dependencies?}} - - {{#lib.dependencies}} - - {{#configurations}} - - {{/configurations}} - {{#artifacts}} - - {{/artifacts}} - {{#excludes}} - {{#name}}{{/name}} - {{^name}}{{/name}} - {{/excludes}} - - {{/lib.dependencies}} - {{#lib.excludes}} - {{#name}}{{/name}} - {{^name}}{{/name}} - {{/lib.excludes}} - {{#lib.overrides?}} - {{#lib.overrides}} - - {{/lib.overrides}} - {{/lib.overrides?}} - - {{/lib.dependencies?}} - diff --git a/src/python/twitter/pants/tasks/templates/jar_publish/ivysettings.mustache b/src/python/twitter/pants/tasks/templates/jar_publish/ivysettings.mustache deleted file mode 100644 index 58543573e..000000000 --- a/src/python/twitter/pants/tasks/templates/jar_publish/ivysettings.mustache +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - {{#publish_local}} - - - - - {{/publish_local}} - - - - {{#published}} - - {{/published}} - - diff --git a/src/python/twitter/pants/tasks/templates/jar_publish/pom.mustache b/src/python/twitter/pants/tasks/templates/jar_publish/pom.mustache deleted file mode 100644 index e0d1b3d3d..000000000 --- a/src/python/twitter/pants/tasks/templates/jar_publish/pom.mustache +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - 4.0.0 - {{artifact.org}} - {{artifact.name}} - jar - {{artifact.rev}} - - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - {{#artifact.dependencies?}} - - {{#artifact.dependencies}} - - {{org}} - {{name}} - {{rev}} - {{scope}} - {{#excludes?}} - - {{#excludes}} - - {{org}} - {{name}} - - {{/excludes}} - - {{/excludes?}} - - {{/artifact.dependencies}} - - {{/artifact.dependencies?}} - diff --git a/src/python/twitter/pants/tasks/thrift_gen.py b/src/python/twitter/pants/tasks/thrift_gen.py deleted file mode 100644 index 36d0281bc..000000000 --- a/src/python/twitter/pants/tasks/thrift_gen.py +++ /dev/null @@ -1,294 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import errno -import os -import re -import subprocess - -from collections import defaultdict, namedtuple - -from twitter.common import log -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_thrift_library import JavaThriftLibrary -from twitter.pants.targets.python_library import PythonLibrary -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary -from twitter.pants.tasks import TaskError -from twitter.pants.thrift_util import calculate_compile_roots, select_thrift_binary - -from .code_gen import CodeGen - - -def _copytree(from_base, to_base): - def abort(error): - raise TaskError('Failed to copy from %s to %s: %s' % (from_base, to_base, error)) - - # TODO(John Sirois): Consider adding a unit test and lifting this to common/dirutils or similar - def safe_link(src, dst): - try: - os.link(src, dst) - except OSError as e: - if e.errno != errno.EEXIST: - raise e - - for dirpath, dirnames, filenames in os.walk(from_base, topdown=True, onerror=abort): - to_path = os.path.join(to_base, os.path.relpath(dirpath, from_base)) - for dirname in dirnames: - safe_mkdir(os.path.join(to_path, dirname)) - for filename in filenames: - safe_link(os.path.join(dirpath, filename), os.path.join(to_path, filename)) - - -class ThriftGen(CodeGen): - GenInfo = namedtuple('GenInfo', ['gen', 'deps']) - ThriftSession = namedtuple('ThriftSession', ['outdir', 'cmd', 'process']) - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - option_group.add_option(mkflag("outdir"), dest="thrift_gen_create_outdir", - help="Emit generated code in to this directory.") - - option_group.add_option(mkflag("version"), dest="thrift_version", - help="Thrift compiler version.") - - option_group.add_option(mkflag("lang"), dest="thrift_gen_langs", default=[], - action="append", type="choice", choices=['python', 'java'], - help="Force generation of thrift code for these languages. Both " - "'python' and 'java' are supported") - - def __init__(self, context): - CodeGen.__init__(self, context) - - output_dir = ( - context.options.thrift_gen_create_outdir - or context.config.get('thrift-gen', 'workdir') - ) - self.combined_dir = os.path.join(output_dir, 'combined') - self.session_dir = os.path.join(output_dir, 'sessions') - - self.strict = context.config.getbool('thrift-gen', 'strict') - self.verbose = context.config.getbool('thrift-gen', 'verbose') - - def create_geninfo(key): - gen_info = context.config.getdict('thrift-gen', key) - gen = gen_info['gen'] - deps = {} - for category, depspecs in gen_info['deps'].items(): - dependencies = OrderedSet() - deps[category] = dependencies - for depspec in depspecs: - dependencies.update(context.resolve(depspec)) - return self.GenInfo(gen, deps) - - self.gen_java = create_geninfo('java') - self.gen_python = create_geninfo('python') - - self.gen_langs = set(context.options.thrift_gen_langs) - for lang in ('java', 'python'): - if self.context.products.isrequired(lang): - self.gen_langs.add(lang) - - self.thrift_binary = select_thrift_binary(context.config, - version=context.options.thrift_version) - - def invalidate_for(self): - return self.gen_langs - - def invalidate_for_files(self): - # TODO: This will prevent artifact caching across platforms. - # Find some cross-platform way to assert the thrift binary version. - return [self.thrift_binary] - - def is_gentarget(self, target): - return ((isinstance(target, JavaThriftLibrary) and target.compiler == 'thrift') - or isinstance(target, PythonThriftLibrary)) - - def is_forced(self, lang): - return lang in self.gen_langs - - def genlangs(self): - return dict(java=lambda t: t.is_jvm, python=lambda t: t.is_python) - - def genlang(self, lang, targets): - bases, sources = calculate_compile_roots(targets, self.is_gentarget) - - if lang == 'java': - gen = self.gen_java.gen - elif lang == 'python': - gen = self.gen_python.gen - else: - raise TaskError('Unrecognized thrift gen lang: %s' % lang) - - args = [ - self.thrift_binary, - '--gen', gen, - '-recurse', - ] - - if self.strict: - args.append('-strict') - if self.verbose: - args.append('-verbose') - for base in bases: - args.extend(('-I', base)) - - sessions = [] - for source in sources: - self.context.log.info('Generating thrift for %s\n' % source) - # Create a unique session dir for this thrift root. Sources may be full paths but we only - # need the path relative to the build root to ensure uniqueness. - # TODO(John Sirois): file paths should be normalized early on and uniformly, fix the need to - # relpath here at all. - relsource = os.path.relpath(source, get_buildroot()) - outdir = os.path.join(self.session_dir, '.'.join(relsource.split(os.path.sep))) - safe_mkdir(outdir) - - cmd = args[:] - cmd.extend(('-o', outdir)) - cmd.append(source) - log.debug('Executing: %s' % ' '.join(cmd)) - sessions.append(self.ThriftSession(outdir, cmd, subprocess.Popen(cmd))) - - result = 0 - for session in sessions: - if result != 0: - session.process.kill() - else: - result = session.process.wait() - if result != 0: - self.context.log.error('Failed: %s' % ' '.join(session.cmd)) - else: - _copytree(session.outdir, self.combined_dir) - if result != 0: - raise TaskError('%s ... exited non-zero (%i)' % (self.thrift_binary, result)) - - def createtarget(self, lang, gentarget, dependees): - if lang == 'java': - return self._create_java_target(gentarget, dependees) - elif lang == 'python': - return self._create_python_target(gentarget, dependees) - else: - raise TaskError('Unrecognized thrift gen lang: %s' % lang) - - def _create_java_target(self, target, dependees): - def create_target(files, deps): - return self.context.add_new_target(os.path.join(self.combined_dir, 'gen-java'), - JavaLibrary, - name=target.id, - sources=files, - provides=target.provides, - dependencies=deps, - excludes=target.excludes) - return self._inject_target(target, dependees, self.gen_java, 'java', create_target) - - def _create_python_target(self, target, dependees): - def create_target(files, deps): - return self.context.add_new_target(os.path.join(self.combined_dir, 'gen-py'), - PythonLibrary, - name=target.id, - sources=files, - dependencies=deps) - return self._inject_target(target, dependees, self.gen_python, 'py', create_target) - - def _inject_target(self, target, dependees, geninfo, namespace, create_target): - files = [] - has_service = False - for src in target.sources_relative_to_buildroot(): - services, genfiles = calculate_gen(src) - has_service = has_service or services - files.extend(genfiles.get(namespace, [])) - deps = geninfo.deps['service' if has_service else 'structs'] - tgt = create_target(files, deps) - tgt.id = target.id + '.thrift_gen' - for dependee in dependees: - if isinstance(dependee, InternalTarget): - dependee.update_dependencies((tgt,)) - else: - # TODO(John Sirois): rationalize targets with dependencies. - # JarLibrary or PythonTarget dependee on the thrift target - dependee.dependencies.add(tgt) - return tgt - - -NAMESPACE_PARSER = re.compile(r'^\s*namespace\s+([^\s]+)\s+([^\s]+)\s*$') -TYPE_PARSER = re.compile(r'^\s*(const|enum|exception|service|struct|union)\s+([^\s{]+).*') - - -# TODO(John Sirois): consolidate thrift parsing to 1 pass instead of 2 -def calculate_gen(source): - """Calculates the service types and files generated for the given thrift IDL source. - - Returns a tuple of (service types, generated files). - """ - - with open(source, 'r') as thrift: - lines = thrift.readlines() - namespaces = {} - types = defaultdict(set) - for line in lines: - match = NAMESPACE_PARSER.match(line) - if match: - lang = match.group(1) - namespace = match.group(2) - namespaces[lang] = namespace - else: - match = TYPE_PARSER.match(line) - if match: - typename = match.group(1) - name = match.group(2) - types[typename].add(name) - - genfiles = defaultdict(set) - - namespace = namespaces.get('py') - if namespace: - genfiles['py'].update(calculate_python_genfiles(namespace, types)) - - namespace = namespaces.get('java') - if namespace: - genfiles['java'].update(calculate_java_genfiles(namespace, types)) - - return types['service'], genfiles - - -def calculate_python_genfiles(namespace, types): - basepath = namespace.replace('.', '/') - def path(name): - return os.path.join(basepath, '%s.py' % name) - yield path('__init__') - if 'const' in types: - yield path('constants') - if set(['enum', 'exception', 'struct', 'union']) & set(types.keys()): - yield path('ttypes') - for service in types['service']: - yield path(service) - yield os.path.join(basepath, '%s-remote' % service) - - -def calculate_java_genfiles(namespace, types): - basepath = namespace.replace('.', '/') - def path(name): - return os.path.join(basepath, '%s.java' % name) - if 'const' in types: - yield path('Constants') - for typename in ['enum', 'exception', 'service', 'struct', 'union']: - for name in types[typename]: - yield path(name) diff --git a/src/python/twitter/pants/tasks/what_changed.py b/src/python/twitter/pants/tasks/what_changed.py deleted file mode 100644 index e2e034593..000000000 --- a/src/python/twitter/pants/tasks/what_changed.py +++ /dev/null @@ -1,164 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import sys - -from abc import abstractmethod -from collections import defaultdict - -from twitter.common.lang import AbstractClass, Compatibility - -from twitter.pants.base.build_environment import get_buildroot, get_scm -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base.target import Target -from twitter.pants.scm import Scm - -from .console_task import ConsoleTask -from .task_error import TaskError - - -class WhatChanged(ConsoleTask): - """Emits the targets that have been modified since a given commit.""" - - @classmethod - def setup_parser(cls, option_group, args, mkflag): - super(WhatChanged, cls).setup_parser(option_group, args, mkflag) - - option_group.add_option(mkflag('parent'), dest='what_changed_create_prefix', default='HEAD', - help='[%default] Identifies the parent tree-ish to calculate changes ' - 'against.') - - option_group.add_option(mkflag("files"), mkflag("files", negate=True), default=False, - action="callback", callback=mkflag.set_bool, - dest='what_changed_show_files', - help='[%default] Shows changed files instead of the targets that own ' - 'them.') - - def __init__(self, context, workspace, outstream=sys.stdout): - if not isinstance(workspace, Workspace): - raise ValueError('WhatChanged requires a Workspace, given %s' % workspace) - - super(WhatChanged, self).__init__(context, outstream) - - self._workspace = workspace - - self._parent = context.options.what_changed_create_prefix - self._show_files = context.options.what_changed_show_files - - self._filemap = defaultdict(set) - - def console_output(self, _): - touched_files = self._get_touched_files() - if self._show_files: - for path in touched_files: - yield path - else: - touched_targets = set() - for path in touched_files: - for touched_target in self._owning_targets(path): - if touched_target not in touched_targets: - touched_targets.add(touched_target) - yield str(touched_target.address) - - def _get_touched_files(self): - try: - return self._workspace.touched_files(self._parent) - except Workspace.WorkspaceError as e: - raise TaskError(e) - - def _owning_targets(self, path): - for build_file in self._candidate_owners(path): - is_build_file = (build_file.full_path == os.path.join(get_buildroot(), path)) - for address in Target.get_all_addresses(build_file): - target = Target.get(address) - - # A synthesized target can never own permanent files on disk - if target != target.derived_from: - # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a user - # vs. targets created by pants at runtime. - continue - - if target and (is_build_file or ((target.has_sources() or target.has_resources) - and self._owns(target, path))): - yield target - - def _candidate_owners(self, path): - build_file = BuildFile(get_buildroot(), relpath=os.path.dirname(path), must_exist=False) - if build_file.exists(): - yield build_file - for sibling in build_file.siblings(): - yield sibling - for ancestor in build_file.ancestors(): - yield ancestor - - def _owns(self, target, path): - if target not in self._filemap: - files = self._filemap[target] - files_owned_by_target = target.sources if target.has_sources() else [] - # TODO (tdesai): This case to handle resources in PythonTarget. - # Remove this when we normalize resources handling across python and jvm targets. - if target.has_resources: - for resource in target.resources: - if isinstance(resource, Compatibility.string): - files_owned_by_target.extend(target.resources) - for owned_file in files_owned_by_target: - owned_path = os.path.join(target.target_base, owned_file) - files.add(owned_path) - return path in self._filemap[target] - - -class Workspace(AbstractClass): - """Tracks the state of the current workspace.""" - - class WorkspaceError(Exception): - """Indicates a problem reading the local workspace.""" - - @abstractmethod - def touched_files(self, parent): - """Returns the set of paths modified between the given parent commit and the current local - workspace state. - """ - - -class ScmWorkspace(Workspace): - """A workspace that uses an Scm to determine the touched files.""" - - def __init__(self, scm): - super(ScmWorkspace, self).__init__() - - self._scm = scm or get_scm() - - if self._scm is None: - raise self.WorkspaceError('Cannot figure out what changed without a configured ' - 'source-control system.') - - def touched_files(self, parent): - try: - return self._scm.changed_files(from_commit=parent, include_untracked=True) - except Scm.ScmException as e: - raise self.WorkspaceError("Problem detecting changed files.", e) - - -class ScmWhatChanged(WhatChanged): - def __init__(self, context, scm=None, outstream=sys.stdout): - """Creates a WhatChanged task that uses an Scm to determine changed files. - - context: The pants execution context. - scm: The scm to use, taken from the globally configured scm if None. - outstream: The stream to write changed files or targets to. - """ - super(ScmWhatChanged, self).__init__(context, ScmWorkspace(scm or get_scm()), outstream) diff --git a/src/python/twitter/pants/thrift_util.py b/src/python/twitter/pants/thrift_util.py deleted file mode 100644 index e8b2291b7..000000000 --- a/src/python/twitter/pants/thrift_util.py +++ /dev/null @@ -1,123 +0,0 @@ -import os -import re - -from twitter.pants.binary_util import select_binary - - -INCLUDE_PARSER = re.compile(r'^\s*include\s+"([^"]+)"\s*([\/\/|\#].*)*$') - - -def find_includes(basedirs, source, log=None): - """Finds all thrift files included by the given thrift source. - - :basedirs: A set of thrift source file base directories to look for includes in. - :source: The thrift source file to scan for includes. - :log: An optional logger - """ - - all_basedirs = [os.path.dirname(source)] - all_basedirs.extend(basedirs) - - includes = set() - with open(source, 'r') as thrift: - for line in thrift.readlines(): - match = INCLUDE_PARSER.match(line) - if match: - capture = match.group(1) - added = False - for basedir in all_basedirs: - include = os.path.join(basedir, capture) - if os.path.exists(include): - if log: - log.debug('%s has include %s' % (source, include)) - includes.add(include) - added = True - if not added: - raise ValueError("%s included in %s not found in bases %s" - % (include, source, all_basedirs)) - return includes - - -def find_root_thrifts(basedirs, sources, log=None): - """Finds the root thrift files in the graph formed by sources and their recursive includes. - - :basedirs: A set of thrift source file base directories to look for includes in. - :sources: Seed thrift files to examine. - :log: An optional logger. - """ - - root_sources = set(sources) - for source in sources: - root_sources.difference_update(find_includes(basedirs, source, log=log)) - return root_sources - - -def calculate_compile_sources_HACK_FOR_SCROOGE_LEGACY(targets, is_thrift_target): - """Calculates the set of thrift source files that need to be compiled - as well as their associated import/include directories. - It does not exclude sources that are included in other sources. - - A tuple of (include dirs, thrift sources) is returned. - - :targets: The targets to examine. - :is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis. - """ - - dirs = set() - sources = set() - - def collect_sources(target): - for source in target.sources: - dirs.add(os.path.normpath(os.path.join(target.target_base, os.path.dirname(source)))) - sources.add(os.path.join(target.target_base, source)) - for target in targets: - target.walk(collect_sources, predicate=is_thrift_target) - - return dirs, sources - - -def calculate_compile_sources(targets, is_thrift_target): - """Calculates the set of thrift source files that need to be compiled. - It does not exclude sources that are included in other sources. - - A tuple of (include basedirs, thrift sources) is returned. - - :targets: The targets to examine. - :is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis. - """ - - basedirs = set() - sources = set() - def collect_sources(target): - basedirs.add(target.target_base) - sources.update(target.sources_relative_to_buildroot()) - for target in targets: - target.walk(collect_sources, predicate=is_thrift_target) - return basedirs, sources - - -def calculate_compile_roots(targets, is_thrift_target): - """Calculates the minimal set of thrift source files that need to be compiled. - - A tuple of (include basedirs, root thrift sources) is returned. - - :targets: The targets to examine. - :is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis. - """ - - basedirs, sources = calculate_compile_sources(targets, is_thrift_target) - sources = find_root_thrifts(basedirs, sources) - return basedirs, sources - - -def select_thrift_binary(config, version=None): - """Selects a thrift compiler binary matching the current os and architecture. - - By default uses the repo default thrift compiler version specified in the pants config. - - config: The pants config containing thrift thrift binary selection data. - version: An optional thrift compiler binary version override. - """ - thrift_supportdir = config.get('thrift-gen', 'supportdir') - thrift_version = version or config.get('thrift-gen', 'version') - return select_binary(thrift_supportdir, thrift_version, 'thrift', config) diff --git a/src/python/twitter/pants/version/__init__.py b/src/python/twitter/pants/version/__init__.py deleted file mode 100644 index c0c3aeba9..000000000 --- a/src/python/twitter/pants/version/__init__.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = '0.0.11' diff --git a/tests/python/twitter/pants/__init__.py b/tests/python/twitter/pants/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/python/twitter/pants/base/__init__.py b/tests/python/twitter/pants/base/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/base/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/base/context_utils.py b/tests/python/twitter/pants/base/context_utils.py deleted file mode 100644 index 26514f623..000000000 --- a/tests/python/twitter/pants/base/context_utils.py +++ /dev/null @@ -1,86 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import io - -from twitter.common.collections import maybe_list -from twitter.common.dirutil import safe_mkdtemp -from twitter.common.lang import Compatibility - -from twitter.pants.base.config import Config -from twitter.pants.base.target import Target -from twitter.pants.goal import Context, RunTracker -from twitter.pants.reporting.report import Report - - -def create_options(options_hash=None): - """Creates an options object populated with no options at all by default. - - :param dict options_hash: An optional dict of option values. - """ - opts = options_hash or {} - if not isinstance(opts, dict): - raise ValueError('The given options_hash must be a dict, got: %s' % options_hash) - - class Options(object): - def __init__(self): - self.__dict__ = opts - return Options() - - -def create_config(sample_ini='', defaults=None): - """Creates a ``Config`` from the ``sample_ini`` file contents. - - :param string sample_ini: The contents of the ini file containing the config values. - :param dict defaults: An optional dict of global default ini values to seed. - """ - if not isinstance(sample_ini, Compatibility.string): - raise ValueError('The sample_ini supplied must be a string, given: %s' % sample_ini) - - parser = Config.create_parser(defaults) - with io.BytesIO(sample_ini) as ini: - parser.readfp(ini) - return Config(parser) - - -def create_run_tracker(info_dir=None): - """Creates a ``RunTracker`` and starts it. - - :param string info_dir: An optional director for the run tracker to store state; defaults to a - new temp dir that will be be cleaned up on interpreter exit. - """ - # TODO(John Sirois): Rework uses around a context manager for cleanup of the info_dir in a more - # disciplined manner - info_dir = info_dir or safe_mkdtemp() - run_tracker = RunTracker(info_dir) - report = Report() - run_tracker.start(report) - return run_tracker - - -def create_context(config='', options=None, target_roots=None, **kwargs): - """Creates a ``Context`` with no config values, options, or targets by default. - - :param config: Either a ``Context`` object or else a string representing the contents of the - pants.ini to parse the config from. - :param options: An optional dict of of option values. - :param target_roots: An optional list of target roots to seed the context target graph from. - :param ``**kwargs``: Any additional keyword arguments to pass through to the Context constructor. - """ - config = config if isinstance(config, Config) else create_config(config) - run_tracker = create_run_tracker() - target_roots = maybe_list(target_roots, Target) if target_roots else [] - return Context(config, create_options(options or {}), run_tracker, target_roots, **kwargs) diff --git a/tests/python/twitter/pants/base/test_abbreviate_target_ids.py b/tests/python/twitter/pants/base/test_abbreviate_target_ids.py deleted file mode 100644 index 3d6d04f90..000000000 --- a/tests/python/twitter/pants/base/test_abbreviate_target_ids.py +++ /dev/null @@ -1,54 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.abbreviate_target_ids import abbreviate_target_ids - -class AbbreviateTargetIdsTest(unittest.TestCase): - def _test(self, expected, *actual): - self.assertEqual(expected, abbreviate_target_ids(actual)) - - def test_empty(self): - self._test({}) - - def test_single(self): - self._test({'a': 'a'}, 'a') - self._test({'a.b.c': 'c'}, 'a.b.c') - - def test_simple(self): - self._test({'a': 'a', - 'b': 'b', - 'c': 'c'}, - 'a', 'b', 'c') - self._test({'x.a': 'a', - 'y.b': 'b', - 'z.c': 'c'}, - 'x.a', 'y.b', 'z.c') - - def test_complex(self): - self._test({'x.a': 'a', - 'x.b': 'b', - 'x.c': 'c'}, - 'x.a', 'x.b', 'x.c') - self._test({'x.a': 'x.a', - 'y.a': 'y.a', - 'z.b': 'b'}, - 'x.a', 'y.a', 'z.b') - self._test({'x.a': 'a', - 'x.y.a': 'y.a', - 'z.b': 'b'}, - 'x.a', 'x.y.a', 'z.b') diff --git a/tests/python/twitter/pants/base/test_address.py b/tests/python/twitter/pants/base/test_address.py deleted file mode 100644 index b65a1a68d..000000000 --- a/tests/python/twitter/pants/base/test_address.py +++ /dev/null @@ -1,80 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pytest -import unittest - -from contextlib import contextmanager - -from twitter.common.contextutil import temporary_dir, pushd -from twitter.common.dirutil import touch - -from twitter.pants.base.address import Address -from twitter.pants.base.build_environment import set_buildroot - - -class AddressTest(unittest.TestCase): - @contextmanager - def workspace(self, *buildfiles): - with temporary_dir() as root_dir: - set_buildroot(root_dir) - with pushd(root_dir): - for buildfile in buildfiles: - touch(os.path.join(root_dir, buildfile)) - yield os.path.realpath(root_dir) - - def assertAddress(self, root_dir, path, name, address): - self.assertEqual(root_dir, address.buildfile.root_dir) - self.assertEqual(path, address.buildfile.relpath) - self.assertEqual(name, address.target_name) - - def test_full_forms(self): - with self.workspace('a/BUILD') as root_dir: - self.assertAddress(root_dir, 'a/BUILD', 'b', Address.parse(root_dir, 'a:b')) - self.assertAddress(root_dir, 'a/BUILD', 'b', Address.parse(root_dir, 'a/:b')) - self.assertAddress(root_dir, 'a/BUILD', 'b', Address.parse(root_dir, 'a/BUILD:b')) - self.assertAddress(root_dir, 'a/BUILD', 'b', Address.parse(root_dir, 'a/BUILD/:b')) - - def test_default_form(self): - with self.workspace('a/BUILD') as root_dir: - self.assertAddress(root_dir, 'a/BUILD', 'a', Address.parse(root_dir, 'a')) - self.assertAddress(root_dir, 'a/BUILD', 'a', Address.parse(root_dir, 'a/BUILD')) - self.assertAddress(root_dir, 'a/BUILD', 'a', Address.parse(root_dir, 'a/BUILD/')) - - def test_top_level(self): - with self.workspace('BUILD') as root_dir: - self.assertAddress(root_dir, 'BUILD', 'c', Address.parse(root_dir, ':c')) - self.assertAddress(root_dir, 'BUILD', 'c', Address.parse(root_dir, '.:c')) - self.assertAddress(root_dir, 'BUILD', 'c', Address.parse(root_dir, './:c')) - self.assertAddress(root_dir, 'BUILD', 'c', Address.parse(root_dir, './BUILD:c')) - self.assertAddress(root_dir, 'BUILD', 'c', Address.parse(root_dir, 'BUILD:c')) - - def test_parse_from_root_dir(self): - with self.workspace('a/b/c/BUILD') as root_dir: - self.assertAddress(root_dir, 'a/b/c/BUILD', 'c', - Address.parse(root_dir, 'a/b/c', is_relative=False)) - self.assertAddress(root_dir, 'a/b/c/BUILD', 'c', - Address.parse(root_dir, 'a/b/c', is_relative=True)) - - def test_parse_from_sub_dir(self): - with self.workspace('a/b/c/BUILD') as root_dir: - with pushd(os.path.join(root_dir, 'a')): - self.assertAddress(root_dir, 'a/b/c/BUILD', 'c', - Address.parse(root_dir, 'b/c', is_relative=True)) - - with pytest.raises(IOError): - Address.parse(root_dir, 'b/c', is_relative=False) diff --git a/tests/python/twitter/pants/base/test_build_file.py b/tests/python/twitter/pants/base/test_build_file.py deleted file mode 100644 index 0d24fe6fb..000000000 --- a/tests/python/twitter/pants/base/test_build_file.py +++ /dev/null @@ -1,98 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'John Sirois' - -from twitter.common.collections import OrderedSet -from twitter.common.dirutil import touch, safe_mkdir -from twitter.pants.base.build_file import BuildFile - -import os -import shutil -import tempfile -import unittest - -class BuildFileTest(unittest.TestCase): - - @classmethod - def makedirs(cls, path): - safe_mkdir(os.path.join(BuildFileTest.root_dir, path)) - - @classmethod - def touch(cls, path): - touch(os.path.join(BuildFileTest.root_dir, path)) - - @classmethod - def buildfile(cls, path): - return BuildFile(BuildFileTest.root_dir, path) - - @classmethod - def setUpClass(cls): - BuildFileTest.base_dir = tempfile.mkdtemp() - - # Seed a BUILD outside the build root that should not be detected - touch(os.path.join(BuildFileTest.base_dir, 'BUILD')) - - BuildFileTest.root_dir = os.path.join(BuildFileTest.base_dir, 'root') - - BuildFileTest.touch('grandparent/parent/BUILD') - BuildFileTest.touch('grandparent/parent/BUILD.twitter') - BuildFileTest.makedirs('grandparent/parent/BUILD.dir') - BuildFileTest.makedirs('grandparent/BUILD') - BuildFileTest.touch('BUILD') - BuildFileTest.touch('BUILD.twitter') - BuildFileTest.touch('grandparent/parent/child1/BUILD') - BuildFileTest.touch('grandparent/parent/child1/BUILD.twitter') - BuildFileTest.touch('grandparent/parent/child2/child3/BUILD') - BuildFileTest.makedirs('grandparent/parent/child2/BUILD') - BuildFileTest.makedirs('grandparent/parent/child4') - - @classmethod - def tearDownClass(cls): - shutil.rmtree(BuildFileTest.root_dir) - - def setUp(self): - self.buildfile = BuildFileTest.buildfile('grandparent/parent/BUILD') - - def testSiblings(self): - buildfile = BuildFileTest.buildfile('grandparent/parent/BUILD.twitter') - self.assertEquals(OrderedSet([buildfile]), OrderedSet(self.buildfile.siblings())) - self.assertEquals(OrderedSet([self.buildfile]), OrderedSet(buildfile.siblings())) - - buildfile = BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD') - self.assertEquals(OrderedSet(), OrderedSet(buildfile.siblings())) - - def testFamily(self): - self.assertEquals(OrderedSet([ - BuildFileTest.buildfile('grandparent/parent/BUILD'), - BuildFileTest.buildfile('grandparent/parent/BUILD.twitter'), - ]), self.buildfile.family()) - - buildfile = BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD') - self.assertEquals(OrderedSet([buildfile]), buildfile.family()) - - def testAncestors(self): - self.assertEquals(OrderedSet([ - BuildFileTest.buildfile('BUILD'), - BuildFileTest.buildfile('BUILD.twitter'), - ]), self.buildfile.ancestors()) - - def testDescendants(self): - self.assertEquals(OrderedSet([ - BuildFileTest.buildfile('grandparent/parent/child1/BUILD'), - BuildFileTest.buildfile('grandparent/parent/child1/BUILD.twitter'), - BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD'), - ]), self.buildfile.descendants()) diff --git a/tests/python/twitter/pants/base/test_build_invalidator.py b/tests/python/twitter/pants/base/test_build_invalidator.py deleted file mode 100644 index 8aab1d84c..000000000 --- a/tests/python/twitter/pants/base/test_build_invalidator.py +++ /dev/null @@ -1,67 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import hashlib -import tempfile - -from contextlib import contextmanager - -from twitter.common.contextutil import temporary_dir -from twitter.pants.base.build_invalidator import BuildInvalidator, CacheKeyGenerator - - -TEST_CONTENT = 'muppet' - - -def expected_hash(tf): - return hashlib.sha1(os.path.basename(tf.name) + TEST_CONTENT).hexdigest() - - -@contextmanager -def test_env(content=TEST_CONTENT): - with temporary_dir() as d: - with tempfile.NamedTemporaryFile() as f: - f.write(content) - f.flush() - yield f, CacheKeyGenerator(), BuildInvalidator(d) - - -def test_cache_key_hash(): - with test_env() as (f, keygen, cache): - key = keygen.key_for('test', [f.name]) - assert key.hash == expected_hash(f) - - -def test_needs_update_missing_key(): - with test_env() as (f, keygen, cache): - key = keygen.key_for('test', [f.name]) - assert cache.needs_update(key) - - -def test_needs_update_after_change(): - with test_env() as (f, keygen, cache): - key = keygen.key_for('test', [f.name]) - assert cache.needs_update(key) - cache.update(key) - assert not cache.needs_update(key) - f.truncate() - f.write('elmo') - f.flush() - key = keygen.key_for('test', [f.name]) - assert cache.needs_update(key) - cache.update(key) - assert not cache.needs_update(key) diff --git a/tests/python/twitter/pants/base/test_build_root.py b/tests/python/twitter/pants/base/test_build_root.py deleted file mode 100644 index 7a950377c..000000000 --- a/tests/python/twitter/pants/base/test_build_root.py +++ /dev/null @@ -1,72 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -import unittest - -from twitter.common.contextutil import environment_as, pushd, temporary_dir -from twitter.common.dirutil import safe_mkdir, safe_mkdtemp, safe_rmtree, touch - -from twitter.pants.base.build_root import BuildRoot - - -class BuildRootTest(unittest.TestCase): - - def setUp(self): - self.original_root = BuildRoot().path - self.new_root = os.path.realpath(safe_mkdtemp()) - BuildRoot().reset() - - def tearDown(self): - BuildRoot().reset() - safe_rmtree(self.new_root) - - def test_via_env(self): - with environment_as(PANTS_BUILD_ROOT=self.new_root): - self.assertEqual(self.new_root, BuildRoot().path) - - def test_via_set(self): - BuildRoot().path = self.new_root - self.assertEqual(self.new_root, BuildRoot().path) - - def test_reset(self): - BuildRoot().path = self.new_root - BuildRoot().reset() - self.assertEqual(self.original_root, BuildRoot().path) - - def test_via_pantsini(self): - with temporary_dir() as root: - root = os.path.realpath(root) - touch(os.path.join(root, 'pants.ini')) - with pushd(root): - self.assertEqual(root, BuildRoot().path) - - BuildRoot().reset() - child = os.path.join(root, 'one', 'two') - safe_mkdir(child) - with pushd(child): - self.assertEqual(root, BuildRoot().path) - - def test_temporary(self): - with BuildRoot().temporary(self.new_root): - self.assertEqual(self.new_root, BuildRoot().path) - self.assertEqual(self.original_root, BuildRoot().path) - - def test_singleton(self): - self.assertEqual(BuildRoot().path, BuildRoot().path) - BuildRoot().path = self.new_root - self.assertEqual(BuildRoot().path, BuildRoot().path) diff --git a/tests/python/twitter/pants/base/test_double_dag.py b/tests/python/twitter/pants/base/test_double_dag.py deleted file mode 100644 index dd7505464..000000000 --- a/tests/python/twitter/pants/base/test_double_dag.py +++ /dev/null @@ -1,123 +0,0 @@ -from twitter.pants.base.double_dag import DoubleDag -from twitter.pants.reporting.report import Report -from twitter.pants.testutils import MockLogger, MockTarget -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -def make_dag(nodes): - return DoubleDag(nodes, lambda t: t.dependencies, MockLogger(Report.INFO)) - - -class DoubleDagTest(BaseMockTargetTest): - - def check_dag_node(self, dag, data, children, parents): - node = dag.lookup(data) - - self.assertEquals(node.data, data) - self.assertEquals(node.children, set(map(dag.lookup, children))) - self.assertEquals(node.parents, set(map(dag.lookup, parents))) - - def test_simple_dag(self): - a = MockTarget('a') - b = MockTarget('b', [a]) - c = MockTarget('c', [b]) - d = MockTarget('d', [c, a]) - e = MockTarget('e', [d]) - - def test_dag(dag): - self.assertEquals(dag._roots, set([dag.lookup(e)])) - self.assertEquals(dag.leaves, set([dag.lookup(a)])) - - self.check_dag_node(dag, e, [d], []) - self.check_dag_node(dag, d, [a, c], [e]) - self.check_dag_node(dag, c, [b], [d]) - self.check_dag_node(dag, b, [a], [c]) - self.check_dag_node(dag, a, [], [b, d]) - - test_dag(make_dag([e, d, c, b, a])) - test_dag(make_dag([a, b, c, d, e])) - test_dag(make_dag([a, b, e, d, c])) - test_dag(make_dag([d, a, c, e, b])) - - def test_binary_search_dag(self): - - rrr = MockTarget('rrr') - rrl = MockTarget('rrl') - rlr = MockTarget('rlr') - rll = MockTarget('rll') - lrr = MockTarget('lrr') - lrl = MockTarget('lrl') - llr = MockTarget('llr') - lll = MockTarget('lll') - - rr = MockTarget('rr', [rrr, rrl]) - rl = MockTarget('rl', [rlr, rll]) - lr = MockTarget('lr', [lrr, lrl]) - ll = MockTarget('ll', [llr, lll]) - - r = MockTarget('r', [rr, rl]) - l = MockTarget('l', [lr, ll]) - - root = MockTarget('root', [r, l]) - - def test_dag(dag): - - def t(n): - return dag.lookup(n) - - self.assertEquals(dag._roots, set([t(root)])) - self.assertEquals(dag.leaves, set(map(t, [rrr, rrl, rlr, rll, lrr, lrl, llr, lll]))) - - self.check_dag_node(dag, root, [r, l], []) - - self.check_dag_node(dag, r, [rl, rr], [root]) - self.check_dag_node(dag, l, [ll, lr], [root]) - - self.check_dag_node(dag, rr, [rrl, rrr], [r]) - self.check_dag_node(dag, rl, [rll, rlr], [r]) - self.check_dag_node(dag, lr, [lrl, lrr], [l]) - self.check_dag_node(dag, ll, [lll, llr], [l]) - - self.check_dag_node(dag, rrr, [], [rr]) - self.check_dag_node(dag, rrl, [], [rr]) - self.check_dag_node(dag, rlr, [], [rl]) - self.check_dag_node(dag, rll, [], [rl]) - self.check_dag_node(dag, lrr, [], [lr]) - self.check_dag_node(dag, lrl, [], [lr]) - self.check_dag_node(dag, llr, [], [ll]) - self.check_dag_node(dag, lll, [], [ll]) - - # Test in order - test_dag(make_dag([root, r, l, rr, rl, lr, ll, rrr, rrl, rlr, rll, lrr, lrl, llr, lll])) - - # Test a couple of randomly chosen orders - test_dag(make_dag([lrl, r, root, rl, rrr, rll, lr, lrr, ll, lll, l, rr, rrl, rlr, llr])) - test_dag(make_dag([ll, rrl, lrl, rl, rlr, lr, root, rrr, rll, r, llr, rr, lrr, l, lll])) - test_dag(make_dag([rr, rlr, rl, rrr, rrl, l, root, lr, lrr, llr, r, rll, lrl, ll, lll])) - test_dag(make_dag([l, lll, rrr, rll, ll, lrl, llr, rl, root, r, lr, rlr, rr, lrr, rrl])) - - def test_diamond_in_different_orders(self): - a = MockTarget('a') - b = MockTarget('b', [a]) - c = MockTarget('c', [a]) - d = MockTarget('d', [c, b]) - - def test_diamond_dag(dag): - self.assertEquals(dag._roots, set([dag.lookup(d)])) - self.assertEquals(dag.leaves, set([dag.lookup(a)])) - self.check_dag_node(dag, d, [b, c], []) - self.check_dag_node(dag, c, [a], [d]) - self.check_dag_node(dag, b, [a], [d]) - self.check_dag_node(dag, a, [], [b, c]) - - test_diamond_dag(make_dag([a, b, c, d])) - test_diamond_dag(make_dag([d, c, b, a])) - test_diamond_dag(make_dag([b, d, a, c])) - - def test_find_children_across_unused_target(self): - a = MockTarget('a') - b = MockTarget('b', [a]) - c = MockTarget('c', [b]) - d = MockTarget('d', [c, a]) - e = MockTarget('e', [d]) - diff --git a/tests/python/twitter/pants/base/test_generator.py b/tests/python/twitter/pants/base/test_generator.py deleted file mode 100644 index 184cef45e..000000000 --- a/tests/python/twitter/pants/base/test_generator.py +++ /dev/null @@ -1,45 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.generator import TemplateData - -class TemplateDataTest(unittest.TestCase): - def setUp(self): - self.data = TemplateData(foo = 'bar', baz = 42) - - def test_member_access(self): - try: - self.data.bip - self.fail("Access to undefined template data slots should raise") - except AttributeError: - # expected - pass - - def test_member_mutation(self): - try: - self.data.baz = 1 / 137 - self.fail("Mutation of a template data's slots should not be allowed") - except AttributeError: - # expected - pass - - def test_extend(self): - self.assertEqual(self.data.extend(jake = 0.3), TemplateData(baz = 42, foo = 'bar', jake = 0.3)) - - def test_equals(self): - self.assertEqual(self.data, TemplateData(baz = 42).extend(foo = 'bar')) diff --git a/tests/python/twitter/pants/base/test_hash_utils.py b/tests/python/twitter/pants/base/test_hash_utils.py deleted file mode 100644 index 8dc96ea56..000000000 --- a/tests/python/twitter/pants/base/test_hash_utils.py +++ /dev/null @@ -1,48 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import mox - -from twitter.common.contextutil import temporary_file - -from twitter.pants.base.hash_utils import hash_all, hash_file - - -class TestHashUtils(mox.MoxTestBase): - - def setUp(self): - super(TestHashUtils, self).setUp() - self.digest = self.mox.CreateMockAnything() - - def test_hash_all(self): - self.digest.update('jake') - self.digest.update('jones') - self.digest.hexdigest().AndReturn('42') - self.mox.ReplayAll() - - self.assertEqual('42', hash_all(['jake', 'jones'], digest=self.digest)) - - def test_hash_file(self): - self.digest.update('jake jones') - self.digest.hexdigest().AndReturn('1137') - self.mox.ReplayAll() - - with temporary_file() as fd: - fd.write('jake jones') - fd.close() - - self.assertEqual('1137', hash_file(fd.name, digest=self.digest)) - diff --git a/tests/python/twitter/pants/base/test_parse_context.py b/tests/python/twitter/pants/base/test_parse_context.py deleted file mode 100644 index a01e83c51..000000000 --- a/tests/python/twitter/pants/base/test_parse_context.py +++ /dev/null @@ -1,124 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pytest - -from textwrap import dedent - -from twitter.common.contextutil import temporary_dir -from twitter.common.dirutil import safe_mkdir - -from twitter.pants.base.address import Address -from twitter.pants.base.build_file import BuildFile -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target - - -def create_buildfile(root_dir, relpath, name='BUILD', content=''): - path = os.path.join(root_dir, relpath) - safe_mkdir(path) - buildfile = os.path.join(path, name) - with open(buildfile, 'a') as f: - f.write(content) - return BuildFile(root_dir, relpath) - - -class ParseContextTest(BaseBuildRootTest): - def test_locate(self): - with pytest.raises(ParseContext.ContextError): - ParseContext.locate() - - with temporary_dir() as root_dir: - a_context = ParseContext(create_buildfile(root_dir, 'a')) - b_context = ParseContext(create_buildfile(root_dir, 'b')) - - def test_in_a(): - self.assertEquals(a_context, ParseContext.locate()) - return b_context.do_in_context(lambda: ParseContext.locate()) - - self.assertEquals(b_context, a_context.do_in_context(test_in_a)) - - def test_parse(self): - with temporary_dir() as root_dir: - buildfile = create_buildfile(root_dir, 'a', - content=dedent(""" - with open('%s/a/b', 'w') as b: - b.write('jack spratt') - """ % root_dir).strip() - ) - b_file = os.path.join(root_dir, 'a', 'b') - self.assertFalse(os.path.exists(b_file)) - ParseContext(buildfile).parse() - with open(b_file, 'r') as b: - self.assertEquals('jack spratt', b.read()) - - def test_on_context_exit(self): - with temporary_dir() as root_dir: - parse_context = ParseContext(create_buildfile(root_dir, 'a')) - with pytest.raises(parse_context.ContextError): - parse_context.on_context_exit(lambda: 37) - - with temporary_dir() as root_dir: - buildfile = create_buildfile(root_dir, 'a', - content=dedent(""" - import os - from twitter.pants.base.parse_context import ParseContext - def leave_a_trail(file, contents=''): - with open(file, 'w') as b: - b.write(contents) - b_file = os.path.join(os.path.dirname(__file__), 'b') - ParseContext.locate().on_context_exit(leave_a_trail, b_file, contents='42') - assert not os.path.exists(b_file), 'Expected context exit action to be delayed.' - """).strip() - ) - b_file = os.path.join(root_dir, 'a', 'b') - self.assertFalse(os.path.exists(b_file)) - ParseContext(buildfile).parse() - with open(b_file, 'r') as b: - self.assertEquals('42', b.read()) - - def test_sibling_references(self): - with temporary_dir() as root_dir: - buildfile = create_buildfile(root_dir, 'a', name='BUILD', - content=dedent(""" - dependencies(name='util', - dependencies=[ - jar(org='com.twitter', name='util', rev='0.0.1') - ] - ) - """).strip() - ) - sibling = create_buildfile(root_dir, 'a', name='BUILD.sibling', - content=dedent(""" - dependencies(name='util-ex', - dependencies=[ - pants(':util'), - jar(org='com.twitter', name='util-ex', rev='0.0.1') - ] - ) - """).strip() - ) - ParseContext(buildfile).parse() - - utilex = Target.get(Address.parse(root_dir, 'a:util-ex', is_relative=False)) - utilex_deps = set(utilex.resolve()) - - util = Target.get(Address.parse(root_dir, 'a:util', is_relative=False)) - util_deps = set(util.resolve()) - - self.assertEquals(util_deps, util_deps.intersection(utilex_deps)) diff --git a/tests/python/twitter/pants/base/test_revision.py b/tests/python/twitter/pants/base/test_revision.py deleted file mode 100644 index 7cfaeda9e..000000000 --- a/tests/python/twitter/pants/base/test_revision.py +++ /dev/null @@ -1,104 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest -import unittest - -from twitter.pants.base.revision import Revision - - -class RevisionTest(unittest.TestCase): - def assertComponents(self, revision, *expected): - self.assertEqual(list(expected), revision.components) - - -class SemverTest(RevisionTest): - def test_bad(self): - for bad_rev in ('a.b.c', '1.b.c', '1.2.c', '1.2.3;4', '1.2.3;4+5'): - with pytest.raises(Revision.BadRevision): - Revision.semver(bad_rev) - - def test_simple(self): - self.assertEqual(Revision.semver('1.2.3'), Revision.semver('1.2.3')) - self.assertComponents(Revision.semver('1.2.3'), 1, 2, 3, None, None) - - self.assertTrue(Revision.semver('1.2.3') > Revision.semver('1.2.2')) - self.assertTrue(Revision.semver('1.3.0') > Revision.semver('1.2.2')) - self.assertTrue(Revision.semver('1.3.10') > Revision.semver('1.3.2')) - self.assertTrue(Revision.semver('2.0.0') > Revision.semver('1.3.2')) - - def test_pre_release(self): - self.assertEqual(Revision.semver('1.2.3-pre1.release.1'), - Revision.semver('1.2.3-pre1.release.1')) - self.assertComponents(Revision.semver('1.2.3-pre1.release.1'), - 1, 2, 3, 'pre1', 'release', 1, None) - - self.assertTrue( - Revision.semver('1.2.3-pre1.release.1') < Revision.semver('1.2.3-pre2.release.1')) - self.assertTrue( - Revision.semver('1.2.3-pre1.release.2') < Revision.semver('1.2.3-pre1.release.10')) - - self.assertTrue(Revision.semver('1.2.3') < Revision.semver('1.2.3-pre2.release.1')) - - def test_build(self): - self.assertEqual(Revision.semver('1.2.3+pre1.release.1'), - Revision.semver('1.2.3+pre1.release.1')) - self.assertComponents(Revision.semver('1.2.3+pre1.release.1'), - 1, 2, 3, None, 'pre1', 'release', 1) - - self.assertTrue( - Revision.semver('1.2.3+pre1.release.1') < Revision.semver('1.2.3+pre2.release.1')) - self.assertTrue( - Revision.semver('1.2.3+pre1.release.2') < Revision.semver('1.2.3+pre1.release.10')) - - self.assertTrue(Revision.semver('1.2.3') < Revision.semver('1.2.3+pre2.release.1')) - self.assertTrue( - Revision.semver('1.2.3+pre1.release.2') < Revision.semver('1.2.3-pre1.release.2')) - - def test_pre_release_build(self): - self.assertEqual(Revision.semver('1.2.3-pre1.release.1+1'), - Revision.semver('1.2.3-pre1.release.1+1')) - self.assertComponents(Revision.semver('1.2.3-pre1.release.1+1'), - 1, 2, 3, 'pre1', 'release', 1, 1) - - self.assertTrue( - Revision.semver('1.2.3-pre1.release.1') < Revision.semver('1.2.3-pre2.release.1+1')) - self.assertTrue( - Revision.semver('1.2.3-pre1.release.2') > Revision.semver('1.2.3-pre1.release.1+1')) - - self.assertTrue(Revision.semver('1.2.3') < Revision.semver('1.2.3-pre2.release.2+1.foo')) - self.assertTrue( - Revision.semver('1.2.3-pre1.release.2+1') < Revision.semver('1.2.3-pre1.release.2+1.foo')) - self.assertTrue( - Revision.semver('1.2.3-pre1.release.2+1') < Revision.semver('1.2.3-pre1.release.2+2')) - - -class LenientTest(RevisionTest): - def test(self): - self.assertComponents(Revision.lenient('1.2.3'), 1, 2, 3) - self.assertComponents(Revision.lenient('1.2.3-SNAPSHOT-eabc'), 1, 2, 3, 'SNAPSHOT', 'eabc') - self.assertComponents(Revision.lenient('1.2.3-SNAPSHOT4'), 1, 2, 3, 'SNAPSHOT', 4) - - self.assertTrue(Revision.lenient('a') < Revision.lenient('b')) - self.assertTrue(Revision.lenient('1') < Revision.lenient('2')) - self.assertTrue(Revision.lenient('1') < Revision.lenient('a')) - - self.assertEqual(Revision.lenient('1.2.3'), Revision.lenient('1.2.3')) - self.assertTrue(Revision.lenient('1.2.3') < Revision.lenient('1.2.3-SNAPSHOT')) - self.assertTrue(Revision.lenient('1.2.3-SNAPSHOT') < Revision.lenient('1.2.3-SNAPSHOT-abc')) - self.assertTrue(Revision.lenient('1.2.3-SNAPSHOT-abc') < Revision.lenient('1.2.3-SNAPSHOT-bcd')) - self.assertTrue( - Revision.lenient('1.2.3-SNAPSHOT-abc6') < Revision.lenient('1.2.3-SNAPSHOT-abc10')) diff --git a/tests/python/twitter/pants/base/test_run_info.py b/tests/python/twitter/pants/base/test_run_info.py deleted file mode 100644 index 11926b773..000000000 --- a/tests/python/twitter/pants/base/test_run_info.py +++ /dev/null @@ -1,33 +0,0 @@ -import unittest2 as unittest - -from twitter.common.contextutil import temporary_file_path -from twitter.pants.base.run_info import RunInfo - - -class RunInfoTest(unittest.TestCase): - def test_run_info_read(self): - with temporary_file_path() as tmppath: - with open(tmppath, 'w') as tmpfile: - tmpfile.write('foo:bar\n baz :qux quux') - ri = RunInfo(tmppath) - self.assertEquals(ri.path(), tmppath) - - # Test get_info access. - self.assertEquals(ri.get_info('foo'), 'bar') - self.assertEquals(ri.get_info('baz'), 'qux quux') - self.assertIsNone(ri.get_info('nonexistent')) - - # Test dict-like access. - self.assertEquals(ri['foo'], 'bar') - self.assertEquals(ri['baz'], 'qux quux') - - def test_write_run_info(self): - with temporary_file_path() as tmppath: - ri = RunInfo(tmppath) - ri.add_info('key1', 'val1') - ri.add_infos(('key2', ' val2'), (' key3 ', 'val3 ')) - self.assertEquals({'key1': 'val1', 'key2': 'val2', 'key3': 'val3'}, ri.get_as_dict()) - - with open(tmppath, 'r') as tmpfile: - contents = tmpfile.read() - self.assertEquals('key1: val1\nkey2: val2\nkey3: val3\n', contents) diff --git a/tests/python/twitter/pants/base_build_root_test.py b/tests/python/twitter/pants/base_build_root_test.py deleted file mode 100644 index 9311eb371..000000000 --- a/tests/python/twitter/pants/base_build_root_test.py +++ /dev/null @@ -1,92 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import unittest - -from tempfile import mkdtemp - -from twitter.common.dirutil import safe_open, safe_rmtree, safe_mkdir - -from twitter.pants.base.build_root import BuildRoot -from twitter.pants.base.address import Address -from twitter.pants.base.target import Target -from twitter.pants.targets.sources import SourceRoot - - -class BaseBuildRootTest(unittest.TestCase): - """A baseclass useful for tests requiring a temporary buildroot.""" - - build_root = None - - @classmethod - def build_path(cls, relpath): - """Returns the canonical BUILD file path for the given relative build path.""" - if os.path.basename(relpath).startswith('BUILD'): - return relpath - else: - return os.path.join(relpath, 'BUILD') - - @classmethod - def create_dir(cls, relpath): - """Creates a directory under the buildroot. - - relpath: The relative path to the directory from the build root. - """ - safe_mkdir(os.path.join(cls.build_root, relpath)) - - @classmethod - def create_file(cls, relpath, contents='', mode='w'): - """Writes to a file under the buildroot. - - relpath: The relative path to the file from the build root. - contents: A string containing the contents of the file - '' by default.. - mode: The mode to write to the file in - over-write by default. - """ - with safe_open(os.path.join(cls.build_root, relpath), mode=mode) as fp: - fp.write(contents) - - @classmethod - def create_target(cls, relpath, target): - """Adds the given target specification to the BUILD file at relpath. - - relpath: The relative path to the BUILD file from the build root. - target: A string containing the target definition as it would appear in a BUILD file. - """ - cls.create_file(cls.build_path(relpath), target, mode='a') - - @classmethod - def setUpClass(cls): - cls.build_root = mkdtemp(suffix='_BUILD_ROOT') - BuildRoot().path = cls.build_root - cls.create_file('pants.ini') - Target._clear_all_addresses() - - @classmethod - def tearDownClass(cls): - BuildRoot().reset() - SourceRoot.reset() - safe_rmtree(cls.build_root) - - @classmethod - def target(cls, address): - """Resolves the given target address to a Target object. - - address: The BUILD target address to resolve. - - Returns the corresponding Target or else None if the address does not point to a defined Target. - """ - return Target.get(Address.parse(cls.build_root, address, is_relative=False)) diff --git a/tests/python/twitter/pants/cache/test_artifact_cache.py b/tests/python/twitter/pants/cache/test_artifact_cache.py deleted file mode 100644 index a19fbc5b9..000000000 --- a/tests/python/twitter/pants/cache/test_artifact_cache.py +++ /dev/null @@ -1,135 +0,0 @@ -import SimpleHTTPServer -import SocketServer -import os -from threading import Thread -import unittest - -from twitter.common.contextutil import pushd, temporary_dir, temporary_file -from twitter.common.dirutil import safe_mkdir -from twitter.pants.base.build_invalidator import CacheKey -from twitter.pants.cache.cache_setup import create_artifact_cache, select_best_url -from twitter.pants.cache.combined_artifact_cache import CombinedArtifactCache -from twitter.pants.cache.local_artifact_cache import LocalArtifactCache -from twitter.pants.cache.restful_artifact_cache import RESTfulArtifactCache -from twitter.pants.testutils import MockLogger - - -class MockPinger(object): - def __init__(self, hosts_to_times): - self._hosts_to_times = hosts_to_times - # Returns a fake ping time such that the last host is always the 'fastest'. - def pings(self, hosts): - return map(lambda host: (host, self._hosts_to_times.get(host, 9999)), hosts) - - -# A very trivial server that serves files under the cwd. -class SimpleRESTHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): - def __init__(self, request, client_address, server): - # The base class implements GET and HEAD. - SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, request, client_address, server) - - def do_HEAD(self): - return SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) - - def do_PUT(self): - path = self.translate_path(self.path) - content_length = int(self.headers.getheader('content-length')) - content = self.rfile.read(content_length) - safe_mkdir(os.path.dirname(path)) - with open(path, 'wb') as outfile: - outfile.write(content) - self.send_response(200) - self.end_headers() - - def do_DELETE(self): - path = self.translate_path(self.path) - if os.path.exists(path): - os.unlink(path) - self.send_response(200) - else: - self.send_error(404, 'File not found') - self.end_headers() - - -TEST_CONTENT1 = 'muppet' -TEST_CONTENT2 = 'kermit' - - -class TestArtifactCache(unittest.TestCase): - def test_select_best_url(self): - spec = 'http://host1|https://host2:666/path/to|http://host3/path/' - best = select_best_url(spec, MockPinger({'host1': 5, 'host2:666': 3, 'host3': 7}), MockLogger()) - self.assertEquals('https://host2:666/path/to', best) - - def test_cache_spec_parsing(self): - artifact_root = '/bogus/artifact/root' - - def check(expected_type, spec): - cache = create_artifact_cache(MockLogger(), artifact_root, spec, 'TestTask', 'testing') - self.assertTrue(isinstance(cache, expected_type)) - self.assertEquals(cache.artifact_root, artifact_root) - - with temporary_dir() as tmpdir: - cachedir = os.path.join(tmpdir, 'cachedir') # Must be a real path, so we can safe_mkdir it. - check(LocalArtifactCache, cachedir) - check(RESTfulArtifactCache, 'http://localhost/bar') - check(CombinedArtifactCache, [cachedir, 'http://localhost/bar']) - - - def test_local_cache(self): - with temporary_dir() as artifact_root: - with temporary_dir() as cache_root: - artifact_cache = LocalArtifactCache(None, artifact_root, cache_root) - self.do_test_artifact_cache(artifact_cache) - - - def test_restful_cache(self): - httpd = None - httpd_thread = None - try: - with temporary_dir() as cache_root: - with pushd(cache_root): # SimpleRESTHandler serves from the cwd. - httpd = SocketServer.TCPServer(('localhost', 0), SimpleRESTHandler) - port = httpd.server_address[1] - httpd_thread = Thread(target=httpd.serve_forever) - httpd_thread.start() - with temporary_dir() as artifact_root: - artifact_cache = RESTfulArtifactCache(MockLogger(), artifact_root, - 'http://localhost:%d' % port) - self.do_test_artifact_cache(artifact_cache) - finally: - if httpd: - httpd.shutdown() - if httpd_thread: - httpd_thread.join() - - - def do_test_artifact_cache(self, artifact_cache): - key = CacheKey('muppet_key', 'fake_hash', 42, []) - with temporary_file(artifact_cache.artifact_root) as f: - # Write the file. - f.write(TEST_CONTENT1) - path = f.name - f.close() - - # Cache it. - self.assertFalse(artifact_cache.has(key)) - self.assertFalse(bool(artifact_cache.use_cached_files(key))) - artifact_cache.insert(key, [path]) - self.assertTrue(artifact_cache.has(key)) - - # Stomp it. - with open(path, 'w') as outfile: - outfile.write(TEST_CONTENT2) - - # Recover it from the cache. - self.assertTrue(bool(artifact_cache.use_cached_files(key))) - - # Check that it was recovered correctly. - with open(path, 'r') as infile: - content = infile.read() - self.assertEquals(content, TEST_CONTENT1) - - # Delete it. - artifact_cache.delete(key) - self.assertFalse(artifact_cache.has(key)) diff --git a/tests/python/twitter/pants/commands/test_goal.py b/tests/python/twitter/pants/commands/test_goal.py deleted file mode 100644 index ce3fa74f6..000000000 --- a/tests/python/twitter/pants/commands/test_goal.py +++ /dev/null @@ -1,56 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.commands.goal import Goal, GoalError - -class GoalTest(unittest.TestCase): - def test_parse_args(self): - def assert_result(goals, specs, args): - g, s = Goal.parse_args(args) - self.assertEquals((goals, specs), (list(g), list(s))) - - assert_result(goals=[], specs=[], args=[]) - assert_result(goals=[], specs=[], args=['--']) - assert_result(goals=[], specs=[], args=['-v', '--help']) - - assert_result(goals=['compile'], specs=[], args=['compile', '--log']) - assert_result(goals=['compile', 'test'], specs=[], args=['compile', 'test']) - assert_result(goals=['compile', 'test'], specs=[], args=['compile', '-v', 'test']) - - assert_result(goals=[], specs=['resolve'], args=['--', 'resolve', '--ivy-open']) - assert_result(goals=['test'], specs=['resolve'], args=['test', '--', 'resolve', '--ivy-open']) - - try: - Goal.parse_args(['test', 'lib:all', '--', 'resolve']) - self.fail('Expected mixed specs and goals to the left of an explicit ' - 'multi-goal sep (--) to be rejected.') - except GoalError: - pass # expected - - try: - Goal.parse_args(['resolve', 'lib/all', 'test', '--']) - self.fail('Expected mixed specs and goals to the left of an explicit ' - 'multi-goal sep (--) to be rejected.') - except GoalError: - pass # expected - - assert_result(goals=['test'], specs=['lib:all'], args=['lib:all', '-v', 'test']) - assert_result(goals=['test'], specs=['lib/'], args=['-v', 'test', 'lib/']) - assert_result(goals=['test'], specs=['lib/io:sound'], args=['test', '-v', 'lib/io:sound']) - assert_result(goals=['test'], specs=['lib:all'], args=['-h', 'test', '-v', 'lib:all', '-x']) - diff --git a/tests/python/twitter/pants/commands/test_setup_py.py b/tests/python/twitter/pants/commands/test_setup_py.py deleted file mode 100644 index 3f1d95e2a..000000000 --- a/tests/python/twitter/pants/commands/test_setup_py.py +++ /dev/null @@ -1,291 +0,0 @@ -from contextlib import contextmanager -import os -import unittest - -from twitter.common.collections import OrderedSet -from twitter.common.contextutil import temporary_file, temporary_dir -from twitter.common.dirutil import safe_mkdir, touch -from twitter.common.dirutil.chroot import Chroot -from twitter.common.collections import OrderedSet -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.commands.setup_py import SetupPy -from twitter.pants.targets.pants_target import Pants as pants -from twitter.pants.targets.python_artifact import PythonArtifact as setup_py -from twitter.pants.targets.python_binary import PythonBinary as python_binary -from twitter.pants.targets.python_library import PythonLibrary as python_library - -from mock import ( - MagicMock, - Mock, - call, -) -import pytest - - -def create_dependencies(depmap): - target_map = {} - with ParseContext.temp(): - for name, deps in depmap.items(): - target_map[name] = python_library( - name=name, - provides=setup_py(name=name, version='0.0.0'), - dependencies=[pants(':%s' % dep) for dep in deps] - ) - return target_map - - -class MockableSetupPyCommand(SetupPy): - def __init__(self, target): - self.target = target - - -class TestSetupPy(unittest.TestCase): - def tearDown(self): - Target._clear_all_addresses() - - def test_minified_dependencies_1(self): - # foo -> bar -> baz - dep_map = {'foo': ['bar'], 'bar': ['baz'], 'baz': []} - target_map = create_dependencies(dep_map) - assert SetupPy.minified_dependencies(target_map['foo']) == OrderedSet([target_map['bar']]) - assert SetupPy.minified_dependencies(target_map['bar']) == OrderedSet([target_map['baz']]) - assert SetupPy.minified_dependencies(target_map['baz']) == OrderedSet() - - @classmethod - @contextmanager - def run_execute(cls, target, recursive=False): - setup_py = MockableSetupPyCommand(target) - setup_py.options = MagicMock() - setup_py.options.recursive = recursive - setup_py.run_one = MagicMock() - setup_py.run_one.return_value = True - setup_py.execute() - yield setup_py - - def test_execution_minified_dependencies_1(self): - dep_map = {'foo': ['bar'], 'bar': ['baz'], 'baz': []} - target_map = create_dependencies(dep_map) - with self.run_execute(target_map['foo'], recursive=False) as setup_py: - setup_py.run_one.assert_called_with(target_map['foo']) - with self.run_execute(target_map['foo'], recursive=True) as setup_py: - setup_py.run_one.assert_has_calls([ - call(target_map['foo']), - call(target_map['bar']), - call(target_map['baz']) - ], any_order=True) - - def test_minified_dependencies_2(self): - # foo --> baz - # | ^ - # v | - # bar ----' - dep_map = {'foo': ['bar', 'baz'], 'bar': ['baz'], 'baz': []} - target_map = create_dependencies(dep_map) - assert SetupPy.minified_dependencies(target_map['foo']) == OrderedSet([target_map['bar']]) - assert SetupPy.minified_dependencies(target_map['bar']) == OrderedSet([target_map['baz']]) - assert SetupPy.minified_dependencies(target_map['baz']) == OrderedSet() - - def test_minified_dependencies_diamond(self): - # bar <-- foo --> baz - # | | - # `----> bak <----' - dep_map = {'foo': ['bar', 'baz'], 'bar': ['bak'], 'baz': ['bak'], 'bak': []} - target_map = create_dependencies(dep_map) - assert SetupPy.minified_dependencies(target_map['foo']) == OrderedSet( - [target_map['bar'], target_map['baz']]) - assert SetupPy.minified_dependencies(target_map['bar']) == OrderedSet([target_map['bak']]) - assert SetupPy.minified_dependencies(target_map['baz']) == OrderedSet([target_map['bak']]) - - def test_binary_target_injected_into_minified_dependencies(self): - with ParseContext.temp(): - foo = python_library( - name = 'foo', - provides = setup_py( - name = 'foo', - version = '0.0.0', - ).with_binaries( - foo_binary = pants(':foo_bin') - ) - ) - - foo_bin = python_binary( - name = 'foo_bin', - entry_point = 'foo.bin.foo', - dependencies = [ pants(':foo_bin_dep') ] - ) - - foo_bin_dep = python_library( - name = 'foo_bin_dep' - ) - - assert SetupPy.minified_dependencies(foo) == OrderedSet([foo_bin, foo_bin_dep]) - entry_points = dict(SetupPy.iter_entry_points(foo)) - assert entry_points == {'foo_binary': 'foo.bin.foo'} - - with self.run_execute(foo, recursive=False) as setup_py_command: - setup_py_command.run_one.assert_called_with(foo) - - with self.run_execute(foo, recursive=True) as setup_py_command: - setup_py_command.run_one.assert_called_with(foo) - - def test_binary_target_injected_into_minified_dependencies_with_provider(self): - with ParseContext.temp(): - bar = python_library( - name = 'bar', - provides = setup_py( - name = 'bar', - version = '0.0.0', - ).with_binaries( - bar_binary = pants(':bar_bin') - ) - ) - - bar_bin = python_binary( - name = 'bar_bin', - entry_point = 'bar.bin.bar', - dependencies = [ pants(':bar_bin_dep') ] - ) - - bar_bin_dep = python_library( - name = 'bar_bin_dep', - provides = setup_py( - name = 'bar_bin_dep', - version = '0.0.0', - ) - ) - - assert SetupPy.minified_dependencies(bar) == OrderedSet([bar_bin, bar_bin_dep]) - entry_points = dict(SetupPy.iter_entry_points(bar)) - assert entry_points == {'bar_binary': 'bar.bin.bar'} - - with self.run_execute(bar, recursive=False) as setup_py_command: - setup_py_command.run_one.assert_called_with(bar) - - with self.run_execute(bar, recursive=True) as setup_py_command: - setup_py_command.run_one.assert_has_calls([ - call(bar), - call(bar_bin_dep) - ], any_order=True) - - def test_binary_cycle(self): - with ParseContext.temp(): - foo = python_library( - name = 'foo', - provides = setup_py( - name = 'foo', - version = '0.0.0', - ).with_binaries( - foo_binary = pants(':foo_bin') - ) - ) - - foo_bin = python_binary( - name = 'foo_bin', - entry_point = 'foo.bin.foo', - dependencies = [ pants(':foo') ] - ) - - with pytest.raises(TargetDefinitionException): - SetupPy.minified_dependencies(foo) - - -def test_detect_namespace_packages(): - def has_ns(stmt): - with temporary_file() as fp: - fp.write(stmt) - fp.flush() - return SetupPy.declares_namespace_package(fp.name) - - assert not has_ns('') - assert not has_ns('add(1, 2); foo(__name__); self.shoot(__name__)') - assert not has_ns('declare_namespace(bonk)') - assert has_ns('__import__("pkg_resources").declare_namespace(__name__)') - assert has_ns('import pkg_resources; pkg_resources.declare_namespace(__name__)') - assert has_ns('from pkg_resources import declare_namespace; declare_namespace(__name__)') - - -@contextmanager -def yield_chroot(packages, namespace_packages, resources): - def to_path(package): - return package.replace('.', os.path.sep) - - with temporary_dir() as td: - def write(package, name, content): - package_path = os.path.join(td, SetupPy.SOURCE_ROOT, to_path(package)) - safe_mkdir(os.path.dirname(os.path.join(package_path, name))) - with open(os.path.join(package_path, name), 'w') as fp: - fp.write(content) - for package in packages: - write(package, '__init__.py', '') - for package in namespace_packages: - write(package, '__init__.py', '__import__("pkg_resources").declare_namespace(__name__)') - for package, resource_list in resources.items(): - for resource in resource_list: - write(package, resource, 'asdfasdf') - - chroot_mock = Mock(spec=Chroot) - chroot_mock.path.return_value = td - yield chroot_mock - - -def test_find_packages(): - def assert_single_chroot(packages, namespace_packages, resources): - with yield_chroot(packages, namespace_packages, resources) as chroot: - p, n_p, r = SetupPy.find_packages(chroot) - assert p == set(packages + namespace_packages) - assert n_p == set(namespace_packages) - assert r == dict((k, set(v)) for (k, v) in resources.items()) - - # assert both packages and namespace packages work - assert_single_chroot(['twitter'], [], {}) - assert_single_chroot(['twitter'], ['twitter'], {}) - - # assert resources work - assert_single_chroot(['twitter'], [], {'twitter': ['blork.dat']}) - - resources = { - 'twitter': [ - 'README.rst', - os.path.join('pants', 'templates', 'ivy.mk'), - os.path.join('pants', 'templates', 'maven.mk'), - ] - } - assert_single_chroot(['twitter'], [], resources) - - # assert that nearest-submodule is honored - with yield_chroot(['twitter', 'twitter.pants'], [], resources) as chroot: - _, _, r = SetupPy.find_packages(chroot) - assert r == { - 'twitter': set(['README.rst']), - 'twitter.pants': set([ - os.path.join('templates', 'ivy.mk'), - os.path.join('templates', 'maven.mk'), - ]) - } - - # assert that nearest submodule splits on module prefixes - with yield_chroot( - ['twitter', 'twitter.util'], - [], - {'twitter.utilization': ['README.rst']}) as chroot: - - _, _, r = SetupPy.find_packages(chroot) - assert r == {'twitter': set(['utilization/README.rst'])} - - -def test_nearest_subpackage(): - # degenerate - assert SetupPy.nearest_subpackage('twitter', []) == 'twitter' - assert SetupPy.nearest_subpackage('twitter', ['twitter']) == 'twitter' - assert SetupPy.nearest_subpackage('twitter', ['foursquare']) == 'twitter' - - # common prefix - assert 'twitter' == SetupPy.nearest_subpackage('twitter.util', ['twitter']) - assert 'twitter.util' == SetupPy.nearest_subpackage( - 'twitter.util', ['twitter', 'twitter.util']) - assert 'twitter.util' == SetupPy.nearest_subpackage( - 'twitter.util.topo', ['twitter', 'twitter.util']) - assert 'twitter' == SetupPy.nearest_subpackage( - 'twitter.utilization', ['twitter', 'twitter.util']) - diff --git a/tests/python/twitter/pants/engine/__init__.py b/tests/python/twitter/pants/engine/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/engine/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/engine/base_engine_test.py b/tests/python/twitter/pants/engine/base_engine_test.py deleted file mode 100644 index c882afac3..000000000 --- a/tests/python/twitter/pants/engine/base_engine_test.py +++ /dev/null @@ -1,55 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.goal import Goal, Phase - - -class EngineTestBase(unittest.TestCase): - - @classmethod - def _namespace(cls, identifier): - return '__%s.%s__%s__' % (cls.__module__, cls.__name__, identifier) - - @classmethod - def as_phase(cls, phase_name): - """Returns a ``Phase`` object of the given name""" - return Phase(cls._namespace(phase_name)) - - @classmethod - def as_phases(cls, *phase_names): - """Converts the given phase names to a list of ``Phase`` objects.""" - return map(cls.as_phase, phase_names) - - @classmethod - def installed_goal(cls, name, action=None, group=None, dependencies=None, phase=None): - """Creates and installs a goal with the given name. - - :param string name: The goal name. - :param action: The goal's action. - :param group: The goal's group if it belongs to one. - :param list dependencies: The list of phase names the goal depends on, if any. - :param string phase: The name of the phase to install the goal in if different from the goal - name. - :returns The installed ``Goal`` object. - """ - goal = Goal(cls._namespace(name), - action=action or (lambda: None), - group=group, - dependencies=map(cls._namespace, dependencies or [])) - goal.install(cls._namespace(phase) if phase is not None else None) - return goal diff --git a/tests/python/twitter/pants/engine/test_engine.py b/tests/python/twitter/pants/engine/test_engine.py deleted file mode 100644 index 71e87ce37..000000000 --- a/tests/python/twitter/pants/engine/test_engine.py +++ /dev/null @@ -1,140 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import mox - -from twitter.pants.engine.engine import Engine, Timer -from twitter.pants.tasks import TaskError - -from ..base.context_utils import create_context -from .base_engine_test import EngineTestBase - - -class TimerTest(mox.MoxTestBase, EngineTestBase): - def setUp(self): - super(TimerTest, self).setUp() - self.ticker = self.mox.CreateMockAnything() - - def test_begin(self): - self.ticker().AndReturn(0) # start timer - self.ticker().AndReturn(11) # start timed goal_succeed #1 - self.ticker().AndReturn(13) # finish timed goal_succeed #1 - self.ticker().AndReturn(17) # start timed goal_succeed #2 - self.ticker().AndReturn(23) # finish timed goal_succeed #2 - self.ticker().AndReturn(29) # start timed goal_fail #1 - self.ticker().AndReturn(42) # finish timed goal_fail #1 - self.ticker().AndReturn(42) # start timed goal_muddle #1 - self.ticker().AndReturn(42) # finish timed goal_muddle #1 - self.ticker().AndReturn(42) # finish timer - self.mox.ReplayAll() - - goal_succeed = self.installed_goal('succeed', phase='first') - goal_fail = self.installed_goal('fail', phase='first') - goal_muddle = self.installed_goal('muddle', phase='second') - - with Timer.begin(self.ticker) as timer: - with timer.timed(goal_succeed): - pass - with timer.timed(goal_succeed): - pass - with timer.timed(goal_fail): - pass - with timer.timed(goal_muddle): - pass - - self.assertEqual(42, timer.elapsed) - - first_timings = timer.timings.pop(self.as_phase('first')) - second_timings = timer.timings.pop(self.as_phase('second')) - self.assertEqual(0, len(timer.timings)) - - goal_succeed_timings = first_timings.pop(goal_succeed) - goal_fail_timings = first_timings.pop(goal_fail) - self.assertEqual(0, len(first_timings)) - self.assertEqual([2, 6], goal_succeed_timings) - self.assertEqual([13], goal_fail_timings) - - goal_muddle_timings = second_timings.pop(goal_muddle) - self.assertEqual(0, len(second_timings)) - self.assertEqual([0], goal_muddle_timings) - - -class ExecutionOrderTest(EngineTestBase): - def test_execution_order(self): - self.installed_goal('invalidate') - self.installed_goal('clean-all', dependencies=['invalidate']) - - self.installed_goal('resolve') - self.installed_goal('javac', dependencies=['resolve'], phase='compile') - self.installed_goal('scalac', dependencies=['resolve'], phase='compile') - self.installed_goal('junit', dependencies=['compile'], phase='test') - - self.assertEqual(self.as_phases('invalidate', 'clean-all', 'resolve', 'compile', 'test'), - list(Engine.execution_order(self.as_phases('clean-all', 'test')))) - - self.assertEqual(self.as_phases('resolve', 'compile', 'test', 'invalidate', 'clean-all'), - list(Engine.execution_order(self.as_phases('test', 'clean-all')))) - - -class EngineTest(EngineTestBase): - class RecordingEngine(Engine): - def __init__(self, action=None): - super(EngineTest.RecordingEngine, self).__init__(print_timing=False) - self._action = action - self._attempts = [] - - @property - def attempts(self): - return self._attempts - - def attempt(self, timer, context, phases): - self._attempts.append((timer, context, phases)) - if self._action: - self._action() - - def setUp(self): - self.context = create_context() - - def assert_attempt(self, engine, *phase_names): - self.assertEqual(1, len(engine.attempts)) - - timer, context, phases = engine.attempts[0] - self.assertTrue(timer.elapsed >= 0, 'Expected timer to be finished.') - self.assertEqual(self.context, context) - self.assertEqual(self.as_phases(*phase_names), phases) - - def test_execute_success(self): - engine = self.RecordingEngine() - result = engine.execute(self.context, self.as_phases('one', 'two')) - self.assertEqual(0, result) - self.assert_attempt(engine, 'one', 'two') - - def _throw(self, error): - def throw(): - raise error - return throw - - def test_execute_raise(self): - engine = self.RecordingEngine(action=self._throw(TaskError())) - result = engine.execute(self.context, self.as_phases('three')) - self.assertEqual(1, result) - self.assert_attempt(engine, 'three') - - def test_execute_code(self): - engine = self.RecordingEngine(action=self._throw(TaskError(exit_code=42))) - result = engine.execute(self.context, self.as_phases('four', 'five', 'six')) - self.assertEqual(42, result) - self.assert_attempt(engine, 'four', 'five', 'six') diff --git a/tests/python/twitter/pants/engine/test_group_engine.py b/tests/python/twitter/pants/engine/test_group_engine.py deleted file mode 100644 index 746273bb6..000000000 --- a/tests/python/twitter/pants/engine/test_group_engine.py +++ /dev/null @@ -1,250 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest -import unittest - -from textwrap import dedent - -from twitter.pants.engine.group_engine import GroupEngine, GroupIterator, GroupMember -from twitter.pants.goal import Goal, Group -from twitter.pants.tasks import Task -from twitter.pants.tasks.check_exclusives import ExclusivesMapping - -from ..base.context_utils import create_context -from ..base_build_root_test import BaseBuildRootTest -from .base_engine_test import EngineTestBase - - -class GroupMemberTest(unittest.TestCase): - def test_from_goal_valid(self): - def predicate(tgt): - return tgt == 42 - - goal = Goal('fred', action=lambda: None, group=Group('heathers', predicate)) - self.assertEqual(GroupMember('heathers', 'fred', predicate), GroupMember.from_goal(goal)) - - def test_from_goal_invalid(self): - with pytest.raises(ValueError): - GroupMember.from_goal(Goal('fred', action=lambda: None)) - - -class JvmTargetTest(BaseBuildRootTest): - @classmethod - def java_library(cls, path, name, deps=None): - cls._library(path, 'java_library', name, deps) - - @classmethod - def python_library(cls, path, name, deps=None): - cls._library(path, 'python_library', name, deps) - - @classmethod - def scala_library(cls, path, name, deps=None): - cls._library(path, 'scala_library', name, deps) - - @classmethod - def _library(cls, path, target_type, name, deps=None): - cls.create_target(path, dedent(''' - %(target_type)s(name='%(name)s', - dependencies=[%(deps)s], - sources=[], - ) - ''' % dict(target_type=target_type, - name=name, - deps=','.join('pants("%s")' % d for d in (deps or []))))) - - @classmethod - def targets(cls, *addresses): - return map(cls.target, addresses) - - -class GroupIteratorTestBase(JvmTargetTest): - def setUp(self): - super(GroupIteratorTestBase, self).setUp() - - self.red = GroupMember('colors', 'red', lambda tgt: 'red' in tgt.name) - self.green = GroupMember('colors', 'green', lambda tgt: 'green' in tgt.name) - self.blue = GroupMember('colors', 'blue', lambda tgt: 'blue' in tgt.name) - - def iterate(self, *addresses): - return list(GroupIterator(self.targets(*addresses), [self.red, self.green, self.blue])) - - -class GroupIteratorSingleTest(GroupIteratorTestBase): - def test(self): - self.java_library('root', 'colorless', deps=[]) - self.java_library('root', 'a_red', deps=['root:colorless']) - self.java_library('root', 'b_red', deps=['root:a_red']) - self.java_library('root', 'c_red', deps=['root:a_red', 'root:colorless']) - self.java_library('root', 'd_red', deps=['root:b_red', 'root:c_red']) - - chunks = self.iterate('root:d_red') - self.assertEqual(1, len(chunks)) - - group_member, targets = chunks[0] - self.assertEqual(self.red, group_member) - self.assertEqual(set(self.targets('root:d_red', 'root:b_red', 'root:c_red', 'root:a_red')), - set(targets)) - - -class GroupIteratorMultipleTest(GroupIteratorTestBase): - def test(self): - self.java_library('root', 'colorless', deps=[]) - self.java_library('root', 'a_red', deps=['root:colorless']) - self.java_library('root', 'a_blue', deps=['root:a_red']) - self.java_library('root', 'a_green', deps=['root:a_blue', 'root:colorless']) - self.java_library('root', 'b_red', deps=['root:a_blue']) - self.java_library('root', 'c_red', deps=['root:b_red']) - - chunks = self.iterate('root:c_red', 'root:a_green') - self.assertEqual(4, len(chunks)) - - group_member, targets = chunks[0] - self.assertEqual(self.red, group_member) - self.assertEqual(set(self.targets('root:a_red')), set(targets)) - - group_member, targets = chunks[1] - self.assertEqual(self.blue, group_member) - self.assertEqual(set(self.targets('root:a_blue')), set(targets)) - - group_member, targets = chunks[2] - self.assertEqual(self.green, group_member) - self.assertEqual(set(self.targets('root:a_green')), set(targets)) - - group_member, targets = chunks[3] - self.assertEqual(self.red, group_member) - self.assertEqual(set(self.targets('root:b_red', 'root:c_red')), set(targets)) - - -class GroupIteratorTargetsTest(GroupIteratorTestBase): - """Test that GroupIterator raises an exception when given non-internal targets.""" - - def test_internal_targets(self): - self.java_library('root', 'colorless', deps=[]) - self.iterate('root:colorless') - - def test_non_internal_targets(self): - self.python_library('root2', 'colorless', deps=[]) - with pytest.raises(ValueError): - self.iterate('root2:colorless') - - -class GroupEngineTest(EngineTestBase, JvmTargetTest): - @classmethod - def setUpClass(cls): - super(GroupEngineTest, cls).setUpClass() - - cls.java_library('src/java', 'a') - cls.scala_library('src/scala', 'b', deps=['src/java:a']) - cls.java_library('src/java', 'c', deps=['src/scala:b']) - cls.scala_library('src/scala', 'd', deps=['src/java:c']) - cls.java_library('src/java', 'e', deps=['src/scala:d']) - cls.python_library('src/python', 'f') - - def setUp(self): - super(GroupEngineTest, self).setUp() - - self.context = create_context(options=dict(explain=False), - target_roots=self.targets('src/java:e', 'src/python:f')) - self.assertTrue(self.context.is_unlocked()) - - # TODO(John Sirois): disentangle GroupEngine from relying upon the CheckExclusives task being - # run. It should either arrange this directly or else the requirement should be in a different - # layer. - exclusives_mapping = ExclusivesMapping(self.context) - exclusives_mapping._populate_target_maps(self.context.targets()) - self.context.products.safe_create_data('exclusives_groups', lambda: exclusives_mapping) - - self.engine = GroupEngine(print_timing=False) - self.recorded_actions = [] - - def tearDown(self): - self.assertTrue(self.context.is_unlocked()) - - def construct_action(self, tag): - return 'construct', tag, self.context - - def execute_action(self, tag, targets=None): - return 'execute', tag, (targets or self.context.targets()) - - def record(self, tag): - class RecordingTask(Task): - def __init__(me, context): - super(RecordingTask, me).__init__(context) - self.recorded_actions.append(self.construct_action(tag)) - - def execute(me, targets): - self.recorded_actions.append(self.execute_action(tag, targets=targets)) - - return RecordingTask - - def install_goal(self, name, group=None, dependencies=None, phase=None): - return self.installed_goal(name, - action=self.record(name), - group=group, - dependencies=dependencies, - phase=phase) - - def test_no_groups(self): - self.install_goal('resolve') - self.install_goal('javac', dependencies=['resolve'], phase='compile') - self.install_goal('checkstyle', phase='compile') - self.install_goal('resources') - self.install_goal('test', dependencies=['compile', 'resources']) - - result = self.engine.execute(self.context, self.as_phases('test')) - self.assertEqual(0, result) - - expected = [self.construct_action('test'), - self.construct_action('resources'), - self.construct_action('checkstyle'), - self.construct_action('javac'), - self.construct_action('resolve'), - self.execute_action('resolve'), - self.execute_action('javac'), - self.execute_action('checkstyle'), - self.execute_action('resources'), - self.execute_action('test')] - self.assertEqual(expected, self.recorded_actions) - - def test_groups(self): - self.install_goal('resolve') - self.install_goal('javac', - group=Group('jvm', lambda t: t.is_java), - dependencies=['resolve'], - phase='compile') - self.install_goal('scalac', - group=Group('jvm', lambda t: t.is_scala), - dependencies=['resolve'], - phase='compile') - self.install_goal('checkstyle', phase='compile') - - result = self.engine.execute(self.context, self.as_phases('compile')) - self.assertEqual(0, result) - - expected = [self.construct_action('checkstyle'), - self.construct_action('scalac'), - self.construct_action('javac'), - self.construct_action('resolve'), - self.execute_action('resolve'), - self.execute_action('javac', targets=self.targets('src/java:a')), - self.execute_action('scalac', targets=self.targets('src/scala:b')), - self.execute_action('javac', targets=self.targets('src/java:c')), - self.execute_action('scalac', targets=self.targets('src/scala:d')), - self.execute_action('javac', targets=self.targets('src/java:e')), - self.execute_action('checkstyle')] - self.assertEqual(expected, self.recorded_actions) - diff --git a/tests/python/twitter/pants/fs/__init__.py b/tests/python/twitter/pants/fs/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/fs/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/fs/test_archive.py b/tests/python/twitter/pants/fs/test_archive.py deleted file mode 100644 index 06812dc95..000000000 --- a/tests/python/twitter/pants/fs/test_archive.py +++ /dev/null @@ -1,60 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import unittest - -from twitter.common.contextutil import temporary_dir -from twitter.common.dirutil import safe_mkdir, touch - -from twitter.pants.fs.archive import TAR, TGZ, TBZ2, ZIP - -class ArchiveTest(unittest.TestCase): - def round_trip(self, archiver, empty_dirs): - def listtree(root): - listing = set() - for path, dirs, files in os.walk(root): - relpath = os.path.normpath(os.path.relpath(path, root)) - if empty_dirs: - listing.update(os.path.normpath(os.path.join(relpath, d)) for d in dirs) - listing.update(os.path.normpath(os.path.join(relpath, f)) for f in files) - return listing - - def test_round_trip(prefix=None): - with temporary_dir() as fromdir: - safe_mkdir(os.path.join(fromdir, 'a/b/c')) - touch(os.path.join(fromdir, 'a/b/d/e.txt')) - with temporary_dir() as archivedir: - archive = archiver.create(fromdir, archivedir, 'archive', prefix=prefix) - with temporary_dir() as todir: - archiver.extract(archive, todir) - fromlisting = listtree(fromdir) - if prefix: - fromlisting = set(os.path.join(prefix, x) for x in fromlisting) - if empty_dirs: - fromlisting.add(prefix) - self.assertEqual(fromlisting, listtree(todir)) - - test_round_trip() - test_round_trip(prefix='jake') - - def test_tar(self): - self.round_trip(TAR, empty_dirs=True) - self.round_trip(TGZ, empty_dirs=True) - self.round_trip(TBZ2, empty_dirs=True) - - def test_zip(self): - self.round_trip(ZIP, empty_dirs=False) diff --git a/tests/python/twitter/pants/fs/test_safe_filename.py b/tests/python/twitter/pants/fs/test_safe_filename.py deleted file mode 100644 index 3776db842..000000000 --- a/tests/python/twitter/pants/fs/test_safe_filename.py +++ /dev/null @@ -1,49 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pytest -import unittest - -from twitter.pants.fs.fs import safe_filename - - -class SafeFilenameTest(unittest.TestCase): - class FixedDigest(object): - def __init__(self, size): - self._size = size - - def update(self, value): - pass - - def hexdigest(self): - return self._size * '*' - - def test_bad_name(self): - with pytest.raises(ValueError): - safe_filename(os.path.join('more', 'than', 'a', 'name.game')) - - def test_noop(self): - self.assertEqual('jack.jill', safe_filename('jack', '.jill', max_length=9)) - self.assertEqual('jack.jill', safe_filename('jack', '.jill', max_length=100)) - - def test_shorten(self): - self.assertEqual('**.jill', - safe_filename('jack', '.jill', digest=self.FixedDigest(2), max_length=8)) - - def test_shorten_fail(self): - with pytest.raises(ValueError): - safe_filename('jack', '.beanstalk', digest=self.FixedDigest(3), max_length=12) diff --git a/tests/python/twitter/pants/java/__init__.py b/tests/python/twitter/pants/java/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/java/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/java/distribution/__init__.py b/tests/python/twitter/pants/java/distribution/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/java/distribution/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/java/distribution/test_distribution.py b/tests/python/twitter/pants/java/distribution/test_distribution.py deleted file mode 100644 index bde251d80..000000000 --- a/tests/python/twitter/pants/java/distribution/test_distribution.py +++ /dev/null @@ -1,184 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from collections import namedtuple -from contextlib import contextmanager - -import os -import subprocess -import textwrap -import unittest - -import pytest - -from twitter.common.collections import maybe_list -from twitter.common.contextutil import environment_as, temporary_dir -from twitter.common.dirutil import chmod_plus_x, safe_open, touch - -from twitter.pants.base.revision import Revision -from twitter.pants.java.distribution import Distribution - - -class MockDistributionTest(unittest.TestCase): - EXE = namedtuple('Exe', ['name', 'contents']) - - @classmethod - def exe(cls, name, version=None): - contents = None if not version else textwrap.dedent(''' - #!/bin/sh - if [ $# -ne 3 ]; then - # Sanity check a classpath switch with a value plus the classname for main - echo "Expected 3 arguments, got $#: $@" >&2 - exit 1 - fi - echo "java.version=%s" - ''' % version).strip() - return cls.EXE(name, contents=contents) - - @contextmanager - def distribution(self, files=None, executables=None): - with temporary_dir() as jdk: - for f in maybe_list(files or ()): - touch(os.path.join(jdk, f)) - for exe in maybe_list(executables or (), expected_type=self.EXE): - path = os.path.join(jdk, exe.name) - with safe_open(path, 'w') as fp: - fp.write(exe.contents or '') - chmod_plus_x(path) - yield jdk - - def test_validate_basic(self): - with pytest.raises(Distribution.Error): - with self.distribution() as jdk: - Distribution(bin_path=jdk).validate() - - with pytest.raises(Distribution.Error): - with self.distribution(files='java') as jdk: - Distribution(bin_path=jdk).validate() - - with self.distribution(executables=self.exe('java')) as jdk: - Distribution(bin_path=jdk).validate() - - def test_validate_jdk(self): - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java')) as jdk: - Distribution(bin_path=jdk, jdk=True).validate() - - with self.distribution(executables=[self.exe('java'), self.exe('javac')]) as jdk: - Distribution(bin_path=jdk, jdk=True).validate() - - def test_validate_version(self): - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java', '1.7.0_25')) as jdk: - Distribution(bin_path=jdk, minimum_version='1.7.0_45').validate() - - with self.distribution(executables=self.exe('java', '1.7.0_25')) as jdk: - Distribution(bin_path=jdk, minimum_version='1.7.0_25').validate() - Distribution(bin_path=jdk, minimum_version=Revision.semver('1.6.0')).validate() - - def test_validated_binary(self): - with pytest.raises(Distribution.Error): - with self.distribution(files='jar', executables=self.exe('java')) as jdk: - Distribution(bin_path=jdk).binary('jar') - - with self.distribution(executables=[self.exe('java'), self.exe('jar')]) as jdk: - Distribution(bin_path=jdk).binary('jar') - - def test_locate(self): - @contextmanager - def env(**kwargs): - environment = dict(JDK_HOME=None, JAVA_HOME=None, PATH=None) - environment.update(**kwargs) - with environment_as(**environment): - yield - - with pytest.raises(Distribution.Error): - with env(): - Distribution.locate() - - with pytest.raises(Distribution.Error): - with self.distribution(files='java') as jdk: - with env(PATH=jdk): - Distribution.locate() - - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java')) as jdk: - with env(PATH=jdk): - Distribution.locate(jdk=True) - - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java', '1.6.0')) as jdk: - with env(PATH=jdk): - Distribution.locate(minimum_version='1.7.0') - - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java')) as jdk: - with env(JDK_HOME=jdk): - Distribution.locate() - - with pytest.raises(Distribution.Error): - with self.distribution(executables=self.exe('java')) as jdk: - with env(JAVA_HOME=jdk): - Distribution.locate() - - with self.distribution(executables=self.exe('java')) as jdk: - with env(PATH=jdk): - Distribution.locate() - - with self.distribution(executables=[self.exe('java'), self.exe('javac')]) as jdk: - with env(PATH=jdk): - Distribution.locate(jdk=True) - - with self.distribution(executables=self.exe('java', '1.7.0')) as jdk: - with env(PATH=jdk): - Distribution.locate(minimum_version='1.6.0') - - with self.distribution(executables=self.exe('bin/java')) as jdk: - with env(JDK_HOME=jdk): - Distribution.locate() - - with self.distribution(executables=self.exe('bin/java')) as jdk: - with env(JAVA_HOME=jdk): - Distribution.locate() - - -def exe_path(name): - process = subprocess.Popen(['which', name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, _ = process.communicate() - if process.returncode != 0: - return None - path = stdout.strip() - return path if os.path.exists(path) and os.access(path, os.X_OK) else None - - -class LiveDistributionTest(unittest.TestCase): - JAVA = exe_path('java') - JAVAC = exe_path('javac') - - @pytest.mark.skipif('not LiveDistributionTest.JAVA', reason='No java executable on the PATH.') - def test_validate_live(self): - with pytest.raises(Distribution.Error): - Distribution(bin_path=os.path.dirname(self.JAVA), minimum_version='999.9.9').validate() - - Distribution(bin_path=os.path.dirname(self.JAVA)).validate() - Distribution(bin_path=os.path.dirname(self.JAVA), minimum_version='1.3.1').validate() - Distribution.locate(jdk=False) - - @pytest.mark.skipif('not LiveDistributionTest.JAVAC', reason='No javac executable on the PATH.') - def test_validate_live_jdk(self): - Distribution(bin_path=os.path.dirname(self.JAVAC), jdk=True).validate() - Distribution(bin_path=os.path.dirname(self.JAVAC), jdk=True).binary('javap') - Distribution.locate(jdk=True) diff --git a/tests/python/twitter/pants/java/jar/__init__.py b/tests/python/twitter/pants/java/jar/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/java/jar/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/java/jar/test_open_jar.py b/tests/python/twitter/pants/java/jar/test_open_jar.py deleted file mode 100644 index 94f321045..000000000 --- a/tests/python/twitter/pants/java/jar/test_open_jar.py +++ /dev/null @@ -1,99 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import errno -import os -import pytest -import sys -import unittest - -from contextlib import contextmanager - -from twitter.common.contextutil import temporary_dir, temporary_file -from twitter.common.dirutil import safe_mkdir -from twitter.common.lang import Compatibility - -from twitter.pants.java.jar import open_jar - - -class OpenJarTest(unittest.TestCase): - - @contextmanager - def jarfile(self): - with temporary_file() as fd: - fd.close() - yield fd.name - - def test_mkdirs(self): - def assert_mkdirs(path, *entries): - with self.jarfile() as jarfile: - with open_jar(jarfile, 'w') as jar: - jar.mkdirs(path) - with open_jar(jarfile) as jar: - self.assertEquals(list(entries), jar.namelist()) - - if Compatibility.PY2 and sys.version_info[1] <= 6: - # Empty zip files in python 2.6 or lower cannot be read normally. - # Although BadZipFile should be raised, Apple's python 2.6.1 is sloppy and lets an IOError - # bubble, so we check for that case explicitly. - from zipfile import BadZipfile - with pytest.raises(Exception) as raised_info: - assert_mkdirs('') - raised = raised_info.value - self.assertTrue(isinstance(raised, (BadZipfile, IOError))) - if isinstance(raised, IOError): - self.assertEqual(errno.EINVAL, raised.errno) - else: - assert_mkdirs('') - - assert_mkdirs('a', 'a/') - assert_mkdirs('a/b/c', 'a/', 'a/b/', 'a/b/c/') - - def test_write_dir(self): - with temporary_dir() as chroot: - dir = os.path.join(chroot, 'a/b/c') - safe_mkdir(dir) - with self.jarfile() as jarfile: - with open_jar(jarfile, 'w') as jar: - jar.write(dir, 'd/e') - with open_jar(jarfile) as jar: - self.assertEquals(['d/', 'd/e/'], jar.namelist()) - - def test_write_file(self): - with temporary_dir() as chroot: - dir = os.path.join(chroot, 'a/b/c') - safe_mkdir(dir) - data_file = os.path.join(dir, 'd.txt') - with open(data_file, 'w') as fd: - fd.write('e') - with self.jarfile() as jarfile: - with open_jar(jarfile, 'w') as jar: - jar.write(data_file, 'f/g/h') - with open_jar(jarfile) as jar: - self.assertEquals(['f/', 'f/g/', 'f/g/h'], jar.namelist()) - self.assertEquals('e', jar.read('f/g/h')) - - def test_writestr(self): - def assert_writestr(path, contents, *entries): - with self.jarfile() as jarfile: - with open_jar(jarfile, 'w') as jar: - jar.writestr(path, contents) - with open_jar(jarfile) as jar: - self.assertEquals(list(entries), jar.namelist()) - self.assertEquals(contents, jar.read(path)) - - assert_writestr('a.txt', 'b', 'a.txt') - assert_writestr('a/b/c.txt', 'd', 'a/', 'a/b/', 'a/b/c.txt') diff --git a/tests/python/twitter/pants/net/__init__.py b/tests/python/twitter/pants/net/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/net/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/net/http/__init__.py b/tests/python/twitter/pants/net/http/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/net/http/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/net/http/test_fetcher.py b/tests/python/twitter/pants/net/http/test_fetcher.py deleted file mode 100644 index e84e80836..000000000 --- a/tests/python/twitter/pants/net/http/test_fetcher.py +++ /dev/null @@ -1,215 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from contextlib import closing - -import os - -import mox -import pytest -import requests - -from twitter.common.contextutil import temporary_file -from twitter.common.lang import Compatibility -from twitter.common.quantity import Amount, Data, Time - -from twitter.pants.net.http.fetcher import Fetcher - - -class FetcherTest(mox.MoxTestBase): - def setUp(self): - super(FetcherTest, self).setUp() - - self.requests = self.mox.CreateMockAnything() - self.response = self.mox.CreateMock(requests.Response) - self.fetcher = Fetcher(requests_api=self.requests) - self.listener = self.mox.CreateMock(Fetcher.Listener) - - def expect_get(self, url, chunk_size_bytes, timeout_secs, listener=True): - self.requests.get(url, stream=True, timeout=timeout_secs).AndReturn(self.response) - self.response.status_code = 200 - self.response.headers = {'content-length': '11'} - if listener: - self.listener.status(200, content_length=11) - - chunks = ['0123456789', 'a'] - self.response.iter_content(chunk_size=chunk_size_bytes).AndReturn(chunks) - return chunks - - def test_get(self): - for chunk in self.expect_get('http://bar', chunk_size_bytes=1024, timeout_secs=60): - self.listener.recv_chunk(chunk) - self.listener.finished() - self.response.close() - - self.mox.ReplayAll() - - self.fetcher.fetch('http://bar', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - - def test_checksum_listener(self): - digest = self.mox.CreateMockAnything() - for chunk in self.expect_get('http://baz', chunk_size_bytes=1, timeout_secs=37): - self.listener.recv_chunk(chunk) - digest.update(chunk) - - self.listener.finished() - digest.hexdigest().AndReturn('42') - - self.response.close() - - self.mox.ReplayAll() - - checksum_listener = Fetcher.ChecksumListener(digest=digest) - self.fetcher.fetch('http://baz', - checksum_listener.wrap(self.listener), - chunk_size=Amount(1, Data.BYTES), - timeout=Amount(37, Time.SECONDS)) - self.assertEqual('42', checksum_listener.checksum) - - def test_download_listener(self): - downloaded = '' - for chunk in self.expect_get('http://foo', chunk_size_bytes=1048576, timeout_secs=3600): - self.listener.recv_chunk(chunk) - downloaded += chunk - - self.listener.finished() - self.response.close() - - self.mox.ReplayAll() - - with closing(Compatibility.StringIO()) as fp: - self.fetcher.fetch('http://foo', - Fetcher.DownloadListener(fp).wrap(self.listener), - chunk_size=Amount(1, Data.MB), - timeout=Amount(1, Time.HOURS)) - self.assertEqual(downloaded, fp.getvalue()) - - def test_size_mismatch(self): - self.requests.get('http://foo', stream=True, timeout=60).AndReturn(self.response) - self.response.status_code = 200 - self.response.headers = {'content-length': '11'} - self.listener.status(200, content_length=11) - - self.response.iter_content(chunk_size=1024).AndReturn(['a', 'b']) - self.listener.recv_chunk('a') - self.listener.recv_chunk('b') - - self.response.close() - - self.mox.ReplayAll() - - with pytest.raises(self.fetcher.Error): - self.fetcher.fetch('http://foo', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - - def test_get_error_transient(self): - self.requests.get('http://foo', stream=True, timeout=60).AndRaise(requests.ConnectionError) - - self.mox.ReplayAll() - - with pytest.raises(self.fetcher.TransientError): - self.fetcher.fetch('http://foo', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - - def test_get_error_permanent(self): - self.requests.get('http://foo', stream=True, timeout=60).AndRaise(requests.TooManyRedirects) - - self.mox.ReplayAll() - - with pytest.raises(self.fetcher.PermanentError) as e: - self.fetcher.fetch('http://foo', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - self.assertTrue(e.value.response_code is None) - - def test_http_error(self): - self.requests.get('http://foo', stream=True, timeout=60).AndReturn(self.response) - self.response.status_code = 404 - self.listener.status(404) - - self.response.close() - - self.mox.ReplayAll() - - with pytest.raises(self.fetcher.PermanentError) as e: - self.fetcher.fetch('http://foo', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - self.assertEqual(404, e.value.response_code) - - def test_iter_content_error(self): - self.requests.get('http://foo', stream=True, timeout=60).AndReturn(self.response) - self.response.status_code = 200 - self.response.headers = {} - self.listener.status(200, content_length=None) - - self.response.iter_content(chunk_size=1024).AndRaise(requests.Timeout) - self.response.close() - - self.mox.ReplayAll() - - with pytest.raises(self.fetcher.TransientError): - self.fetcher.fetch('http://foo', - self.listener, - chunk_size=Amount(1, Data.KB), - timeout=Amount(1, Time.MINUTES)) - - def expect_download(self, path_or_fd=None): - downloaded = '' - for chunk in self.expect_get('http://1', chunk_size_bytes=13, timeout_secs=13, listener=False): - downloaded += chunk - self.response.close() - - self.mox.ReplayAll() - - path = self.fetcher.download('http://1', - path_or_fd=path_or_fd, - chunk_size=Amount(13, Data.BYTES), - timeout=Amount(13, Time.SECONDS)) - return downloaded, path - - def test_download(self): - downloaded, path = self.expect_download() - try: - with open(path) as fp: - self.assertEqual(downloaded, fp.read()) - finally: - os.unlink(path) - - def test_download_fd(self): - with temporary_file() as fd: - downloaded, path = self.expect_download(path_or_fd=fd) - self.assertEqual(path, fd.name) - fd.close() - with open(path) as fp: - self.assertEqual(downloaded, fp.read()) - - def test_download_path(self): - with temporary_file() as fd: - fd.close() - downloaded, path = self.expect_download(path_or_fd=fd.name) - self.assertEqual(path, fd.name) - with open(path) as fp: - self.assertEqual(downloaded, fp.read()) diff --git a/tests/python/twitter/pants/process/__init__.py b/tests/python/twitter/pants/process/__init__.py deleted file mode 100644 index 3da6fdb8e..000000000 --- a/tests/python/twitter/pants/process/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== diff --git a/tests/python/twitter/pants/process/test_xargs.py b/tests/python/twitter/pants/process/test_xargs.py deleted file mode 100644 index 563d86af5..000000000 --- a/tests/python/twitter/pants/process/test_xargs.py +++ /dev/null @@ -1,98 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import errno -import os - -import mox -import pytest - -from twitter.pants.process.xargs import Xargs - - -class XargsTest(mox.MoxTestBase): - def setUp(self): - super(XargsTest, self).setUp() - self.call = self.mox.CreateMockAnything() - self.xargs = Xargs(self.call) - - def test_execute_nosplit_success(self): - self.call(['one', 'two', 'three', 'four']).AndReturn(0) - self.mox.ReplayAll() - - self.assertEqual(0, self.xargs.execute(['one', 'two', 'three', 'four'])) - - def test_execute_nosplit_raise(self): - exception = Exception() - - self.call(['one', 'two', 'three', 'four']).AndRaise(exception) - self.mox.ReplayAll() - - with pytest.raises(Exception) as raised: - self.xargs.execute(['one', 'two', 'three', 'four']) - self.assertTrue(exception is raised.value) - - def test_execute_nosplit_fail(self): - self.call(['one', 'two', 'three', 'four']).AndReturn(42) - self.mox.ReplayAll() - - self.assertEqual(42, self.xargs.execute(['one', 'two', 'three', 'four'])) - - TOO_BIG = OSError(errno.E2BIG, os.strerror(errno.E2BIG)) - - def test_execute_split(self): - self.call(['one', 'two', 'three', 'four']).AndRaise(self.TOO_BIG) - self.call(['one', 'two']).AndReturn(0) - self.call(['three', 'four']).AndReturn(0) - self.mox.ReplayAll() - - self.assertEqual(0, self.xargs.execute(['one', 'two', 'three', 'four'])) - - def test_execute_uneven(self): - self.call(['one', 'two', 'three']).AndRaise(self.TOO_BIG) - # TODO(John Sirois): We really don't care if the 1st call gets 1 argument or 2, we just - # care that all arguments get passed just once via exactly 2 rounds of call - consider making - # this test less brittle to changes in the chunking logic. - self.call(['one']).AndReturn(0) - self.call(['two', 'three']).AndReturn(0) - self.mox.ReplayAll() - - self.assertEqual(0, self.xargs.execute(['one', 'two', 'three'])) - - def test_execute_split_multirecurse(self): - self.call(['one', 'two', 'three', 'four']).AndRaise(self.TOO_BIG) - self.call(['one', 'two']).AndRaise(self.TOO_BIG) - self.call(['one']).AndReturn(0) - self.call(['two']).AndReturn(0) - self.call(['three', 'four']).AndReturn(0) - self.mox.ReplayAll() - - self.assertEqual(0, self.xargs.execute(['one', 'two', 'three', 'four'])) - - def test_execute_split_fail_fast(self): - self.call(['one', 'two', 'three', 'four']).AndRaise(self.TOO_BIG) - self.call(['one', 'two']).AndReturn(42) - self.mox.ReplayAll() - - self.assertEqual(42, self.xargs.execute(['one', 'two', 'three', 'four'])) - - def test_execute_split_fail_slow(self): - self.call(['one', 'two', 'three', 'four']).AndRaise(self.TOO_BIG) - self.call(['one', 'two']).AndReturn(0) - self.call(['three', 'four']).AndReturn(42) - self.mox.ReplayAll() - - self.assertEqual(42, self.xargs.execute(['one', 'two', 'three', 'four'])) diff --git a/tests/python/twitter/pants/python/test_antlr_builder.py b/tests/python/twitter/pants/python/test_antlr_builder.py deleted file mode 100644 index 798946c35..000000000 --- a/tests/python/twitter/pants/python/test_antlr_builder.py +++ /dev/null @@ -1,51 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -import antlr3 -import antlr3.tree - -from twitter.common.python.test.ExprLexer import ExprLexer -from twitter.common.python.test.ExprParser import ExprParser -from twitter.common.python.test.Eval import Eval - -# We import this gratuitously, just to test that namespace packages work correctly in the -# generated ANTLR code. This module shares a namespace prefix with the generated -# ANTLR code, and so will be masked by it if namespace packages are broken. -from twitter.common.python.test2.csvLexer import csvLexer - -class AntlrBuilderTest(unittest.TestCase): - def test_generated_parser(self): - """The 'test' here is the very fact that we can successfully import the generated antlr code. - However there's no harm in also exercising it. This code is modified from the canonical example - at http://www.antlr.org/wiki/display/ANTLR3/Example .""" - char_stream = antlr3.ANTLRStringStream('4 + 5\n') - lexer = ExprLexer(char_stream) - tokens = antlr3.CommonTokenStream(lexer) - parser = ExprParser(tokens) - r = parser.prog() - - # this is the root of the AST - root = r.tree - - nodes = antlr3.tree.CommonTreeNodeStream(root) - nodes.setTokenStream(tokens) - eval = Eval(nodes) - eval.prog() - -if __name__ == '__main__': - unittest.main() diff --git a/tests/python/twitter/pants/python/test_resolver.py b/tests/python/twitter/pants/python/test_resolver.py deleted file mode 100644 index ef5f45c92..000000000 --- a/tests/python/twitter/pants/python/test_resolver.py +++ /dev/null @@ -1,43 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.common.contextutil import temporary_file -from twitter.common.python.platforms import Platform - -from twitter.pants.base.config import Config -from twitter.pants.python.resolver import get_platforms - - -class ResolverTest(unittest.TestCase): - def setUp(self): - with temporary_file() as ini: - ini.write( -''' -[python-setup] -platforms: [ - 'current', - 'linux-x86_64'] -''') - ini.close() - self.config = Config.load(configpath=ini.name) - - def test_get_current_platform(self): - expected_platforms = [Platform.current(), 'linux-x86_64'] - self.assertEqual(set(expected_platforms), - set(get_platforms(self.config.getlist('python-setup', 'platforms')))) - diff --git a/tests/python/twitter/pants/python/test_thrift_builder.py b/tests/python/twitter/pants/python/test_thrift_builder.py deleted file mode 100644 index 60b13dc5c..000000000 --- a/tests/python/twitter/pants/python/test_thrift_builder.py +++ /dev/null @@ -1,117 +0,0 @@ -#================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from textwrap import dedent - -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.base.context_utils import create_config -from twitter.pants.python.thrift_builder import PythonThriftBuilder -from twitter.pants.targets.python_thrift_library import PythonThriftLibrary -from twitter.pants.targets.sources import SourceRoot - -from mock import call, MagicMock, mock_open, patch - - -sample_ini_test = """ -[DEFAULT] -pants_workdir: %(buildroot)s -thrift_workdir: %(pants_workdir)s/thrift -""" - - -class TestPythonThriftBuilder(BaseBuildRootTest): - - @classmethod - def setUpClass(self): - super(TestPythonThriftBuilder, self).setUpClass() - SourceRoot.register(os.path.realpath(os.path.join(self.build_root, 'test_thrift_replacement')), - PythonThriftLibrary) - self.create_target('test_thrift_replacement', dedent(''' - python_thrift_library(name='one', - sources=['thrift/keyword.thrift'], - dependencies=None - ) - ''')) - - def test_keyword_replacement(self): - m = mock_open(read_data='') - with patch('__builtin__.open', m, create=True): - with patch('shutil.copyfile'): - builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'), - root_dir=self.build_root, - config=create_config(sample_ini=sample_ini_test)) - - builder._modify_thrift = MagicMock() - builder._run_thrift = MagicMock() - builder.run_thrifts() - - builder._modify_thrift.assert_called_once_with(os.path.realpath('%s/thrift/py-thrift/%s' - % (self.build_root, - 'thrift/keyword.thrift'))) - - def test_keyword_replaced(self): - thrift_contents = dedent(''' - namespace py gen.twitter.tweetypie.tweet - struct UrlEntity { - 1: i16 from - } - ''') - expected_replaced_contents = dedent(''' - namespace py gen.twitter.tweetypie.tweet - struct UrlEntity { - 1: i16 from_ - } - ''') - builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'), - root_dir=self.build_root, - config=create_config(sample_ini=sample_ini_test)) - m = mock_open(read_data=thrift_contents) - with patch('__builtin__.open', m, create=True): - builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'), - root_dir=self.build_root, - config=create_config(sample_ini=sample_ini_test)) - builder._modify_thrift('thrift_dummmy.thrift') - expected_open_call_list = [call('thrift_dummmy.thrift'), call('thrift_dummmy.thrift', 'w')] - m.call_args_list == expected_open_call_list - mock_file_handle = m() - mock_file_handle.write.assert_called_once_with(expected_replaced_contents) - - def test_non_keyword_file(self): - thrift_contents = dedent(''' - namespace py gen.twitter.tweetypie.tweet - struct UrlEntity { - 1: i16 no_keyword - 2: i16 from_ - 3: i16 _fromdsd - 4: i16 FROM - 5: i16 fromsuffix - } - ''') - builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'), - root_dir=self.build_root, - config=create_config(sample_ini=sample_ini_test)) - m = mock_open(read_data=thrift_contents) - with patch('__builtin__.open', m, create=True): - builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'), - root_dir=self.build_root, - config=create_config(sample_ini=sample_ini_test)) - builder._modify_thrift('thrift_dummmy.thrift') - expected_open_call_list = [call('thrift_dummmy.thrift'), call('thrift_dummmy.thrift', 'w')] - m.call_args_list == expected_open_call_list - mock_file_handle = m() - mock_file_handle.write.assert_called_once_with(thrift_contents) diff --git a/tests/python/twitter/pants/python/test_thrift_namespace_packages.py b/tests/python/twitter/pants/python/test_thrift_namespace_packages.py deleted file mode 100644 index 7320eec7e..000000000 --- a/tests/python/twitter/pants/python/test_thrift_namespace_packages.py +++ /dev/null @@ -1,30 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.birds.duck.ttypes import Duck -from twitter.birds.goose.ttypes import Goose - -class ThritNamespacePackagesTest(unittest.TestCase): - def test_thrift_namespaces(self): - """The 'test' here is the very fact that we can successfully import the generated thrift code - with a shared package prefix (twitter.birds) from two different eggs. - However there's no harm in also exercising the thrift objects, just to be sure we can.""" - myDuck = Duck() - myDuck.quack = 'QUACKQUACKQUACK' - myGoose = Goose() - myGoose.laysGoldenEggs = True diff --git a/tests/python/twitter/pants/reporting/__init__.py b/tests/python/twitter/pants/reporting/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/python/twitter/pants/reporting/test_linkify.py b/tests/python/twitter/pants/reporting/test_linkify.py deleted file mode 100644 index 77e7b00b5..000000000 --- a/tests/python/twitter/pants/reporting/test_linkify.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import shutil -import tempfile -import unittest - -from twitter.pants.reporting.linkify import linkify - - -def ensure_file_exists(path): - os.makedirs(os.path.dirname(path)) - open(path, 'a').close() - -class RunInfoTest(unittest.TestCase): - def setUp(self): - self._buildroot = tempfile.mkdtemp(prefix='test_html_reporter') - - def tearDown(self): - if os.path.exists(self._buildroot): - shutil.rmtree(self._buildroot, ignore_errors=True) - - def _do_test_linkify(self, expected_link, url): - s = 'foo %s bar' % url - expected = 'foo %s bar' % (expected_link, url) - linkified = linkify(self._buildroot, s) - self.assertEqual(expected, linkified) - - def test_linkify_absolute_paths(self): - relpath = 'underscore_and.dot/and-dash/baz' - path = os.path.join(self._buildroot, relpath) - ensure_file_exists(path) - self._do_test_linkify('/browse/%s' % relpath, path) - - def test_linkify_relative_paths(self): - relpath = 'underscore_and.dot/and-dash/baz' - path = os.path.join(self._buildroot, relpath) - ensure_file_exists(path) - self._do_test_linkify('/browse/%s' % relpath, relpath) - - def test_linkify_http(self): - url = 'http://foobar.com/baz/qux' - self._do_test_linkify(url, url) - - url = 'http://localhost:666/baz/qux' - self._do_test_linkify(url, url) - - def test_linkify_https(self): - url = 'https://foobar.com/baz/qux' - self._do_test_linkify(url, url) - - def test_linkify_target(self): - ensure_file_exists(os.path.join(self._buildroot, 'foo/bar/BUILD')) - self._do_test_linkify('/browse/foo/bar/BUILD', 'foo/bar') - self._do_test_linkify('/browse/foo/bar/BUILD', 'foo/bar:target') - diff --git a/tests/python/twitter/pants/scm/__init__.py b/tests/python/twitter/pants/scm/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/python/twitter/pants/scm/test_git.py b/tests/python/twitter/pants/scm/test_git.py deleted file mode 100644 index df8ff1cb6..000000000 --- a/tests/python/twitter/pants/scm/test_git.py +++ /dev/null @@ -1,177 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import subprocess -import re -import unittest - -from itertools import izip_longest - -import pytest - -from twitter.common.contextutil import environment_as, pushd, temporary_dir -from twitter.common.dirutil import safe_open, safe_mkdtemp, safe_rmtree, touch -from twitter.pants.scm import Scm -from twitter.pants.scm.git import Git - - -class Version(object): - def __init__(self, text): - self._components = map(int, text.split('.')) - - def __cmp__(self, other): - for ours, theirs in izip_longest(self._components, other._components, fillvalue=0): - difference = cmp(ours, theirs) - if difference != 0: - return difference - return 0 - - -class VersionTest(unittest.TestCase): - def test_equal(self): - self.assertEqual(Version('1'), Version('1.0.0.0')) - self.assertEqual(Version('1.0'), Version('1.0.0.0')) - self.assertEqual(Version('1.0.0'), Version('1.0.0.0')) - self.assertEqual(Version('1.0.0.0'), Version('1.0.0.0')) - - def test_less(self): - self.assertTrue(Version('1.6') < Version('2')) - self.assertTrue(Version('1.6') < Version('1.6.1')) - self.assertTrue(Version('1.6') < Version('1.10')) - - def test_greater(self): - self.assertTrue(Version('1.6.22') > Version('1')) - self.assertTrue(Version('1.6.22') > Version('1.6')) - self.assertTrue(Version('1.6.22') > Version('1.6.2')) - self.assertTrue(Version('1.6.22') > Version('1.6.21')) - self.assertTrue(Version('1.6.22') > Version('1.6.21.3')) - - -def git_version(): - process = subprocess.Popen(['git', '--version'], stdout=subprocess.PIPE) - (stdout, stderr) = process.communicate() - assert process.returncode == 0, "Failed to determine git version." - matches = re.search('(.*)\s(\d+.*\d+)\s(.*)', stdout) - return Version(matches.group(2)) - - -@pytest.mark.skipif("git_version() < Version('1.7.10')") -class GitTest(unittest.TestCase): - @staticmethod - def init_repo(remote_name, remote): - subprocess.check_call(['git', 'init']) - subprocess.check_call(['git', 'config', 'user.email', 'you@example.com']) - subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) - subprocess.check_call(['git', 'remote', 'add', remote_name, remote]) - - @classmethod - def setUpClass(cls): - cls.origin = safe_mkdtemp() - with pushd(cls.origin): - subprocess.check_call(['git', 'init', '--bare']) - - cls.gitdir = safe_mkdtemp() - cls.worktree = safe_mkdtemp() - - cls.readme_file = os.path.join(cls.worktree, 'README') - - with environment_as(GIT_DIR=cls.gitdir, GIT_WORK_TREE=cls.worktree): - cls.init_repo('depot', cls.origin) - - touch(cls.readme_file) - subprocess.check_call(['git', 'add', 'README']) - subprocess.check_call(['git', 'commit', '-am', 'initial commit.']) - subprocess.check_call(['git', 'tag', 'first']) - subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) - subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master']) - - with safe_open(cls.readme_file, 'w') as readme: - readme.write('Hello World.') - subprocess.check_call(['git', 'commit', '-am', 'Update README.']) - - cls.clone2 = safe_mkdtemp() - with pushd(cls.clone2): - cls.init_repo('origin', cls.origin) - subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) - - with safe_open(os.path.realpath('README'), 'a') as readme: - readme.write('--') - subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) - subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) - - cls.git = Git(gitdir=cls.gitdir, worktree=cls.worktree) - - @classmethod - def tearDownClass(cls): - safe_rmtree(cls.origin) - safe_rmtree(cls.gitdir) - safe_rmtree(cls.worktree) - safe_rmtree(cls.clone2) - - def test(self): - self.assertEqual(set(), self.git.changed_files()) - self.assertEqual(set(['README']), self.git.changed_files(from_commit='HEAD^')) - - tip_sha = self.git.commit_id - self.assertTrue(tip_sha) - - self.assertTrue(tip_sha in self.git.changelog()) - - self.assertTrue(self.git.tag_name.startswith('first-'), msg='un-annotated tags should be found') - self.assertEqual('master', self.git.branch_name) - - def edit_readme(): - with open(self.readme_file, 'a') as readme: - readme.write('More data.') - - edit_readme() - with open(os.path.join(self.worktree, 'INSTALL'), 'w') as untracked: - untracked.write('make install') - self.assertEqual(set(['README']), self.git.changed_files()) - self.assertEqual(set(['README', 'INSTALL']), self.git.changed_files(include_untracked=True)) - - try: - # These changes should be rejected because our branch point from origin is 1 commit behind - # the changes pushed there in clone 2. - self.git.commit('API Changes.') - except Scm.RemoteException: - with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): - subprocess.check_call(['git', 'reset', '--hard', 'depot/master']) - self.git.refresh() - edit_readme() - - self.git.commit('''API '"' " Changes.''') - self.git.tag('second', message='''Tagged ' " Changes''') - - with temporary_dir() as clone: - with pushd(clone): - self.init_repo('origin', self.origin) - subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) - - with open(os.path.realpath('README')) as readme: - self.assertEqual('--More data.', readme.read()) - - git = Git() - - # Check that we can pick up committed and uncommitted changes. - with safe_open(os.path.realpath('CHANGES'), 'w') as changes: - changes.write('none') - subprocess.check_call(['git', 'add', 'CHANGES']) - self.assertEqual(set(['README', 'CHANGES']), git.changed_files(from_commit='first')) - - self.assertEqual('master', git.branch_name) - self.assertEqual('second', git.tag_name, msg='annotated tags should be found') diff --git a/tests/python/twitter/pants/targets/test_artifact.py b/tests/python/twitter/pants/targets/test_artifact.py deleted file mode 100644 index 26ce5bf68..000000000 --- a/tests/python/twitter/pants/targets/test_artifact.py +++ /dev/null @@ -1,37 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.targets.artifact import Artifact -from twitter.pants.targets.repository import Repository - - -class ArtifactTest(unittest.TestCase): - - def test_validation(self): - with ParseContext.temp(): - repo = Repository(name="myRepo", url="myUrl", push_db="myPushDb") - Artifact(org="testOrg", name="testName", repo=repo, description="Test") - self.assertRaises(ValueError, Artifact, - org=1, name="testName", repo=repo, description="Test") - self.assertRaises(ValueError, Artifact, - org="testOrg", name=1, repo=repo, description="Test") - self.assertRaises(ValueError, Artifact, - org="testOrg", name="testName", repo=1, description="Test") - self.assertRaises(ValueError, Artifact, - org="testOrg", name="testName", repo=repo, description=1) diff --git a/tests/python/twitter/pants/targets/test_bundle.py b/tests/python/twitter/pants/targets/test_bundle.py deleted file mode 100644 index ece07b4a6..000000000 --- a/tests/python/twitter/pants/targets/test_bundle.py +++ /dev/null @@ -1,88 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from textwrap import dedent - -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.targets.jvm_binary import Bundle - -class BundleTest(BaseBuildRootTest): - - def test_bundle_filemap_dest_bypath(self): - self.create_dir('src/java/org/archimedes/buoyancy/config') - self.create_file('src/java/org/archimedes/buoyancy/config/densities.xml') - self.create_target('src/java/org/archimedes/buoyancy/BUILD', dedent(''' - jvm_app(name='buoyancy', - binary=jvm_binary(name='unused'), - bundles=bundle().add('config/densities.xml')) - ''')) - app = self.target('src/java/org/archimedes/buoyancy') - # after one big refactor, ../../../../../ snuck into this path: - self.assertEquals(app.bundles[0].filemap.values()[0], - 'config/densities.xml') - - def test_bundle_filemap_dest_byglobs(self): - self.create_dir('src/java/org/archimedes/tub/config') - self.create_file('src/java/org/archimedes/tub/config/one.xml') - self.create_file('src/java/org/archimedes/tub/config/two.xml') - self.create_target('src/java/org/archimedes/tub/BUILD', dedent(''' - jvm_app(name='tub', - binary=jvm_binary(name='unused'), - bundles=bundle().add(globs('config/*.xml'))) - ''')) - app = self.target('src/java/org/archimedes/tub') - for k in app.bundles[0].filemap.keys(): - if k.endswith('archimedes/tub/config/one.xml'): - onexml_key = k - self.assertEquals(app.bundles[0].filemap[onexml_key], - 'config/one.xml') - - def test_bundle_filemap_dest_relative(self): - self.create_dir('src/java/org/archimedes/crown/gold/config') - self.create_file('src/java/org/archimedes/crown/gold/config/five.xml') - self.create_target('src/java/org/archimedes/crown/BUILD', dedent(''' - jvm_app(name='crown', - binary=jvm_binary(name='unused'), - bundles=bundle(relative_to='gold').add('gold/config/five.xml')) - ''')) - app = self.target('src/java/org/archimedes/crown') - for k in app.bundles[0].filemap.keys(): - if k.endswith('archimedes/crown/gold/config/five.xml'): - fivexml_key = k - self.assertEquals(app.bundles[0].filemap.values()[0], - 'config/five.xml') - - def test_bundle_add_add(self): - self.create_dir('src/java/org/archimedes/volume/config/stone') - self.create_file('src/java/org/archimedes/volume/config/stone/dense.xml') - self.create_dir('src/java/org/archimedes/volume/config') - self.create_file('src/java/org/archimedes/volume/config/metal/dense.xml') - self.create_target('src/java/org/archimedes/volume/BUILD', dedent(''' - jvm_app(name='volume', - binary=jvm_binary(name='unused'), - bundles=bundle(relative_to='config') - .add('config/stone/dense.xml') - .add('config/metal/dense.xml')) - ''')) - app = self.target('src/java/org/archimedes/volume') - for k in app.bundles[0].filemap.keys(): - if k.endswith('archimedes/volume/config/stone/dense.xml'): - stonexml_key = k - self.assertEquals(app.bundles[0].filemap[stonexml_key], - 'stone/dense.xml') diff --git a/tests/python/twitter/pants/targets/test_exclusive.py b/tests/python/twitter/pants/targets/test_exclusive.py deleted file mode 100644 index 302ccdd9a..000000000 --- a/tests/python/twitter/pants/targets/test_exclusive.py +++ /dev/null @@ -1,44 +0,0 @@ -from twitter.pants.testutils import MockTarget -from twitter.pants.base.config import Config -from twitter.pants.goal import Context -from twitter.pants.tasks.check_exclusives import CheckExclusives -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -class ExclusivesTargetTest(BaseMockTargetTest): - """Test exclusives propagation in the dependency graph""" - - @classmethod - def setUpClass(cls): - cls.config = Config.load() - - def setupTargets(self): - a = MockTarget('a', exclusives={'a': '1', 'b': '1'}) - b = MockTarget('b', exclusives={'a': '1'}) - c = MockTarget('c', exclusives = {'a': '2'}) - d = MockTarget('d', dependencies=[a, b]) - e = MockTarget('e', dependencies=[a, c], exclusives={'c': '1'}) - return a, b, c, d, e - - def testPropagation(self): - a, b, c, d, e = self.setupTargets() - d_excl = d.get_all_exclusives() - self.assertEquals(d_excl['a'], set(['1'])) - e_excl = e.get_all_exclusives() - self.assertEquals(e_excl['a'], set(['1', '2'])) - - def testPartitioning(self): - # Target e has conflicts; in this test, we want to check that partitioning - # of valid targets works to prevent conflicts in chunks, so we only use a-d. - a, b, c, d, _ = self.setupTargets() - context = Context(ExclusivesTargetTest.config, options={}, run_tracker=None, target_roots=[a, b, c, d]) - context.products.require_data('exclusives_groups') - check_exclusives_task = CheckExclusives(context, signal_error=True) - check_exclusives_task.execute([a, b, c, d]) - egroups = context.products.get_data('exclusives_groups') - self.assertEquals(egroups.get_targets_for_group_key("a=1"), set([a, b, d])) - self.assertEquals(egroups.get_targets_for_group_key("a=2"), set([c])) - - - - diff --git a/tests/python/twitter/pants/targets/test_internal.py b/tests/python/twitter/pants/targets/test_internal.py deleted file mode 100644 index eae6ce586..000000000 --- a/tests/python/twitter/pants/targets/test_internal.py +++ /dev/null @@ -1,74 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.testutils import MockTarget -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -class InternalTargetTest(BaseMockTargetTest): - - def test_validation(self): - with ParseContext.temp('InternalTargetTest/test_validation'): - InternalTarget(name="valid", dependencies=None) - self.assertRaises(TargetDefinitionException, InternalTarget, - name=1, dependencies=None) - - InternalTarget(name="valid2", dependencies=Target(name='mybird')) - self.assertRaises(TargetDefinitionException, InternalTarget, - name='valid3', dependencies=1) - - def test_detect_cycle_direct(self): - a = MockTarget('a') - - # no cycles yet - InternalTarget.sort_targets([a]) - a.update_dependencies([a]) - try: - InternalTarget.sort_targets([a]) - self.fail("Expected a cycle to be detected") - except InternalTarget.CycleException: - # expected - pass - - def test_detect_cycle_indirect(self): - c = MockTarget('c') - b = MockTarget('b', [c]) - a = MockTarget('a', [c, b]) - - # no cycles yet - InternalTarget.sort_targets([a]) - - c.update_dependencies([a]) - try: - InternalTarget.sort_targets([a]) - self.fail("Expected a cycle to be detected") - except InternalTarget.CycleException: - # expected - pass - - def testSort(self): - a = MockTarget('a', []) - b = MockTarget('b', [a]) - c = MockTarget('c', [b]) - d = MockTarget('d', [c, a]) - e = MockTarget('e', [d]) - - self.assertEquals(InternalTarget.sort_targets([a,b,c,d,e]), [e,d,c,b,a]) - self.assertEquals(InternalTarget.sort_targets([b,d,a,e,c]), [e,d,c,b,a]) - self.assertEquals(InternalTarget.sort_targets([e,d,c,b,a]), [e,d,c,b,a]) diff --git a/tests/python/twitter/pants/targets/test_jar_library.py b/tests/python/twitter/pants/targets/test_jar_library.py deleted file mode 100644 index ddbf62557..000000000 --- a/tests/python/twitter/pants/targets/test_jar_library.py +++ /dev/null @@ -1,31 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.targets.jar_library import JarLibrary - - -class JarLibraryTest(unittest.TestCase): - - def test_validation(self): - with ParseContext.temp('JarLibraryTest/test_validation'): - target = Target(name='mybird') - JarLibrary(name="test", dependencies=target) - self.assertRaises(TargetDefinitionException, JarLibrary, - name="test1", dependencies=None) diff --git a/tests/python/twitter/pants/targets/test_pants_target.py b/tests/python/twitter/pants/targets/test_pants_target.py deleted file mode 100644 index 481ad8f69..000000000 --- a/tests/python/twitter/pants/targets/test_pants_target.py +++ /dev/null @@ -1,30 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import TargetDefinitionException -from twitter.pants.targets.pants_target import Pants - - -class PantsTargetTest(unittest.TestCase): - - def test_validation(self): - basedir = 'PantsTargetTest/test_validation' - with ParseContext.temp(basedir): - self.assertRaises(TargetDefinitionException, Pants, spec='fake') - self.assertRaises(TargetDefinitionException, Pants, spec='%s:fake' % basedir) diff --git a/tests/python/twitter/pants/targets/test_python_binary.py b/tests/python/twitter/pants/targets/test_python_binary.py deleted file mode 100644 index eb0b63007..000000000 --- a/tests/python/twitter/pants/targets/test_python_binary.py +++ /dev/null @@ -1,61 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.targets.python_binary import PythonBinary - - -class TestPythonBinary(BaseBuildRootTest): - def tearDown(self): - Target._clear_all_addresses() - - def test_python_binary_must_have_some_entry_point(self): - with ParseContext.temp('src'): - with pytest.raises(TargetDefinitionException): - PythonBinary(name = 'binary') - - def test_python_binary_with_entry_point_no_source(self): - with ParseContext.temp('src'): - assert PythonBinary(name = 'binary', entry_point = 'blork').entry_point == 'blork' - - def test_python_binary_with_source_no_entry_point(self): - with ParseContext.temp('src'): - assert PythonBinary(name = 'binary1', source = 'blork.py').entry_point == 'blork' - assert PythonBinary(name = 'binary2', source = 'bin/blork.py').entry_point == 'bin.blork' - - def test_python_binary_with_entry_point_and_source(self): - with ParseContext.temp('src'): - assert 'blork' == PythonBinary( - name = 'binary1', entry_point = 'blork', source='blork.py').entry_point - assert 'blork:main' == PythonBinary( - name = 'binary2', entry_point = 'blork:main', source='blork.py').entry_point - assert 'bin.blork:main' == PythonBinary( - name = 'binary3', entry_point = 'bin.blork:main', source='bin/blork.py').entry_point - - def test_python_binary_with_entry_point_and_source_mismatch(self): - with ParseContext.temp('src'): - with pytest.raises(TargetDefinitionException): - PythonBinary(name = 'binary1', entry_point = 'blork', source='hork.py') - with pytest.raises(TargetDefinitionException): - PythonBinary(name = 'binary2', entry_point = 'blork:main', source='hork.py') - with pytest.raises(TargetDefinitionException): - PythonBinary(name = 'binary3', entry_point = 'bin.blork', source='blork.py') - with pytest.raises(TargetDefinitionException): - PythonBinary(name = 'binary4', entry_point = 'bin.blork', source='bin.py') diff --git a/tests/python/twitter/pants/targets/test_python_target.py b/tests/python/twitter/pants/targets/test_python_target.py deleted file mode 100644 index a08719aba..000000000 --- a/tests/python/twitter/pants/targets/test_python_target.py +++ /dev/null @@ -1,64 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import unittest - -from textwrap import dedent - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import TargetDefinitionException -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.targets.artifact import Artifact -from twitter.pants.targets.python_target import PythonTarget -from twitter.pants.targets.python_artifact import PythonArtifact -from twitter.pants.targets.repository import Repository -from twitter.pants.targets.sources import SourceRoot - - -class PythonTargetTest(BaseBuildRootTest): - - @classmethod - def setUpClass(self): - super(PythonTargetTest, self).setUpClass() - SourceRoot.register(os.path.realpath(os.path.join(self.build_root, 'test_python_target')), - PythonTarget) - - self.create_target('test_thrift_replacement', dedent(''' - python_thrift_library(name='one', - sources=['thrift/keyword.thrift'], - dependencies=None - ) - ''')) - - def test_validation(self): - with ParseContext.temp('PythonTargetTest/test_validation'): - - # Adding a JVM Artifact as a provides on a PythonTarget doesn't make a lot of sense. This test - # sets up that very scenario, and verifies that pants throws a TargetDefinitionException. - self.assertRaises(TargetDefinitionException, PythonTarget, name="one", sources=[], - provides=Artifact(org='com.twitter', name='one-jar', - repo=Repository(name='internal', url=None, push_db=None, exclusives=None))) - - name = "test-with-PythonArtifact" - pa = PythonArtifact(name='foo', version='1.0', description='foo') - - # This test verifies that adding a 'setup_py' provides to a PythonTarget is okay. - self.assertEquals(PythonTarget(name=name, provides=pa, sources=[]).name, name) - name = "test-with-none" - - # This test verifies that having no provides is okay. - self.assertEquals(PythonTarget(name=name, provides=None, sources=[]).name, name) diff --git a/tests/python/twitter/pants/targets/test_target.py b/tests/python/twitter/pants/targets/test_target.py deleted file mode 100644 index 8cc704449..000000000 --- a/tests/python/twitter/pants/targets/test_target.py +++ /dev/null @@ -1,29 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import Target, TargetDefinitionException - - -class TargetTest(unittest.TestCase): - - def test_validation(self): - with ParseContext.temp('TargetTest/test_validation'): - self.assertRaises(TargetDefinitionException, Target, name=None) - name = "test" - self.assertEquals(Target(name=name).name, name) diff --git a/tests/python/twitter/pants/targets/test_util.py b/tests/python/twitter/pants/targets/test_util.py deleted file mode 100644 index 0c12bab89..000000000 --- a/tests/python/twitter/pants/targets/test_util.py +++ /dev/null @@ -1,62 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Foursquare Labs, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'Ryan Williams' - -import unittest - -class MockPantsTarget(object): - def __init__(self, spec): - self.foo = spec - - def __eq__(self, other): - if not isinstance(other, MockPantsTarget): - return False - return self.foo == other.foo - - def __repr__(self): - return "MockPantsTarget(%s)" % str(self.foo) - - -from twitter.pants.targets.util import resolve - -class ResolveTest(unittest.TestCase): - - def testString(self): - self.assertEquals(resolve("asdf", clazz=MockPantsTarget).foo, "asdf") - - def testUnicodeString(self): - self.assertEquals(resolve(u"asdf", clazz=MockPantsTarget).foo, u"asdf") - - def testNone(self): - self.assertEquals(resolve(None, clazz=MockPantsTarget), None) - - def testPantsTarget(self): - self.assertEquals(resolve(MockPantsTarget("asdf"), clazz=MockPantsTarget).foo, "asdf") - - def testMixedList(self): - self.assertEquals( - resolve([MockPantsTarget("1"), "2", MockPantsTarget("3"), "4", "5"], clazz=MockPantsTarget), - [MockPantsTarget("1"), - MockPantsTarget("2"), - MockPantsTarget("3"), - MockPantsTarget("4"), - MockPantsTarget("5")]) - - def testNonTarget(self): - self.assertEquals( - resolve([MockPantsTarget(1), [4, 'asdf'], "qwer",], clazz=MockPantsTarget), - [MockPantsTarget(1), [4, MockPantsTarget('asdf')], MockPantsTarget('qwer')]) diff --git a/tests/python/twitter/pants/tasks/test_base.py b/tests/python/twitter/pants/tasks/test_base.py deleted file mode 100644 index 7da4024d9..000000000 --- a/tests/python/twitter/pants/tasks/test_base.py +++ /dev/null @@ -1,170 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest - -from contextlib import closing -from optparse import OptionGroup, OptionParser -from StringIO import StringIO - -from twitter.common.collections import maybe_list - -from twitter.pants.base.context_utils import create_context, create_config, create_run_tracker -from twitter.pants.base.target import Target -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.commands.goal import SpecParser -from twitter.pants.goal import Mkflag, Context -from twitter.pants.tasks import Task -from twitter.pants.tasks.console_task import ConsoleTask - - -def prepare_task(task_type, config=None, args=None, targets=None, **kwargs): - """Prepares a Task for execution. - - task_type: The class of the Task to create. - config: An optional string representing the contents of a pants.ini config. - args: optional list of command line flags, these should be prefixed with '--test-'. - targets: optional list of Target objects passed on the command line. - **kwargs: Any additional args the Task subclass constructor takes beyond the required context. - - Returns a new Task ready to execute. - """ - - assert issubclass(task_type, Task), 'task_type must be a Task subclass, got %s' % task_type - - config = create_config(config or '') - - parser = OptionParser() - option_group = OptionGroup(parser, 'test') - mkflag = Mkflag('test') - task_type.setup_parser(option_group, args, mkflag) - options, _ = parser.parse_args(args or []) - - run_tracker = create_run_tracker() - - context = Context(config, options, run_tracker, targets or []) - return task_type(context, **kwargs) - - -class TaskTest(BaseBuildRootTest): - """A baseclass useful for testing Tasks.""" - - @classmethod - def targets(cls, spec): - """Resolves a target spec to one or more Target objects. - - spec: Either BUILD target address or else a target glob using the siblings ':' or - descendants '::' suffixes. - - Returns the set of all Targets found. - """ - return set(target for target, _ in SpecParser(cls.build_root).parse(spec) if target) - - def assertDeps(self, target, expected_deps=None): - """Check that actual and expected dependencies of the given target match. - - :param target: :class:`twitter.pants.base.target.Target` to check - dependencies of. - :param expected_deps: :class:`twitter.pants.base.target.Target` or list of - ``Target`` instances that are expected dependencies of ``target``. - """ - expected_deps_list = maybe_list(expected_deps or [], expected_type=Target) - self.assertEquals(set(expected_deps_list), set(target.dependencies)) - - -class ConsoleTaskTest(TaskTest): - """A baseclass useful for testing ConsoleTasks.""" - - @classmethod - def setUpClass(cls): - super(ConsoleTaskTest, cls).setUpClass() - - task_type = cls.task_type() - assert issubclass(task_type, ConsoleTask), \ - 'task_type() must return a ConsoleTask subclass, got %s' % task_type - - @classmethod - def task_type(cls): - """Subclasses must return the type of the ConsoleTask subclass under test.""" - raise NotImplementedError() - - def execute_task(self, config=None, args=None, targets=None, extra_targets=None): - """Creates a new task and executes it with the given config, command line args and targets. - - config: an optional string representing the contents of a pants.ini config. - args: optional list of command line flags, these should be prefixed with '--test-'. - targets: optional list of Target objects passed on the command line. - extra_targets: optional list of extra targets in the context in addition to those passed on the - command line - Returns the text output of the task. - """ - with closing(StringIO()) as output: - task = prepare_task(self.task_type(), config=config, args=args, targets=targets, - outstream=output) - task.execute(list(targets or ()) + list(extra_targets or ())) - return output.getvalue() - - def execute_console_task(self, config=None, args=None, targets=None, extra_targets=None, - **kwargs): - """Creates a new task and executes it with the given config, command line args and targets. - - config: an optional string representing the contents of a pants.ini config. - args: optional list of command line flags, these should be prefixed with '--test-'. - targets: optional list of Target objects passed on the command line. - extra_targets: optional list of extra targets in the context in addition to those passed on the - command line - **kwargs: additional kwargs are passed to the task constructor. - - Returns the list of items returned from invoking the console task's console_output method. - """ - task = prepare_task(self.task_type(), config=config, args=args, targets=targets, **kwargs) - return list(task.console_output(list(targets or ()) + list(extra_targets or ()))) - - def assert_entries(self, sep, *output, **kwargs): - """Verifies the expected output text is flushed by the console task under test. - - NB: order of entries is not tested, just presence. - - sep: the expected output separator. - *output: the output entries expected between the separators - **kwargs: additional kwargs are passed to the task constructor except for config args, targets - and extra_targets which are passed to execute_task. - """ - # We expect each output line to be suffixed with the separator, so for , and [1,2,3] we expect: - # '1,2,3,' - splitting this by the separator we should get ['1', '2', '3', ''] - always an extra - # empty string if the separator is properly always a suffix and not applied just between - # entries. - self.assertEqual(sorted(list(output) + ['']), sorted((self.execute_task(**kwargs)).split(sep))) - - def assert_console_output(self, *output, **kwargs): - """Verifies the expected output entries are emitted by the console task under test. - - NB: order of entries is not tested, just presence. - - *output: the expected output entries - **kwargs: additional kwargs are passed to the task constructor except for config args, targets - and extra_targets which are passed to execute_console_task. - """ - self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs))) - - def assert_console_raises(self, exception, **kwargs): - """Verifies the expected exception is raised by the console task under test. - - **kwargs: additional kwargs are passed to the task constructor except for config args, targets - and extra_targets which are passed to execute_console_task. - """ - with pytest.raises(exception): - self.execute_console_task(**kwargs) diff --git a/tests/python/twitter/pants/tasks/test_binary_create.py b/tests/python/twitter/pants/tasks/test_binary_create.py deleted file mode 100644 index 3f965dfa1..000000000 --- a/tests/python/twitter/pants/tasks/test_binary_create.py +++ /dev/null @@ -1,38 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.context_utils import create_context -from twitter.pants.tasks.binary_create import BinaryCreate - - -sample_ini_test_1 = """ -[DEFAULT] -pants_distdir = /tmp/dist -""" - - -class BinaryCreateTest(unittest.TestCase): - - def test_binary_create_init(self): - options = {'jvm_binary_create_outdir': None, - 'binary_create_compressed': None, - 'binary_create_zip64': None, - 'jvm_binary_create_deployjar': None} - binary_create = BinaryCreate(create_context(config=sample_ini_test_1, options=options)) - self.assertEquals(binary_create.outdir, '/tmp/dist') - diff --git a/tests/python/twitter/pants/tasks/test_builddict.py b/tests/python/twitter/pants/tasks/test_builddict.py deleted file mode 100644 index c2cc047ea..000000000 --- a/tests/python/twitter/pants/tasks/test_builddict.py +++ /dev/null @@ -1,58 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from contextlib import closing -from StringIO import StringIO - -from twitter.pants.tasks.builddictionary import assemble, BuildBuildDictionary -from twitter.pants.tasks.test_base import prepare_task, TaskTest - - -OUTDIR = "/tmp/dist" - -sample_ini_test_1 = """ -[DEFAULT] -outdir: %s -""" % OUTDIR - - -class BaseBuildBuildDictionaryTest(TaskTest): - - def execute_task(self, config=sample_ini_test_1): - with closing(StringIO()) as output: - task = prepare_task(BuildBuildDictionary, config=config) - task.execute(()) - return output.getvalue() - - -class BuildBuildDictionaryTestEmpty(BaseBuildBuildDictionaryTest): - - def test_builddict_empty(self): - """Execution should be silent.""" - # We don't care _that_ much that execution be silent. Nice if at least - # one test executes the task and doesn't explode, tho. - self.assertEqual('', self.execute_task()) - - -class ExtractedContentSanityTests(BaseBuildBuildDictionaryTest): - def test_usual_syms(self): - usual_syms = assemble() - usual_names = usual_syms.keys() - self.assertTrue(len(usual_names) > 20, "Strangely few symbols found") - for expected in ['jvm_binary', 'python_binary']: - self.assertTrue(expected in usual_names, "Didn't find %s" % expected) - for unexpected in ['__builtins__', 'Target']: - self.assertTrue(unexpected not in usual_names, "Found %s" % unexpected) diff --git a/tests/python/twitter/pants/tasks/test_bundle_create.py b/tests/python/twitter/pants/tasks/test_bundle_create.py deleted file mode 100644 index 247929644..000000000 --- a/tests/python/twitter/pants/tasks/test_bundle_create.py +++ /dev/null @@ -1,42 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base.context_utils import create_context -from twitter.pants.tasks.bundle_create import BundleCreate - - -sample_ini_test_1 = """ -[DEFAULT] -pants_distdir = /tmp/dist -""" - - -class BundleCreateTest(unittest.TestCase): - - def test_bundle_create_init(self): - options = { - 'jvm_binary_create_outdir': None, - 'binary_create_compressed': None, - 'binary_create_zip64': None, - 'jvm_binary_create_deployjar': None, - 'bundle_create_prefix': None, - 'bundle_create_archive': None - } - bundle_create = BundleCreate(create_context(config=sample_ini_test_1, options=options)) - self.assertEquals(bundle_create.outdir, '/tmp/dist') - diff --git a/tests/python/twitter/pants/tasks/test_cache_manager.py b/tests/python/twitter/pants/tasks/test_cache_manager.py deleted file mode 100644 index de06b776f..000000000 --- a/tests/python/twitter/pants/tasks/test_cache_manager.py +++ /dev/null @@ -1,81 +0,0 @@ -import shutil -import tempfile - -from twitter.pants.base.build_invalidator import CacheKey, CacheKeyGenerator -from twitter.pants.tasks.cache_manager import CacheManager, InvalidationCheck, VersionedTarget -from twitter.pants.testutils import MockTarget -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -class AppendingCacheKeyGenerator(CacheKeyGenerator): - """Generates cache keys for versions of target sets.""" - - @staticmethod - def combine_cache_keys(cache_keys): - if len(cache_keys) == 1: - return cache_keys[0] - else: - sorted_cache_keys = sorted(cache_keys) # For commutativity. - combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys]) - combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys]) - combined_num_sources = reduce(lambda x, y: x + y, [cache_key.num_sources for cache_key in sorted_cache_keys], 0) - return CacheKey(combined_id, combined_hash, combined_num_sources, []) - - def key_for_target(self, target, sources=None, fingerprint_extra=None): - return CacheKey(target.id, target.id, target.num_sources, []) - - def key_for(self, tid, sources): - return CacheKey(tid, tid, len(sources), []) - - -def print_vt(vt): - print('%d (%s) %s: [ %s ]' % (len(vt.targets), vt.cache_key, vt.valid, ', '.join(['%s(%s)' % (v.id, v.cache_key) for v in vt.versioned_targets]))) - - -class CacheManagerTest(BaseMockTargetTest): - class TestCacheManager(CacheManager): - def __init__(self, tmpdir): - CacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir, True, None, False) - - def setUp(self): - self._dir = tempfile.mkdtemp() - self.cache_manager = CacheManagerTest.TestCacheManager(self._dir) - - def tearDown(self): - shutil.rmtree(self._dir, ignore_errors=True) - - def make_vts(self, target): - return VersionedTarget(self.cache_manager, target, target.id) - - def test_partition(self): - a = MockTarget('a', [], 1) - b = MockTarget('b', [a], 1) - c = MockTarget('c', [b], 1) - d = MockTarget('d', [c, a], 1) - e = MockTarget('e', [d], 1) - - targets = [a, b, c, d, e] - - all_vts = self.cache_manager._sort_and_validate_targets(targets) - - [ print_vt(vt) for vt in all_vts ] - print '' - - invalid_vts = filter(lambda vt: not vt.valid, all_vts) - self.assertEquals(5, len(invalid_vts)) - - self.assertEquals(5, len(all_vts)) - - vts_targets = [vt.targets[0] for vt in all_vts] - self.assertEquals(set(targets), set(vts_targets)) - - ic = InvalidationCheck(all_vts, [], 3) - partitioned = ic.all_vts_partitioned - - [ print_vt(vt) for vt in partitioned ] - - # NOTE(ryan): several correct partitionings are possible, but in all cases 4 1-source targets will be added to the - # first partition before it exceeds the limit of 3, and the final target will be in a partition by itself. - self.assertEquals(2, len(partitioned)) - self.assertEquals(4, len(partitioned[0].targets)) - self.assertEquals(1, len(partitioned[1].targets)) diff --git a/tests/python/twitter/pants/tasks/test_check_exclusives.py b/tests/python/twitter/pants/tasks/test_check_exclusives.py deleted file mode 100644 index eaa83ee5b..000000000 --- a/tests/python/twitter/pants/tasks/test_check_exclusives.py +++ /dev/null @@ -1,109 +0,0 @@ -from twitter.pants.base.config import Config -from twitter.pants.goal import Context -from twitter.pants.testutils import MockTarget -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.check_exclusives import CheckExclusives -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -class CheckExclusivesTest(BaseMockTargetTest): - """Test of the CheckExclusives task.""" - - @classmethod - def setUpClass(cls): - cls.config = Config.load() - - def test_check_exclusives(self): - a = MockTarget('a', exclusives={'a': '1', 'b': '1'}) - b = MockTarget('b', exclusives={'a': '1'}) - c = MockTarget('c', exclusives={'a': '2'}) - d = MockTarget('d', dependencies=[a, b]) - e = MockTarget('e', dependencies=[a, c], exclusives={'c': '1'}) - - context = Context(CheckExclusivesTest.config, options={}, run_tracker=None, target_roots=[d, e]) - check_exclusives_task = CheckExclusives(context, signal_error=True) - try: - check_exclusives_task.execute([d, e]) - self.fail("Expected a conflicting exclusives exception to be thrown.") - except TaskError: - pass - - def test_classpath_compatibility(self): - # test the compatibility checks for different exclusive groups. - a = MockTarget('a', exclusives={'a': '1', 'b': '1'}) - b = MockTarget('b', exclusives={'a': '1', 'b': ''}) - c = MockTarget('c', exclusives={'a': '2', 'b': '2'}) - d = MockTarget('d') - - context = Context(CheckExclusivesTest.config, options={}, run_tracker=None, - target_roots=[a, b, c, d]) - context.products.require_data('exclusives_groups') - check_exclusives_task = CheckExclusives(context, signal_error=True) - check_exclusives_task.execute([a, b, c, d]) - egroups = context.products.get_data('exclusives_groups') - # Expected compatibility: - # a is compatible with nothing but itself. - self.assertTrue(egroups._is_compatible(egroups.target_to_key[a], egroups.target_to_key[a])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[a], egroups.target_to_key[b])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[a], egroups.target_to_key[d])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[a], egroups.target_to_key[c])) - - # b is compatible with itself and a. - self.assertTrue(egroups._is_compatible(egroups.target_to_key[b], egroups.target_to_key[a])) - self.assertTrue(egroups._is_compatible(egroups.target_to_key[b], egroups.target_to_key[b])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[b], egroups.target_to_key[c])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[b], egroups.target_to_key[d])) - - # c is compatible with nothing but itself - self.assertTrue(egroups._is_compatible(egroups.target_to_key[c], egroups.target_to_key[c])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[c], egroups.target_to_key[a])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[c], egroups.target_to_key[b])) - self.assertFalse(egroups._is_compatible(egroups.target_to_key[c], egroups.target_to_key[d])) - - # d is compatible with everything. - self.assertTrue(egroups._is_compatible(egroups.target_to_key[d], egroups.target_to_key[a])) - self.assertTrue(egroups._is_compatible(egroups.target_to_key[d], egroups.target_to_key[b])) - self.assertTrue(egroups._is_compatible(egroups.target_to_key[d], egroups.target_to_key[c])) - self.assertTrue(egroups._is_compatible(egroups.target_to_key[d], egroups.target_to_key[d])) - - def test_classpath_updates(self): - # Check that exclusive groups classpaths accumulate properly. - a = MockTarget('a', exclusives={'a': '1', 'b': '1'}) - b = MockTarget('b', exclusives={'a': '1', 'b': ''}) - c = MockTarget('c', exclusives={'a': '2', 'b': '2'}) - d = MockTarget('d') - - context = Context(CheckExclusivesTest.config, options={}, run_tracker=None, - target_roots=[a, b, c, d]) - context.products.require_data('exclusives_groups') - check_exclusives_task = CheckExclusives(context, signal_error=True) - check_exclusives_task.execute([a, b, c, d]) - egroups = context.products.get_data('exclusives_groups') - - egroups.set_base_classpath_for_group("a=1,b=1", ["a1", "b1"]) - egroups.set_base_classpath_for_group("a=1,b=", ["a1"]) - egroups.set_base_classpath_for_group("a=2,b=2", ["a2", "b2"]) - egroups.set_base_classpath_for_group("a=,b=", ["none"]) - egroups.update_compatible_classpaths(None, ["update_without_group"]) - egroups.update_compatible_classpaths("a=,b=", ["update_all"]) - egroups.update_compatible_classpaths("a=1,b=", ["update_a1bn"]) - egroups.update_compatible_classpaths("a=2,b=2", ["update_only_a2b2"]) - self.assertEquals(egroups.get_classpath_for_group("a=2,b=2"), - ["update_only_a2b2", "update_all", "update_without_group", "a2", "b2"]) - self.assertEquals(egroups.get_classpath_for_group("a=1,b=1"), - ["update_a1bn", "update_all", "update_without_group", "a1", "b1"]) - self.assertEquals(egroups.get_classpath_for_group("a=1,b="), - ["update_a1bn", "update_all", "update_without_group", "a1"]) - self.assertEquals(egroups.get_classpath_for_group("a=,b="), - ["update_all", "update_without_group", "none"]) - - # make sure repeated additions of the same thing are idempotent. - egroups.update_compatible_classpaths("a=1,b=1", ["a1", "b1", "xxx"]) - self.assertEquals(egroups.get_classpath_for_group("a=1,b=1"), - ["xxx", "update_a1bn", "update_all", "update_without_group", "a1", "b1"]) - - - - - - diff --git a/tests/python/twitter/pants/tasks/test_check_published_deps.py b/tests/python/twitter/pants/tasks/test_check_published_deps.py deleted file mode 100644 index f0cb80e0a..000000000 --- a/tests/python/twitter/pants/tasks/test_check_published_deps.py +++ /dev/null @@ -1,100 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.check_published_deps import CheckPublishedDeps -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class CheckPublishedDepsTest(ConsoleTaskTest): - - @classmethod - def task_type(cls): - return CheckPublishedDeps - - @classmethod - def setUpClass(cls): - super(CheckPublishedDepsTest, cls).setUpClass() - - cls.create_file('repo/pushdb/publish.properties', dedent(''' - revision.major.org.name%lib1=2 - revision.minor.org.name%lib1=0 - revision.patch.org.name%lib1=0 - revision.sha.org.name%lib1=12345 - revision.major.org.name%lib2=2 - revision.minor.org.name%lib2=0 - revision.patch.org.name%lib2=0 - revision.sha.org.name%lib2=12345 - ''')) - cls.create_target('repo/BUILD', dedent(''' - import os - repo(name='repo', - url='http://www.www.com', - push_db=os.path.join(os.path.dirname(__file__), 'pushdb', 'publish.properties')) - ''')) - - cls.create_target('provider/BUILD', dedent(''' - java_library(name='lib1', - provides=artifact( - org='org.name', - name='lib1', - repo=pants('repo')), - sources=[]) - java_library(name='lib2', - provides=artifact( - org='org.name', - name='lib2', - repo=pants('repo')), - sources=[]) - ''')) - cls.create_target('outdated/BUILD', dedent(''' - jar_library(name='outdated', - dependencies=[jar(org='org.name', name='lib1', rev='1.0.0')] - ) - ''')) - cls.create_target('uptodate/BUILD', dedent(''' - jar_library(name='uptodate', - dependencies=[jar(org='org.name', name='lib2', rev='2.0.0')] - ) - ''')) - cls.create_target('both/BUILD', dedent(''' - dependencies(name='both', - dependencies=[ - pants('outdated'), - pants('uptodate'), - ] - ) - ''')) - - def test_all_up_to_date(self): - self.assert_console_output( - targets=[self.target('uptodate')] - ) - - def test_print_up_to_date_and_outdated(self): - self.assert_console_output( - 'outdated org.name#lib1 1.0.0 latest 2.0.0', - 'up-to-date org.name#lib2 2.0.0', - targets=[self.target('both')], - args=['--test-print-uptodate'] - ) - - def test_outdated(self): - self.assert_console_output( - 'outdated org.name#lib1 1.0.0 latest 2.0.0', - targets=[self.target('outdated')] - ) diff --git a/tests/python/twitter/pants/tasks/test_config.py b/tests/python/twitter/pants/tasks/test_config.py deleted file mode 100644 index dd4a6b16b..000000000 --- a/tests/python/twitter/pants/tasks/test_config.py +++ /dev/null @@ -1,99 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -__author__ = 'jsirois' - -import unittest - -from twitter.common.contextutil import temporary_file -from twitter.pants.base.config import Config - -class ConfigTest(unittest.TestCase): - - def setUp(self): - with temporary_file() as ini: - ini.write( -''' -[DEFAULT] -answer: 42 -scale: 1.2 -path: /a/b/%(answer)s -embed: %(path)s::foo -disclaimer: - Let it be known - that. - -[a] -fast: True -list: [1, 2, 3, %(answer)s] - -[b] -preempt: False -dict: { - 'a': 1, - 'b': %(answer)s, - 'c': ['%(answer)s', %(answer)s] - } -''') - ini.close() - self.config = Config.load(configpath=ini.name) - - - def test_getstring(self): - self.assertEquals('/a/b/42', self.config.get('a', 'path')) - self.assertEquals('/a/b/42::foo', self.config.get('a', 'embed')) - self.assertEquals( - ''' -Let it be known -that.''', - self.config.get('b', 'disclaimer')) - - self.checkDefaults(self.config.get, '') - self.checkDefaults(self.config.get, '42') - - - def test_getint(self): - self.assertEquals(42, self.config.getint('a', 'answer')) - self.checkDefaults(self.config.get, 42) - - - def test_getfloat(self): - self.assertEquals(1.2, self.config.getfloat('b', 'scale')) - self.checkDefaults(self.config.get, 42.0) - - - def test_getbool(self): - self.assertTrue(self.config.getbool('a', 'fast')) - self.assertFalse(self.config.getbool('b', 'preempt')) - self.checkDefaults(self.config.get, True) - - - def test_getlist(self): - self.assertEquals([1, 2, 3, 42], self.config.getlist('a', 'list')) - self.checkDefaults(self.config.get, []) - self.checkDefaults(self.config.get, [42]) - - - def test_getmap(self): - self.assertEquals(dict(a=1, b=42, c=['42', 42]), self.config.getdict('b', 'dict')) - self.checkDefaults(self.config.get, {}) - self.checkDefaults(self.config.get, dict(a=42)) - - - def checkDefaults(self, accessor, default): - self.assertEquals(None, accessor('c', 'fast')) - self.assertEquals(None, accessor('c', 'preempt', None)) - self.assertEquals(default, accessor('c', 'jake', default=default)) diff --git a/tests/python/twitter/pants/tasks/test_console_task.py b/tests/python/twitter/pants/tasks/test_console_task.py deleted file mode 100644 index 3278ef703..000000000 --- a/tests/python/twitter/pants/tasks/test_console_task.py +++ /dev/null @@ -1,71 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import sys -import threading -import unittest - -import pytest - -from Queue import Empty, Queue - -from twitter.pants.tasks.test_base import prepare_task -from twitter.pants.tasks.console_task import ConsoleTask - - -class ConsoleTaskTest(unittest.TestCase): - class Infinite(ConsoleTask): - def __init__(self, context, outstream=sys.stdout): - super(ConsoleTaskTest.Infinite, self).__init__(context, outstream) - self.halt = threading.Event() - - def console_output(self, _): - while not self.halt.isSet(): - yield 'jake' - - def stop(self): - self.halt.set() - - def test_sigpipe(self): - r, w = os.pipe() - task = prepare_task(self.Infinite, outstream=os.fdopen(w, 'w')) - - raised = Queue(maxsize=1) - - def execute(): - try: - task.execute([]) - except IOError as e: - raised.put(e) - - execution = threading.Thread(target=execute, name='ConsoleTaskTest_sigpipe') - execution.setDaemon(True) - execution.start() - try: - data = os.read(r, 5) - self.assertEqual('jake\n', data) - os.close(r) - finally: - task.stop() - execution.join() - - with pytest.raises(Empty): - e = raised.get_nowait() - - # Instead of taking the generic pytest.raises message, provide a more detailed failure - # message that shows exactly what untrapped error was on the queue. - self.fail('task raised %s' % e) diff --git a/tests/python/twitter/pants/tasks/test_context.py b/tests/python/twitter/pants/tasks/test_context.py deleted file mode 100644 index d01f7c350..000000000 --- a/tests/python/twitter/pants/tasks/test_context.py +++ /dev/null @@ -1,46 +0,0 @@ -# ================================================================================================== -# Copyright 2011 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base.config import Config -from twitter.pants.goal import Context -from twitter.pants.testutils import MockTarget -from twitter.pants.testutils.base_mock_target_test import BaseMockTargetTest - - -class ContextTest(BaseMockTargetTest): - @classmethod - def setUpClass(cls): - cls.config = Config.load() - - @classmethod - def create_context(cls, **kwargs): - return Context(cls.config, run_tracker=None, **kwargs) - - def test_dependents_empty(self): - context = self.create_context(options={}, target_roots=[]) - dependees = context.dependents() - self.assertEquals(0, len(dependees)) - - def test_dependents_direct(self): - a = MockTarget('a') - b = MockTarget('b', [a]) - c = MockTarget('c', [b]) - d = MockTarget('d', [c, a]) - e = MockTarget('e', [d]) - context = self.create_context(options={}, target_roots=[a, b, c, d, e]) - dependees = context.dependents(lambda t: t in set([e, c])) - self.assertEquals(set([c]), dependees.pop(d)) - self.assertEquals(0, len(dependees)) diff --git a/tests/python/twitter/pants/tasks/test_dependees.py b/tests/python/twitter/pants/tasks/test_dependees.py deleted file mode 100644 index eb2af5f47..000000000 --- a/tests/python/twitter/pants/tasks/test_dependees.py +++ /dev/null @@ -1,231 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.targets.python_tests import PythonTests, PythonTestSuite -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.dependees import ReverseDepmap -from twitter.pants.tasks.test_base import ConsoleTaskTest - -import mox - - -class BaseReverseDepmapTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return ReverseDepmap - - -class ReverseDepmapEmptyTest(BaseReverseDepmapTest): - def test(self): - self.assert_console_output(targets=[]) - - -class ReverseDepmapTest(BaseReverseDepmapTest, mox.MoxTestBase): - @classmethod - def setUpClass(cls): - super(ReverseDepmapTest, cls).setUpClass() - - def create_target(path, name, alias=False, deps=()): - cls.create_target(path, dedent(''' - %(type)s(name='%(name)s', - dependencies=[%(deps)s] - ) - ''' % dict( - type='dependencies' if alias else 'python_library', - name=name, - deps=','.join("pants('%s')" % dep for dep in list(deps))) - )) - - create_target('common/a', 'a', deps=['common/d']) - create_target('common/b', 'b') - create_target('common/c', 'c') - create_target('common/d', 'd') - create_target('tests/d', 'd', deps=['common/d']) - create_target('overlaps', 'one', deps=['common/a', 'common/b']) - create_target('overlaps', 'two', deps=['common/a', 'common/c']) - create_target('overlaps', 'three', deps=['common/a', 'overlaps:one']) - create_target('overlaps', 'four', alias=True, deps=['common/b']) - create_target('overlaps', 'five', deps=['overlaps:four']) - - cls.create_target('resources/a', dedent(''' - resources( - name='a_resources', - sources=['a.resource'] - ) - ''')) - - cls.create_target('src/java/a', dedent(''' - java_library( - name='a_java', - resources=[pants('resources/a:a_resources')] - ) - ''')) - - #Compile idl tests - cls.create_target('src/thrift/example', dedent(''' - java_thrift_library( - name='mybird', - compiler='scrooge', - language='scala', - sources=['1.thrift'] - ) - ''')) - - cls.create_target('src/thrift/example', dedent(''' - jar_library( - name='compiled_scala', - dependencies=[ - pants(':mybird') - ] - ) - ''')) - - cls.create_target('src/thrift/example', dedent(''' - scala_library( - name='compiled_scala_user', - dependencies=[ - pants(':compiled_scala') - ], - sources=['1.scala'], - ) - ''')) - - create_target('src/thrift/dependent', 'my-example', deps=['src/thrift/example:mybird']) - - #External Dependency tests - cls.create_target('src/java/example', dedent(''' - java_library( - name='mybird', - dependencies=[ - jar(org='com', name='twitter') - ], - sources=['1.java'], - ) - ''')) - - cls.create_target('src/java/example', dedent(''' - java_library( - name='example2', - dependencies=[ - pants(':mybird') - ], - sources=['2.java'] - ) - ''')) - - def test_roots(self): - self.assert_console_output( - 'overlaps/BUILD:two', - targets=[self.target('common/c')], - extra_targets=[self.target('common/a')] - ) - - def test_normal(self): - self.assert_console_output( - 'overlaps/BUILD:two', - targets=[self.target('common/c')] - ) - - def test_closed(self): - self.assert_console_output( - 'overlaps/BUILD:two', - 'common/c/BUILD:c', - args=['--test-closed'], - targets=[self.target('common/c')] - ) - - def test_transitive(self): - self.assert_console_output( - 'overlaps/BUILD:one', - 'overlaps/BUILD:three', - 'overlaps/BUILD:four', - 'overlaps/BUILD:five', - args=['--test-transitive'], - targets=[self.target('common/b')] - ) - - def test_nodups_dependees(self): - self.assert_console_output( - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - targets=[ - self.target('common/a'), - self.target('overlaps:one') - ], - ) - - def test_nodups_roots(self): - targets = [self.target('common/c')] * 2 - self.assertEqual(2, len(targets)) - self.assert_console_output( - 'overlaps/BUILD:two', - 'common/c/BUILD:c', - args=['--test-closed'], - targets=targets - ) - - def test_aliasing(self): - self.assert_console_output( - 'overlaps/BUILD:five', - targets=[self.target('overlaps:four')] - ) - - def test_depeendees_type(self): - self._set_up_mocks(PythonTests, ["%s/tests" % get_buildroot()]) - self.assert_console_output( - 'tests/d/BUILD:d', - args=['--test-type=python_tests'], - targets=[self.target('common/d')] - ) - - def test_empty_depeendees_type(self): - self._set_up_mocks(PythonTestSuite, []) - self.assert_console_raises( - TaskError, - args=['--test-type=python_test_suite'], - targets=[self.target('common/d')] - ) - - def test_compile_idls(self): - self.assert_console_output( - 'src/thrift/dependent/BUILD:my-example', - 'src/thrift/example/BUILD:compiled_scala', - 'src/thrift/example/BUILD:compiled_scala_user', - targets=[ - self.target('src/thrift/example:mybird'), - ], - ) - - def test_external_dependency(self): - self.assert_console_output( - 'src/java/example/BUILD:example2', - targets=[self.target('src/java/example/BUILD:mybird')] - ) - - def test_resources_dependees(self): - self.assert_console_output( - 'src/java/a/BUILD:a_java', - targets=[self.target('resources/a:a_resources')] - ) - - def _set_up_mocks(self, class_type, src_roots): - self.mox.StubOutWithMock(SourceRoot, 'roots') - SourceRoot.roots(class_type).AndReturn(src_roots) - self.mox.ReplayAll() diff --git a/tests/python/twitter/pants/tasks/test_dependencies.py b/tests/python/twitter/pants/tasks/test_dependencies.py deleted file mode 100644 index 1de358539..000000000 --- a/tests/python/twitter/pants/tasks/test_dependencies.py +++ /dev/null @@ -1,168 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.dependencies import Dependencies -from twitter.pants.tasks.task_error import TaskError -from twitter.pants.tasks.test_base import ConsoleTaskTest - -import pytest - - -# some helper methods to be able to setup the state in a cleaner way -def pants(path): - return "pants('%s')" % path - -def jar(org, name, rev): - return "jar('%s', '%s', '%s')" % (org, name, rev) - -def python_requirement(name): - return "python_requirement('%s')" % name - - -class BaseDependenciesTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return Dependencies - - @classmethod - def define_target(cls, path, name, ttype='python_library', deps=()): - cls.create_target(path, dedent(''' - %(type)s(name='%(name)s', - dependencies=[%(deps)s] - ) - ''' % dict( - type=ttype, - name=name, - deps=','.join(deps)) - )) - - @classmethod - def scala_library(cls, path, name, deps=()): - cls.create_target(path, dedent(''' - scala_library(name='%(name)s', - dependencies=[%(deps)s], - sources=[], - ) - ''' % dict( - name=name, - deps=','.join(deps)) - )) - - -class DependenciesEmptyTest(BaseDependenciesTest): - def test_no_targets(self): - self.assert_console_output(targets=[]) - -class NonPythonDependenciesTest(BaseDependenciesTest): - @classmethod - def setUpClass(cls): - super(NonPythonDependenciesTest, cls).setUpClass() - - cls.scala_library('dependencies', 'third') - cls.scala_library('dependencies', 'first', - deps=[pants('dependencies:third')]) - - cls.scala_library('dependencies', 'second', - deps=[ - jar('org.apache', 'apache-jar', '12.12.2012')]); - - cls.scala_library('project', 'project', - deps=[ - pants('dependencies:first'), - pants('dependencies:second')]) - - def test_without_dependencies(self): - self.assert_console_output( - 'dependencies/BUILD:third', - targets=[self.target('dependencies:third')] - ) - - def test_all_dependencies(self): - self.assert_console_output( - 'dependencies/BUILD:third', - 'dependencies/BUILD:first', - 'dependencies/BUILD:second', - 'project/BUILD:project', - 'org.apache:apache-jar:12.12.2012', - targets=[self.target('project:project')] - ) - - def test_internal_dependencies(self): - self.assert_console_output( - 'dependencies/BUILD:third', - 'dependencies/BUILD:first', - 'dependencies/BUILD:second', - 'project/BUILD:project', - args=['--test-internal-only'], - targets=[self.target('project:project')] - ) - - def test_external_dependencies(self): - self.assert_console_output( - 'org.apache:apache-jar:12.12.2012', - args=['--test-external-only'], - targets=[self.target('project:project')] - ) - - -class PythonDependenciesTests(BaseDependenciesTest): - @classmethod - def setUpClass(cls): - super(PythonDependenciesTests, cls).setUpClass() - - cls.define_target('dependencies', 'python_leaf') - - cls.define_target('dependencies', 'python_inner', - deps=[ - pants('dependencies:python_leaf') - ]) - - cls.define_target('dependencies', 'python_inner_with_external', - deps=[ - python_requirement("antlr_python_runtime==3.1.3") - ]) - - cls.define_target('dependencies', 'python_root', - deps=[ - pants('dependencies:python_inner'), - pants('dependencies:python_inner_with_external') - ]) - - def test_normal(self): - self.assert_console_output( - 'antlr-python-runtime==3.1.3', - 'dependencies/BUILD:python_inner', - 'dependencies/BUILD:python_inner_with_external', - 'dependencies/BUILD:python_leaf', - 'dependencies/BUILD:python_root', - targets=[self.target('dependencies:python_root')] - ) - - def test_internal_dependencies(self): - with pytest.raises(TaskError): - self.assert_console_output( - args=['--test-internal-only'], - targets=[self.target('dependencies:python_root')] - ) - - def test_external_dependencies(self): - with pytest.raises(TaskError): - self.assert_console_output( - args=['--test-external-only'], - targets=[self.target('dependencies:python_root')] - ) diff --git a/tests/python/twitter/pants/tasks/test_depmap.py b/tests/python/twitter/pants/tasks/test_depmap.py deleted file mode 100644 index eacafeb9e..000000000 --- a/tests/python/twitter/pants/tasks/test_depmap.py +++ /dev/null @@ -1,222 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.depmap import Depmap -from twitter.pants.tasks.task_error import TaskError -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseDepmapTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return Depmap - - -class DepmapTest(BaseDepmapTest): - @classmethod - def setUpClass(cls): - super(DepmapTest, cls).setUpClass() - - def create_target(path, name, type, deps=(), **kwargs): - cls.create_target(path, dedent(''' - %(type)s(name='%(name)s', - dependencies=[%(deps)s], - %(extra)s - ) - ''' % dict( - type=type, - name=name, - deps=','.join("pants('%s')" % dep for dep in list(deps)), - extra=('' if not kwargs else ', '.join('%s=%r' % (k, v) for k, v in kwargs.items())) - ))) - - def create_python_binary_target(path, name, entry_point, type, deps=()): - cls.create_target(path, dedent(''' - %(type)s(name='%(name)s', - entry_point='%(entry_point)s', - dependencies=[%(deps)s] - ) - ''' % dict( - type=type, - entry_point=entry_point, - name=name, - deps=','.join("pants('%s')" % dep for dep in list(deps))) - )) - - def create_jvm_app(path, name, type, binary, deps=()): - cls.create_target(path, dedent(''' - %(type)s(name='%(name)s', - binary=pants('%(binary)s'), - bundles=%(deps)s - ) - ''' % dict( - type=type, - name=name, - binary=binary, - deps=deps) - )) - - create_target('common/a', 'a', 'dependencies') - create_target('common/b', 'b', 'jar_library') - cls.create_target('common/c', dedent(''' - scala_library(name='c', - sources=[], - ) - ''')) - create_target('common/d', 'd', 'python_library') - create_python_binary_target('common/e', 'e', 'common.e.entry', 'python_binary') - create_target('common/f', 'f', 'jvm_binary') - create_target('common/g', 'g', 'jvm_binary', deps=['common/f:f']) - cls.create_dir('common/h') - cls.create_file('common/h/common.f') - create_jvm_app('common/h', 'h', 'jvm_app', 'common/f:f', "bundle().add('common.f')") - cls.create_dir('common/i') - cls.create_file('common/i/common.g') - create_jvm_app('common/i', 'i', 'jvm_app', 'common/g:g', "bundle().add('common.g')") - create_target('overlaps', 'one', 'jvm_binary', deps=['common/h', 'common/i']) - cls.create_target('overlaps', dedent(''' - scala_library(name='two', - dependencies=[pants('overlaps:one')], - sources=[], - ) - ''')) - cls.create_target('resources/a', dedent(''' - resources( - name='a_resources', - sources=['a.resource'] - ) - ''')) - - cls.create_target('src/java/a', dedent(''' - java_library( - name='a_java', - resources=[pants('resources/a:a_resources')] - ) - ''')) - - def test_empty(self): - self.assert_console_raises( - TaskError, - targets=[self.target('common/a')] - ) - - def test_jar_library(self): - self.assert_console_raises( - TaskError, - targets=[self.target('common/b')], - ) - - def test_scala_library(self): - self.assert_console_output( - 'internal-common.c.c', - targets=[self.target('common/c')] - ) - - def test_python_library(self): - self.assert_console_raises( - TaskError, - targets=[self.target('common/d')] - ) - - def test_python_binary(self): - self.assert_console_raises( - TaskError, - targets=[self.target('common/e')] - ) - - def test_jvm_binary1(self): - self.assert_console_output( - 'internal-common.f.f', - targets=[self.target('common/f')] - ) - - def test_jvm_binary2(self): - self.assert_console_output( - 'internal-common.g.g', - ' internal-common.f.f', - targets=[self.target('common/g')] - ) - - def test_jvm_app1(self): - self.assert_console_output( - 'internal-common.h.h', - ' internal-common.f.f', - targets=[self.target('common/h')] - ) - - def test_jvm_app2(self): - self.assert_console_output( - 'internal-common.i.i', - ' internal-common.g.g', - ' internal-common.f.f', - targets=[self.target('common/i')] - ) - - def test_overlaps_one(self): - self.assert_console_output( - 'internal-overlaps.one', - ' internal-common.h.h', - ' internal-common.f.f', - ' internal-common.i.i', - ' internal-common.g.g', - ' *internal-common.f.f', - targets=[self.target('overlaps:one')] - ) - - def test_overlaps_two(self): - self.assert_console_output( - 'internal-overlaps.two', - ' internal-overlaps.one', - ' internal-common.h.h', - ' internal-common.f.f', - ' internal-common.i.i', - ' internal-common.g.g', - ' *internal-common.f.f', - targets=[self.target('overlaps:two')] - ) - - def test_overlaps_two_minimal(self): - self.assert_console_output( - 'internal-overlaps.two', - ' internal-overlaps.one', - ' internal-common.h.h', - ' internal-common.f.f', - ' internal-common.i.i', - ' internal-common.g.g', - targets=[self.target('overlaps:two')], - args=['--test-minimal'] - ) - - def test_multi(self): - self.assert_console_output( - 'internal-common.g.g', - ' internal-common.f.f', - 'internal-common.h.h', - ' internal-common.f.f', - 'internal-common.i.i', - ' internal-common.g.g', - ' internal-common.f.f', - targets=[self.target('common/g'), self.target('common/h'), self.target('common/i')] - ) - - def test_resources(self): - self.assert_console_output( - 'internal-src.java.a.a_java', - ' internal-resources.a.a_resources', - targets=[self.target('src/java/a:a_java')] - ) diff --git a/tests/python/twitter/pants/tasks/test_detect_duplicates.py b/tests/python/twitter/pants/tasks/test_detect_duplicates.py deleted file mode 100644 index f0e8b41e0..000000000 --- a/tests/python/twitter/pants/tasks/test_detect_duplicates.py +++ /dev/null @@ -1,86 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import pytest -import tempfile - -from contextlib import closing, contextmanager -from zipfile import ZipFile - -from twitter.common.dirutil import safe_rmtree, touch - -from twitter.pants.base.context_utils import create_context -from twitter.pants.tasks.detect_duplicates import DuplicateDetector -from twitter.pants.tasks.task_error import TaskError -from twitter.pants.tasks.test_base import TaskTest - - -class DuplicateDetectorTest(TaskTest): - def setUp(self): - self.base_dir = tempfile.mkdtemp() - - def generate_path(name): - return os.path.join(self.base_dir, name) - - test_class_path = generate_path('com/twitter/Test.class') - duplicate_class_path = generate_path('com/twitter/commons/Duplicate.class') - unique_class_path = generate_path('org/apache/Unique.class') - - touch(test_class_path) - touch(duplicate_class_path) - touch(unique_class_path) - - def generate_jar(path, *class_name): - with closing(ZipFile(generate_path(path), 'w')) as zipfile: - for clazz in class_name: - zipfile.write(clazz) - return zipfile.filename - - @contextmanager - def jars(): - test_jar = generate_jar('test.jar', test_class_path, duplicate_class_path) - jar_with_duplicates = generate_jar('dups.jar', duplicate_class_path, unique_class_path) - jar_without_duplicates = generate_jar('no_dups.jar', unique_class_path) - - jars = [] - jars.append(test_jar) - jars.append(jar_with_duplicates) - jars.append(jar_without_duplicates) - yield jars - - with jars() as jars: - self.path_with_duplicates = [jars[0], jars[1]] - self.path_without_duplicates = [jars[0], jars[2]] - - def tearDown(self): - safe_rmtree(self.base_dir) - - def test_duplicate_found(self): - options = {'fail_fast': False} - task = DuplicateDetector(create_context(options=options)) - self.assertTrue(task._is_conflicts(self.path_with_duplicates, None)) - - def test_duplicate_not_found(self): - options = {'fail_fast': False} - task = DuplicateDetector(create_context(options=options)) - self.assertFalse(task._is_conflicts(self.path_without_duplicates, None)) - - def test_fail_fast_error_raised(self): - options = {'fail_fast': True} - task = DuplicateDetector(create_context(options=options)) - with pytest.raises(TaskError): - task._is_conflicts(self.path_with_duplicates, None) diff --git a/tests/python/twitter/pants/tasks/test_filemap.py b/tests/python/twitter/pants/tasks/test_filemap.py deleted file mode 100644 index f6e5db7c5..000000000 --- a/tests/python/twitter/pants/tasks/test_filemap.py +++ /dev/null @@ -1,69 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -from textwrap import dedent - -from twitter.pants.tasks.filemap import Filemap -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class FilemapTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return Filemap - - @classmethod - def setUpClass(cls): - super(FilemapTest, cls).setUpClass() - - def create_target(path, name, *files): - for f in files: - cls.create_file(os.path.join(path, f), '') - - cls.create_target(path, dedent(''' - python_library(name='%s', - sources=[%s] - ) - ''' % (name, ','.join(repr(f) for f in files)))) - - cls.create_target('common', 'source_root.here(python_library)') - create_target('common/a', 'a', 'one.py') - create_target('common/b', 'b', 'two.py', 'three.py') - create_target('common/c', 'c', 'four.py') - - def test_all(self): - self.assert_console_output( - 'common/a/one.py common/a/BUILD:a', - 'common/b/two.py common/b/BUILD:b', - 'common/b/three.py common/b/BUILD:b', - 'common/c/four.py common/c/BUILD:c', - ) - - def test_one(self): - self.assert_console_output( - 'common/b/two.py common/b/BUILD:b', - 'common/b/three.py common/b/BUILD:b', - targets=[self.target('common/b')] - ) - - def test_dup(self): - self.assert_console_output( - 'common/a/one.py common/a/BUILD:a', - 'common/c/four.py common/c/BUILD:c', - targets=[self.target('common/a'), self.target('common/c'), self.target('common/a')] - ) - diff --git a/tests/python/twitter/pants/tasks/test_filter.py b/tests/python/twitter/pants/tasks/test_filter.py deleted file mode 100644 index d3b0cfdc6..000000000 --- a/tests/python/twitter/pants/tasks/test_filter.py +++ /dev/null @@ -1,209 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.filter import Filter -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseFilterTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return Filter - - -class FilterEmptyTargetsTest(BaseFilterTest): - def test_no_filters(self): - self.assert_console_output() - - def test_type(self): - self.assert_console_output(args=['--test-type=page']) - self.assert_console_output(args=['--test-type=-java_library']) - - def test_regex(self): - self.assert_console_output(args=['--test-regex=^common']) - self.assert_console_output(args=['--test-regex=-^common']) - - -class FilterTest(BaseFilterTest): - @classmethod - def setUpClass(cls): - super(FilterTest, cls).setUpClass() - - requirement_injected = set() - - def create_target(path, name, *deps): - if path not in requirement_injected: - cls.create_target(path, "python_requirement('foo')") - requirement_injected.add(path) - all_deps = ["pants('%s')" % dep for dep in deps] + ["pants(':foo')"] - cls.create_target(path, dedent(''' - python_library(name='%s', - dependencies=[%s] - ) - ''' % (name, ','.join(all_deps)))) - - create_target('common/a', 'a') - create_target('common/b', 'b') - create_target('common/c', 'c') - create_target('overlaps', 'one', 'common/a', 'common/b') - create_target('overlaps', 'two', 'common/a', 'common/c') - create_target('overlaps', 'three', 'common/a', 'overlaps:one') - - def test_roots(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - targets=self.targets('common/::'), - extra_targets=self.targets('overlaps/::') - ) - - def test_nodups(self): - targets = [self.target('common/b')] * 2 - self.assertEqual(2, len(targets)) - self.assert_console_output( - 'common/b/BUILD:b', - targets=targets - ) - - def test_no_filters(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - 'overlaps/BUILD:foo', - targets=self.targets('::') - ) - - def test_filter_type(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/b/BUILD:b', - 'common/c/BUILD:c', - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - args=['--test-type=python_library'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'common/a/BUILD:foo', - 'common/b/BUILD:foo', - 'common/c/BUILD:foo', - 'overlaps/BUILD:foo', - args=['--test-type=-python_library'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'common/a/BUILD:a', - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - 'overlaps/BUILD:foo', - args=['--test-type=PythonRequirement,twitter.pants.targets.python_library.PythonLibrary'], - targets=self.targets('::') - ) - - def test_filter_target(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'overlaps/BUILD:foo', - args=['--test-target=common/a,overlaps/:foo'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - args=['--test-target=-common/a/BUILD:a,overlaps:one,overlaps:foo'], - targets=self.targets('::') - ) - - def test_filter_ancestor(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'overlaps/BUILD:one', - 'overlaps/BUILD:foo', - args=['--test-ancestor=overlaps:one,overlaps:foo'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - args=['--test-ancestor=-overlaps:one,overlaps:foo'], - targets=self.targets('::') - ) - - def test_filter_regex(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/a/BUILD:foo', - 'common/b/BUILD:b', - 'common/b/BUILD:foo', - 'common/c/BUILD:c', - 'common/c/BUILD:foo', - args=['--test-regex=^common'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'common/a/BUILD:foo', - 'common/b/BUILD:foo', - 'common/c/BUILD:foo', - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - 'overlaps/BUILD:foo', - args=['--test-regex=+foo,^overlaps'], - targets=self.targets('::') - ) - - self.assert_console_output( - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - args=['--test-regex=-^common,foo$'], - targets=self.targets('::') - ) diff --git a/tests/python/twitter/pants/tasks/test_jar_create.py b/tests/python/twitter/pants/tasks/test_jar_create.py deleted file mode 100644 index fb6392c55..000000000 --- a/tests/python/twitter/pants/tasks/test_jar_create.py +++ /dev/null @@ -1,260 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os -import tempfile - -from collections import defaultdict -from contextlib import contextmanager, closing -from textwrap import dedent - -from twitter.common.contextutil import temporary_dir -from twitter.common.dirutil import safe_open, safe_rmtree - -from twitter.pants.base.context_utils import create_context -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.goal.products import MultipleRootedProducts -from twitter.pants.java.jar import open_jar -from twitter.pants.targets.java_library import JavaLibrary -from twitter.pants.targets.java_thrift_library import JavaThriftLibrary -from twitter.pants.targets.resources import Resources -from twitter.pants.targets.scala_library import ScalaLibrary -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks.jar_create import is_jvm_library, JarCreate - - -class JarCreateTestBase(BaseBuildRootTest): - @staticmethod - def create_options(**kwargs): - options = dict(jar_create_outdir=None, - jar_create_transitive=None, - jar_create_compressed=None, - jar_create_classes=None, - jar_create_sources=None, - jar_create_idl=None, - jar_create_javadoc=None) - options.update(**kwargs) - return options - - @classmethod - def create_files(cls, path, files): - for f in files: - cls.create_file(os.path.join(path, f), contents=f) - - @classmethod - def library(cls, path, target_type, name, sources): - cls.create_files(path, sources) - - cls.create_target(path, dedent(''' - %(target_type)s(name='%(name)s', - sources=[%(sources)s], - ) - ''' % dict(target_type=target_type, name=name, sources=repr(sources or [])))) - - return cls.target('%s:%s' % (path, name)) - - @classmethod - def resources(cls, path, name, *sources): - return cls.library(path, 'resources', name, sources) - - -class JarCreateMiscTest(JarCreateTestBase): - def test_jar_create_init(self): - ini = dedent(""" - [DEFAULT] - pants_workdir: /tmp/pants.d - pants_supportdir: /tmp/build-support - """).strip() - - jar_create = JarCreate(create_context(config=ini, options=self.create_options())) - self.assertEquals(jar_create._output_dir, '/tmp/pants.d/jars') - self.assertEquals(jar_create.confs, ['default']) - - def test_resources_with_scala_java_files(self): - for ftype in ('java', 'scala'): - target = self.resources(os.path.join('project', ftype), - 'target_%s' % ftype, - 'hello.%s' % ftype) - self.assertFalse(is_jvm_library(target)) - - -class JarCreateExecuteTest(JarCreateTestBase): - @classmethod - def library_with_resources(cls, path, target_type, name, sources, resources=None): - cls.create_files(path, sources) - - cls.create_target(path, dedent(''' - %(target_type)s(name='%(name)s', - sources=[%(sources)s], - %(resources)s - ) - ''' % dict(target_type=target_type, - name=name, - sources=repr(sources or []), - resources=('resources=pants("%s")' % resources if resources else '')))) - - return cls.target('%s:%s' % (path, name)) - - @classmethod - def java_library(cls, path, name, sources, resources=None): - return cls.library_with_resources(path, 'java_library', name, sources, resources=resources) - - @classmethod - def scala_library(cls, path, name, sources, resources=None): - return cls.library_with_resources(path, 'scala_library', name, sources, resources=resources) - - @classmethod - def java_thrift_library(cls, path, name, *sources): - return cls.library(path, 'java_thrift_library', name, sources) - - @classmethod - def setUpClass(cls): - super(JarCreateExecuteTest, cls).setUpClass() - - def get_source_root_fs_path(path): - return os.path.realpath(os.path.join(cls.build_root, path)) - - SourceRoot.register(get_source_root_fs_path('src/resources'), Resources) - SourceRoot.register(get_source_root_fs_path('src/java'), JavaLibrary) - SourceRoot.register(get_source_root_fs_path('src/scala'), ScalaLibrary) - SourceRoot.register(get_source_root_fs_path('src/thrift'), JavaThriftLibrary) - - cls.res = cls.resources('src/resources/com/twitter', 'spam', 'r.txt') - cls.jl = cls.java_library('src/java/com/twitter', 'foo', ['a.java'], - resources='src/resources/com/twitter:spam') - cls.sl = cls.scala_library('src/scala/com/twitter', 'bar', ['c.scala']) - cls.jtl = cls.java_thrift_library('src/thrift/com/twitter', 'baz', 'd.thrift') - - def setUp(self): - super(JarCreateExecuteTest, self).setUp() - self.jar_outdir = tempfile.mkdtemp() - - def tearDown(self): - super(JarCreateExecuteTest, self).tearDown() - safe_rmtree(self.jar_outdir) - - def context(self, config='', **options): - opts = dict(jar_create_outdir=self.jar_outdir) - opts.update(**options) - return create_context(config=config, options=self.create_options(**opts), - target_roots=[self.jl, self.sl, self.jtl]) - - @contextmanager - def add_products(self, context, product_type, target, *products): - product_mapping = context.products.get(product_type) - with temporary_dir() as outdir: - def create_product(product): - with safe_open(os.path.join(outdir, product), mode='w') as fp: - fp.write(product) - return product - product_mapping.add(target, outdir, map(create_product, products)) - yield temporary_dir - - @contextmanager - def add_data(self, context, data_type, target, *products): - make_products = lambda: defaultdict(MultipleRootedProducts) - data_by_target = context.products.get_data(data_type, make_products) - with temporary_dir() as outdir: - def create_product(product): - abspath = os.path.join(outdir, product) - with safe_open(abspath, mode='w') as fp: - fp.write(product) - return abspath - data_by_target[target].add_abs_paths(outdir, map(create_product, products)) - yield temporary_dir - - def assert_jar_contents(self, context, product_type, target, *contents): - jar_mapping = context.products.get(product_type).get(target) - self.assertEqual(1, len(jar_mapping)) - for basedir, jars in jar_mapping.items(): - self.assertEqual(1, len(jars)) - with open_jar(os.path.join(basedir, jars[0])) as jar: - self.assertEqual(list(contents), jar.namelist()) - for content in contents: - if not content.endswith('/'): - with closing(jar.open(content)) as fp: - self.assertEqual(os.path.basename(content), fp.read()) - - def assert_classfile_jar_contents(self, context, empty=False): - with self.add_data(context, 'classes_by_target', self.jl, 'a.class', 'b.class'): - with self.add_data(context, 'classes_by_target', self.sl, 'c.class'): - with self.add_data(context, 'resources_by_target', self.res, 'r.txt.transformed'): - JarCreate(context).execute(context.targets()) - if empty: - self.assertTrue(context.products.get('jars').empty()) - else: - self.assert_jar_contents(context, 'jars', self.jl, - 'a.class', 'b.class', 'r.txt.transformed') - self.assert_jar_contents(context, 'jars', self.sl, 'c.class') - - def test_classfile_jar_required(self): - context = self.context() - context.products.require('jars') - self.assert_classfile_jar_contents(context) - - def test_classfile_jar_flagged(self): - self.assert_classfile_jar_contents(self.context(jar_create_classes=True)) - - def test_classfile_jar_not_required(self): - self.assert_classfile_jar_contents(self.context(), empty=True) - - def assert_source_jar_contents(self, context, empty=False): - JarCreate(context).execute(context.targets()) - - if empty: - self.assertTrue(context.products.get('source_jars').empty()) - else: - self.assert_jar_contents(context, 'source_jars', self.jl, - 'com/', 'com/twitter/', 'com/twitter/a.java', 'com/twitter/r.txt') - self.assert_jar_contents(context, 'source_jars', self.sl, - 'com/', 'com/twitter/', 'com/twitter/c.scala') - - def test_source_jar_required(self): - context = self.context() - context.products.require('source_jars') - self.assert_source_jar_contents(context) - - def test_source_jar_flagged(self): - self.assert_source_jar_contents(self.context(jar_create_sources=True)) - - def test_source_jar_not_required(self): - self.assert_source_jar_contents(self.context(), empty=True) - - def assert_javadoc_jar_contents(self, context, empty=False, **kwargs): - with self.add_products(context, 'javadoc', self.jl, 'a.html', 'b.html'): - with self.add_products(context, 'scaladoc', self.sl, 'c.html'): - JarCreate(context, **kwargs).execute(context.targets()) - - if empty: - self.assertTrue(context.products.get('javadoc_jars').empty()) - else: - self.assert_jar_contents(context, 'javadoc_jars', self.jl, 'a.html', 'b.html') - self.assert_jar_contents(context, 'javadoc_jars', self.sl, 'c.html') - - def test_javadoc_jar_required(self): - context = self.context() - context.products.require('javadoc_jars') - self.assert_javadoc_jar_contents(context) - - def test_javadoc_jar_flagged(self): - self.assert_javadoc_jar_contents(self.context(jar_create_javadoc=True)) - - def test_javadoc_jar_constructor_required(self): - self.assert_javadoc_jar_contents(self.context(), jar_javadoc=True) - - def test_javadoc_jar_not_required(self): - self.assert_javadoc_jar_contents(self.context(), empty=True, jar_javadoc=False) - diff --git a/tests/python/twitter/pants/tasks/test_jar_library_with_empty_dependencies.py b/tests/python/twitter/pants/tasks/test_jar_library_with_empty_dependencies.py deleted file mode 100644 index 701743d50..000000000 --- a/tests/python/twitter/pants/tasks/test_jar_library_with_empty_dependencies.py +++ /dev/null @@ -1,34 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest -import unittest - -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.base.target import TargetDefinitionException -from twitter.pants.targets.jar_library import JarLibrary - - -class JarLibraryWithEmptyDependenciesTest(unittest.TestCase): - - def test_empty_dependencies(self): - with ParseContext.temp(): - JarLibrary("test-jar-library-with-empty-dependencies", []) - - def test_no_dependencies(self): - with pytest.raises(TargetDefinitionException): - with ParseContext.temp(): - JarLibrary("test-jar-library-with-empty-dependencies", None) diff --git a/tests/python/twitter/pants/tasks/test_jar_library_with_overrides.py b/tests/python/twitter/pants/tasks/test_jar_library_with_overrides.py deleted file mode 100644 index 00695ef6a..000000000 --- a/tests/python/twitter/pants/tasks/test_jar_library_with_overrides.py +++ /dev/null @@ -1,47 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import unittest - -from twitter.pants.base import ParseContext -from twitter.pants.targets.exclude import Exclude -from twitter.pants.targets.jar_library import JarLibrary -from twitter.pants.targets.jar_dependency import JarDependency -from twitter.pants.targets.pants_target import Pants - - -class JarLibraryWithOverrides(unittest.TestCase): - - def test_jar_dependency(self): - with ParseContext.temp(): - org, name = "org", "name" - # thing to override - nay = JarDependency(org, name, "0.0.1") - yea = JarDependency(org, name, "0.0.8") - # define targets depend on different 'org:c's - JarLibrary("c", [nay]) - JarLibrary("b", [yea]) - # then depend on those targets transitively, and override to the correct version - l = JarLibrary( - "a", - dependencies=[Pants(":c")], - overrides=[":b"]) - - # confirm that resolving includes the correct version - resolved = set(l.resolve()) - self.assertTrue(yea in resolved) - # and attaches an exclude directly to the JarDependency - self.assertTrue(Exclude(org, name) in nay.excludes) diff --git a/tests/python/twitter/pants/tasks/test_list_goals.py b/tests/python/twitter/pants/tasks/test_list_goals.py deleted file mode 100644 index 28589ee7e..000000000 --- a/tests/python/twitter/pants/tasks/test_list_goals.py +++ /dev/null @@ -1,115 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.goal import Goal -from twitter.pants.goal.phase import Phase -from twitter.pants.tasks import Task -from twitter.pants.tasks.list_goals import ListGoals - -from . import ConsoleTaskTest - - -class ListGoalsTest(ConsoleTaskTest): - _INSTALLED_HEADER = 'Installed goals:' - _UNDOCUMENTED_HEADER = 'Undocumented goals:' - _LIST_GOALS_NAME = 'goals' - _LIST_GOALS_DESC = 'List all documented goals.' - _LLAMA_NAME = 'llama' - _LLAMA_DESC = 'With such handsome fiber, no wonder everyone loves Llamas.' - _ALPACA_NAME = 'alpaca' - - @classmethod - def task_type(cls): - return ListGoals - - class LlamaTask(Task): - pass - - class AlpacaTask(Task): - pass - - def test_list_goals(self): - Phase.clear() - self.assert_console_output(self._INSTALLED_HEADER) - - Goal(name=self._LIST_GOALS_NAME, action=ListGoals)\ - .install().with_description(self._LIST_GOALS_DESC) - self.assert_console_output( - self._INSTALLED_HEADER, - ' %s: %s' % (self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), - ) - - Goal(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ - .install().with_description(self._LLAMA_DESC) - self.assert_console_output( - self._INSTALLED_HEADER, - ' %s: %s' % (self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), - ' %s: %s' % (self._LLAMA_NAME, self._LLAMA_DESC), - ) - - Goal(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ - .install() - self.assert_console_output( - self._INSTALLED_HEADER, - ' %s: %s' % (self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), - ' %s: %s' % (self._LLAMA_NAME, self._LLAMA_DESC), - ) - - def test_list_goals_all(self): - Phase.clear() - - Goal(name=self._LIST_GOALS_NAME, action=ListGoals)\ - .install().with_description(self._LIST_GOALS_DESC) - Goal(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ - .install().with_description(self._LLAMA_DESC) - Goal(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ - .install() - - self.assert_console_output( - self._INSTALLED_HEADER, - ' %s: %s' % (self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), - ' %s: %s' % (self._LLAMA_NAME, self._LLAMA_DESC), - '', - self._UNDOCUMENTED_HEADER, - ' %s' % self._ALPACA_NAME, - args=['--test-all'], - ) - - def test_list_goals_graph(self): - Phase.clear() - - Goal(name=self._LIST_GOALS_NAME, action=ListGoals)\ - .install().with_description(self._LIST_GOALS_DESC) - Goal(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ - .install().with_description(self._LLAMA_DESC) - Goal(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ - .install() - - self.assert_console_output( - 'digraph G {\n rankdir=LR;\n graph [compound=true];', - ' subgraph cluster_goals {\n node [style=filled];\n color = blue;\n label = "goals";', - ' goals_goals [label="goals"];', - ' }', - ' subgraph cluster_llama {\n node [style=filled];\n color = blue;\n label = "llama";', - ' llama_llama [label="llama"];', - ' }', - ' subgraph cluster_alpaca {\n node [style=filled];\n color = blue;\n label = "alpaca";', - ' alpaca_alpaca [label="alpaca"];', - ' }', - ' alpaca_alpaca -> llama_llama [ltail=cluster_alpaca lhead=cluster_llama];', - '}', - args=['--test-graph'], - ) diff --git a/tests/python/twitter/pants/tasks/test_listtargets.py b/tests/python/twitter/pants/tasks/test_listtargets.py deleted file mode 100644 index aa92fb475..000000000 --- a/tests/python/twitter/pants/tasks/test_listtargets.py +++ /dev/null @@ -1,194 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os - -from textwrap import dedent - -from twitter.pants.base.target import Target -from twitter.pants.tasks.listtargets import ListTargets -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseListTargetsTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return ListTargets - - -class ListTargetsTestEmpty(BaseListTargetsTest): - def test_list_all_empty(self): - self.assertEqual('', self.execute_task()) - self.assertEqual('', self.execute_task(args=['--test-sep=###'])) - self.assertEqual([], self.execute_console_task()) - - -class ListTargetsTest(BaseListTargetsTest): - @classmethod - def setUpClass(cls): - super(ListTargetsTest, cls).setUpClass() - - # Setup a BUILD tree for various list tests - - repo_target = dedent(''' - repo( - name='public', - url='http://maven.twttr.com', - push_db='/tmp/publish.properties' - ) - ''').strip() - cls.create_target('repos', repo_target) - - class Lib(object): - def __init__(self, name, provides=False): - self.name = name - self.provides = dedent(''' - artifact( - org='com.twitter', - name='%s', - repo=pants('repos:public') - ) - ''' % name).strip() if provides else 'None' - - def create_library(path, *libs): - libs = libs or [Lib(os.path.basename(os.path.dirname(cls.build_path(path))))] - for lib in libs: - target = "java_library(name='%s', provides=%s, sources=[])\n" % (lib.name, lib.provides) - cls.create_target(path, target) - - create_library('a') - create_library('a/b', Lib('b', provides=True)) - create_library('a/b/c', Lib('c'), Lib('c2', provides=True), Lib('c3')) - create_library('a/b/d') - create_library('a/b/e', Lib('e1')) - cls.create_target('f', dedent(''' - dependencies( - name='alias', - dependencies=[ - pants('a/b/c/BUILD:c3'), - pants('a/b/d/BUILD:d') - ] - ).with_description(""" - Exercises alias resolution. - Further description. - """) - ''')) - - def test_list_path(self): - self.assert_console_output('a/b/BUILD:b', targets=[self.target('a/b')]) - - def test_list_siblings(self): - self.assert_console_output('a/b/BUILD:b', targets=self.targets('a/b:')) - self.assert_console_output('a/b/c/BUILD:c', 'a/b/c/BUILD:c2', 'a/b/c/BUILD:c3', - targets=self.targets('a/b/c/:')) - - def test_list_descendants(self): - self.assert_console_output('a/b/c/BUILD:c', 'a/b/c/BUILD:c2', 'a/b/c/BUILD:c3', - targets=self.targets('a/b/c/::')) - - self.assert_console_output( - 'a/b/BUILD:b', - 'a/b/c/BUILD:c', - 'a/b/c/BUILD:c2', - 'a/b/c/BUILD:c3', - 'a/b/d/BUILD:d', - 'a/b/e/BUILD:e1', - targets=self.targets('a/b::')) - - def test_list_all(self): - self.assert_entries('\n', - 'repos/BUILD:public', - 'a/BUILD:a', - 'a/b/BUILD:b', - 'a/b/c/BUILD:c', - 'a/b/c/BUILD:c2', - 'a/b/c/BUILD:c3', - 'a/b/d/BUILD:d', - 'a/b/e/BUILD:e1', - 'f/BUILD:alias') - - self.assert_entries(', ', - 'repos/BUILD:public', - 'a/BUILD:a', - 'a/b/BUILD:b', - 'a/b/c/BUILD:c', - 'a/b/c/BUILD:c2', - 'a/b/c/BUILD:c3', - 'a/b/d/BUILD:d', - 'a/b/e/BUILD:e1', - 'f/BUILD:alias', - args=['--test-sep=, ']) - - self.assert_console_output( - 'repos/BUILD:public', - 'a/BUILD:a', - 'a/b/BUILD:b', - 'a/b/c/BUILD:c', - 'a/b/c/BUILD:c2', - 'a/b/c/BUILD:c3', - 'a/b/d/BUILD:d', - 'a/b/e/BUILD:e1', - 'f/BUILD:alias') - - def test_list_provides(self): - self.assert_console_output( - 'a/b/BUILD:b com.twitter#b', - 'a/b/c/BUILD:c2 com.twitter#c2', - args=['--test-provides']) - - def test_list_provides_customcols(self): - self.assert_console_output( - '/tmp/publish.properties a/b/BUILD:b http://maven.twttr.com public com.twitter#b', - '/tmp/publish.properties a/b/c/BUILD:c2 http://maven.twttr.com public com.twitter#c2', - args=[ - '--test-provides', - '--test-provides-columns=repo_db,address,repo_url,repo_name,artifact_id' - ]) - - def test_list_dedups(self): - def expand(spec): - for target in self.targets(spec): - for tgt in target.resolve(): - if isinstance(tgt, Target) and tgt.is_concrete: - yield tgt - - targets = [] - targets.extend(expand('a/b/d/::')) - targets.extend(expand('f::')) - - self.assertEquals(3, len(targets), "Expected a duplicate of a/b/d/BUILD:d") - self.assert_console_output( - 'a/b/c/BUILD:c3', - 'a/b/d/BUILD:d', - targets=targets - ) - - def test_list_documented(self): - self.assert_console_output( - # Confirm empty listing - args=['--test-documented'], - targets=[self.target('a/b')] - ) - - self.assert_console_output( - dedent(''' - f/BUILD:alias - Exercises alias resolution. - Further description. - ''').strip(), - args=['--test-documented'] - ) - diff --git a/tests/python/twitter/pants/tasks/test_minimal_cover.py b/tests/python/twitter/pants/tasks/test_minimal_cover.py deleted file mode 100644 index 73eea534d..000000000 --- a/tests/python/twitter/pants/tasks/test_minimal_cover.py +++ /dev/null @@ -1,124 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.minimal_cover import MinimalCover -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseMinimalCovertTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return MinimalCover - - -class MinimalCoverEmptyTest(BaseMinimalCovertTest): - def test(self): - self.assert_console_output(targets=[]) - - -class MinimalCoverTest(BaseMinimalCovertTest): - - @classmethod - def setUpClass(cls): - super(MinimalCoverTest, cls).setUpClass() - - def create_target(path, name, *deps): - all_deps = ["pants('%s')" % dep for dep in list(deps)] - cls.create_target(path, dedent(''' - python_library(name='%s', - dependencies=[%s] - ) - ''' % (name, ','.join(all_deps)))) - - create_target('common/a', 'a') - create_target('common/b', 'b') - create_target('common/c', 'c') - create_target('overlaps', 'one', 'common/a', 'common/b') - create_target('overlaps', 'two', 'common/a', 'common/c') - create_target('overlaps', 'three', 'common/a', 'overlaps:one') - - def test_roots(self): - self.assert_console_output( - 'common/a/BUILD:a', - targets=[self.target('common/a')], - extra_targets=[self.target('common/b')] - ) - - def test_nodups(self): - targets = [self.target('common/a')] * 2 - self.assertEqual(2, len(targets)) - self.assert_console_output( - 'common/a/BUILD:a', - targets=targets - ) - - def test_disjoint(self): - self.assert_console_output( - 'common/a/BUILD:a', - 'common/b/BUILD:b', - 'common/c/BUILD:c', - targets=[ - self.target('common/a'), - self.target('common/b'), - self.target('common/c'), - ] - ) - - def test_identical(self): - self.assert_console_output( - 'common/a/BUILD:a', - targets=[ - self.target('common/a'), - self.target('common/a'), - self.target('common/a'), - ] - ) - - def test_intersection(self): - self.assert_console_output( - 'overlaps/BUILD:one', - 'overlaps/BUILD:two', - targets=[ - self.target('overlaps:one'), - self.target('overlaps:two') - ] - ) - - self.assert_console_output( - 'overlaps/BUILD:one', - 'common/c/BUILD:c', - targets=[ - self.target('common/a'), - self.target('common/b'), - self.target('common/c'), - self.target('overlaps:one'), - ] - ) - - self.assert_console_output( - 'overlaps/BUILD:two', - 'overlaps/BUILD:three', - targets=[ - self.target('common/a'), - self.target('common/b'), - self.target('common/c'), - self.target('overlaps:one'), - self.target('overlaps:two'), - self.target('overlaps:three'), - ] - ) diff --git a/tests/python/twitter/pants/tasks/test_protobuf_gen.py b/tests/python/twitter/pants/tasks/test_protobuf_gen.py deleted file mode 100644 index cce5af27c..000000000 --- a/tests/python/twitter/pants/tasks/test_protobuf_gen.py +++ /dev/null @@ -1,100 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -# TODO(John Sirois): Test against protoc itself - -import unittest - -from twitter.common.contextutil import temporary_file - -from twitter.pants.tasks.protobuf_gen import calculate_genfiles - - -class ProtobufGenCalculateGenfilesTestBase(unittest.TestCase): - def assert_files(self, lang, rel_path, contents, *expected_files): - with temporary_file() as fp: - fp.write(contents) - fp.close() - self.assertEqual(set(expected_files), calculate_genfiles(fp.name, rel_path)[lang]) - - -class ProtobufGenCalculateJavaTest(ProtobufGenCalculateGenfilesTestBase): - - def assert_java_files(self, rel_path, contents, *expected_files): - self.assert_files('java', rel_path, contents, *expected_files) - - def test_plain(self): - self.assert_java_files( - 'snake_case.proto', - 'package com.twitter.ads.revenue_tables;', - 'com/twitter/ads/revenue_tables/SnakeCase.java') - - self.assert_java_files( - 'a/b/jake.proto', - 'package com.twitter.ads.revenue_tables;', - 'com/twitter/ads/revenue_tables/Jake.java') - - def test_custom_package(self): - self.assert_java_files( - 'fred.proto', - ''' - package com.twitter.ads.revenue_tables; - option java_package = "com.example.foo.bar"; - ''', - 'com/example/foo/bar/Fred.java') - - self.assert_java_files( - 'bam_bam.proto', - 'option java_package = "com.example.baz.bip";', - 'com/example/baz/bip/BamBam.java') - - self.assert_java_files( - 'bam_bam.proto', - 'option java_package="com.example.baz.bip" ;', - 'com/example/baz/bip/BamBam.java') - - def test_custom_outer(self): - self.assert_java_files( - 'jack_spratt.proto', - ''' - package com.twitter.lean; - option java_outer_classname = "To"; - ''', - 'com/twitter/lean/To.java') - - def test_multiple_files(self): - self.assert_java_files( - 'jack_spratt.proto', - ''' - package com.twitter.lean; - option java_multiple_files = false; - ''', - 'com/twitter/lean/JackSpratt.java') - - self.assert_java_files( - 'jack_spratt.proto', - ''' - package com.twitter.lean; - option java_multiple_files = true; - - enum Jake { FOO=1; } - message joe_bob { - } - ''', - 'com/twitter/lean/JackSpratt.java', - 'com/twitter/lean/Jake.java', - 'com/twitter/lean/joe_bob.java', - 'com/twitter/lean/joe_bobOrBuilder.java') diff --git a/tests/python/twitter/pants/tasks/test_roots.py b/tests/python/twitter/pants/tasks/test_roots.py deleted file mode 100644 index 515ff4938..000000000 --- a/tests/python/twitter/pants/tasks/test_roots.py +++ /dev/null @@ -1,56 +0,0 @@ -import os - -from contextlib import contextmanager - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks.roots import ListRoots -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -@contextmanager -def register_sourceroot(): - try: - yield SourceRoot.register - except (ValueError, IndexError) as e: - print("SourceRoot Registration Failed.") - raise e - finally: - SourceRoot.reset() - - -class ListRootsTest(ConsoleTaskTest): - - class TypeA(Target): - pass - - class TypeB(Target): - pass - - @classmethod - def task_type(cls): - return ListRoots - - def test_roots_without_register(self): - try: - self.assert_console_output() - except AssertionError: - self.fail("./pants goal roots failed without any registered SourceRoot.") - - def test_no_source_root(self): - with register_sourceroot() as sourceroot: - sourceroot(os.path.join(get_buildroot(), "fakeroot")) - self.assert_console_output('fakeroot: *') - - def test_single_source_root(self): - with register_sourceroot() as sourceroot: - sourceroot(os.path.join(get_buildroot(), "fakeroot"), ListRootsTest.TypeA, - ListRootsTest.TypeB) - self.assert_console_output("fakeroot: TypeA,TypeB") - - def test_multiple_source_root(self): - with register_sourceroot() as sourceroot: - sourceroot(os.path.join(get_buildroot(), "fakerootA"), ListRootsTest.TypeA) - sourceroot(os.path.join(get_buildroot(), "fakerootB"), ListRootsTest.TypeB) - self.assert_console_output('fakerootA: TypeA', 'fakerootB: TypeB') diff --git a/tests/python/twitter/pants/tasks/test_scrooge_gen.py b/tests/python/twitter/pants/tasks/test_scrooge_gen.py deleted file mode 100644 index 895877930..000000000 --- a/tests/python/twitter/pants/tasks/test_scrooge_gen.py +++ /dev/null @@ -1,55 +0,0 @@ -# ================================================================================================== -# Copyright 2014 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import pytest - -from textwrap import dedent - -from twitter.pants.base_build_root_test import BaseBuildRootTest -from twitter.pants.tasks import TaskError -from twitter.pants.tasks.scrooge_gen import ScroogeGen - - -class ScroogeGenTest(BaseBuildRootTest): - - def test_validate(self): - self.create_target('test_validate', dedent(''' - java_thrift_library(name='one', - sources=None, - dependencies=None, - ) - ''')) - - self.create_target('test_validate', dedent(''' - java_thrift_library(name='two', - sources=None, - dependencies=[pants(':one')], - ) - ''')) - - self.create_target('test_validate', dedent(''' - java_thrift_library(name='three', - sources=None, - dependencies=[pants(':one')], - rpc_style='finagle', - ) - ''')) - - ScroogeGen._validate([self.target('test_validate:one')]) - ScroogeGen._validate([self.target('test_validate:two')]) - - with pytest.raises(TaskError): - ScroogeGen._validate([self.target('test_validate:three')]) diff --git a/tests/python/twitter/pants/tasks/test_sorttargets.py b/tests/python/twitter/pants/tasks/test_sorttargets.py deleted file mode 100644 index 53afc2348..000000000 --- a/tests/python/twitter/pants/tasks/test_sorttargets.py +++ /dev/null @@ -1,60 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.tasks.sorttargets import SortTargets -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseSortTargetsTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return SortTargets - - -class SortTargetsEmptyTest(BaseSortTargetsTest): - def test(self): - self.assert_console_output(targets=[]) - - -class SortTargetsTest(BaseSortTargetsTest): - - @classmethod - def setUpClass(cls): - super(SortTargetsTest, cls).setUpClass() - - def create_target(path, name, *deps): - all_deps = ["pants('%s')" % dep for dep in list(deps)] - cls.create_target(path, dedent(''' - python_library(name='%s', - dependencies=[%s] - ) - ''' % (name, ','.join(all_deps)))) - - create_target('common/a', 'a') - create_target('common/b', 'b', 'common/a') - create_target('common/c', 'c', 'common/a', 'common/b') - - def test_sort(self): - targets = [self.target('common/a'), self.target('common/c'), self.target('common/b')] - self.assertEqual(['common/a/BUILD:a', 'common/b/BUILD:b', 'common/c/BUILD:c'], - list(self.execute_console_task(targets=targets))) - - def test_sort_reverse(self): - targets = [self.target('common/c'), self.target('common/a'), self.target('common/b')] - self.assertEqual(['common/c/BUILD:c', 'common/b/BUILD:b', 'common/a/BUILD:a'], - list(self.execute_console_task(targets=targets, args=['--test-reverse']))) diff --git a/tests/python/twitter/pants/tasks/test_targets_help.py b/tests/python/twitter/pants/tasks/test_targets_help.py deleted file mode 100644 index e4a3f42c2..000000000 --- a/tests/python/twitter/pants/tasks/test_targets_help.py +++ /dev/null @@ -1,58 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -import os.path - -from twitter.pants.base.build_environment import get_buildroot -from twitter.pants.base.target import Target -from twitter.pants.targets.sources import SourceRoot -from twitter.pants.tasks.targets_help import TargetsHelp -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class TargetsHelpTest(ConsoleTaskTest): - - @classmethod - def task_type(cls): - return TargetsHelp - - @classmethod - def setUpClass(cls): - super(TargetsHelpTest, cls).setUpClass() - SourceRoot.register(os.path.join(get_buildroot(), 'fakeroot'), TargetsHelpTest.MyTarget) - - def test_list_installed_targets(self): - self.assert_console_output( - TargetsHelp.INSTALLED_TARGETS_HEADER, - ' %s: %s' % ('my_target'.rjust(TargetsHelp.MAX_ALIAS_LEN), - TargetsHelpTest.MyTarget.__doc__.split('\n')[0])) - - def test_get_details(self): - self.assert_console_output( - TargetsHelp.DETAILS_HEADER.substitute( - name='my_target', desc=TargetsHelpTest.MyTarget.__doc__), - ' name: The name of this target.', - ' foo: Another argument. (default: None)', - args=['--test-details=my_target']) - - class MyTarget(Target): - """One-line description of the target.""" - def __init__(self, name, foo=None): - """ - :param name: The name of this target. - :param string foo: Another argument. - """ - Target.__init__(self, name) diff --git a/tests/python/twitter/pants/tasks/test_what_changed.py b/tests/python/twitter/pants/tasks/test_what_changed.py deleted file mode 100644 index 311534477..000000000 --- a/tests/python/twitter/pants/tasks/test_what_changed.py +++ /dev/null @@ -1,180 +0,0 @@ -# ================================================================================================== -# Copyright 2012 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from textwrap import dedent - -from twitter.pants.base.target import TargetDefinitionException -from twitter.pants.tasks.what_changed import WhatChanged, Workspace -from twitter.pants.tasks.test_base import ConsoleTaskTest - - -class BaseWhatChangedTest(ConsoleTaskTest): - @classmethod - def task_type(cls): - return WhatChanged - - def workspace(self, files=None, parent=None): - class MockWorkspace(Workspace): - @staticmethod - def touched_files(p): - self.assertEqual(parent or 'HEAD', p) - return files or [] - return MockWorkspace() - - -class WhatChangedTestBasic(BaseWhatChangedTest): - def test_nochanges(self): - self.assert_console_output(workspace=self.workspace()) - - def test_parent(self): - self.assert_console_output(args=['--test-parent=42'], workspace=self.workspace(parent='42')) - - def test_files(self): - self.assert_console_output( - 'a/b/c', - 'd', - 'e/f', - args=['--test-files'], - workspace=self.workspace(files=['a/b/c', 'd', 'e/f']) - ) - - -class WhatChangedTest(BaseWhatChangedTest): - @classmethod - def setUpClass(cls): - super(WhatChangedTest, cls).setUpClass() - - cls.create_target('root', dedent(''' - source_root('src/py', python_library) - source_root('resources/a1', resources) - ''')) - - cls.create_target('root/src/py/a', dedent(''' - python_library( - name='alpha', - sources=['b/c', 'd'], - resources=['test.resources'] - ) - - jar_library( - name='beta', - dependencies=[ - jar(org='gamma', name='ray', rev='1.137.bruce_banner') - ] - ) - ''')) - - cls.create_target('root/src/py/1', dedent(''' - python_library( - name='numeric', - sources=['2'] - ) - ''')) - - cls.create_target('root/src/thrift', dedent(''' - java_thrift_library( - name='thrift', - sources=['a.thrift'] - ) - - python_thrift_library( - name='py-thrift', - sources=['a.thrift'] - ) - ''')) - - cls.create_target('root/resources/a', dedent(''' - resources( - name='a_resources', - sources=['a.resources'] - ) - ''')) - - cls.create_target('root/src/java/a', dedent(''' - java_library( - name='a_java', - sources=['a.java'], - resources=[pants('root/resources/a:a_resources')] - ) - ''')) - - cls.create_target('root/3rdparty/BUILD.twitter', dedent(''' - jar_library( - name='dummy', - dependencies=[ - jar(org='foo', name='ray', rev='1.45') - ]) - ''')) - - cls.create_target('root/3rdparty/BUILD', dedent(''' - jar_library( - name='dummy1', - dependencies=[ - jar(org='foo1', name='ray', rev='1.45') - ]) - ''')) - - def test_owned(self): - self.assert_console_output( - 'root/src/py/a/BUILD:alpha', - 'root/src/py/1/BUILD:numeric', - workspace=self.workspace(files=['root/src/py/a/b/c', 'root/src/py/a/d', 'root/src/py/1/2']) - ) - - def test_multiply_owned(self): - self.assert_console_output( - 'root/src/thrift/BUILD:thrift', - 'root/src/thrift/BUILD:py-thrift', - workspace=self.workspace(files=['root/src/thrift/a.thrift']) - ) - - def test_build(self): - self.assert_console_output( - 'root/src/py/a/BUILD:alpha', - 'root/src/py/a/BUILD:beta', - workspace=self.workspace(files=['root/src/py/a/BUILD']) - ) - - def test_resource_changed(self): - self.assert_console_output( - 'root/src/py/a/BUILD:alpha', - workspace=self.workspace(files=['root/src/py/a/test.resources']) - ) - - def test_resource_changed_for_java_lib(self): - self.assert_console_output( - 'root/resources/a/BUILD:a_resources', - workspace=self.workspace(files=['root/resources/a/a.resources']) - ) - - def test_build_sibling(self): - self.assert_console_output( - 'root/3rdparty/BUILD.twitter:dummy', - workspace=self.workspace(files=['root/3rdparty/BUILD.twitter']) - ) - - def test_resource_type_error(self): - self.create_target('root/resources/a1', dedent(''' - java_library( - name='a1', - sources=['a1.test'], - resources=[1] - ) - ''')) - self.assert_console_raises( - TargetDefinitionException, - workspace=self.workspace(files=['root/resources/a1/a1.test']) - ) diff --git a/tests/python/twitter/pants/test_maven_layout.py b/tests/python/twitter/pants/test_maven_layout.py deleted file mode 100644 index 5db144d46..000000000 --- a/tests/python/twitter/pants/test_maven_layout.py +++ /dev/null @@ -1,37 +0,0 @@ -# ================================================================================================== -# Copyright 2013 Twitter, Inc. -# -------------------------------------------------------------------------------------------------- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this work except in compliance with the License. -# You may obtain a copy of the License in the LICENSE file, or at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ================================================================================================== - -from twitter.pants.base_build_root_test import BaseBuildRootTest - - -class MavenLayoutTest(BaseBuildRootTest): - @classmethod - def setUpClass(cls): - super(MavenLayoutTest, cls).setUpClass() - - cls.create_target('projectB/src/main/scala', 'scala_library(name="test", sources=[])') - cls.create_file('projectB/BUILD', 'maven_layout()') - - cls.create_target('projectA/subproject/src/main/java', 'java_library(name="test", sources=[])') - cls.create_file('BUILD', 'maven_layout("projectA/subproject")') - - def test_layout_here(self): - self.assertEqual('projectB/src/main/scala', - self.target('projectB/src/main/scala:test').target_base) - - def test_subproject_layout(self): - self.assertEqual('projectA/subproject/src/main/java', - self.target('projectA/subproject/src/main/java:test').target_base) diff --git a/tests/python/twitter/pants/test_thrift_util.py b/tests/python/twitter/pants/test_thrift_util.py deleted file mode 100644 index 97695dca4..000000000 --- a/tests/python/twitter/pants/test_thrift_util.py +++ /dev/null @@ -1,63 +0,0 @@ -import os - -from twitter.common.contextutil import temporary_dir -from twitter.common.dirutil import safe_open -from twitter.common.lang import Compatibility - -if Compatibility.PY3: - import unittest -else: - import unittest2 as unittest - -from twitter.pants.thrift_util import find_includes, find_root_thrifts - - -class ThriftUtilTest(unittest.TestCase): - def write(self, path, contents): - with safe_open(path, 'w') as fp: - fp.write(contents) - return path - - def test_find_includes(self): - with temporary_dir() as dir: - a = os.path.join(dir, 'a') - b = os.path.join(dir, 'b') - - main = self.write(os.path.join(a, 'main.thrift'), ''' - include "sub/a_included.thrift" //Todo commet - include "b_included.thrift" - include "c_included.thrift" #jibberish - include "d_included.thrift" some ramdon - ''') - - a_included = self.write(os.path.join(a, 'sub', 'a_included.thrift'), '# noop') - b_included = self.write(os.path.join(b, 'b_included.thrift'), '# noop') - c_included = self.write(os.path.join(b, 'c_included.thrift'), '# noop') - - self.assertEquals(set([a_included, b_included, c_included]), - find_includes(basedirs=set([a, b]), source=main)) - - def test_find_includes_exception(self): - with temporary_dir() as dir: - a = os.path.join(dir, 'a') - - main = self.write(os.path.join(a, 'main.thrift'), ''' - include "sub/a_included.thrift # Todo" - include "b_included.thrift" - ''') - self.write(os.path.join(a, 'sub', 'a_included.thrift'), '# noop') - self.assertRaises(ValueError, find_includes, basedirs=set([a]), source=main) - - def test_find_root_thrifts(self): - with temporary_dir() as dir: - root_1 = self.write(os.path.join(dir, 'root_1.thrift'), '# noop') - root_2 = self.write(os.path.join(dir, 'root_2.thrift'), '# noop') - self.assertEquals(set([root_1, root_2]), - find_root_thrifts(basedirs=[], sources=[root_1, root_2])) - - with temporary_dir() as dir: - root_1 = self.write(os.path.join(dir, 'root_1.thrift'), 'include "mid_1.thrift"') - self.write(os.path.join(dir, 'mid_1.thrift'), 'include "leaf_1.thrift"') - self.write(os.path.join(dir, 'leaf_1.thrift'), '# noop') - root_2 = self.write(os.path.join(dir, 'root_2.thrift'), 'include "root_1.thrift"') - self.assertEquals(set([root_2]), find_root_thrifts(basedirs=[], sources=[root_1, root_2])) diff --git a/tests/python/twitter/pants/testutils/__init__.py b/tests/python/twitter/pants/testutils/__init__.py deleted file mode 100644 index ec66df2f4..000000000 --- a/tests/python/twitter/pants/testutils/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ - -from mock_logger import MockLogger -from mock_target import MockTarget - -__all__ = [ 'MockLogger', 'MockTarget' ] diff --git a/tests/python/twitter/pants/testutils/base_mock_target_test.py b/tests/python/twitter/pants/testutils/base_mock_target_test.py deleted file mode 100644 index 218046c90..000000000 --- a/tests/python/twitter/pants/testutils/base_mock_target_test.py +++ /dev/null @@ -1,10 +0,0 @@ -import unittest - -from twitter.pants.base.target import Target - - -class BaseMockTargetTest(unittest.TestCase): - """A baseclass useful for tests using ``MockTarget``s..""" - - def setUp(self): - Target._clear_all_addresses() diff --git a/tests/python/twitter/pants/testutils/mock_logger.py b/tests/python/twitter/pants/testutils/mock_logger.py deleted file mode 100644 index 63e6e2848..000000000 --- a/tests/python/twitter/pants/testutils/mock_logger.py +++ /dev/null @@ -1,22 +0,0 @@ -import sys - -from twitter.pants.reporting.report import Report - - -class MockLogger(object): - """A standalone logger that writes to stderr. - - Useful for testing without requiring the full RunTracker reporting framework. - """ - def __init__(self, level=Report.INFO): - self._level = level - - def _maybe_log(self, level, *msg_elements): - if level <= self._level: - sys.stderr.write(''.join(msg_elements)) - - def debug(self, *msg_elements): self._maybe_log(Report.DEBUG, *msg_elements) - def info(self, *msg_elements): self._maybe_log(Report.INFO, *msg_elements) - def warn(self, *msg_elements): self._maybe_log(Report.WARN, *msg_elements) - def error(self, *msg_elements): self._maybe_log(Report.ERROR, *msg_elements) - def fatal(self, *msg_elements): self._maybe_log(Report.FATAL, *msg_elements) diff --git a/tests/python/twitter/pants/testutils/mock_target.py b/tests/python/twitter/pants/testutils/mock_target.py deleted file mode 100644 index fa7fb51eb..000000000 --- a/tests/python/twitter/pants/testutils/mock_target.py +++ /dev/null @@ -1,28 +0,0 @@ -__author__ = 'Ryan Williams' - -from collections import defaultdict -from twitter.pants.base.parse_context import ParseContext -from twitter.pants.targets.internal import InternalTarget -from twitter.pants.targets.with_sources import TargetWithSources - - -class MockTarget(InternalTarget, TargetWithSources): - def __init__(self, name, dependencies=None, num_sources=0, exclusives=None): - with ParseContext.temp(): - InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) - TargetWithSources.__init__(self, name, exclusives=exclusives) - self.num_sources = num_sources - self.declared_exclusives = defaultdict(set) - if exclusives is not None: - for k in exclusives: - self.declared_exclusives[k] = set([exclusives[k]]) - self.exclusives = None - - def resolve(self): - yield self - - def walk(self, work, predicate=None): - work(self) - for dep in self.dependencies: - dep.walk(work) -