Permalink
Browse files

Rollback 23 commits that weren't supposed to end up in master.

  • Loading branch information...
1 parent fd0ba3a commit bbc624c61f3b4defdf793bc2aef442473c863bb4 Benjy committed Jan 18, 2013
Showing with 119 additions and 1,057 deletions.
  1. +0 −1 .gitignore
  2. +0 −2 pants
  3. +0 −2 pants.ini
  4. +0 −1 src/python/twitter/pants/base/__init__.py
  5. +0 −86 src/python/twitter/pants/base/abbreviate_target_ids.py
  6. +2 −2 src/python/twitter/pants/base/artifact_cache.py
  7. +0 −137 src/python/twitter/pants/base/double_dag.py
  8. +2 −8 src/python/twitter/pants/commands/goal.py
  9. +0 −3 src/python/twitter/pants/goal/__init__.py
  10. +5 −6 src/python/twitter/pants/goal/context.py
  11. +6 −9 src/python/twitter/pants/goal/group.py
  12. +5 −5 src/python/twitter/pants/goal/phase.py
  13. +1 −1 src/python/twitter/pants/python/README.md
  14. +1 −3 src/python/twitter/pants/python/__init__.py
  15. +0 −21 src/python/twitter/pants/python/naive_parallelizer.py
  16. +0 −243 src/python/twitter/pants/python/parallel_compile_manager.py
  17. +2 −2 src/python/twitter/pants/targets/jar_dependency.py
  18. +10 −16 src/python/twitter/pants/tasks/__init__.py
  19. +2 −5 src/python/twitter/pants/tasks/binary_utils.py
  20. +21 −122 src/python/twitter/pants/tasks/cache_manager.py
  21. +2 −2 src/python/twitter/pants/tasks/code_gen.py
  22. +1 −1 src/python/twitter/pants/tasks/ivy_resolve.py
  23. +3 −5 src/python/twitter/pants/tasks/java_compile.py
  24. +1 −1 src/python/twitter/pants/tasks/markdown_to_html.py
  25. +5 −25 src/python/twitter/pants/tasks/nailgun_task.py
  26. +14 −68 src/python/twitter/pants/tasks/scala_compile.py
  27. +1 −1 src/python/twitter/pants/tasks/thriftstore_dml_gen.py
  28. 0 tests/python/__init__.py
  29. 0 tests/python/twitter/__init__.py
  30. 0 tests/python/twitter/pants/__init__.py
  31. +1 −1 tests/python/twitter/pants/ant/test-ide.py
  32. +0 −1 tests/python/twitter/pants/base/BUILD
  33. +1 −1 tests/python/twitter/pants/base/test-generator.py
  34. +0 −126 tests/python/twitter/pants/base/test_double_dag.py
  35. +2 −4 tests/python/twitter/pants/targets/BUILD
  36. +10 −16 tests/python/twitter/pants/targets/test-internal.py
  37. +1 −4 tests/python/twitter/pants/tasks/BUILD
  38. +0 −85 tests/python/twitter/pants/tasks/test_cache_manager.py
  39. +20 −9 tests/python/twitter/pants/tasks/test_context.py
  40. +0 −8 tests/python/twitter/pants/testutils/BUILD
  41. +0 −2 tests/python/twitter/pants/testutils/__init__.py
  42. +0 −22 tests/python/twitter/pants/testutils/mock_target.py
View
@@ -17,7 +17,6 @@ bin/
build/
codegen/classes/
htmlcov/
-lib/
dist
out
pants.pex
View
2 pants
@@ -28,7 +28,6 @@ PYTHON_DAEMON_EGG=$MY_DIR/3rdparty/python/python_daemon-1.6-py2.6.egg
function run_pants_bare() {
source $VIRTUAL_PYTHON/bin/activate
PYTHONPATH=$MAKO_EGG:$MARKDOWN_EGG:$PYGMENTS_EGG:$PYSTACHE_EGG:$PYTHON_DAEMON_EGG:$MY_DIR/src/python python $PANTS_EXE "$@"
- ret=$?
deactivate
}
@@ -64,7 +63,6 @@ if [ ! -z "${PANTS_DEV}" ]; then
# ./pants.pex py src/python/twitter/pants "$@"
echo "*** running pants in dev mode from $PANTS_EXE ***"
run_pants_bare "$@"
- exit $ret
else
${MY_DIR}/pants.pex "$@"
fi
View
@@ -147,8 +147,6 @@ scaladeps: ['3rdparty:scala-library']
partition_size_hint: 1000000000
-num_parallel_compiles: 4
-
nailgun_dir: %(pants_workdir)s/ng/%(compile-profile)s
main: com.typesafe.zinc.Main
args: [
@@ -17,7 +17,6 @@
from .address import Address
from .build_file import BuildFile
from .config import Config
-from .double_dag import DoubleDag
from .fileset import Fileset
from .parse_context import ContextError, ParseContext
from .target import Target, TargetDefinitionException
@@ -1,86 +0,0 @@
-__author__ = 'Ryan Williams'
-
-def abbreviate_target_ids(arr):
- """This method takes a list of strings (e.g. target IDs) and maps them to shortened versions of themselves. The
- original strings should consist of '.'-delimited segments, and the abbreviated versions are subsequences of these
- segments such that each string's subsequence is unique from others in @arr.
-
- For example:
-
- input: [
- 'com.twitter.pants.a.b',
- 'com.twitter.pants.a.c',
- 'com.twitter.pants.d'
- ]
-
- might return: {
- 'com.twitter.pants.a.b': 'b',
- 'com.twitter.pants.a.c': 'c',
- 'com.twitter.pants.d': 'd'
- }
-
- This can be useful for debugging purposes, removing a lot of boilerplate from printed lists of target IDs.
- """
- split_keys = [tuple(a.split('.')) for a in arr]
-
- split_keys_by_subseq = {}
-
- def subseq_map(arr, subseq_fn = None, result_cmp_fn = None):
- def subseq_map_rec(remaining_arr, subseq, indent = ''):
- if not remaining_arr:
- if subseq_fn:
- subseq_fn(arr, subseq)
- return subseq
-
- next_segment = remaining_arr.pop()
- next_subseq = tuple([next_segment] + list(subseq))
-
- skip_value = subseq_map_rec(remaining_arr, subseq, indent + '\t')
-
- add_value = subseq_map_rec(remaining_arr, next_subseq, indent + '\t')
-
- remaining_arr.append(next_segment)
-
- if result_cmp_fn:
- if not subseq:
- # Empty subsequence should always lose.
- return add_value
- if result_cmp_fn(skip_value, add_value):
- return skip_value
- return add_value
-
- return None
-
- val = subseq_map_rec(list(arr), tuple())
- return val
-
-
- def add_subseq(arr, subseq):
- if subseq not in split_keys_by_subseq:
- split_keys_by_subseq[subseq] = set()
- if split_key not in split_keys_by_subseq[subseq]:
- split_keys_by_subseq[subseq].add(arr)
-
- for split_key in split_keys:
- subseq_map(split_key, add_subseq)
-
-
- def return_min_subseqs(subseq1, subseq2):
- collisions1 = split_keys_by_subseq[subseq1]
- collisions2 = split_keys_by_subseq[subseq2]
- if (len(collisions1) < len(collisions2) or
- (len(collisions1) == len(collisions2) and
- len(subseq1) <= len(subseq2))):
- return True
- return False
-
- min_subseq_by_key = {}
-
- for split_key in split_keys:
- min_subseq = subseq_map(split_key, result_cmp_fn=return_min_subseqs)
- if not min_subseq:
- raise Exception("No min subseq found for %s: %s" % (str(split_key), str(min_subseq)))
- min_subseq_by_key['.'.join([str(segment) for segment in split_key])] = '.'.join(min_subseq)
-
- return min_subseq_by_key
-
@@ -33,7 +33,7 @@ def create_artifact_cache(context, artifact_root, spec):
as a path or URL prefix to a cache root. If it's a list of strings, it returns an appropriate
combined cache.
"""
- if not spec:
+ if len(spec) == 0:
raise Exception, 'Empty artifact cache spec'
if isinstance(spec, basestring):
if spec.startswith('/'):
@@ -282,7 +282,7 @@ def _url_string(self, path):
class CombinedArtifactCache(ArtifactCache):
"""An artifact cache that delegates to a list of other caches."""
def __init__(self, artifact_caches):
- if not artifact_caches:
+ if len(artifact_caches) == 0:
raise Exception, 'Must provide at least one underlying artifact cache'
context = artifact_caches[0].context
artifact_root = artifact_caches[0].artifact_root
@@ -1,137 +0,0 @@
-__author__ = 'Ryan Williams'
-
-from abbreviate_target_ids import abbreviate_target_ids
-
-# This file contains the implementation for a doubly-linked DAG data structure that is useful for dependency analysis.
-
-class DoubleDagNode(object):
- def __init__(self, data):
- self.data = data
- self.parents = set()
- self.children = set()
-
- def __repr__(self):
- return "Node(%s)" % self.data.id
-
-
-class DoubleDag(object):
- """This implementation of a doubly-linked DAG builds itself from a list of objects (of theoretically unknown type)
- and a function for generating each object's "children". It wraps each object in a "node" structure and exposes the
- following:
-
- - list of all nodes in the DAG (.nodes)
- - lookup dag node from original object (.lookup)
- - set of leaf nodes (.leaves)
- - a method (remove_nodes) that removes nodes and updates the set of leaves appropriately
- - the inverse method (restore_nodes)
-
- These are useful for computing the order in which to compile what groups of targets.
- """
- def __init__(self, objects, child_fn, logger):
- self._child_fn = child_fn
- self._logger = logger
-
- self.nodes = [ DoubleDagNode(object) for object in objects ]
-
- node_ids = [ node.data.id for node in self.nodes ]
- abbreviated_id_map = abbreviate_target_ids(node_ids)
- for node in self.nodes:
- node.short_id = abbreviated_id_map[node.data.id]
- node.data.short_id = abbreviated_id_map[node.data.id]
-
- self._nodes_by_data_map = {}
- for node in self.nodes:
- self._nodes_by_data_map[node.data] = node
-
- self._roots = set([])
- self.leaves = set([])
-
- self._logger.debug("%d nodes:" % len(self.nodes))
- for node in self.nodes:
- self._logger.debug(node.data.id,)
- self._logger.debug('')
-
- self._init_parent_and_child_relationships()
-
- self._find_roots_and_leaves()
-
- self._logger.debug("%d roots:" % len(self._roots))
- for root in self._roots:
- self._logger.debug(root.data.id)
- self._logger.debug('')
-
- self._logger.debug("%d leaves:" % len(self.leaves))
- for leaf in self.leaves:
- self._logger.debug(leaf.data.id)
- self._logger.debug('')
-
-
- def print_tree(self, use_short_ids=True):
- """This method prints out a python dictionary representing this DAG in a format suitable for eval'ing and useful
- for debugging."""
- def short_id(node):
- return node.short_id
- def id(node):
- return node.data.id
-
- node_fn = short_id if use_short_ids else id
- self._logger.debug("deps = {")
- for node in self.nodes:
- self._logger.debug(
- """ "%s": {"num": %d, "children": [%s]},""" % (
- node_fn(node),
- node.data.num_sources,
- ','.join(['"%s"' % node_fn(child) for child in node.children]))
- )
- self._logger.debug('}')
- self._logger.debug('')
-
- def lookup(self, data):
- if data in self._nodes_by_data_map:
- return self._nodes_by_data_map[data]
- return None
-
- def _init_parent_and_child_relationships(self):
- def find_children(original_node, data):
- for child_data in self._child_fn(data):
- if child_data in self._nodes_by_data_map:
- child_node = self._nodes_by_data_map[child_data]
- original_node.children.add(child_node)
- child_node.parents.add(original_node)
- else:
- raise Exception(
- "DAG child_fn shouldn't yield data objects not in tree:\n %s. child of: %s. original data: %s" % (
- str(child_data),
- str(data),
- str(original_node.data)))
-
- for node in self.nodes:
- find_children(node, node.data)
-
-
- def _find_roots_and_leaves(self):
- for node in self.nodes:
- if not node.parents:
- self._roots.add(node)
- if not node.children:
- self.leaves.add(node)
-
-
- def remove_nodes(self, nodes):
- """Removes the given nodes, updates self.leaves accordingly, and returns any nodes that have become leaves as a
- result of this removal."""
- new_leaves = set()
- for node in nodes:
- if node not in self.nodes:
- raise Exception("Attempting to remove invalid node: %s" % node.data.id)
- for parent_node in node.parents:
- if parent_node in nodes:
- continue
- parent_node.children.remove(node)
- if not parent_node.children:
- new_leaves.add(parent_node)
-
- # Do these outside in case 'nodes' is in fact self.leaves, so that we don't change the set we're iterating over.
- self.leaves -= nodes
- self.leaves.update(new_leaves)
- return new_leaves
@@ -402,13 +402,7 @@ def run(self, lock):
for dir in self.options.target_directory:
self.add_target_directory(dir)
- context = Context(
- self.config,
- self.options,
- self.targets,
- lock=lock,
- log=logger,
- timer=self.timer if self.options.time else None)
+ context = Context(self.config, self.options, self.targets, lock=lock, log=logger)
unknown = []
for phase in self.phases:
@@ -423,7 +417,7 @@ def run(self, lock):
if logger:
logger.debug('Operating on targets: %s', self.targets)
- ret = Phase.attempt(context, self.phases)
+ ret = Phase.attempt(context, self.phases, timer=self.timer if self.options.time else None)
if self.options.time:
print('Timing report')
print('=============')
@@ -73,9 +73,6 @@ def execute(self, targets):
self._task = FuncTask
- def __repr__(self):
- return "Goal(%s-%s; %s)" % (self.name, self.group, ','.join([str(d) for d in self.dependencies]))
-
@property
def task_type(self):
return self._task
@@ -43,15 +43,14 @@ def debug(self, msg): pass
def info(self, msg): pass
def warn(self, msg): pass
- def __init__(self, config, options, target_roots, lock=Lock.unlocked(), log=None, timer=None):
+ def __init__(self, config, options, target_roots, lock=Lock.unlocked(), log=None):
self._config = config
self._options = options
self._lock = lock
self._log = log or Context.Log()
self._state = {}
self._products = Products()
self._buildroot = get_buildroot()
- self.timer = timer
self.replace_targets(target_roots)
@@ -126,10 +125,10 @@ def replace_targets(self, target_roots):
self._target_roots = target_roots
self._targets = OrderedSet()
for target in target_roots:
- self._add_target(target)
+ self.add_target(target)
self.id = Target.identify(self._targets)
- def _add_target(self, target):
+ def add_target(self, target):
"""Adds a target and its transitive dependencies to the run context.
The target is not added to the target roots.
@@ -150,7 +149,7 @@ def add_new_target(self, target_base, target_type, *args, **kwargs):
else:
derived_from = None
target = self._create_new_target(target_base, target_type, *args, **kwargs)
- self._add_target(target)
+ self.add_target(target)
if derived_from:
target.derived_from = derived_from
return target
@@ -175,7 +174,7 @@ def targets(self, predicate=None):
"""
return filter(predicate, self._targets)
- def dependents(self, on_predicate=None, from_predicate=None):
+ def dependants(self, on_predicate=None, from_predicate=None):
"""Returns a map from targets that satisfy the from_predicate to targets they depend on that
satisfy the on_predicate.
"""
Oops, something went wrong.

0 comments on commit bbc624c

Please sign in to comment.