Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move the declarative task stuff out of the python backend testing #7279

Merged
@@ -23,14 +23,14 @@
class TestBuildLocalDistsWithCtypesNativeSources(BuildLocalPythonDistributionsTestBase):

@classproperty
def _run_before_task_types(cls):
def run_before_task_types(cls):
return [
CCompile,
CppCompile,
LinkSharedLibraries,
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls)._run_before_task_types
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls).run_before_task_types

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/plat_specific_c_dist:ctypes_c_library', {
'key': 'ctypes_c_library',
@@ -111,7 +111,7 @@ def test_ctypes_c_dist(self):
self.assertEqual(['platform_specific_ctypes_c_dist==0.0.0+{}'.format(snapshot_version)],
[str(x.requirement) for x in synthetic_target.requirements.value])
local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(
local_wheel = self.retrieve_single_product_at_target_base(
local_wheel_products, platform_specific_dist)
self.assertTrue(check_wheel_platform_matches_host(local_wheel))

@@ -123,6 +123,6 @@ def test_ctypes_cpp_dist(self):
[str(x.requirement) for x in synthetic_target.requirements.value])

local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(
local_wheel = self.retrieve_single_product_at_target_base(
local_wheel_products, platform_specific_dist)
self.assertTrue(check_wheel_platform_matches_host(local_wheel))
@@ -19,7 +19,7 @@

class TestBuildLocalDistsNativeSources(BuildLocalPythonDistributionsTestBase):

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/dist:universal_dist', {
'key': 'universal',
@@ -4,7 +4,6 @@

from __future__ import absolute_import, division, print_function, unicode_literals

import os
import re
from builtins import next, str

@@ -15,101 +14,34 @@
BuildLocalPythonDistributions
from pants.backend.python.tasks.resolve_requirements import ResolveRequirements
from pants.backend.python.tasks.select_interpreter import SelectInterpreter
from pants.build_graph.address import Address
from pants.util.collections import assert_single_element
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants_test.backend.python.tasks.python_task_test_base import (PythonTaskTestBase,
name_and_platform)
from pants_test.engine.scheduler_test_base import SchedulerTestBase
from pants_test.task_test_base import DeclarativeTaskTestMixin


class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, SchedulerTestBase):
class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestMixin):

@classmethod
def task_type(cls):
return BuildLocalPythonDistributions

@classproperty
def _dist_specs(cls):
"""
This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special
keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the
specified relative paths). The rest of the keys are fed into `self.make_target()`. An
`OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if
they have dependencies on each other).
"""
raise NotImplementedError('_dist_specs must be implemented!')

@classproperty
def _run_before_task_types(cls):
"""
By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets,
we want to compile and link them as well to get the resulting dist to build, so we add those
task types here and execute them beforehand.
"""
def run_before_task_types(cls):
return [SelectInterpreter]

@classproperty
def _run_after_task_types(cls):
"""Tasks to run after local dists are built, similar to `_run_before_task_types`."""
def run_after_task_types(cls):
return [ResolveRequirements]

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO: make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]
@classmethod
def rules(cls):
return super(BuildLocalPythonDistributionsTestBase, cls).rules() + native_backend_rules()

def setUp(self):
super(BuildLocalPythonDistributionsTestBase, self).setUp()

self.target_dict = {}

# Create a target from each specification and insert it into `self.target_dict`.
for target_spec, target_kwargs in self._dist_specs.items():
unprocessed_kwargs = target_kwargs.copy()

target_base = Address.parse(target_spec).spec_path

# Populate the target's owned files from the specification.
filemap = unprocessed_kwargs.pop('filemap', {})
for rel_path, content in filemap.items():
buildroot_path = os.path.join(target_base, rel_path)
self.create_file(buildroot_path, content)

# Ensure any dependencies exist in the target dict (`_dist_specs` must then be an
# OrderedDict).
# The 'key' is used to access the target in `self.target_dict`.
key = unprocessed_kwargs.pop('key')
dep_targets = []
for dep_spec in unprocessed_kwargs.pop('dependencies', []):
existing_tgt_key = self._dist_specs[dep_spec]['key']
dep_targets.append(self.target_dict[existing_tgt_key])

# Register the generated target.
generated_target = self.make_target(
spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs)
self.target_dict[key] = generated_target

def _all_specified_targets(self):
return list(self.target_dict.values())

def _scheduling_context(self, **kwargs):
scheduler = self.mk_scheduler(rules=native_backend_rules())
return self.context(scheduler=scheduler, **kwargs)

def _retrieve_single_product_at_target_base(self, product_mapping, target):
product = product_mapping.get(target)
base_dirs = list(product.keys())
self.assertEqual(1, len(base_dirs))
single_base_dir = base_dirs[0]
all_products = product[single_base_dir]
self.assertEqual(1, len(all_products))
single_product = all_products[0]
return single_product
self.populate_target_dict()

def _get_dist_snapshot_version(self, task, python_dist_target):
"""Get the target's fingerprint, and guess the resulting version string of the built dist.
@@ -132,45 +64,16 @@ def _get_dist_snapshot_version(self, task, python_dist_target):
# --tag-build option.
return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower())

def _create_task(self, task_type, context):
return task_type(context, self.test_workdir)

def _create_distribution_synthetic_target(self, python_dist_target, extra_targets=[]):
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self._run_before_task_types))
python_create_distributions_task_type = self._testing_task_type
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self._run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
python_create_distributions_task_type,
] + run_after_synthesized_task_types

context = self._scheduling_context(
target_roots=([python_dist_target] + extra_targets),
for_task_types=all_synthesized_task_types,
context, _, python_create_distributions_task_instance, _ = self.invoke_tasks(
target_roots=[python_dist_target] + extra_targets,
for_subsystems=[PythonRepos, LibcDev],
# TODO(#6848): we should be testing all of these with both of our toolchains.
options={
'native-build-step': {
'toolchain_variant': 'llvm',
},
})
self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
python_create_distributions_task_instance = self._create_task(
python_create_distributions_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
python_create_distributions_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

synthetic_tgts = set(context.build_graph.targets()) - set(self._all_specified_targets())
self.assertEqual(1, len(synthetic_tgts))
@@ -192,7 +95,7 @@ def _assert_dist_and_wheel_identity(self, expected_name, expected_version, expec
str(resulting_dist_req.requirement))

local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(local_wheel_products, dist_target)
local_wheel = self.retrieve_single_product_at_target_base(local_wheel_products, dist_target)
dist, version, platform = name_and_platform(local_wheel)
self.assertEquals(dist, expected_name)
self.assertEquals(version, expected_snapshot_version)
@@ -6,15 +6,19 @@

import glob
import os
from builtins import object
from contextlib import closing, contextmanager
from io import BytesIO

from future.utils import PY2

from pants.build_graph.address import Address
from pants.goal.goal import Goal
from pants.ivy.bootstrapper import Bootstrapper
from pants.task.console_task import ConsoleTask
from pants.util.contextutil import temporary_dir
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants.util.process_handler import subprocess
from pants_test.test_base import TestBase

@@ -299,3 +303,116 @@ def assert_console_raises(self, exception, **kwargs):
"""
with self.assertRaises(exception):
self.execute_console_task(**kwargs)


class DeclarativeTaskTestMixin(object):
"""Experimental mixin combining target descriptions with a file path dict.
This class should be mixed in to subclasses of `TaskTestBase`!
NB: `self.populate_target_dict()` should be called in the `setUp()` method to use the target specs
specified in `dist_specs`!
This mixin also allows specifying tasks to be run before or after the task_type() is executed when
calling `self.invoke_tasks()`.
"""

@classproperty
def dist_specs(cls):
"""This is an informally-specified nested dict.
Special keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at
the specified relative paths). The rest of the keys are fed into `self.make_target()`. An
`OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if
they have dependencies on each other).
"""
raise NotImplementedError('dist_specs must be implemented!')

@classproperty
def run_before_task_types(cls):

This comment has been minimized.

Copy link
@stuhood

stuhood Mar 2, 2019

Member

The run_before and run_after tasks look very important for correct usage of this class, but it's not clear to me what they would do (without reading the implementation of this class).

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 3, 2019

Author Contributor

Added lots more documentation (and removed the previous docstrings)!

"""
By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets,
we want to compile and link them as well to get the resulting dist to build, so we add those
task types here and execute them beforehand.
"""
return []

@classproperty
def run_after_task_types(cls):
"""Tasks to run after local dists are built, similar to `run_before_task_types`."""
return []

def populate_target_dict(self):

This comment has been minimized.

Copy link
@stuhood

stuhood Mar 2, 2019

Member

This helper method doesn't seem like it is specific to this class. You could probably pass in the dist_specs as a map, and return the created targets (which you could then pass to invoke_tasks). That would allow the method to move up to TestBase, and make it useful in more places.

The rest of the Task manipulation stuff could probably stay here.

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 3, 2019

Author Contributor

Done!

self.target_dict = {}

# Create a target from each specification and insert it into `self.target_dict`.
for target_spec, target_kwargs in self.dist_specs.items():
unprocessed_kwargs = target_kwargs.copy()

target_base = Address.parse(target_spec).spec_path

# Populate the target's owned files from the specification.
filemap = unprocessed_kwargs.pop('filemap', {})
for rel_path, content in filemap.items():
buildroot_path = os.path.join(target_base, rel_path)
self.create_file(buildroot_path, content)

# Ensure any dependencies exist in the target dict (`dist_specs` must then be an
# OrderedDict).
# The 'key' is used to access the target in `self.target_dict`.
key = unprocessed_kwargs.pop('key')
dep_targets = []
for dep_spec in unprocessed_kwargs.pop('dependencies', []):
existing_tgt_key = self.dist_specs[dep_spec]['key']
dep_targets.append(self.target_dict[existing_tgt_key])

# Register the generated target.
generated_target = self.make_target(
spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs)
self.target_dict[key] = generated_target

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO: make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]

def _all_specified_targets(self):
return list(self.target_dict.values())

def _create_task(self, task_type, context):
"""Helper method to instantiate tasks besides self._testing_task_type in the test workdir."""
return task_type(context, self.test_workdir)

def invoke_tasks(self, **context_kwargs):
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types))
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
self._testing_task_type,
] + run_after_synthesized_task_types

context = self.context(
for_task_types=all_synthesized_task_types,
**context_kwargs)
self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
current_task_instance = self._create_task(
self._testing_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
current_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

return (context, run_before_task_instances, current_task_instance, run_after_task_instances)
@@ -35,6 +35,7 @@
from pants.source.source_root import SourceRootConfig
from pants.subsystem.subsystem import Subsystem
from pants.task.goal_options_mixin import GoalOptionsMixin
from pants.util.collections import assert_single_element
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import (recursive_dirname, relative_symlink, safe_file_dump, safe_mkdir,
safe_mkdtemp, safe_open, safe_rmtree)
@@ -674,3 +675,9 @@ def captured_logging(self, level=None):
finally:
root_logger.setLevel(old_level)
root_logger.removeHandler(handler)

def retrieve_single_product_at_target_base(self, product_mapping, target):
mapping_for_target = product_mapping.get(target)
single_base_dir = assert_single_element(list(mapping_for_target.keys()))
single_product = assert_single_element(mapping_for_target[single_base_dir])
return single_product
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.