Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move the declarative task stuff out of the python backend testing #7279

Merged
@@ -23,14 +23,14 @@
class TestBuildLocalDistsWithCtypesNativeSources(BuildLocalPythonDistributionsTestBase):

@classproperty
def _run_before_task_types(cls):
def run_before_task_types(cls):
return [
CCompile,
CppCompile,
LinkSharedLibraries,
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls)._run_before_task_types
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls).run_before_task_types

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/plat_specific_c_dist:ctypes_c_library', {
'key': 'ctypes_c_library',
@@ -19,7 +19,7 @@

class TestBuildLocalDistsNativeSources(BuildLocalPythonDistributionsTestBase):

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/dist:universal_dist', {
'key': 'universal',
@@ -4,7 +4,6 @@

from __future__ import absolute_import, division, print_function, unicode_literals

import os
import re
from builtins import next, str

@@ -15,101 +14,34 @@
BuildLocalPythonDistributions
from pants.backend.python.tasks.resolve_requirements import ResolveRequirements
from pants.backend.python.tasks.select_interpreter import SelectInterpreter
from pants.build_graph.address import Address
from pants.util.collections import assert_single_element
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants_test.backend.python.tasks.python_task_test_base import (PythonTaskTestBase,
name_and_platform)
from pants_test.engine.scheduler_test_base import SchedulerTestBase
from pants_test.engine.scheduler_test_base import DeclarativeTaskTestBase


class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, SchedulerTestBase):
class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestBase):

@classmethod
def task_type(cls):
return BuildLocalPythonDistributions

@classproperty
def _dist_specs(cls):
"""
This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special
keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the
specified relative paths). The rest of the keys are fed into `self.make_target()`. An
`OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if
they have dependencies on each other).
"""
raise NotImplementedError('_dist_specs must be implemented!')

@classproperty
def _run_before_task_types(cls):
"""
By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets,
we want to compile and link them as well to get the resulting dist to build, so we add those
task types here and execute them beforehand.
"""
def run_before_task_types(cls):
return [SelectInterpreter]

@classproperty
def _run_after_task_types(cls):
"""Tasks to run after local dists are built, similar to `_run_before_task_types`."""
def run_after_task_types(cls):
return [ResolveRequirements]

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO: make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]
@classmethod
def rules(cls):
return super(BuildLocalPythonDistributionsTestBase, cls).rules() + native_backend_rules()

def setUp(self):
super(BuildLocalPythonDistributionsTestBase, self).setUp()

self.target_dict = {}

# Create a target from each specification and insert it into `self.target_dict`.
for target_spec, target_kwargs in self._dist_specs.items():
unprocessed_kwargs = target_kwargs.copy()

target_base = Address.parse(target_spec).spec_path

# Populate the target's owned files from the specification.
filemap = unprocessed_kwargs.pop('filemap', {})
for rel_path, content in filemap.items():
buildroot_path = os.path.join(target_base, rel_path)
self.create_file(buildroot_path, content)

# Ensure any dependencies exist in the target dict (`_dist_specs` must then be an
# OrderedDict).
# The 'key' is used to access the target in `self.target_dict`.
key = unprocessed_kwargs.pop('key')
dep_targets = []
for dep_spec in unprocessed_kwargs.pop('dependencies', []):
existing_tgt_key = self._dist_specs[dep_spec]['key']
dep_targets.append(self.target_dict[existing_tgt_key])

# Register the generated target.
generated_target = self.make_target(
spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs)
self.target_dict[key] = generated_target

def _all_specified_targets(self):
return list(self.target_dict.values())

def _scheduling_context(self, **kwargs):
scheduler = self.mk_scheduler(rules=native_backend_rules())
return self.context(scheduler=scheduler, **kwargs)

def _retrieve_single_product_at_target_base(self, product_mapping, target):
product = product_mapping.get(target)
base_dirs = list(product.keys())
self.assertEqual(1, len(base_dirs))
single_base_dir = base_dirs[0]
all_products = product[single_base_dir]
self.assertEqual(1, len(all_products))
single_product = all_products[0]
return single_product
self.populate_target_dict()

def _get_dist_snapshot_version(self, task, python_dist_target):
"""Get the target's fingerprint, and guess the resulting version string of the built dist.
@@ -132,45 +64,16 @@ def _get_dist_snapshot_version(self, task, python_dist_target):
# --tag-build option.
return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower())

def _create_task(self, task_type, context):
return task_type(context, self.test_workdir)

def _create_distribution_synthetic_target(self, python_dist_target, extra_targets=[]):
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self._run_before_task_types))
python_create_distributions_task_type = self._testing_task_type
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self._run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
python_create_distributions_task_type,
] + run_after_synthesized_task_types

context = self._scheduling_context(
target_roots=([python_dist_target] + extra_targets),
for_task_types=all_synthesized_task_types,
context, _, python_create_distributions_task_instance, _ = self.invoke_tasks(
target_roots=[python_dist_target] + extra_targets,
for_subsystems=[PythonRepos, LibcDev],
# TODO(#6848): we should be testing all of these with both of our toolchains.
options={
'native-build-step': {
'toolchain_variant': 'llvm',
},
})
self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
python_create_distributions_task_instance = self._create_task(
python_create_distributions_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
python_create_distributions_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

synthetic_tgts = set(context.build_graph.targets()) - set(self._all_specified_targets())
self.assertEqual(1, len(synthetic_tgts))
@@ -9,11 +9,14 @@
from builtins import object

from pants.base.file_system_project_tree import FileSystemProjectTree
from pants.build_graph.address import Address
from pants.engine.nodes import Throw
from pants.engine.scheduler import Scheduler
from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS
from pants.util.contextutil import temporary_file_path
from pants.util.dirutil import safe_mkdtemp, safe_rmtree
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants_test.engine.util import init_native


@@ -97,3 +100,120 @@ def execute_raising_throw(self, scheduler, product, subject):
self.assertTrue(type(resulting_value) is Throw)

raise resulting_value.exc


class DeclarativeTaskTestBase(SchedulerTestBase):
This conversation was marked as resolved by cosmicexplorer

This comment has been minimized.

Copy link
@stuhood

stuhood Feb 23, 2019

Member

It should at this point be possible to extend TestBase (and or TaskTestBase) to get a fully useful, legacy-configured Scheduler via self.scheduler... SchedulerTestBase should be primarily useful for tests that intend to manually construct their own scheduler.

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 1, 2019

Author Contributor

Removed all the scheduler stuff and moved to task_test_base.py!

"""???/experimental blah, makes things declarative, whatever"""
This conversation was marked as resolved by cosmicexplorer

This comment has been minimized.

Copy link
@stuhood

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 1, 2019

Author Contributor

Expanded!


@classproperty
def dist_specs(cls):
"""
This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special
keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the
specified relative paths). The rest of the keys are fed into `self.make_target()`. An
`OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if
they have dependencies on each other).
"""
raise NotImplementedError('dist_specs must be implemented!')

@classproperty
def run_before_task_types(cls):
"""
By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets,
we want to compile and link them as well to get the resulting dist to build, so we add those
task types here and execute them beforehand.
"""
return []

@classproperty
def run_after_task_types(cls):
"""Tasks to run after local dists are built, similar to `run_before_task_types`."""
return []

def populate_target_dict(self):
self.target_dict = {}

# Create a target from each specification and insert it into `self.target_dict`.
for target_spec, target_kwargs in self.dist_specs.items():
unprocessed_kwargs = target_kwargs.copy()

target_base = Address.parse(target_spec).spec_path

# Populate the target's owned files from the specification.
filemap = unprocessed_kwargs.pop('filemap', {})
for rel_path, content in filemap.items():
buildroot_path = os.path.join(target_base, rel_path)
self.create_file(buildroot_path, content)
This conversation was marked as resolved by cosmicexplorer

This comment has been minimized.

Copy link
@stuhood

stuhood Feb 23, 2019

Member

I think that this means that this needs to be mixed into something that extends TestBase anyway... so probably best to use the scheduler from TestBase.

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 1, 2019

Author Contributor

Was able to remove all scheduler nonsense! Noted that it needs to be mixed into something subclassing TaskTestBase!


# Ensure any dependencies exist in the target dict (`dist_specs` must then be an
# OrderedDict).
# The 'key' is used to access the target in `self.target_dict`.
key = unprocessed_kwargs.pop('key')
dep_targets = []
for dep_spec in unprocessed_kwargs.pop('dependencies', []):
existing_tgt_key = self.dist_specs[dep_spec]['key']
dep_targets.append(self.target_dict[existing_tgt_key])

# Register the generated target.
generated_target = self.make_target(
spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs)
self.target_dict[key] = generated_target

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO: make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]

def _all_specified_targets(self):
return list(self.target_dict.values())

def _scheduling_context(self, **kwargs):
scheduler = self.mk_scheduler(rules=self.rules())
return self.context(scheduler=scheduler, **kwargs)

def _retrieve_single_product_at_target_base(self, product_mapping, target):
product = product_mapping.get(target)
base_dirs = list(product.keys())
self.assertEqual(1, len(base_dirs))
single_base_dir = base_dirs[0]
all_products = product[single_base_dir]
self.assertEqual(1, len(all_products))
single_product = all_products[0]
return single_product

def _create_task(self, task_type, context):

This comment has been minimized.

Copy link
@stuhood

stuhood Feb 23, 2019

Member

TaskTestBase should handle this I think.

This comment has been minimized.

Copy link
@cosmicexplorer

cosmicexplorer Mar 1, 2019

Author Contributor

Added a docstring -- this is used specifically to hydrate the run_before_task_types and run_after_task_types in self.invoke_tasks().

return task_type(context, self.test_workdir)

def invoke_tasks(self, **context_kwargs):
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types))
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
self._testing_task_type,
] + run_after_synthesized_task_types

context = self._scheduling_context(
for_task_types=all_synthesized_task_types,
**context_kwargs)
self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
current_task_instance = self._create_task(
self._testing_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
current_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

return (context, run_before_task_instances, current_task_instance, run_after_task_instances)
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.