Skip to content

Commit

Permalink
Update ansible-test handling of delegation paths. (ansible#69056)
Browse files Browse the repository at this point in the history
  • Loading branch information
mattclay authored and bcoca committed Apr 22, 2020
1 parent b7bdc8d commit 83576aa
Show file tree
Hide file tree
Showing 11 changed files with 51 additions and 52 deletions.
2 changes: 0 additions & 2 deletions .gitignore
Expand Up @@ -105,5 +105,3 @@ test/units/.coverage.*
/SYMLINK_CACHE.json
changelogs/.plugin-cache.yaml
.ansible-test-timeout.json
# ansible-test temporary metadata file for use with delgation
/metadata-*.json
2 changes: 2 additions & 0 deletions changelogs/fragments/ansible-test-delegation-paths.yml
@@ -0,0 +1,2 @@
minor_changes:
- ansible-test now places the ansible source and collections content in separate directories when using the ``--docker`` or ``--remote`` options.
7 changes: 1 addition & 6 deletions test/lib/ansible_test/_internal/cloud/__init__.py
Expand Up @@ -197,12 +197,7 @@ def __init__(self, args):
def config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the config file to the payload file list."""
if self._get_cloud_config(self._CONFIG_PATH, ''):
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''

pair = (self.config_path, os.path.join(working_path, os.path.relpath(self.config_path, data_context().content.root)))
pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root))

if pair not in files:
display.info('Including %s config: %s -> %s' % (self.platform, pair[0], pair[1]), verbosity=3)
Expand Down
7 changes: 1 addition & 6 deletions test/lib/ansible_test/_internal/config.py
Expand Up @@ -209,13 +209,8 @@ def metadata_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the metadata file to the payload file list."""
config = self

if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''

if self.metadata_path:
files.append((os.path.abspath(config.metadata_path), os.path.join(working_path, config.metadata_path)))
files.append((os.path.abspath(config.metadata_path), config.metadata_path))

data_context().register_payload_callback(metadata_callback)

Expand Down
9 changes: 2 additions & 7 deletions test/lib/ansible_test/_internal/core_ci.py
Expand Up @@ -617,13 +617,8 @@ def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.
"""
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''

files.append((key, os.path.join(working_path, os.path.relpath(key_dst, data_context().content.root))))
files.append((pub, os.path.join(working_path, os.path.relpath(pub_dst, data_context().content.root))))
files.append((key, os.path.relpath(key_dst, data_context().content.root)))
files.append((pub, os.path.relpath(pub_dst, data_context().content.root)))

data_context().register_payload_callback(ssh_key_callback)

Expand Down
28 changes: 15 additions & 13 deletions test/lib/ansible_test/_internal/delegation.py
Expand Up @@ -117,8 +117,10 @@ def delegate(args, exclude, require, integration_targets):
:rtype: bool
"""
if isinstance(args, TestConfig):
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=data_context().content.root) as metadata_fd:
args.metadata_path = os.path.basename(metadata_fd.name)
make_dirs(ResultType.TMP.path)

with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
args.metadata.to_file(args.metadata_path)

try:
Expand Down Expand Up @@ -244,16 +246,17 @@ def delegate_docker(args, exclude, require, integration_targets):

python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)

install_root = '/root/ansible'
pwd = '/root'
ansible_root = os.path.join(pwd, 'ansible')

if data_context().content.collection:
content_root = os.path.join(install_root, data_context().content.collection.directory)
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = install_root
content_root = ansible_root

remote_results_root = os.path.join(content_root, data_context().content.results_path)

cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)

if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
Expand Down Expand Up @@ -321,9 +324,8 @@ def delegate_docker(args, exclude, require, integration_targets):
# write temporary files to /root since /tmp isn't ready immediately on container start
docker_put(args, test_id, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'), '/root/docker.sh')
docker_exec(args, test_id, ['/bin/bash', '/root/docker.sh'])
docker_put(args, test_id, local_source_fd.name, '/root/ansible.tgz')
docker_exec(args, test_id, ['mkdir', '/root/ansible'])
docker_exec(args, test_id, ['tar', 'oxzf', '/root/ansible.tgz', '-C', '/root/ansible'])
docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])

# docker images are only expected to have a single python version available
if isinstance(args, UnitsConfig) and not args.python:
Expand Down Expand Up @@ -440,14 +442,14 @@ def delegate_remote(args, exclude, require, integration_targets):

python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)

install_root = os.path.join(pwd, 'ansible')
ansible_root = os.path.join(pwd, 'ansible')

if data_context().content.collection:
content_root = os.path.join(install_root, data_context().content.collection.directory)
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = install_root
content_root = ansible_root

cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)

if httptester_id:
cmd += ['--inject-httptester']
Expand Down
7 changes: 1 addition & 6 deletions test/lib/ansible_test/_internal/executor.py
Expand Up @@ -972,12 +972,7 @@ def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) ->
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.
"""
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''

files.append((vars_file_src, os.path.join(working_path, data_context().content.integration_vars_path)))
files.append((vars_file_src, data_context().content.integration_vars_path))

data_context().register_payload_callback(integration_config_callback)

Expand Down
7 changes: 1 addition & 6 deletions test/lib/ansible_test/_internal/integration/__init__.py
Expand Up @@ -170,12 +170,7 @@ def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
"""
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''

inventory_path = os.path.join(working_path, get_inventory_relative_path(args))
inventory_path = get_inventory_relative_path(args)
inventory_tuple = inventory_path_src, inventory_path

if os.path.isfile(inventory_path_src) and inventory_tuple not in files:
Expand Down
2 changes: 1 addition & 1 deletion test/lib/ansible_test/_internal/manage_ci.py
Expand Up @@ -277,7 +277,7 @@ def upload_source(self):
# being different and -z not being recognized. This pattern works
# with both versions of tar.
self.ssh(
'rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && gunzip --stdout %s | tar oxf - && rm %s' %
'rm -rf ~/ansible ~/ansible_collections && cd ~/ && gunzip --stdout %s | tar oxf - && rm %s' %
(remote_source_path, remote_source_path)
)

Expand Down
31 changes: 27 additions & 4 deletions test/lib/ansible_test/_internal/payload.py
Expand Up @@ -75,20 +75,43 @@ def make_executable(tar_info): # type: (tarfile.TarInfo) -> t.Optional[tarfile.
files = [f for f in files if
is_subdir(f[1], 'bin/') or
is_subdir(f[1], 'lib/ansible/') or
(is_subdir(f[1], 'test/lib/ansible_test/') and not is_subdir(f[1], 'test/lib/ansible_test/tests/'))]
is_subdir(f[1], 'test/lib/ansible_test/')]

if not isinstance(args, (ShellConfig, IntegrationConfig)):
# exclude built-in ansible modules when they are not needed
files = [f for f in files if not is_subdir(f[1], 'lib/ansible/modules/') or f[1] == 'lib/ansible/modules/__init__.py']

collection_layouts = data_context().create_collection_layouts()

content_files = []
extra_files = []

for layout in collection_layouts:
# include files from each collection in the same collection root as the content being tested
files.extend((os.path.join(layout.root, path), os.path.join(layout.collection.directory, path)) for path in layout.all_files())
if layout == data_context().content:
# include files from the current collection (layout.collection.directory will be added later)
content_files.extend((os.path.join(layout.root, path), path) for path in data_context().content.all_files())
else:
# include files from each collection in the same collection root as the content being tested
extra_files.extend((os.path.join(layout.root, path), os.path.join(layout.collection.directory, path)) for path in layout.all_files())
else:
# when testing ansible itself the ansible source is the content
content_files = files
# there are no extra files when testing ansible itself
extra_files = []

for callback in data_context().payload_callbacks:
callback(files)
# execute callbacks only on the content paths
# this is done before placing them in the appropriate subdirectory (see below)
callback(content_files)

# place ansible source files under the 'ansible' directory on the delegated host
files = [(src, os.path.join('ansible', dst)) for src, dst in files]

if data_context().content.collection:
# place collection files under the 'ansible_collections/{namespace}/{collection}' directory on the delegated host
files.extend((src, os.path.join(data_context().content.collection.directory, dst)) for src, dst in content_files)
# extra files already have the correct destination path
files.extend(extra_files)

# maintain predictable file order
files = sorted(set(files))
Expand Down
1 change: 0 additions & 1 deletion test/sanity/code-smell/package-data.py
Expand Up @@ -27,7 +27,6 @@ def assemble_files_to_ship(complete_file_list):
'hacking/shippable/*',
'hacking/tests/*',
'hacking/ticket_stubs/*',
'metadata-*.json', # ansible-test with --docker produces this tmp file.
'test/sanity/code-smell/botmeta.*',
'test/utils/*',
'test/utils/*/*',
Expand Down

0 comments on commit 83576aa

Please sign in to comment.