Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
195 changes: 153 additions & 42 deletions reframe/frontend/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,124 @@ def generate_report_filename(filepatt):
return filepatt.format(sessionid=new_id)


def filter_checks(checks_found, options, rt, loader, ci_tag=None):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will need to immense conflicts when #1594 is merged. I would suggest to revert its original location.


# Filter checks by name
checks_matched = checks_found
if options.exclude_names:
for name in options.exclude_names:
checks_matched = filter(filters.have_not_name(name),
checks_matched)

if options.names:
checks_matched = filter(filters.have_name('|'.join(options.names)),
checks_matched)

# Filter checks by tags
for tag in options.tags:
checks_matched = filter(filters.have_tag(tag), checks_matched)

# Filter checks by ci tags
if ci_tag:
checks_matched = filter(filters.have_tag(ci_tag), checks_matched)

# Filter checks by prgenv
if not options.skip_prgenv_check:
for prgenv in options.prgenv:
checks_matched = filter(filters.have_prgenv(prgenv),
checks_matched)

# Filter checks by system
if not options.skip_system_check:
checks_matched = filter(
filters.have_partition(rt.system.partitions), checks_matched)

if options.gpu_only:
checks_matched = filter(filters.have_gpu_only(), checks_matched)
elif options.cpu_only:
checks_matched = filter(filters.have_cpu_only(), checks_matched)

# Determine the allowed programming environments
allowed_environs = {e.name
for env_patt in options.prgenv
for p in rt.system.partitions
for e in p.environs if re.match(env_patt, e.name)}

# Generate the test cases, validate dependency and sort them
checks_matched = list(checks_matched)

return checks_matched

# TODO place this function in a proper module
# TODO define mechanism on how to propagate command line options, currently
# we are passing site_config, but that's clearly a bad design
# read below the options we need to propagate or make available to each
# ci invokation
# An alternative design would be to have environment variables exported
# during the trigger phase of reframe that would influence the tests
# individually
def generate_ci_pipeline_file(ci_pipeline_file, ci_tags, checks_found, options, rt, loader, site_config):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

All the CI integration functionality must go into a separate module.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And the interface seems a bit hacky, too.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The reason is because this function mixes everything: selection of tests and the generation of the pipeline. These must be separated and, especially, the selection of the tests must be viewed in the light of the work done in #1594.

import reframe.utility as util
import yaml

stages = {
'stages': [f'rfm-stage-{m}' for m in ci_tags]
}

with open(ci_pipeline_file, 'w') as pipeline_file:
for entry in yaml.safe_dump(stages, indent=2).split('\n'):
pipeline_file.write(f'{entry}\n')

for tag in ci_tags:
pipeline = {}
checks_matched = filter_checks(checks_found, options, rt, loader, ci_tag=tag)

# TODO there are some options that need to be revisited
# - about the test folder: reframe's -c option
# - about the configuration file to use: reframe's -C option
# - about the artifacts artifacts: {paths: [jobs_scratch_dir], when: always}

# TODO there are some missing options:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not clear to me how you want to treat the different flags. Can you explain?

# - about other potentially important options:
# + --keep-stage-files
# + --ignore-check-conflicts
# + -p
# + --gpu-only
# + --cpu-only
# + -A
# + -P
# + --reservation
# + --skip-performance-check
# + --nodelist
# + --exclude-nodes
# + --skip-system-check
# + --skip-prgenv-check
# + --mode
# + --max-retries
# + -M
# + -m
# + --module-mappings
# + --failure-stats
# + --performance-report
for test in checks_matched:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Everything must happen at the test case level. Now the test cases are generated immediately and everything in the frontend operates on them.

load_path='-c '.join(site_config.get('general/0/check_search_path'))
recurse='-R' if site_config.get('general/0/check_search_recursive') else ''
ignore_conflicts='--ignore-check-conflicts' if site_config.get('general/0/ignore_check_conflicts') else ''
pipeline[f'{test.name}'] = {
'stage' : f'rfm-stage-{tag}',
'script' : [
f'{reframe.INSTALL_PREFIX}/bin/reframe -C {site_config.filename} -c {load_path} {recurse} {ignore_conflicts} --prefix rfm_tests_stage_dir -n {test.name} -r'
],
'artifacts' : {
'paths' : 'rfm_tests_stage_dir'
}
}

with open(ci_pipeline_file, 'a') as pipeline_file:
for entry in yaml.safe_dump(pipeline, indent=2).split('\n'):
pipeline_file.write(f'{entry}\n')


def main():
# Setup command line options
argparser = argparse.ArgumentParser()
Expand Down Expand Up @@ -299,6 +417,19 @@ def main():
'--disable-hook', action='append', metavar='NAME', dest='hooks',
default=[], help='Disable a pipeline hook for this run'
)
run_options.add_argument(
'--ci-generate-pipeline', action='store', metavar='FILE',
help="Store ci pipeline in yaml FILE",
envvar='RFM_CI_PIPELINE_FILE',
configvar='general/ci_pipeline_file'
)
run_options.add_argument(
'--ci-tags', action='append', metavar='OPT', default=[],
help="Select the ci pipeline stages from tags FILE",
envvar='RFM_CI_TAGS',
configvar='general/ci_tags'
)

env_options.add_argument(
'-M', '--map-module', action='append', metavar='MAPPING',
dest='module_mappings', default=[],
Expand Down Expand Up @@ -578,72 +709,52 @@ def print_infoline(param, value):
print_infoline('stage directory', repr(session_info['prefix_stage']))
print_infoline('output directory', repr(session_info['prefix_output']))
printer.info('')

ci_pipeline_file = None
if site_config.get('general/0/ci_pipeline_file'):
ci_pipeline_file = site_config.get('general/0/ci_pipeline_file')

ci_tags=site_config.get('general/0/ci_tags')

# if one is defined but one is not defined
if (ci_pipeline_file or ci_tags) and not (ci_pipeline_file and ci_tags):
printer.error("options `--ci-pipeline-file' and `--ci-tags' "
"must be used together")
sys.exit(1)

try:
# Locate and load checks
try:
checks_found = loader.load_all()
except OSError as e:
raise errors.ReframeError from e

# Filter checks by name
checks_matched = checks_found
if options.exclude_names:
for name in options.exclude_names:
checks_matched = filter(filters.have_not_name(name),
checks_matched)

if options.names:
checks_matched = filter(filters.have_name('|'.join(options.names)),
checks_matched)

# Filter checks by tags
for tag in options.tags:
checks_matched = filter(filters.have_tag(tag), checks_matched)

# Filter checks by prgenv
if not options.skip_prgenv_check:
for prgenv in options.prgenv:
checks_matched = filter(filters.have_prgenv(prgenv),
checks_matched)

# Filter checks by system
if not options.skip_system_check:
checks_matched = filter(
filters.have_partition(rt.system.partitions), checks_matched)

# Filter checks further
if options.gpu_only and options.cpu_only:
printer.error("options `--gpu-only' and `--cpu-only' "
"are mutually exclusive")
sys.exit(1)

if options.gpu_only:
checks_matched = filter(filters.have_gpu_only(), checks_matched)
elif options.cpu_only:
checks_matched = filter(filters.have_cpu_only(), checks_matched)

# Determine the allowed programming environments
allowed_environs = {e.name
for env_patt in options.prgenv
for p in rt.system.partitions
for e in p.environs if re.match(env_patt, e.name)}

# Generate the test cases, validate dependencies and sort them
checks_matched = list(checks_matched)
# TODO: The -l flag is processed after this. So we generate the files no matter what!
if ci_tags:
generate_ci_pipeline_file(ci_pipeline_file, ci_tags, checks_found, options, rt, loader, site_config)
sys.exit(0)

checks_matched = filter_checks(checks_found, options, rt, loader)
# Disable hooks
for c in checks_matched:
for h in options.hooks:
type(c).disable_hook(h)

testcases = generate_testcases(checks_matched,
options.skip_system_check,
options.skip_prgenv_check,
allowed_environs)
options.skip_system_check,
options.skip_prgenv_check,
allowed_environs)
testgraph = dependency.build_deps(testcases)
dependency.validate_deps(testgraph)
testcases = dependency.toposort(testgraph)


# Manipulate ReFrame's environment
if site_config.get('general/0/purge_environment'):
rt.modules_system.unload_all()
Expand Down