diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000000..f7a0a25ab4 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,22 @@ +name: Static Analysis +on: [push, pull_request] +jobs: + python-linting: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + + - name: set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: install Python packages + run: | + pip install --upgrade pip + pip install --upgrade flake8 + + - name: Run flake8 + run: | + flake8 easybuild/tools + \ No newline at end of file diff --git a/.gitignore b/.gitignore index c8b95e4482..593345db93 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ build/ dist/ *egg-info/ *.swp +.mypy_cache/ Dockerfile.* Singularity.* diff --git a/RELEASE_NOTES b/RELEASE_NOTES index 1b71f89630..605ce561e2 100644 --- a/RELEASE_NOTES +++ b/RELEASE_NOTES @@ -3,6 +3,41 @@ For more detailed information, please see the git log. These release notes can also be consulted at https://easybuild.readthedocs.io/en/latest/Release_notes.html. +v4.2.1 (May 20th 2020) +---------------------- + +update/bugfix release + +- various enhancements, including: + - also mention CPU architecture (x86_64, POWER) in comment for test reports (#3281) + - add support for enhancing existing sanity check in easyconfigs, as opposed to overwriting paths/commands (#3288) + - clean up locks when EasyBuild session is cancelled with a signal like SIGTERM (#3291, #3321) + - add 'find_glob_pattern' function to filetools module (#3297) + - add constants for common OS dependencies (OS_PKG_IBVERBS_DEV, OS_PKG_OPENSSL_DEV, ...) (#3309, #3334) + - flesh out get_mpi_cmd_template function from Mpi.mpi_cmd_for method (#3312) + - add variable 'moddependpaths' to specify extra $MODULEPATH entry to consider for loading dependency modules (#3324) + - allow copying of tweaked easyconfigs when using --try-* with --copy-ec (#3332) +- various bug fixes, including: + - make ModulesTool.exist more robust w.r.t. module wrappers, aliases, defaults, etc. (#3216, #3337) + - clean up rst output of --list-toolchains (#3246) + - cast CPU arch name provided by archspec to a regular string (#3286) + - get pr_title and pr_descr built_options in new_pr_from_branch instead of new_pr (and commit_msg in both) (#3298) + - make pypi_source_urls more robust by using HTMLParser rather than xml.etree.ElementTree (#3303, #3329) + - fix broken test for --include-easyblocks-from-pr (#3304) + - don't use distutils.dir_util in copy_dir (#3310) + - print trace message for sanity check command before running it (#3316) + - fix problems with processing of easyconfigs using a Cray* toolchain when there are no actual external modules (#3319) + - make test_find_eb_script more robust in case $EB_SCRIPT_PATH is already set (#3320) + - fix several small problems wit --try-update-deps (experimental feature) (#3325, #3326, #3330) + - add --disable-job in 'eb' command used in jobs, to prevent infinite job cycle (#3328) + - avoid empty entries in $LD_LIBRARY_PATH and other path-like environment variables (#3333) +- other changes: + - fix code style issues in easybuild.tools + add flake8 linting test (#3282) + - introduce contextmanager for disabling templating and reduce resolving errors (#3287) + - add 'change_into_dir' named argument to 'extract_file' + print deprecation warning if it's not specified (#3292) + - improve install_eb_dep.sh script to install EasyBuild dependencies in CI environment (#3314) + + v4.2.0 (April 14th 2020) ------------------------ diff --git a/easybuild/framework/easyblock.py b/easybuild/framework/easyblock.py index 1aba3187d2..5cdfbae2f5 100644 --- a/easybuild/framework/easyblock.py +++ b/easybuild/framework/easyblock.py @@ -71,12 +71,12 @@ from easybuild.tools.config import install_path, log_path, package_path, source_paths from easybuild.tools.environment import restore_env, sanitize_env from easybuild.tools.filetools import CHECKSUM_TYPE_MD5, CHECKSUM_TYPE_SHA256 -from easybuild.tools.filetools import adjust_permissions, apply_patch, back_up_file -from easybuild.tools.filetools import change_dir, convert_name, compute_checksum, copy_file, derive_alt_pypi_url -from easybuild.tools.filetools import diff_files, download_file, encode_class_name, extract_file +from easybuild.tools.filetools import adjust_permissions, apply_patch, back_up_file, change_dir, convert_name +from easybuild.tools.filetools import compute_checksum, copy_file, check_lock, create_lock, derive_alt_pypi_url +from easybuild.tools.filetools import diff_files, dir_contains_files, download_file, encode_class_name, extract_file from easybuild.tools.filetools import find_backup_name_candidate, get_source_tarball_from_git, is_alt_pypi_url from easybuild.tools.filetools import is_binary, is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir -from easybuild.tools.filetools import remove_file, verify_checksum, weld_paths, write_file, dir_contains_files +from easybuild.tools.filetools import remove_file, remove_lock, verify_checksum, weld_paths, write_file from easybuild.tools.hooks import BUILD_STEP, CLEANUP_STEP, CONFIGURE_STEP, EXTENSIONS_STEP, FETCH_STEP, INSTALL_STEP from easybuild.tools.hooks import MODULE_STEP, PACKAGE_STEP, PATCH_STEP, PERMISSIONS_STEP, POSTITER_STEP, POSTPROC_STEP from easybuild.tools.hooks import PREPARE_STEP, READY_STEP, SANITYCHECK_STEP, SOURCE_STEP, TEST_STEP, TESTCASES_STEP @@ -1014,6 +1014,25 @@ def make_devel_module(self, create_in_builddir=False): # cleanup: unload fake module, remove fake module dir self.clean_up_fake_module(fake_mod_data) + def make_module_deppaths(self): + """ + Add specific 'module use' actions to module file, in order to find + dependencies outside the end user's MODULEPATH. + """ + deppaths = self.cfg['moddependpaths'] + if not deppaths: + return '' + elif not isinstance(deppaths, (str, list, tuple)): + raise EasyBuildError("moddependpaths value %s (type: %s) is not a string, list or tuple", + deppaths, type(deppaths)) + + if isinstance(deppaths, str): + txt = self.module_generator.use([deppaths], guarded=True) + else: + txt = self.module_generator.use(deppaths, guarded=True) + + return txt + def make_module_dep(self, unload_info=None): """ Make the dependencies for the module file. @@ -1194,7 +1213,8 @@ def make_module_extra_extensions(self): lines = [self.module_extra_extensions] # set environment variable that specifies list of extensions - exts_list = ','.join(['%s-%s' % (ext[0], ext[1]) for ext in self.cfg['exts_list']]) + # We need only name and version, so don't resolve templates + exts_list = ','.join(['-'.join(ext[:2]) for ext in self.cfg.get_ref('exts_list')]) env_var_name = convert_name(self.name, upper=True) lines.append(self.module_generator.set_environment('EBEXTSLIST%s' % env_var_name, exts_list)) @@ -1207,7 +1227,7 @@ def make_module_footer(self): footer = [self.module_generator.comment("Built with EasyBuild version %s" % VERBOSE_VERSION)] # add extra stuff for extensions (if any) - if self.cfg['exts_list']: + if self.cfg.get_ref('exts_list'): footer.append(self.make_module_extra_extensions()) # include modules footer if one is specified @@ -1791,7 +1811,7 @@ def fetch_step(self, skip_checksums=False): trace_msg(msg) # fetch extensions - if self.cfg['exts_list']: + if self.cfg.get_ref('exts_list'): self.exts = self.fetch_extension_sources(skip_checksums=skip_checksums) # create parent dirs in install and modules path already @@ -1911,7 +1931,9 @@ def extract_step(self): """ for src in self.src: self.log.info("Unpacking source %s" % src['name']) - srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], extra_options=self.cfg['unpack_options']) + srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], + extra_options=self.cfg['unpack_options'], change_into_dir=False) + change_dir(srcdir) if srcdir: self.src[self.src.index(src)]['finalpath'] = srcdir else: @@ -2063,7 +2085,7 @@ def extensions_step(self, fetch=False): - find source for extensions, in 'extensions' (and 'packages' for legacy reasons) - run extra_extensions """ - if len(self.cfg['exts_list']) == 0: + if not self.cfg.get_ref('exts_list'): self.log.debug("No extensions in exts_list") return @@ -2409,37 +2431,71 @@ def _sanity_check_step_common(self, custom_paths, custom_commands): SANITY_CHECK_PATHS_DIRS: ("(non-empty) directory", lambda dp: os.path.isdir(dp) and os.listdir(dp)), } - # prepare sanity check paths - paths = self.cfg['sanity_check_paths'] - if not paths: + enhance_sanity_check = self.cfg['enhance_sanity_check'] + ec_commands = self.cfg['sanity_check_commands'] + ec_paths = self.cfg['sanity_check_paths'] + + # if enhance_sanity_check is not enabled, only sanity_check_paths specified in the easyconfig file are used, + # the ones provided by the easyblock (via custom_paths) are ignored + if ec_paths and not enhance_sanity_check: + paths = ec_paths + self.log.info("Using (only) sanity check paths specified by easyconfig file: %s", paths) + else: + # if no sanity_check_paths are specified in easyconfig, + # we fall back to the ones provided by the easyblock via custom_paths if custom_paths: paths = custom_paths - self.log.info("Using customized sanity check paths: %s" % paths) + self.log.info("Using customized sanity check paths: %s", paths) + # if custom_paths is empty, we fall back to a generic set of paths: + # non-empty bin/ + /lib or /lib64 directories else: paths = {} for key in path_keys_and_check: paths.setdefault(key, []) paths.update({SANITY_CHECK_PATHS_DIRS: ['bin', ('lib', 'lib64')]}) - self.log.info("Using default sanity check paths: %s" % paths) + self.log.info("Using default sanity check paths: %s", paths) + + # if enhance_sanity_check is enabled *and* sanity_check_paths are specified in the easyconfig, + # those paths are used to enhance the paths provided by the easyblock + if enhance_sanity_check and ec_paths: + for key in ec_paths: + val = ec_paths[key] + if isinstance(val, list): + paths[key] = paths.get(key, []) + val + else: + error_pattern = "Incorrect value type in sanity_check_paths, should be a list: " + error_pattern += "%s (type: %s)" % (val, type(val)) + raise EasyBuildError(error_pattern) + self.log.info("Enhanced sanity check paths after taking into account easyconfig file: %s", paths) + + sorted_keys = sorted(paths.keys()) + known_keys = sorted(path_keys_and_check.keys()) + + # verify sanity_check_paths value: only known keys, correct value types, at least one non-empty value + only_list_values = all(isinstance(x, list) for x in paths.values()) + only_empty_lists = all(not x for x in paths.values()) + if sorted_keys != known_keys or not only_list_values or only_empty_lists: + error_msg = "Incorrect format for sanity_check_paths: should (only) have %s keys, " + error_msg += "values should be lists (at least one non-empty)." + raise EasyBuildError(error_msg % ', '.join("'%s'" % k for k in known_keys)) + + # if enhance_sanity_check is not enabled, only sanity_check_commands specified in the easyconfig file are used, + # the ones provided by the easyblock (via custom_commands) are ignored + if ec_commands and not enhance_sanity_check: + commands = ec_commands + self.log.info("Using (only) sanity check commands specified by easyconfig file: %s", commands) else: - self.log.info("Using specified sanity check paths: %s" % paths) - - ks = sorted(paths.keys()) - valnottypes = [not isinstance(x, list) for x in paths.values()] - lenvals = [len(x) for x in paths.values()] - req_keys = sorted(path_keys_and_check.keys()) - if not ks == req_keys or sum(valnottypes) > 0 or sum(lenvals) == 0: - raise EasyBuildError("Incorrect format for sanity_check_paths (should (only) have %s keys, " - "values should be lists (at least one non-empty)).", ','.join(req_keys)) - - commands = self.cfg['sanity_check_commands'] - if not commands: if custom_commands: commands = custom_commands - self.log.info("Using customised sanity check commands: %s" % commands) + self.log.info("Using customised sanity check commands: %s", commands) else: commands = [] - self.log.info("Using specified sanity check commands: %s" % commands) + + # if enhance_sanity_check is enabled, the sanity_check_commands specified in the easyconfig file + # are combined with those provided by the easyblock via custom_commands + if enhance_sanity_check and ec_commands: + commands = commands + ec_commands + self.log.info("Enhanced sanity check commands after taking into account easyconfig file: %s", commands) for i, command in enumerate(commands): # set command to default. This allows for config files with @@ -2475,9 +2531,17 @@ def _sanity_check_step_dry_run(self, custom_paths=None, custom_commands=None, ** """ paths, path_keys_and_check, commands = self._sanity_check_step_common(custom_paths, custom_commands) - for key, (typ, _) in path_keys_and_check.items(): + for key in [SANITY_CHECK_PATHS_FILES, SANITY_CHECK_PATHS_DIRS]: + (typ, _) = path_keys_and_check[key] self.dry_run_msg("Sanity check paths - %s ['%s']", typ, key) - if paths[key]: + entries = paths[key] + if entries: + # some entries may be tuple values, + # we need to convert them to strings first so we can print them sorted + for idx, entry in enumerate(entries): + if isinstance(entry, tuple): + entries[idx] = ' or '.join(entry) + for path in sorted(paths[key]): self.dry_run_msg(" * %s", str(path)) else: @@ -2608,6 +2672,9 @@ def xs2str(xs): # run sanity check commands for command in commands: + + trace_msg("running command '%s' ..." % command) + out, ec = run_cmd(command, simple=False, log_ok=False, log_all=False, trace=False) if ec != 0: fail_msg = "sanity check command %s exited with code %s (output: %s)" % (command, ec, out) @@ -2616,7 +2683,7 @@ def xs2str(xs): else: self.log.info("sanity check command %s ran successfully! (output: %s)" % (command, out)) - trace_msg("running command '%s': %s" % (command, ('FAILED', 'OK')[ec == 0])) + trace_msg("result for command '%s': %s" % (command, ('FAILED', 'OK')[ec == 0])) # also run sanity check for extensions (unless we are an extension ourselves) if not extension: @@ -2723,6 +2790,7 @@ def make_module_step(self, fake=False): txt += self.make_module_description() txt += self.make_module_group_check() + txt += self.make_module_deppaths() txt += self.make_module_dep() txt += self.make_module_extend_modpath() txt += self.make_module_req() @@ -3049,30 +3117,14 @@ def run_all_steps(self, run_test_cases): if ignore_locks: self.log.info("Ignoring locks...") else: - locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') - lock_path = os.path.join(locks_dir, '%s.lock' % self.installdir.replace('/', '_')) - - # if lock already exists, either abort or wait until it disappears - if os.path.exists(lock_path): - wait_on_lock = build_option('wait_on_lock') - if wait_on_lock: - while os.path.exists(lock_path): - print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_on_lock), - silent=self.silent) - time.sleep(wait_on_lock) - else: - raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + lock_name = self.installdir.replace('/', '_') - # create lock to avoid that another installation running in parallel messes things up; - # we use a directory as a lock, since that's atomically created - try: - mkdir(lock_path, parents=True) - except EasyBuildError as err: - # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes - stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') - raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + # check if lock already exists; + # either aborts with an error or waits until it disappears (depends on --wait-on-lock) + check_lock(lock_name) - self.log.info("Lock created: %s", lock_path) + # create lock to avoid that another installation running in parallel messes things up + create_lock(lock_name) try: for (step_name, descr, step_methods, skippable) in steps: @@ -3090,8 +3142,7 @@ def run_all_steps(self, run_test_cases): pass finally: if not ignore_locks: - remove_dir(lock_path) - self.log.info("Lock removed: %s", lock_path) + remove_lock(lock_name) # return True for successfull build (or stopped build) return True diff --git a/easybuild/framework/easyconfig/constants.py b/easybuild/framework/easyconfig/constants.py index 05fcb80d97..86587c816a 100644 --- a/easybuild/framework/easyconfig/constants.py +++ b/easybuild/framework/easyconfig/constants.py @@ -51,4 +51,15 @@ 'OS_VERSION': (get_os_version(), "System version"), 'SYS_PYTHON_VERSION': (platform.python_version(), "System Python version (platform.python_version())"), 'SYSTEM': ({'name': 'system', 'version': 'system'}, "System toolchain"), + + 'OS_PKG_IBVERBS_DEV': (('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), + "OS packages providing ibverbs/infiniband development support"), + 'OS_PKG_OPENSSL_BIN': (('openssl'), + "OS packages providing the openSSL binary"), + 'OS_PKG_OPENSSL_LIB': (('libssl', 'libopenssl'), + "OS packages providing openSSL libraries"), + 'OS_PKG_OPENSSL_DEV': (('openssl-devel', 'libssl-dev', 'libopenssl-devel'), + "OS packages providing openSSL developement support"), + 'OS_PKG_PAM_DEV': (('pam-devel', 'libpam0g-dev'), + "OS packages providing Pluggable Authentication Module (PAM) developement support"), } diff --git a/easybuild/framework/easyconfig/default.py b/easybuild/framework/easyconfig/default.py index 920e27d1bd..904a7d7bd7 100644 --- a/easybuild/framework/easyconfig/default.py +++ b/easybuild/framework/easyconfig/default.py @@ -90,6 +90,8 @@ 'easyblock': [None, "EasyBlock to use for building; if set to None, an easyblock is selected " "based on the software name", BUILD], 'easybuild_version': [None, "EasyBuild-version this spec-file was written for", BUILD], + 'enhance_sanity_check': [False, "Indicate that additional sanity check commands & paths should enhance " + "the existin sanity check, not replace it", BUILD], 'fix_perl_shebang_for': [None, "List of files for which Perl shebang should be fixed " "to '#!/usr/bin/env perl' (glob patterns supported)", BUILD], 'fix_python_shebang_for': [None, "List of files for which Python shebang should be fixed " @@ -152,6 +154,7 @@ 'multi_deps': [{}, "Dict of lists of dependency versions over which to iterate", DEPENDENCIES], 'multi_deps_load_default': [True, "Load module for first version listed in multi_deps by default", DEPENDENCIES], 'osdependencies': [[], "OS dependencies that should be present on the system", DEPENDENCIES], + 'moddependpaths': [None, "Absolute path(s) to prepend to MODULEPATH before loading dependencies", DEPENDENCIES], # LICENSE easyconfig parameters 'group': [None, "Name of the user group for which the software should be available; " diff --git a/easybuild/framework/easyconfig/easyconfig.py b/easybuild/framework/easyconfig/easyconfig.py index 3faeb84b90..d79eed817b 100644 --- a/easybuild/framework/easyconfig/easyconfig.py +++ b/easybuild/framework/easyconfig/easyconfig.py @@ -45,6 +45,7 @@ import os import re from distutils.version import LooseVersion +from contextlib import contextmanager import easybuild.tools.filetools as filetools from easybuild.base import fancylogger @@ -383,6 +384,23 @@ def get_toolchain_hierarchy(parent_toolchain, incl_capabilities=False): return toolchain_hierarchy +@contextmanager +def disable_templating(ec): + """Temporarily disable templating on the given EasyConfig + + Usage: + with disable_templating(ec): + # Do what you want without templating + # Templating set to previous value + """ + old_enable_templating = ec.enable_templating + ec.enable_templating = False + try: + yield old_enable_templating + finally: + ec.enable_templating = old_enable_templating + + class EasyConfig(object): """ Class which handles loading, reading, validation of easyconfigs @@ -592,18 +610,15 @@ def set_keys(self, params): """ # disable templating when setting easyconfig parameters # required to avoid problems with values that need more parsing to be done (e.g. dependencies) - prev_enable_templating = self.enable_templating - self.enable_templating = False - - for key in sorted(params.keys()): - # validations are skipped, just set in the config - if key in self._config.keys(): - self[key] = params[key] - self.log.info("setting easyconfig parameter %s: value %s (type: %s)", key, self[key], type(self[key])) - else: - raise EasyBuildError("Unknown easyconfig parameter: %s (value '%s')", key, params[key]) - - self.enable_templating = prev_enable_templating + with disable_templating(self): + for key in sorted(params.keys()): + # validations are skipped, just set in the config + if key in self._config.keys(): + self[key] = params[key] + self.log.info("setting easyconfig parameter %s: value %s (type: %s)", + key, self[key], type(self[key])) + else: + raise EasyBuildError("Unknown easyconfig parameter: %s (value '%s')", key, params[key]) def parse(self): """ @@ -647,42 +662,39 @@ def parse(self): # templating is disabled when parse_hook is called to allow for easy updating of mutable easyconfig parameters # (see also comment in resolve_template) - prev_enable_templating = self.enable_templating - self.enable_templating = False - - # if any lists of dependency versions are specified over which we should iterate, - # deal with them now, before calling parse hook, parsing of dependencies & iterative easyconfig parameters... - self.handle_multi_deps() - - parse_hook_msg = None - if self.path: - parse_hook_msg = "Running %s hook for %s..." % (PARSE, os.path.basename(self.path)) - - # trigger parse hook - hooks = load_hooks(build_option('hooks')) - run_hook(PARSE, hooks, args=[self], msg=parse_hook_msg) - - # parse dependency specifications - # it's important that templating is still disabled at this stage! - self.log.info("Parsing dependency specifications...") - self['dependencies'] = [self._parse_dependency(dep) for dep in self['dependencies']] - self['hiddendependencies'] = [self._parse_dependency(dep, hidden=True) for dep in self['hiddendependencies']] - - # need to take into account that builddependencies may need to be iterated over, - # i.e. when the value is a list of lists of tuples - builddeps = self['builddependencies'] - if builddeps and all(isinstance(x, (list, tuple)) for b in builddeps for x in b): - self.iterate_options.append('builddependencies') - builddeps = [[self._parse_dependency(dep, build_only=True) for dep in x] for x in builddeps] - else: - builddeps = [self._parse_dependency(dep, build_only=True) for dep in builddeps] - self['builddependencies'] = builddeps + with disable_templating(self): + # if any lists of dependency versions are specified over which we should iterate, + # deal with them now, before calling parse hook, parsing of dependencies & iterative easyconfig parameters + self.handle_multi_deps() - # keep track of parsed multi deps, they'll come in handy during sanity check & module steps... - self.multi_deps = self.get_parsed_multi_deps() + parse_hook_msg = None + if self.path: + parse_hook_msg = "Running %s hook for %s..." % (PARSE, os.path.basename(self.path)) + + # trigger parse hook + hooks = load_hooks(build_option('hooks')) + run_hook(PARSE, hooks, args=[self], msg=parse_hook_msg) + + # parse dependency specifications + # it's important that templating is still disabled at this stage! + self.log.info("Parsing dependency specifications...") + self['dependencies'] = [self._parse_dependency(dep) for dep in self['dependencies']] + self['hiddendependencies'] = [ + self._parse_dependency(dep, hidden=True) for dep in self['hiddendependencies'] + ] + + # need to take into account that builddependencies may need to be iterated over, + # i.e. when the value is a list of lists of tuples + builddeps = self['builddependencies'] + if builddeps and all(isinstance(x, (list, tuple)) for b in builddeps for x in b): + self.iterate_options.append('builddependencies') + builddeps = [[self._parse_dependency(dep, build_only=True) for dep in x] for x in builddeps] + else: + builddeps = [self._parse_dependency(dep, build_only=True) for dep in builddeps] + self['builddependencies'] = builddeps - # restore templating - self.enable_templating = prev_enable_templating + # keep track of parsed multi deps, they'll come in handy during sanity check & module steps... + self.multi_deps = self.get_parsed_multi_deps() # update templating dictionary self.generate_template_values() @@ -1108,63 +1120,57 @@ def dump(self, fp, always_overwrite=True, backup=False, explicit_toolchains=Fals :param always_overwrite: overwrite existing file at specified location without use of --force :param backup: create backup of existing file before overwriting it """ - orig_enable_templating = self.enable_templating - # templated values should be dumped unresolved - self.enable_templating = False - - # build dict of default values - default_values = dict([(key, DEFAULT_CONFIG[key][0]) for key in DEFAULT_CONFIG]) - default_values.update(dict([(key, self.extra_options[key][0]) for key in self.extra_options])) + with disable_templating(self): + # build dict of default values + default_values = dict([(key, DEFAULT_CONFIG[key][0]) for key in DEFAULT_CONFIG]) + default_values.update(dict([(key, self.extra_options[key][0]) for key in self.extra_options])) + + self.generate_template_values() + templ_const = dict([(quote_py_str(const[1]), const[0]) for const in TEMPLATE_CONSTANTS]) + + # create reverse map of templates, to inject template values where possible + # longer template values are considered first, shorter template keys get preference over longer ones + sorted_keys = sorted(self.template_values, key=lambda k: (len(self.template_values[k]), -len(k)), + reverse=True) + templ_val = OrderedDict([]) + for key in sorted_keys: + # shortest template 'key' is retained in case of duplicates + # ('namelower' is preferred over 'github_account') + # only template values longer than 2 characters are retained + if self.template_values[key] not in templ_val and len(self.template_values[key]) > 2: + templ_val[self.template_values[key]] = key + + toolchain_hierarchy = None + if not explicit_toolchains: + try: + toolchain_hierarchy = get_toolchain_hierarchy(self['toolchain']) + except EasyBuildError as err: + # don't fail hard just because we can't get the hierarchy + self.log.warning('Could not generate toolchain hierarchy for %s to use in easyconfig dump method, ' + 'error:\n%s', self['toolchain'], str(err)) - self.generate_template_values() - templ_const = dict([(quote_py_str(const[1]), const[0]) for const in TEMPLATE_CONSTANTS]) - - # create reverse map of templates, to inject template values where possible - # longer template values are considered first, shorter template keys get preference over longer ones - sorted_keys = sorted(self.template_values, key=lambda k: (len(self.template_values[k]), -len(k)), reverse=True) - templ_val = OrderedDict([]) - for key in sorted_keys: - # shortest template 'key' is retained in case of duplicates ('namelower' is preferred over 'github_account') - # only template values longer than 2 characters are retained - if self.template_values[key] not in templ_val and len(self.template_values[key]) > 2: - templ_val[self.template_values[key]] = key - - toolchain_hierarchy = None - if not explicit_toolchains: try: - toolchain_hierarchy = get_toolchain_hierarchy(self['toolchain']) - except EasyBuildError as err: - # don't fail hard just because we can't get the hierarchy - self.log.warning('Could not generate toolchain hierarchy for %s to use in easyconfig dump method, ' - 'error:\n%s', self['toolchain'], str(err)) + ectxt = self.parser.dump(self, default_values, templ_const, templ_val, + toolchain_hierarchy=toolchain_hierarchy) + except NotImplementedError as err: + raise NotImplementedError(err) - try: - ectxt = self.parser.dump(self, default_values, templ_const, templ_val, - toolchain_hierarchy=toolchain_hierarchy) - except NotImplementedError as err: - # need to restore enable_templating value in case this method is caught in a try/except block and ignored - # (the ability to dump is not a hard requirement for build success) - self.enable_templating = orig_enable_templating - raise NotImplementedError(err) + self.log.debug("Dumped easyconfig: %s", ectxt) - self.log.debug("Dumped easyconfig: %s", ectxt) + if build_option('dump_autopep8'): + autopep8_opts = { + 'aggressive': 1, # enable non-whitespace changes, but don't be too aggressive + 'max_line_length': 120, + } + self.log.info("Reformatting dumped easyconfig using autopep8 (options: %s)", autopep8_opts) + ectxt = autopep8.fix_code(ectxt, options=autopep8_opts) + self.log.debug("Dumped easyconfig after autopep8 reformatting: %s", ectxt) - if build_option('dump_autopep8'): - autopep8_opts = { - 'aggressive': 1, # enable non-whitespace changes, but don't be too aggressive - 'max_line_length': 120, - } - self.log.info("Reformatting dumped easyconfig using autopep8 (options: %s)", autopep8_opts) - ectxt = autopep8.fix_code(ectxt, options=autopep8_opts) - self.log.debug("Dumped easyconfig after autopep8 reformatting: %s", ectxt) + if not ectxt.endswith('\n'): + ectxt += '\n' - if not ectxt.endswith('\n'): - ectxt += '\n' - - write_file(fp, ectxt, always_overwrite=always_overwrite, backup=backup, verbose=backup) - - self.enable_templating = orig_enable_templating + write_file(fp, ectxt, always_overwrite=always_overwrite, backup=backup, verbose=backup) def _validate(self, attr, values): # private method """ @@ -1473,7 +1479,7 @@ def _parse_dependency(self, dep, hidden=False, build_only=False): # (true) boolean value simply indicates that a system toolchain is used elif isinstance(tc_spec, bool) and tc_spec: - tc = {'name': SYSTEM_TOOLCHAIN_NAME, 'version': ''} + tc = {'name': SYSTEM_TOOLCHAIN_NAME, 'version': ''} # two-element list/tuple value indicates custom toolchain specification elif isinstance(tc_spec, (list, tuple,)): @@ -1593,17 +1599,12 @@ def _generate_template_values(self, ignore=None): # step 1-3 work with easyconfig.templates constants # disable templating with creating dict with template values to avoid looping back to here via __getitem__ - prev_enable_templating = self.enable_templating - - self.enable_templating = False - - if self.template_values is None: - # if no template values are set yet, initiate with a minimal set of template values; - # this is important for easyconfig that use %(version_minor)s to define 'toolchain', - # which is a pretty weird use case, but fine... - self.template_values = template_constant_dict(self, ignore=ignore) - - self.enable_templating = prev_enable_templating + with disable_templating(self): + if self.template_values is None: + # if no template values are set yet, initiate with a minimal set of template values; + # this is important for easyconfig that use %(version_minor)s to define 'toolchain', + # which is a pretty weird use case, but fine... + self.template_values = template_constant_dict(self, ignore=ignore) # grab toolchain instance with templating support enabled, # which is important in case the Toolchain instance was not created yet @@ -1611,9 +1612,8 @@ def _generate_template_values(self, ignore=None): # get updated set of template values, now with toolchain instance # (which is used to define the %(mpi_cmd_prefix)s template) - self.enable_templating = False - template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) - self.enable_templating = prev_enable_templating + with disable_templating(self): + template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) # update the template_values dict self.template_values.update(template_values) @@ -1656,13 +1656,8 @@ def get_ref(self, key): # see also comments in resolve_template # temporarily disable templating - prev_enable_templating = self.enable_templating - self.enable_templating = False - - ref = self[key] - - # restore previous value for 'enable_templating' - self.enable_templating = prev_enable_templating + with disable_templating(self): + ref = self[key] return ref diff --git a/easybuild/framework/easyconfig/format/one.py b/easybuild/framework/easyconfig/format/one.py index 88d043bc61..350afdbb6c 100644 --- a/easybuild/framework/easyconfig/format/one.py +++ b/easybuild/framework/easyconfig/format/one.py @@ -97,6 +97,13 @@ class FormatOneZero(EasyConfigFormatConfigObj): PYHEADER_MANDATORY = ['version', 'name', 'toolchain', 'homepage', 'description'] PYHEADER_BLACKLIST = [] + def __init__(self, *args, **kwargs): + """FormatOneZero constructor.""" + super(FormatOneZero, self).__init__(*args, **kwargs) + + self.log = fancylogger.getLogger(self.__class__.__name__, fname=False) + self.strict_sanity_check_paths_keys = True + def validate(self): """Format validation""" # minimal checks @@ -168,11 +175,14 @@ def _reformat_line(self, param_name, param_val, outer=False, addlen=0): for item_key in ordered_item_keys: if item_key in param_val: item_val = param_val[item_key] + item_comments = self._get_item_comments(param_name, item_val) + elif param_name == 'sanity_check_paths' and not self.strict_sanity_check_paths_keys: + item_val = [] + item_comments = {} + self.log.info("Using default value for '%s' in sanity_check_paths: %s", item_key, item_val) else: raise EasyBuildError("Missing mandatory key '%s' in %s.", item_key, param_name) - item_comments = self._get_item_comments(param_name, item_val) - inline_comment = item_comments.get('inline', '') item_tmpl_dict = {'inline_comment': inline_comment} @@ -317,6 +327,10 @@ def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy :param templ_val: known template values :param toolchain_hierarchy: hierarchy of toolchains for easyconfig """ + # figoure out whether we should be strict about the format of sanity_check_paths; + # if enhance_sanity_check is set, then both files/dirs keys are not strictly required... + self.strict_sanity_check_paths_keys = not ecfg['enhance_sanity_check'] + # include header comments first dump = self.comments['header'][:] diff --git a/easybuild/framework/easyconfig/tweak.py b/easybuild/framework/easyconfig/tweak.py index e39dfae559..b54bb55b2b 100644 --- a/easybuild/framework/easyconfig/tweak.py +++ b/easybuild/framework/easyconfig/tweak.py @@ -52,11 +52,13 @@ from easybuild.framework.easyconfig.format.format import DEPENDENCY_PARAMETERS from easybuild.framework.easyconfig.parser import fetch_parameters_from_easyconfig from easybuild.framework.easyconfig.tools import alt_easyconfig_paths +from easybuild.toolchains.compiler.systemcompiler import TC_CONSTANT_SYSTEM from easybuild.toolchains.gcccore import GCCcore from easybuild.tools.build_log import EasyBuildError, print_warning from easybuild.tools.config import build_option from easybuild.tools.filetools import read_file, write_file from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version +from easybuild.tools.py2vs3 import string_type from easybuild.tools.robot import resolve_dependencies, robot_find_easyconfig, search_easyconfigs from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME from easybuild.tools.toolchain.toolchain import TOOLCHAIN_CAPABILITIES @@ -907,8 +909,11 @@ def map_common_versionsuffixes(software_name, original_toolchain, toolchain_mapp if original_suffix in versionsuffix_mappings: if mapped_suffix != versionsuffix_mappings[original_suffix]: raise EasyBuildError("No unique versionsuffix mapping for %s in %s toolchain " - "hierarchy to %s toolchain hierarchy", original_suffix, - original_toolchain, toolchain_mapping[original_toolchain['name']]) + "hierarchy to %s toolchain hierarchy (mapped suffix was %s but " + "versionsuffix mappings were %s)", + original_suffix, original_toolchain, + toolchain_mapping[original_toolchain['name']], mapped_suffix, + versionsuffix_mappings) else: versionsuffix_mappings[original_suffix] = mapped_suffix @@ -951,8 +956,9 @@ def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir= parsed_ec = process_easyconfig(ec_spec, validate=False)[0]['ec'] versonsuffix_mapping = {} - - if update_dep_versions: + # We only need to map versionsuffixes if we are updating dependency versions and if there are + # versionsuffixes being used in dependencies + if update_dep_versions and (list_deps_versionsuffixes(ec_spec) or parsed_ec['versionsuffix']): # We may need to update the versionsuffix if it is like, for example, `-Python-2.7.8` versonsuffix_mapping = map_common_versionsuffixes('Python', parsed_ec['toolchain'], toolchain_mapping) @@ -1059,6 +1065,31 @@ def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir= return tweaked_spec +def list_deps_versionsuffixes(ec_spec): + """ + Take an easyconfig spec, parse it, extracts the list of version suffixes used in its dependencies + + :param ec_spec: location of original easyconfig file + + :return: The list of versionsuffixes used by the dependencies of this recipe + """ + # Fully parse the original easyconfig + parsed_ec = process_easyconfig(ec_spec, validate=False)[0]['ec'] + + versionsuffix_list = [] + for key in DEPENDENCY_PARAMETERS: + val = parsed_ec[key] + + if key in parsed_ec.iterate_options: + val = flatten(val) + + for dep in val: + if dep['versionsuffix']: + versionsuffix_list += [dep['versionsuffix']] + + return list(set(versionsuffix_list)) + + def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mapping=None, highest_versions_only=True): """ Find potential version mapping for a dependency in a new hierarchy @@ -1089,6 +1120,9 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin # Figure out the main versionsuffix (altered depending on toolchain in the loop below) versionsuffix = dep.get('versionsuffix', '') + # If versionsuffix is equal to None, it should be put to empty string + if versionsuffix is None: + versionsuffix = '' # If versionsuffix is in our mapping then we expect it to be updated if versionsuffix in versionsuffix_mapping: versionsuffix = versionsuffix_mapping[versionsuffix] @@ -1105,14 +1139,12 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin if len(version_components) > 1: # Have at least major.minor candidate_ver_list.append(r'%s\..*' % major_version) candidate_ver_list.append(r'.*') # Include a major version search - potential_version_mappings, highest_version = [], None for candidate_ver in candidate_ver_list: # if any potential version mappings were found already at this point, we don't add more if not potential_version_mappings: - for toolchain in toolchain_hierarchy: # determine search pattern based on toolchain, version prefix/suffix & version regex @@ -1129,6 +1161,21 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin tweaked_ecs_paths, _ = alt_easyconfig_paths(tempfile.gettempdir(), tweaked_ecs=True) cand_paths = [path for path in cand_paths if not path.startswith(tweaked_ecs_paths)] + # if SYSTEM_TOOLCHAIN_NAME is used, it produces regex of the form + # -.eb, which can map to incompatible toolchains. + # For example Boost-1.68\..*.eb would match Boost-1.68.0-intel-2019a.eb + # This filters out such matches unless the toolchain in the easyconfig matches a system toolchain + if toolchain['name'] == SYSTEM_TOOLCHAIN_NAME: + cand_paths_filtered = [] + for path in cand_paths: + tc_candidate = fetch_parameters_from_easyconfig(read_file(path), ['toolchain'])[0] + if isinstance(tc_candidate, dict) and tc_candidate['name'] == SYSTEM_TOOLCHAIN_NAME: + cand_paths_filtered += [path] + if isinstance(tc_candidate, string_type) and tc_candidate == TC_CONSTANT_SYSTEM: + cand_paths_filtered += [path] + + cand_paths = cand_paths_filtered + # add what is left to the possibilities for path in cand_paths: version = fetch_parameters_from_easyconfig(read_file(path), ['version'])[0] diff --git a/easybuild/framework/extension.py b/easybuild/framework/extension.py index b44d5759fe..90ba521ecd 100644 --- a/easybuild/framework/extension.py +++ b/easybuild/framework/extension.py @@ -127,14 +127,14 @@ def __init__(self, mself, ext, extra_params=None): # make sure they are merged into self.cfg so they can be queried; # unknown easyconfig parameters are ignored since self.options may include keys only there for extensions; # this allows to specify custom easyconfig parameters on a per-extension basis - for key in self.options: + for key, value in self.options.items(): if key in self.cfg: - self.cfg[key] = resolve_template(self.options[key], self.cfg.template_values) + self.cfg[key] = value self.log.debug("Customising known easyconfig parameter '%s' for extension %s/%s: %s", - key, name, version, self.cfg[key]) + key, name, version, value) else: self.log.debug("Skipping unknown custom easyconfig parameter '%s' for extension %s/%s: %s", - key, name, version, self.options[key]) + key, name, version, value) self.sanity_check_fail_msgs = [] diff --git a/easybuild/framework/extensioneasyblock.py b/easybuild/framework/extensioneasyblock.py index 35b1bf4407..277a59fb45 100644 --- a/easybuild/framework/extensioneasyblock.py +++ b/easybuild/framework/extensioneasyblock.py @@ -103,7 +103,9 @@ def run(self, unpack_src=False): # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) - self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options) + self.ext_dir = extract_file(self.src, targetdir, extra_options=self.unpack_options, + change_into_dir=False) + change_dir(self.ext_dir) if self.start_dir and os.path.isdir(self.start_dir): self.log.debug("Using start_dir: %s", self.start_dir) diff --git a/easybuild/main.py b/easybuild/main.py index 415321dc9a..f15c648b73 100644 --- a/easybuild/main.py +++ b/easybuild/main.py @@ -47,6 +47,7 @@ from easybuild.framework.easyblock import build_and_install_one, inject_checksums from easybuild.framework.easyconfig import EASYCONFIGS_PKG_SUBDIR +from easybuild.framework.easyconfig.easyconfig import clean_up_easyconfigs from easybuild.framework.easyconfig.easyconfig import fix_deprecated_easyconfigs, verify_easyconfig_filename from easybuild.framework.easyconfig.style import cmdline_easyconfigs_style_check from easybuild.framework.easyconfig.tools import categorize_files_by_type, dep_graph @@ -57,7 +58,7 @@ from easybuild.tools.containers.common import containerize from easybuild.tools.docs import list_software from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, dump_index, load_index -from easybuild.tools.filetools import read_file, write_file +from easybuild.tools.filetools import read_file, register_lock_cleanup_signal_handlers, write_file from easybuild.tools.github import check_github, close_pr, new_branch_github, find_easybuild_easyconfig from easybuild.tools.github import install_github_token, list_prs, new_pr, new_pr_from_branch, merge_pr from easybuild.tools.github import sync_branch_with_develop, sync_pr_with_develop, update_branch, update_pr @@ -189,6 +190,9 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ + + register_lock_cleanup_signal_handlers() + # if $CDPATH is set, unset it, it'll only cause trouble... # see https://github.com/easybuilders/easybuild-framework/issues/2944 if 'CDPATH' in os.environ: @@ -317,7 +321,7 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): # determine paths to easyconfigs determined_paths = det_easyconfig_paths(categorized_paths['easyconfigs']) - if options.copy_ec or options.fix_deprecated_easyconfigs or options.show_ec: + if (options.copy_ec and not tweaked_ecs_paths) or options.fix_deprecated_easyconfigs or options.show_ec: if options.copy_ec: if len(determined_paths) == 1: @@ -417,6 +421,17 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] + if options.copy_ec and tweaked_ecs_paths: + all_specs = [spec['spec'] for spec in + resolve_dependencies(easyconfigs, modtool, retain_all_deps=True, raise_error_missing_ecs=False)] + tweaked_ecs_in_all_ecs = [path for path in all_specs if + any(tweaked_ecs_path in path for tweaked_ecs_path in tweaked_ecs_paths)] + if tweaked_ecs_in_all_ecs: + # Clean them, then copy them + clean_up_easyconfigs(tweaked_ecs_in_all_ecs) + copy_files(tweaked_ecs_in_all_ecs, target_path) + print_msg("%d file(s) copied to %s" % (len(tweaked_ecs_in_all_ecs), target_path), prefix=False) + # creating/updating PRs if pr_options: if options.new_pr: @@ -518,5 +533,5 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): main() except EasyBuildError as err: print_error(err.msg) - except KeyboardInterrupt: - print_error("Cancelled by user (keyboard interrupt)") + except KeyboardInterrupt as err: + print_error("Cancelled by user: %s" % err) diff --git a/easybuild/scripts/install_eb_dep.sh b/easybuild/scripts/install_eb_dep.sh index ccc6c74b88..82c34b774d 100755 --- a/easybuild/scripts/install_eb_dep.sh +++ b/easybuild/scripts/install_eb_dep.sh @@ -1,77 +1,81 @@ #!/bin/bash -set -e - if [ $# -ne 2 ]; then echo "Usage: $0 - " exit 1 fi -PKG=$1 -PREFIX=$2 -PKG_NAME=`echo $PKG | sed 's/-[^-]*$//g'` -PKG_VERSION=`echo $PKG | sed 's/.*-//g'` +set -eu + +PKG="$1" +PREFIX="$2" + +PKG_NAME="${PKG%-*}" +PKG_VERSION="${PKG##*-}" CONFIG_OPTIONS= PRECONFIG_CMD= -if [ x$PKG_NAME == 'xmodules' ] && [ x$PKG_VERSION == 'x3.2.10' ]; then +if [ "$PKG_NAME" == 'modules' ] && [ "$PKG_VERSION" == '3.2.10' ]; then PKG_URL="http://prdownloads.sourceforge.net/modules/${PKG}.tar.gz" BACKUP_PKG_URL="https://easybuilders.github.io/easybuild/files/${PKG}.tar.gz" - export PATH=$PREFIX/Modules/$PKG_VERSION/bin:$PATH - export MOD_INIT=$HOME/Modules/$PKG_VERSION/init/bash + export PATH="$PREFIX/Modules/$PKG_VERSION/bin:$PATH" + export MOD_INIT="$PREFIX/Modules/$PKG_VERSION/init/bash" -elif [ x$PKG_NAME == 'xmodules' ]; then +elif [ "$PKG_NAME" == 'modules' ]; then PKG_URL="http://prdownloads.sourceforge.net/modules/${PKG}.tar.gz" - export PATH=$PREFIX/bin:$PATH - export MOD_INIT=$HOME/init/bash + export PATH="$PREFIX/bin:$PATH" + export MOD_INIT="$PREFIX/init/bash" -elif [ x$PKG_NAME == 'xlua' ]; then +elif [ "$PKG_NAME" == 'lua' ]; then PKG_URL="http://downloads.sourceforge.net/project/lmod/${PKG}.tar.gz" BACKUP_PKG_URL="https://easybuilders.github.io/easybuild/files/${PKG}.tar.gz" PRECONFIG_CMD="make clean" CONFIG_OPTIONS='--with-static=yes' - export PATH=$PWD/$PKG:$PREFIX/bin:$PATH + export PATH="$PWD/$PKG:$PREFIX/bin:$PATH" -elif [ x$PKG_NAME == 'xLmod' ]; then +elif [ "$PKG_NAME" == 'Lmod' ]; then PKG_URL="https://github.com/TACC/Lmod/archive/${PKG_VERSION}.tar.gz" - export PATH=$PREFIX/lmod/$PKG_VERSION/libexec:$PATH - export MOD_INIT=$HOME/lmod/$PKG_VERSION/init/bash + export PATH="$PREFIX/lmod/$PKG_VERSION/libexec:$PATH" + export MOD_INIT="$PREFIX/lmod/$PKG_VERSION/init/bash" -elif [ x$PKG_NAME == 'xmodules-tcl' ]; then +elif [ "$PKG_NAME" == 'modules-tcl' ]; then # obtain tarball from upstream via http://modules.cvs.sourceforge.net/viewvc/modules/modules/?view=tar&revision=1.147 PKG_URL="https://easybuilders.github.io/easybuild/files/modules-tcl-${PKG_VERSION}.tar.gz" - export MODULESHOME=$PREFIX/$PKG/tcl # required by init/bash source script - export PATH=$MODULESHOME:$PATH - export MOD_INIT=$MODULESHOME/init/bash.in + export MODULESHOME="$PREFIX/$PKG/tcl" # required by init/bash source script + export PATH="$MODULESHOME:$PATH" + export MOD_INIT="$MODULESHOME/init/bash.in" else echo "ERROR: Unknown package name '$PKG_NAME'" exit 2 fi echo "Installing ${PKG} @ ${PREFIX}..." -mkdir -p ${PREFIX} -set +e -wget ${PKG_URL} && tar xfz *${PKG_VERSION}.tar.gz -if [ $? -ne 0 ] && [ ! -z $BACKUP_PKG_URL ]; then - rm -f *${PKG_VERSION}.tar.gz - wget ${BACKUP_PKG_URL} && tar xfz *${PKG_VERSION}.tar.gz +mkdir -p "${PREFIX}" +if ! wget "${PKG_URL}" && [ -n "$BACKUP_PKG_URL" ]; then + rm -f ./*"${PKG_VERSION}".tar.gz + wget "${BACKUP_PKG_URL}" fi -set -e + +tar xfz ./*"${PKG_VERSION}".tar.gz +rm ./*"${PKG_VERSION}".tar.gz # environment-modules needs a patch to work with Tcl8.6 -if [ x$PKG_NAME == 'xmodules' ] && [ x$PKG_VERSION == 'x3.2.10' ]; then +if [ "$PKG_NAME" == 'modules' ] && [ "$PKG_VERSION" == '3.2.10' ]; then wget -O 'modules-tcl8.6.patch' 'https://easybuilders.github.io/easybuild/files/modules-3.2.10-tcl8.6.patch' - patch ${PKG}/cmdModule.c modules-tcl8.6.patch + patch "${PKG}/cmdModule.c" modules-tcl8.6.patch fi -if [ x$PKG_NAME == 'xmodules-tcl' ]; then - mv modules $PREFIX/${PKG} +if [ "$PKG_NAME" == 'modules-tcl' ]; then + mv modules "$PREFIX/${PKG}" else - cd ${PKG} - if [[ ! -z $PRECONFIG_CMD ]]; then - eval ${PRECONFIG_CMD} + cd "${PKG}" + if [[ -n "$PRECONFIG_CMD" ]]; then + eval "${PRECONFIG_CMD}" fi - ./configure $CONFIG_OPTIONS --prefix=$PREFIX && make && make install + ./configure $CONFIG_OPTIONS --prefix="$PREFIX" && make && make install cd - > /dev/null + rm -r "${PKG}" fi + +set +eu diff --git a/easybuild/toolchains/mpi/craympich.py b/easybuild/toolchains/mpi/craympich.py index cf32237451..bd01662002 100644 --- a/easybuild/toolchains/mpi/craympich.py +++ b/easybuild/toolchains/mpi/craympich.py @@ -40,7 +40,7 @@ class CrayMPICH(Mpi): """Generic support for using Cray compiler wrappers""" # MPI support # no separate module, Cray compiler drivers always provide MPI support - MPI_MODULE_NAME = [] + MPI_MODULE_NAME = None MPI_FAMILY = TC_CONSTANT_MPICH MPI_TYPE = TC_CONSTANT_MPI_TYPE_MPICH diff --git a/easybuild/tools/asyncprocess.py b/easybuild/tools/asyncprocess.py index 17e7e132e4..b96b7870f7 100644 --- a/easybuild/tools/asyncprocess.py +++ b/easybuild/tools/asyncprocess.py @@ -66,16 +66,15 @@ """ import errno +import fcntl import os +import select import subprocess import time PIPE = subprocess.PIPE STDOUT = subprocess.STDOUT -import select #@UnresolvedImport -import fcntl #@UnresolvedImport - class Popen(subprocess.Popen): @@ -117,7 +116,7 @@ def send(self, inp): try: written = os.write(self.stdin.fileno(), inp.encode()) except OSError as why: - if why[0] == errno.EPIPE: #broken pipe + if why[0] == errno.EPIPE: # broken pipe return self._close('stdin') raise @@ -147,8 +146,10 @@ def _recv(self, which, maxsize): if not conn.closed: fcntl.fcntl(conn, fcntl.F_SETFL, flags) + message = "Other end disconnected!" + def recv_some(p, t=.2, e=1, tr=5, stderr=0): if tr < 1: tr = 1 @@ -171,6 +172,7 @@ def recv_some(p, t=.2, e=1, tr=5, stderr=0): time.sleep(max((x - time.time()) / tr, 0)) return b''.join(y) + def send_all(p, data): while len(data): sent = p.send(data) diff --git a/easybuild/tools/config.py b/easybuild/tools/config.py index 0bcf31ab8b..3bca0194b7 100644 --- a/easybuild/tools/config.py +++ b/easybuild/tools/config.py @@ -102,6 +102,8 @@ DEFAULT_PNS = 'EasyBuildPNS' DEFAULT_PREFIX = os.path.join(os.path.expanduser('~'), ".local", "easybuild") DEFAULT_REPOSITORY = 'FileRepository' +DEFAULT_WAIT_ON_LOCK_INTERVAL = 60 +DEFAULT_WAIT_ON_LOCK_LIMIT = 0 EBROOT_ENV_VAR_ACTIONS = [ERROR, IGNORE, UNSET, WARN] LOADED_MODULES_ACTIONS = [ERROR, IGNORE, PURGE, UNLOAD, WARN] @@ -211,6 +213,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'subdir_user_modules', 'test_report_env_filter', 'testoutput', + 'wait_on_lock', 'umask', 'zip_logs', ], @@ -256,7 +259,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'use_f90cache', 'use_existing_modules', 'set_default_module', - 'wait_on_lock', + 'wait_on_lock_limit', ], True: [ 'cleanup_builddir', @@ -305,6 +308,9 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): DEFAULT_ALLOW_LOADED_MODULES: [ 'allow_loaded_modules', ], + DEFAULT_WAIT_ON_LOCK_INTERVAL: [ + 'wait_on_lock_interval', + ], } # build option that do not have a perfectly matching command line option BUILD_OPTIONS_OTHER = { diff --git a/easybuild/tools/configobj.py b/easybuild/tools/configobj.py index f96c404ac0..ea418ce750 100644 --- a/easybuild/tools/configobj.py +++ b/easybuild/tools/configobj.py @@ -41,7 +41,7 @@ BOM_UTF16_BE: ('utf16_be', 'utf_16'), BOM_UTF16_LE: ('utf16_le', 'utf_16'), BOM_UTF16: ('utf_16', 'utf_16'), - } +} # All legal variants of the BOM codecs. # TODO: the list of aliases is not meant to be exhaustive, is there a # better way ? @@ -61,7 +61,7 @@ 'utf': 'utf_8', 'utf8': 'utf_8', 'utf-8': 'utf_8', - } +} # Map of encodings to the BOM to write. BOM_SET = { @@ -70,7 +70,7 @@ 'utf16_be': BOM_UTF16_BE, 'utf16_le': BOM_UTF16_LE, None: BOM_UTF8 - } +} def match_utf8(encoding): @@ -143,7 +143,6 @@ def any(iterable): } - def getObj(s): global compiler if compiler is None: @@ -222,12 +221,12 @@ def unrepr(s): return _builder.build(getObj(s)) - class ConfigObjError(SyntaxError): """ This is the base class for all errors that ConfigObj raises. It is a subclass of SyntaxError. """ + def __init__(self, message='', line_number=None, line=''): self.line = line self.line_number = line_number @@ -253,6 +252,7 @@ class ReloadError(IOError): A 'reload' operation failed. This exception is a subclass of ``IOError``. """ + def __init__(self): IOError.__init__(self, 'reload failed, filename is not set.') @@ -291,6 +291,7 @@ class RepeatSectionError(ConfigObjError): class MissingInterpolationOption(InterpolationError): """A value specified for interpolation was missing.""" + def __init__(self, option): msg = 'missing option "%s" in interpolation.' % option InterpolationError.__init__(self, msg) @@ -300,7 +301,6 @@ class UnreprError(ConfigObjError): """An error parsing in unrepr mode.""" - class InterpolationEngine(object): """ A helper class to help perform string interpolation. @@ -317,10 +317,9 @@ def __init__(self, section): # the Section instance that "owns" this engine self.section = section - def interpolate(self, key, value): # short-cut - if not self._cookie in value: + if self._cookie not in value: return value def recursive_interpolate(key, value, section, backtrail): @@ -370,7 +369,6 @@ def recursive_interpolate(key, value, section, backtrail): value = recursive_interpolate(key, value, self.section, {}) return value - def _fetch(self, key): """Helper function to fetch values from owning section. @@ -404,7 +402,6 @@ def _fetch(self, key): raise MissingInterpolationOption(key) return val, current_section - def _parse_match(self, match): """Implementation-dependent helper function. @@ -424,7 +421,6 @@ def _parse_match(self, match): raise NotImplementedError() - class ConfigParserInterpolation(InterpolationEngine): """Behaves like ConfigParser.""" _cookie = '%' @@ -436,7 +432,6 @@ def _parse_match(self, match): return key, value, section - class TemplateInterpolation(InterpolationEngine): """Behaves like string.Template.""" _cookie = '$' @@ -473,6 +468,7 @@ def __newobj__(cls, *args): # Hack for pickle return cls.__new__(cls, *args) + class Section(dict): """ A dictionary-like object that represents a section in a config file. @@ -491,7 +487,6 @@ class Section(dict): Iteration follows the order: scalars, then sections. """ - def __setstate__(self, state): dict.update(self, state[0]) self.__dict__.update(state[1]) @@ -500,7 +495,6 @@ def __reduce__(self): state = (dict(self), self.__dict__) return (__newobj__, (self.__class__,), state) - def __init__(self, parent, depth, main, indict=None, name=None): """ * parent is the section above @@ -542,7 +536,6 @@ def _initialise(self): self.extra_values = [] self._created = False - def _interpolate(self, key, value): try: # do we already have an interpolation engine? @@ -550,7 +543,7 @@ def _interpolate(self, key, value): except AttributeError: # not yet: first time running _interpolate(), so pick the engine name = self.main.interpolation - if name == True: # note that "if name:" would be incorrect here + if name is True: # note that "if name:" would be incorrect here # backwards-compatibility: interpolation=True means use default name = DEFAULT_INTERPOLATION name = name.lower() # so that "Template", "template", etc. all work @@ -565,7 +558,6 @@ def _interpolate(self, key, value): # let the engine do the actual work return engine.interpolate(key, value) - def __getitem__(self, key): """Fetch the item and do string interpolation.""" val = dict.__getitem__(self, key) @@ -582,7 +574,6 @@ def _check(entry): return new return val - def __setitem__(self, key, value, unrepr=False): """ Correctly set a value. @@ -641,7 +632,6 @@ def __setitem__(self, key, value, unrepr=False): raise TypeError('Value is not a string "%s".' % value) dict.__setitem__(self, key, value) - def __delitem__(self, key): """Remove items from the sequence when deleting.""" dict. __delitem__(self, key) @@ -652,7 +642,6 @@ def __delitem__(self, key): del self.comments[key] del self.inline_comments[key] - def get(self, key, default=None): """A version of ``get`` that doesn't bypass string interpolation.""" try: @@ -660,7 +649,6 @@ def get(self, key, default=None): except KeyError: return default - def update(self, indict): """ A version of update that uses our ``__setitem__``. @@ -668,7 +656,6 @@ def update(self, indict): for entry in indict: self[entry] = indict[entry] - def pop(self, key, default=MISSING): """ 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. @@ -684,7 +671,6 @@ def pop(self, key, default=MISSING): del self[key] return val - def popitem(self): """Pops the first (key,val)""" sequence = (self.scalars + self.sections) @@ -695,7 +681,6 @@ def popitem(self): del self[key] return key, val - def clear(self): """ A version of clear that also affects scalars/sections @@ -713,7 +698,6 @@ def clear(self): self.defaults = [] self.extra_values = [] - def setdefault(self, key, default=None): """A version of setdefault that sets sequence if appropriate.""" try: @@ -722,39 +706,32 @@ def setdefault(self, key, default=None): self[key] = default return self[key] - def items(self): """D.items() -> list of D's (key, value) pairs, as 2-tuples""" return zip((self.scalars + self.sections), self.values()) - def keys(self): """D.keys() -> list of D's keys""" return (self.scalars + self.sections) - def values(self): """D.values() -> list of D's values""" return [self[key] for key in (self.scalars + self.sections)] - def iteritems(self): """D.iteritems() -> an iterator over the (key, value) items of D""" return iter(self.items()) - def iterkeys(self): """D.iterkeys() -> an iterator over the keys of D""" return iter((self.scalars + self.sections)) __iter__ = iterkeys - def itervalues(self): """D.itervalues() -> an iterator over the values of D""" return iter(self.values()) - def __repr__(self): """x.__repr__() <==> repr(x)""" def _getval(key): @@ -763,12 +740,11 @@ def _getval(key): except MissingInterpolationOption: return dict.__getitem__(self, key) return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) - for key in (self.scalars + self.sections)]) + for key in (self.scalars + self.sections)]) __str__ = __repr__ __str__.__doc__ = "x.__str__() <==> str(x)" - # Extra methods - not in a normal dictionary def dict(self): @@ -798,7 +774,6 @@ def dict(self): newdict[entry] = this_entry return newdict - def merge(self, indict): """ A recursive update - useful for merging config files. @@ -820,12 +795,11 @@ def merge(self, indict): """ for key, val in indict.items(): if (key in self and isinstance(self[key], dict) and - isinstance(val, dict)): + isinstance(val, dict)): self[key].merge(val) else: self[key] = val - def rename(self, oldkey, newkey): """ Change a keyname to another, without changing position in sequence. @@ -855,9 +829,8 @@ def rename(self, oldkey, newkey): self.comments[newkey] = comm self.inline_comments[newkey] = inline_comment - def walk(self, function, raise_errors=True, - call_on_sections=False, **keywargs): + call_on_sections=False, **keywargs): """ Walk every member and call a function on the keyword and value. @@ -940,7 +913,6 @@ def walk(self, function, raise_errors=True, **keywargs) return out - def as_bool(self, key): """ Accepts a key as input. The corresponding value must be a string or @@ -970,10 +942,8 @@ def as_bool(self, key): 0 """ val = self[key] - if val == True: - return True - elif val == False: - return False + if val is True or val is False: + return val else: try: if not isinstance(val, string_type): @@ -984,7 +954,6 @@ def as_bool(self, key): except KeyError: raise ValueError('Value "%s" is neither True nor False' % val) - def as_int(self, key): """ A convenience method which coerces the specified value to an integer. @@ -1007,7 +976,6 @@ def as_int(self, key): """ return int(self[key]) - def as_float(self, key): """ A convenience method which coerces the specified value to a float. @@ -1029,7 +997,6 @@ def as_float(self, key): """ return float(self[key]) - def as_list(self, key): """ A convenience method which fetches the specified value, guaranteeing @@ -1051,7 +1018,6 @@ def as_list(self, key): return list(result) return [result] - def restore_default(self, key): """ Restore (and return) default value for the specified key. @@ -1067,7 +1033,6 @@ def restore_default(self, key): self.defaults.append(key) return default - def restore_defaults(self): """ Recursively restore default values to all members @@ -1099,7 +1064,7 @@ class ConfigObj(Section): (.*) # value (including list values and comments) $ # line end ''', - re.VERBOSE) + re.VERBOSE) _sectionmarker = re.compile(r'''^ (\s*) # 1: indentation @@ -1112,7 +1077,7 @@ class ConfigObj(Section): ((?:\s*\])+) # 4: section marker close \s*(\#.*)? # 5: optional comment $''', - re.VERBOSE) + re.VERBOSE) # this regexp pulls list values out as a single string # or single values and comments @@ -1142,7 +1107,7 @@ class ConfigObj(Section): ) \s*(\#.*)? # optional comment $''', - re.VERBOSE) + re.VERBOSE) # use findall to get the members of a list value _listvalueexp = re.compile(r''' @@ -1153,7 +1118,7 @@ class ConfigObj(Section): ) \s*,\s* # comma ''', - re.VERBOSE) + re.VERBOSE) # this regexp is used for the value # when lists are switched off @@ -1166,7 +1131,7 @@ class ConfigObj(Section): ) \s*(\#.*)? # optional comment $''', - re.VERBOSE) + re.VERBOSE) # regexes for finding triple quoted values on one line _single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$") @@ -1185,8 +1150,7 @@ class ConfigObj(Section): 'on': True, 'off': False, '1': True, '0': False, 'true': True, 'false': False, - } - + } def __init__(self, infile=None, options=None, configspec=None, encoding=None, interpolation=True, raise_errors=False, list_values=True, @@ -1245,7 +1209,6 @@ def __init__(self, infile=None, options=None, configspec=None, encoding=None, self._original_configspec = configspec self._load(infile, configspec) - def _load(self, infile, configspec): if isinstance(infile, string_type): self.filename = infile @@ -1342,7 +1305,6 @@ def set_section(in_section, this_section): else: self._handle_configspec(configspec) - def _initialise(self, options=None): if options is None: options = OPTION_DEFAULTS @@ -1374,7 +1336,6 @@ def _initialise(self, options=None): # Clear section attributes as well Section._initialise(self) - def __repr__(self): def _getval(key): try: @@ -1383,8 +1344,7 @@ def _getval(key): return dict.__getitem__(self, key) return ('ConfigObj({%s})' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) - for key in (self.scalars + self.sections)])) - + for key in (self.scalars + self.sections)])) def _handle_bom(self, infile): """ @@ -1409,7 +1369,7 @@ def _handle_bom(self, infile): passed in as a single string. """ if ((self.encoding is not None) and - (self.encoding.lower() not in BOM_LIST)): + (self.encoding.lower() not in BOM_LIST)): # No need to check for a BOM # the encoding specified doesn't have one # just decode @@ -1486,7 +1446,6 @@ def _handle_bom(self, infile): return infile.splitlines(True) return infile - def _a_to_u(self, aString): """Decode ASCII strings to unicode if a self.encoding is specified.""" if self.encoding: @@ -1494,7 +1453,6 @@ def _a_to_u(self, aString): else: return aString - def _decode(self, infile, encoding): """ Decode infile to unicode. Using the specified encoding. @@ -1506,14 +1464,13 @@ def _decode(self, infile, encoding): # NOTE: Could raise a ``UnicodeDecodeError`` return infile.decode(encoding).splitlines(True) for i, line in enumerate(infile): - if not isinstance(line, unicode): + if isinstance(line, str): # NOTE: The isinstance test here handles mixed lists of unicode/string # NOTE: But the decode will break on any non-string values # NOTE: Or could raise a ``UnicodeDecodeError`` infile[i] = line.decode(encoding) return infile - def _decode_element(self, line): """Decode element to unicode if necessary.""" if not self.encoding: @@ -1522,7 +1479,6 @@ def _decode_element(self, line): return line.decode(self.default_encoding) return line - def _str(self, value): """ Used by ``stringify`` within validate, to turn non-string values @@ -1533,7 +1489,6 @@ def _str(self, value): else: return value - def _parse(self, infile): """Actually parse the config file.""" temp_list_values = self.list_values @@ -1646,12 +1601,12 @@ def _parse(self, infile): try: value = unrepr(value) except Exception as e: - if type(e) == UnknownType: + if isinstance(e, UnknownType): msg = 'Unknown name or type in value at line %s.' else: msg = 'Parse error in value at line %s.' self._handle_error(msg, UnreprError, infile, - cur_index) + cur_index) continue else: if self.unrepr: @@ -1664,7 +1619,7 @@ def _parse(self, infile): else: msg = 'Parse error in value at line %s.' self._handle_error(msg, UnreprError, infile, - cur_index) + cur_index) continue else: # extract comment and lists @@ -1701,7 +1656,6 @@ def _parse(self, infile): self.final_comment = comment_list self.list_values = temp_list_values - def _match_depth(self, sect, depth): """ Given a section and a depth level, walk back through the sections @@ -1720,7 +1674,6 @@ def _match_depth(self, sect, depth): # shouldn't get here raise SyntaxError() - def _handle_error(self, text, ErrorClass, infile, cur_index): """ Handle an error according to the error settings. @@ -1739,7 +1692,6 @@ def _handle_error(self, text, ErrorClass, infile, cur_index): # reraise when parsing has finished self._errors.append(error) - def _unquote(self, value): """Return an unquoted version of a value""" if not value: @@ -1749,7 +1701,6 @@ def _unquote(self, value): value = value[1:-1] return value - def _quote(self, value, multiline=True): """ Return a safely quoted version of a value. @@ -1780,7 +1731,7 @@ def _quote(self, value, multiline=True): elif len(value) == 1: return self._quote(value[0], multiline=False) + ',' return ', '.join([self._quote(val, multiline=False) - for val in value]) + for val in value]) if not isinstance(value, string_type): if self.stringify: value = str(value) @@ -1818,7 +1769,6 @@ def _quote(self, value, multiline=True): return quot % value - def _get_single_quote(self, value): if ("'" in value) and ('"' in value): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) @@ -1828,7 +1778,6 @@ def _get_single_quote(self, value): quot = dquot return quot - def _get_triple_quote(self, value): if (value.find('"""') != -1) and (value.find("'''") != -1): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) @@ -1838,7 +1787,6 @@ def _get_triple_quote(self, value): quot = tsquot return quot - def _handle_value(self, value): """ Given a value string, unquote, remove comment, @@ -1887,7 +1835,6 @@ def _handle_value(self, value): the_list += [single] return (the_list, comment) - def _multiline(self, value, infile, cur_index, maxline): """Extract the value, where we are in a multiline situation.""" quot = value[:3] @@ -1922,7 +1869,6 @@ def _multiline(self, value, infile, cur_index, maxline): (value, comment) = mat.groups() return (newvalue + value, comment, cur_index) - def _handle_configspec(self, configspec): """Parse the configspec.""" # FIXME: Should we check that the configspec was created with the @@ -1942,8 +1888,6 @@ def _handle_configspec(self, configspec): self.configspec = configspec - - def _set_configspec(self, section, copy): """ Called by validate. Handles setting the configspec on subsections @@ -1971,7 +1915,6 @@ def _set_configspec(self, section, copy): if isinstance(section[entry], Section): section[entry].configspec = configspec[entry] - def _write_line(self, indent_string, entry, this_entry, comment): """Write an individual line, for the write method""" # NOTE: the calls to self._quote here handles non-StringType values. @@ -1985,7 +1928,6 @@ def _write_line(self, indent_string, entry, this_entry, comment): val, self._decode_element(comment)) - def _write_marker(self, indent_string, depth, entry, comment): """Write a section marker line""" return '%s%s%s%s%s' % (indent_string, @@ -1994,7 +1936,6 @@ def _write_marker(self, indent_string, depth, entry, comment): self._a_to_u(']' * depth), self._decode_element(comment)) - def _handle_comment(self, comment): """Deal with a comment.""" if not comment: @@ -2004,7 +1945,6 @@ def _handle_comment(self, comment): start += self._a_to_u(' # ') return (start + comment) - # Public methods def write(self, outfile=None, section=None): @@ -2085,9 +2025,9 @@ def write(self, outfile=None, section=None): # might need to encode # NOTE: This will *screw* UTF16, each line will start with the BOM if self.encoding: - out = [l.encode(self.encoding) for l in out] + out = [line.encode(self.encoding) for line in out] if (self.BOM and ((self.encoding is None) or - (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): + (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): # Add the UTF8 BOM if not out: out.append('') @@ -2097,7 +2037,7 @@ def write(self, outfile=None, section=None): # Turn the list to a string, joined with correct newlines newline = self.newlines or os.linesep if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' - and sys.platform == 'win32' and newline == '\r\n'): + and sys.platform == 'win32' and newline == '\r\n'): # Windows specific hack to avoid writing '\r\r\n' newline = '\n' output = self._a_to_u(newline).join(out) @@ -2116,7 +2056,6 @@ def write(self, outfile=None, section=None): h.write(output) h.close() - def validate(self, validator, preserve_errors=False, copy=False, section=None): """ @@ -2177,7 +2116,6 @@ def validate(self, validator, preserve_errors=False, copy=False, configspec = section.configspec self._set_configspec(section, copy) - def validate_entry(entry, spec, val, missing, ret_true, ret_false): section.default_values.pop(entry, None) @@ -2235,7 +2173,7 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): if entry in ('__many__', '___many___'): # reserved names continue - if (not entry in section.scalars) or (entry in section.defaults): + if (entry not in section.scalars) or (entry in section.defaults): # missing entries # or entries from defaults missing = True @@ -2298,9 +2236,9 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): section.inline_comments[entry] = configspec.inline_comments.get(entry, '') check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) out[entry] = check - if check == False: + if check is False: ret_true = False - elif check == True: + elif check is True: ret_false = False else: ret_true = False @@ -2323,7 +2261,6 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): return False return out - def reset(self): """Clear ConfigObj instance and restore to 'freshly created' state.""" self.clear() @@ -2334,7 +2271,6 @@ def reset(self): # Just to be sure ;-) self._original_configspec = None - def reload(self): """ Reload a ConfigObj from file. @@ -2360,7 +2296,6 @@ def reload(self): self._load(filename, configspec) - class SimpleVal(object): """ A simple validator. @@ -2419,15 +2354,15 @@ def flatten_errors(cfg, res, levels=None, results=None): # first time called levels = [] results = [] - if res == True: + if res is True: return results - if res == False or isinstance(res, Exception): + if res is False or isinstance(res, Exception): results.append((levels[:], None, res)) if levels: levels.pop() return results for (key, val) in res.items(): - if val == True: + if val is True: continue if isinstance(cfg.get(key), dict): # Go down one level diff --git a/easybuild/tools/containers/utils.py b/easybuild/tools/containers/utils.py index b259763d56..4902d2c114 100644 --- a/easybuild/tools/containers/utils.py +++ b/easybuild/tools/containers/utils.py @@ -78,7 +78,7 @@ def check_tool(tool_name, min_tool_version=None): out, ec = run_cmd(version_cmd, simple=False, trace=False, force_in_dry_run=True) if ec: raise EasyBuildError("Error running '{0}' for tool {1} with output: {2}".format(version_cmd, tool_name, out)) - res = re.search("\d+\.\d+(\.\d+)?", out.strip()) + res = re.search(r"\d+\.\d+(\.\d+)?", out.strip()) if not res: raise EasyBuildError("Error parsing version for tool {0}".format(tool_name)) tool_version = res.group(0) diff --git a/easybuild/tools/docs.py b/easybuild/tools/docs.py index 1d1342e2c1..3770ed34d4 100644 --- a/easybuild/tools/docs.py +++ b/easybuild/tools/docs.py @@ -490,7 +490,7 @@ def add_class(classes, cls): txt.append(format_strings['root_templ'] % root) if format_strings.get('newline') is not None: - txt.append(format_strings['newline']) + txt.append(format_strings['newline']) if 'children' in classes[root]: txt.extend(avail_classes_tree(classes, classes[root]['children'], locations, detailed, format_strings)) if format_strings.get('newline') is not None: @@ -522,7 +522,7 @@ def list_software(output_format=FORMAT_TXT, detailed=False, only_installed=False ecs.append(ec) print_msg('\r', prefix=False, newline=False, silent=silent) - print_msg("Processed %d/%d easyconfigs..." % (idx+1, cnt), newline=False, silent=silent) + print_msg("Processed %d/%d easyconfigs..." % (idx + 1, cnt), newline=False, silent=silent) print_msg('', prefix=False, silent=silent) software = {} @@ -602,7 +602,7 @@ def list_software_rst(software, detailed=False): def key_to_ref(name): """Create a reference label for the specified software name.""" - return 'list_software_%s_%d' % (name, sum(ord(l) for l in name)) + return 'list_software_%s_%d' % (name, sum(ord(letter) for letter in name)) letter = None sorted_keys = sorted(software.keys(), key=lambda x: x.lower()) @@ -744,28 +744,59 @@ def list_toolchains_rst(tcs): """ Returns overview of all toolchains in rst format """ title = "List of known toolchains" - # figure out column names - table_titles = ['name', 'compiler', 'MPI'] - for tc in tcs.values(): - table_titles.extend(tc.keys()) + # Specify the column names for the table + table_titles = ['NAME', 'COMPILER', 'MPI', 'LINALG', 'FFT'] + # Set up column name : display name pairs col_names = { - 'COMPILER_CUDA': 'CUDA compiler', - 'SCALAPACK': 'ScaLAPACK', + 'NAME': 'Name', + 'COMPILER': 'Compiler(s)', + 'LINALG': "Linear algebra", } - table_titles = nub(table_titles) + # Create sorted list of toolchain names + sorted_tc_names = sorted(tcs.keys(), key=str.lower) - table_values = [[] for i in range(len(table_titles))] - table_values[0] = ['**%s**' % tcname for tcname in tcs.keys()] + # Create text placeholder to use for missing entries + none_txt = '*(none)*' - for idx in range(1, len(table_titles)): - for tc in tcs.values(): - table_values[idx].append(', '.join(tc.get(table_titles[idx].upper(), []))) + # Initialize an empty list of lists for the table data + table_values = [[] for i in range(len(table_titles))] + for col_id, col_name in enumerate(table_titles): + if col_name == 'NAME': + # toolchain names column gets bold face entry + table_values[col_id] = ['**%s**' % tcname for tcname in sorted_tc_names] + else: + for tc_name in sorted_tc_names: + tc = tcs[tc_name] + if 'cray' in tc_name.lower(): + if col_name == 'COMPILER': + entry = ', '.join(tc[col_name.upper()]) + elif col_name == 'MPI': + entry = 'cray-mpich' + elif col_name == 'LINALG': + entry = 'cray-libsci' + # Combine the linear algebra libraries into a single column + elif col_name == 'LINALG': + linalg = [] + for col in ['BLAS', 'LAPACK', 'SCALAPACK']: + linalg.extend(tc.get(col, [])) + entry = ', '.join(nub(linalg)) or none_txt + else: + # for other columns, we can grab the values via 'tc' + # key = col_name + entry = ', '.join(tc.get(col_name, [])) or none_txt + table_values[col_id].append(entry) + + # Set the table titles to the pretty ones table_titles = [col_names.get(col, col) for col in table_titles] + + # Pass the data to the rst formatter, wich is returned as a list, each element + # is an rst formatted text row. doc = rst_title_and_table(title, table_titles, table_values) + # Make a string with line endings suitable to write to document file return '\n'.join(doc) @@ -871,7 +902,7 @@ def gen_easyblock_doc_section_rst(eb_class, path_to_examples, common_params, doc '.. _' + classname + ':', '', '``' + classname + '``', - '=' * (len(classname)+4), + '=' * (len(classname) + 4), '', ] @@ -936,7 +967,7 @@ def gen_easyblock_doc_section_rst(eb_class, path_to_examples, common_params, doc if os.path.exists(os.path.join(path_to_examples, '%s.eb' % classname)): title = 'Example easyconfig for ``' + classname + '`` easyblock' doc.extend([title, '-' * len(title), '', '.. code::', '']) - for line in read_file(os.path.join(path_to_examples, classname+'.eb')).split('\n'): + for line in read_file(os.path.join(path_to_examples, classname + '.eb')).split('\n'): doc.append(INDENT_4SPACES + line) doc.append('') # empty line after literal block diff --git a/easybuild/tools/environment.py b/easybuild/tools/environment.py index 18e8c798d5..5a6e2e39dc 100644 --- a/easybuild/tools/environment.py +++ b/easybuild/tools/environment.py @@ -189,18 +189,39 @@ def sanitize_env(): """ Sanitize environment. - This function undefines all $PYTHON* environment variables, - since they may affect the build/install procedure of Python packages. + This function: - cfr. https://docs.python.org/2/using/cmdline.html#environment-variables + * Filters out empty entries from environment variables like $PATH, $LD_LIBRARY_PATH, etc. + Empty entries make no sense, and can cause problems, + see for example https://github.com/easybuilders/easybuild-easyconfigs/issues/9843 . - While the $PYTHON* environment variables may be relevant/required for EasyBuild itself, - and for any non-stdlib Python packages it uses, - they are irrelevant (and potentially harmful) when installing Python packages. + * Undefines all $PYTHON* environment variables, + since they may affect the build/install procedure of Python packages. - Note that this is not an airtight protection against the Python being used in the build/install procedure - picking up non-stdlib Python packages (e.g., setuptools, vsc-base, ...), thanks to the magic of .pth files, - cfr. https://docs.python.org/2/library/site.html . + cfr. https://docs.python.org/2/using/cmdline.html#environment-variables + + While the $PYTHON* environment variables may be relevant/required for EasyBuild itself, + and for any non-stdlib Python packages it uses, + they are irrelevant (and potentially harmful) when installing Python packages. + + Note that this is not an airtight protection against the Python being used in the build/install procedure + picking up non-stdlib Python packages (e.g., setuptools, vsc-base, ...), thanks to the magic of .pth files, + cfr. https://docs.python.org/2/library/site.html . """ + + # remove empty entries from $*PATH variables + for key in ['CPATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH', 'LD_PRELOAD', 'PATH']: + val = os.getenv(key) + if val: + entries = val.split(os.pathsep) + if '' in entries: + _log.info("Found %d empty entries in $%s, filtering them out...", entries.count(''), key) + newval = os.pathsep.join(x for x in entries if x) + if newval: + setvar(key, newval) + else: + unset_env_vars([key], verbose=False) + + # unset all $PYTHON* environment variables keys_to_unset = [key for key in os.environ if key.startswith('PYTHON')] unset_env_vars(keys_to_unset, verbose=False) diff --git a/easybuild/tools/filetools.py b/easybuild/tools/filetools.py index 20b8cd6335..a662169a3c 100644 --- a/easybuild/tools/filetools.py +++ b/easybuild/tools/filetools.py @@ -40,7 +40,6 @@ """ import datetime import difflib -import distutils.dir_util import fileinput import glob import hashlib @@ -49,19 +48,19 @@ import os import re import shutil +import signal import stat import sys import tempfile import time import zlib -from xml.etree import ElementTree from easybuild.base import fancylogger from easybuild.tools import run # import build_log must stay, to use of EasyBuildLog from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg, print_warning -from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, build_option -from easybuild.tools.py2vs3 import std_urllib, string_type +from easybuild.tools.config import DEFAULT_WAIT_ON_LOCK_INTERVAL, GENERIC_EASYBLOCK_PKG, build_option, install_path +from easybuild.tools.py2vs3 import HTMLParser, std_urllib, string_type from easybuild.tools.utilities import nub, remove_unwanted_chars try: @@ -156,12 +155,16 @@ '.tar.z': "tar xzf %(filepath)s", } +# global set of names of locks that were created in this session +global_lock_names = set() + class ZlibChecksum(object): """ wrapper class for adler32 and crc32 checksums to match the interface of the hashlib module """ + def __init__(self, algorithm): self.algorithm = algorithm self.checksum = algorithm(b'') # use the same starting point as the module @@ -372,7 +375,7 @@ def change_dir(path): return cwd -def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced=False): +def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced=False, change_into_dir=None): """ Extract file at given path to specified directory :param fn: path to file to extract @@ -381,8 +384,16 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced :param extra_options: extra options to pass to extract command :param overwrite: overwrite existing unpacked file :param forced: force extraction in (extended) dry run mode + :param change_into_dir: change into resulting directory; + None (current default) implies True, but this is deprecated, + this named argument should be set to False or True explicitely + (in a future major release, default will be changed to False) :return: path to directory (in case of success) """ + if change_into_dir is None: + _log.deprecated("extract_file function was called without specifying value for change_into_dir", '5.0') + change_into_dir = True + if not os.path.isfile(fn) and not build_option('extended_dry_run'): raise EasyBuildError("Can't extract file %s: no such file", fn) @@ -392,8 +403,8 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced abs_dest = os.path.abspath(dest) # change working directory - _log.debug("Unpacking %s in directory %s.", fn, abs_dest) - change_dir(abs_dest) + _log.debug("Unpacking %s in directory %s", fn, abs_dest) + cwd = change_dir(abs_dest) if not cmd: cmd = extract_cmd(fn, overwrite=overwrite) @@ -408,7 +419,18 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced run.run_cmd(cmd, simple=True, force_in_dry_run=forced) - return find_base_dir() + # note: find_base_dir also changes into the base dir! + base_dir = find_base_dir() + + # if changing into obtained directory is not desired, + # change back to where we came from (unless that was a non-existing directory) + if not change_into_dir: + if cwd is None: + raise EasyBuildError("Can't change back to non-existing directory after extracting %s in %s", fn, dest) + else: + change_dir(cwd) + + return base_dir def which(cmd, retain_all=False, check_perms=True, log_ok=True, log_error=True): @@ -495,11 +517,23 @@ def pypi_source_urls(pkg_name): _log.debug("Failed to download %s to determine available PyPI URLs for %s", simple_url, pkg_name) res = [] else: - parsed_html = ElementTree.parse(urls_html) - if hasattr(parsed_html, 'iter'): - res = [a.attrib['href'] for a in parsed_html.iter('a')] - else: - res = [a.attrib['href'] for a in parsed_html.getiterator('a')] + urls_txt = read_file(urls_html) + + res = [] + + # note: don't use xml.etree.ElementTree to parse HTML page served by PyPI's simple API + # cfr. https://github.com/pypa/warehouse/issues/7886 + class HrefHTMLParser(HTMLParser): + """HTML parser to extract 'href' attribute values from anchor tags ().""" + + def handle_starttag(self, tag, attrs): + if tag == 'a': + attrs = dict(attrs) + if 'href' in attrs: + res.append(attrs['href']) + + parser = HrefHTMLParser() + parser.feed(urls_txt) # links are relative, transform them into full URLs; for example: # from: ../../packages////easybuild-.tar.gz#md5= @@ -758,6 +792,18 @@ def find_easyconfigs(path, ignore_dirs=None): return files +def find_glob_pattern(glob_pattern, fail_on_no_match=True): + """Find unique file/dir matching glob_pattern (raises error if more than one match is found)""" + if build_option('extended_dry_run'): + return glob_pattern + res = glob.glob(glob_pattern) + if len(res) == 0 and not fail_on_no_match: + return None + if len(res) != 1: + raise EasyBuildError("Was expecting exactly one match for '%s', found %d: %s", glob_pattern, len(res), res) + return res[0] + + def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filename_only=False, terse=False, case_sensitive=False): """ @@ -1186,7 +1232,8 @@ def apply_patch(patch_file, dest, fn=None, copy=False, level=None, use_git_am=Fa workdir = tempfile.mkdtemp(prefix='eb-patch-') _log.debug("Extracting the patch to: %s", workdir) # extracting the patch - apatch_dir = extract_file(apatch, workdir) + apatch_dir = extract_file(apatch, workdir, change_into_dir=False) + change_dir(apatch_dir) apatch = os.path.join(apatch_dir, apatch_name) if level is None and build_option('extended_dry_run'): @@ -1476,6 +1523,131 @@ def mkdir(path, parents=False, set_gid=None, sticky=None): _log.debug("Not creating existing path %s" % path) +def det_lock_path(lock_name): + """ + Determine full path for lock with specifed name. + """ + locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') + return os.path.join(locks_dir, lock_name + '.lock') + + +def create_lock(lock_name): + """Create lock with specified name.""" + + lock_path = det_lock_path(lock_name) + _log.info("Creating lock at %s...", lock_path) + try: + # we use a directory as a lock, since that's atomically created + mkdir(lock_path, parents=True) + global_lock_names.add(lock_name) + except EasyBuildError as err: + # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes + stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') + raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + _log.info("Lock created: %s", lock_path) + + +def check_lock(lock_name): + """ + Check whether a lock with specified name already exists. + + If it exists, either wait until it's released, or raise an error + (depending on --wait-on-lock configuration option). + """ + lock_path = det_lock_path(lock_name) + if os.path.exists(lock_path): + _log.info("Lock %s exists!", lock_path) + + wait_interval = build_option('wait_on_lock_interval') + wait_limit = build_option('wait_on_lock_limit') + + # --wait-on-lock is deprecated, should use --wait-on-lock-limit and --wait-on-lock-interval instead + wait_on_lock = build_option('wait_on_lock') + if wait_on_lock is not None: + depr_msg = "Use of --wait-on-lock is deprecated, use --wait-on-lock-limit and --wait-on-lock-interval" + _log.deprecated(depr_msg, '5.0') + + # if --wait-on-lock-interval has default value and --wait-on-lock is specified too, the latter wins + # (required for backwards compatibility) + if wait_interval == DEFAULT_WAIT_ON_LOCK_INTERVAL and wait_on_lock > 0: + wait_interval = wait_on_lock + + # if --wait-on-lock-limit is not specified we need to wait indefinitely if --wait-on-lock is specified, + # since the original semantics of --wait-on-lock was that it specified the waiting time interval (no limit) + if not wait_limit: + wait_limit = -1 + + # wait limit could be zero (no waiting), -1 (no waiting limit) or non-zero value (waiting limit in seconds) + if wait_limit != 0: + wait_time = 0 + while os.path.exists(lock_path) and (wait_limit == -1 or wait_time < wait_limit): + print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_interval), + silent=build_option('silent')) + time.sleep(wait_interval) + wait_time += wait_interval + + if os.path.exists(lock_path) and wait_limit != -1 and wait_time >= wait_limit: + error_msg = "Maximum wait time for lock %s to be released reached: %s sec >= %s sec" + raise EasyBuildError(error_msg, lock_path, wait_time, wait_limit) + else: + _log.info("Lock %s was released!", lock_path) + else: + raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + else: + _log.info("Lock %s does not exist", lock_path) + + +def remove_lock(lock_name): + """ + Remove lock with specified name. + """ + lock_path = det_lock_path(lock_name) + _log.info("Removing lock %s...", lock_path) + remove_dir(lock_path) + if lock_name in global_lock_names: + global_lock_names.remove(lock_name) + _log.info("Lock removed: %s", lock_path) + + +def clean_up_locks(): + """ + Clean up all still existing locks that were created in this session. + """ + for lock_name in list(global_lock_names): + remove_lock(lock_name) + + +def clean_up_locks_signal_handler(signum, frame): + """ + Signal handler, cleans up locks & exits with received signal number. + """ + + if not build_option('silent'): + print_warning("signal received (%s), cleaning up locks (%s)..." % (signum, ', '.join(global_lock_names))) + clean_up_locks() + + # by default, a KeyboardInterrupt is raised with SIGINT, so keep doing so + if signum == signal.SIGINT: + raise KeyboardInterrupt("keyboard interrupt") + else: + sys.exit(signum) + + +def register_lock_cleanup_signal_handlers(): + """ + Register signal handler for signals that cancel the current EasyBuild session, + so we can clean up the locks that were created first. + """ + signums = [ + signal.SIGABRT, + signal.SIGINT, # Ctrl-C + signal.SIGTERM, # signal 15, soft kill (like when Slurm job is cancelled or received timeout) + signal.SIGQUIT, # kinda like Ctrl-C + ] + for signum in signums: + signal.signal(signum, clean_up_locks_signal_handler) + + def expand_glob_paths(glob_paths): """Expand specified glob paths to a list of unique non-glob paths to only files.""" paths = [] @@ -1637,83 +1809,9 @@ def cleanup(logfile, tempdir, testing, silent=False): def copytree(src, dst, symlinks=False, ignore=None): - """ - Copied from Lib/shutil.py in python 2.7, since we need this to work for python2.4 aswell - and this code can be improved... - - Recursively copy a directory tree using copy2(). - - The destination directory must not already exist. - If exception(s) occur, an Error is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. - - The optional ignore argument is a callable. If given, it - is called with the `src` parameter, which is the directory - being visited by copytree(), and `names` which is the list of - `src` contents, as returned by os.listdir(): - - callable(src, names) -> ignored_names - - Since copytree() is called recursively, the callable will be - called once for each directory that is copied. It returns a - list of names relative to the `src` directory that should - not be copied. - - XXX Consider this example code rather than the ultimate tool. - - """ + """DEPRECATED and removed. Use copy_dir""" _log.deprecated("Use 'copy_dir' rather than 'copytree'", '4.0') - class Error(EnvironmentError): - pass - try: - WindowsError # @UndefinedVariable - except NameError: - WindowsError = None - - names = os.listdir(src) - if ignore is not None: - ignored_names = ignore(src, names) - else: - ignored_names = set() - _log.debug("copytree: skipping copy of %s" % ignored_names) - os.makedirs(dst) - errors = [] - for name in names: - if name in ignored_names: - continue - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if symlinks and os.path.islink(srcname): - linkto = os.readlink(srcname) - os.symlink(linkto, dstname) - elif os.path.isdir(srcname): - copytree(srcname, dstname, symlinks, ignore) - else: - # Will raise a SpecialFileError for unsupported file types - shutil.copy2(srcname, dstname) - # catch the Error from the recursive copytree so that we can - # continue with other files - except Error as err: - errors.extend(err.args[0]) - except EnvironmentError as why: - errors.append((srcname, dstname, str(why))) - try: - shutil.copystat(src, dst) - except OSError as why: - if WindowsError is not None and isinstance(why, WindowsError): - # Copying file access times may fail on Windows - pass - else: - errors.extend((src, dst, str(why))) - if errors: - raise Error(errors) - def encode_string(name): """ @@ -1907,7 +2005,12 @@ def copy_file(path, target_path, force_in_dry_run=False): _log.info("Copied contents of file %s to %s", path, target_path) else: mkdir(os.path.dirname(target_path), parents=True) - shutil.copy2(path, target_path) + if os.path.exists(path): + shutil.copy2(path, target_path) + elif os.path.islink(path): + # special care for copying broken symlinks + link_target = os.readlink(path) + symlink(link_target, target_path) _log.info("%s copied to %s", path, target_path) except (IOError, OSError, shutil.Error) as err: raise EasyBuildError("Failed to copy file %s to %s: %s", path, target_path, err) @@ -1942,16 +2045,13 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k :param path: the original directory path :param target_path: path to copy the directory to :param force_in_dry_run: force running the command during dry run - :param dirs_exist_ok: wrapper around shutil.copytree option, which was added in Python 3.8 - - On Python >= 3.8 shutil.copytree is always used - On Python < 3.8 if 'dirs_exist_ok' is False - shutil.copytree is used - On Python < 3.8 if 'dirs_exist_ok' is True - distutils.dir_util.copy_tree is used + :param dirs_exist_ok: boolean indicating whether it's OK if the target directory already exists - Additional specified named arguments are passed down to shutil.copytree if used. + shutil.copytree is used if the target path does not exist yet; + if the target path already exists, the 'copy' function will be used to copy the contents of + the source path to the target path - Because distutils.dir_util.copy_tree supports only 'symlinks' named argument, - using any other will raise EasyBuildError. + Additional specified named arguments are passed down to shutil.copytree/copy if used. """ if not force_in_dry_run and build_option('extended_dry_run'): dry_run_msg("copied directory %s to %s" % (path, target_path)) @@ -1960,38 +2060,49 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k if not dirs_exist_ok and os.path.exists(target_path): raise EasyBuildError("Target location %s to copy %s to already exists", target_path, path) - if sys.version_info >= (3, 8): - # on Python >= 3.8, shutil.copytree works fine, thanks to availability of dirs_exist_ok named argument - shutil.copytree(path, target_path, dirs_exist_ok=dirs_exist_ok, **kwargs) + # note: in Python >= 3.8 shutil.copytree works just fine thanks to the 'dirs_exist_ok' argument, + # but since we need to be more careful in earlier Python versions we use our own implementation + # in case the target directory exists and 'dirs_exist_ok' is enabled + if dirs_exist_ok and os.path.exists(target_path): + # if target directory already exists (and that's allowed via dirs_exist_ok), + # we need to be more careful, since shutil.copytree will fail (in Python < 3.8) + # if target directory already exists; + # so, recurse via 'copy' function to copy files/dirs in source path to target path + # (NOTE: don't use distutils.dir_util.copy_tree here, see + # https://github.com/easybuilders/easybuild-framework/issues/3306) - elif dirs_exist_ok: - # use distutils.dir_util.copy_tree with Python < 3.8 if dirs_exist_ok is enabled + entries = os.listdir(path) - # first get value for symlinks named argument (if any) - preserve_symlinks = kwargs.pop('symlinks', False) + # take into account 'ignore' function that is supported by shutil.copytree + # (but not by 'copy_file' function used by 'copy') + ignore = kwargs.get('ignore') + if ignore: + ignored_entries = ignore(path, entries) + entries = [x for x in entries if x not in ignored_entries] - # check if there are other named arguments (there shouldn't be, only 'symlinks' is supported) - if kwargs: - raise EasyBuildError("Unknown named arguments passed to copy_dir with dirs_exist_ok=True: %s", - ', '.join(sorted(kwargs.keys()))) - distutils.dir_util.copy_tree(path, target_path, preserve_symlinks=preserve_symlinks) + # determine list of paths to copy + paths_to_copy = [os.path.join(path, x) for x in entries] + + copy(paths_to_copy, target_path, + force_in_dry_run=force_in_dry_run, dirs_exist_ok=dirs_exist_ok, **kwargs) else: - # if dirs_exist_ok is not enabled, just use shutil.copytree + # if dirs_exist_ok is not enabled or target directory doesn't exist, just use shutil.copytree shutil.copytree(path, target_path, **kwargs) _log.info("%s copied to %s", path, target_path) - except (IOError, OSError) as err: + except (IOError, OSError, shutil.Error) as err: raise EasyBuildError("Failed to copy directory %s to %s: %s", path, target_path, err) -def copy(paths, target_path, force_in_dry_run=False): +def copy(paths, target_path, force_in_dry_run=False, **kwargs): """ Copy single file/directory or list of files and directories to specified location :param paths: path(s) to copy :param target_path: target location :param force_in_dry_run: force running the command during dry run + :param kwargs: additional named arguments to pass down to copy_dir """ if isinstance(paths, string_type): paths = [paths] @@ -2002,10 +2113,11 @@ def copy(paths, target_path, force_in_dry_run=False): full_target_path = os.path.join(target_path, os.path.basename(path)) mkdir(os.path.dirname(full_target_path), parents=True) - if os.path.isfile(path): + # copy broken symlinks only if 'symlinks=True' is used + if os.path.isfile(path) or (os.path.islink(path) and kwargs.get('symlinks')): copy_file(path, full_target_path, force_in_dry_run=force_in_dry_run) elif os.path.isdir(path): - copy_dir(path, full_target_path, force_in_dry_run=force_in_dry_run) + copy_dir(path, full_target_path, force_in_dry_run=force_in_dry_run, **kwargs) else: raise EasyBuildError("Specified path to copy is not an existing file or directory: %s", path) @@ -2116,8 +2228,8 @@ def diff_files(path1, path2): """ Return unified diff between two files """ - file1_lines = ['%s\n' % l for l in read_file(path1).split('\n')] - file2_lines = ['%s\n' % l for l in read_file(path2).split('\n')] + file1_lines = ['%s\n' % line for line in read_file(path1).split('\n')] + file2_lines = ['%s\n' % line for line in read_file(path2).split('\n')] return ''.join(difflib.unified_diff(file1_lines, file2_lines, fromfile=path1, tofile=path2)) diff --git a/easybuild/tools/github.py b/easybuild/tools/github.py index 9eb9219dd8..816650d2f5 100644 --- a/easybuild/tools/github.py +++ b/easybuild/tools/github.py @@ -50,7 +50,7 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser from easybuild.tools.build_log import EasyBuildError, print_msg, print_warning from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_patch, copy_dir, copy_easyblocks, copy_framework_files +from easybuild.tools.filetools import apply_patch, change_dir, copy_dir, copy_easyblocks, copy_framework_files from easybuild.tools.filetools import det_patched_files, download_file, extract_file from easybuild.tools.filetools import get_easyblock_class_name, mkdir, read_file, symlink, which, write_file from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters, urlopen @@ -360,7 +360,9 @@ def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_ download_file(base_name, url, target_path, forced=True) _log.debug("%s downloaded to %s, extracting now" % (base_name, path)) - extracted_path = os.path.join(extract_file(target_path, path, forced=True), extracted_dir_name) + base_dir = extract_file(target_path, path, forced=True, change_into_dir=False) + change_dir(base_dir) + extracted_path = os.path.join(base_dir, extracted_dir_name) # check if extracted_path exists if not os.path.isdir(extracted_path): @@ -1008,7 +1010,7 @@ def find_software_name_for_patch(patch_name, ec_dirs): break except EasyBuildError as err: _log.debug("Ignoring easyconfig %s that fails to parse: %s", path, err) - sys.stdout.write('\r%s of %s easyconfigs checked' % (idx+1, nr_of_ecs)) + sys.stdout.write('\r%s of %s easyconfigs checked' % (idx + 1, nr_of_ecs)) sys.stdout.flush() sys.stdout.write('\n') @@ -1333,11 +1335,18 @@ def new_branch_github(paths, ecs, commit_msg=None): @only_if_module_is_available('git', pkgname='GitPython') -def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None): +def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None, commit_msg=None): """ Create new pull request from specified branch on GitHub. """ + if descr is None: + descr = build_option('pr_descr') + if commit_msg is None: + commit_msg = build_option('pr_commit_msg') + if title is None: + title = build_option('pr_title') or commit_msg + pr_target_account = build_option('pr_target_account') pr_target_branch = build_option('pr_target_branch') if pr_target_repo is None: @@ -1550,19 +1559,15 @@ def new_pr(paths, ecs, title=None, descr=None, commit_msg=None): :param commit_msg: commit message to use """ - if descr is None: - descr = build_option('pr_descr') if commit_msg is None: commit_msg = build_option('pr_commit_msg') - if title is None: - title = build_option('pr_title') or commit_msg # create new branch in GitHub res = new_branch_github(paths, ecs, commit_msg=commit_msg) file_info, deleted_paths, _, branch_name, diff_stat, pr_target_repo = res new_pr_from_branch(branch_name, title=title, descr=descr, pr_target_repo=pr_target_repo, - pr_metadata=(file_info, deleted_paths, diff_stat)) + pr_metadata=(file_info, deleted_paths, diff_stat), commit_msg=commit_msg) def det_account_branch_for_pr(pr_id, github_user=None, pr_target_repo=None): diff --git a/easybuild/tools/include.py b/easybuild/tools/include.py index 2e85d99e20..67ded09f53 100644 --- a/easybuild/tools/include.py +++ b/easybuild/tools/include.py @@ -68,7 +68,7 @@ import pkgutil # extend path so Python finds our easyblocks in the subdirectories where they are located -subdirs = [chr(l) for l in range(ord('a'), ord('z') + 1)] + ['0'] +subdirs = [chr(char) for char in range(ord('a'), ord('z') + 1)] + ['0'] for subdir in subdirs: __path__ = pkgutil.extend_path(__path__, '%s.%s' % (__name__, subdir)) @@ -143,7 +143,7 @@ def verify_imports(pymods, pypkg, from_path): def is_software_specific_easyblock(module): """Determine whether Python module at specified location is a software-specific easyblock.""" - return bool(re.search('^class EB_.*\(.*\):\s*$', read_file(module), re.M)) + return bool(re.search(r'^class EB_.*\(.*\):\s*$', read_file(module), re.M)) def include_easyblocks(tmpdir, paths): diff --git a/easybuild/tools/job/gc3pie.py b/easybuild/tools/job/gc3pie.py index 80593a037e..79a9728131 100644 --- a/easybuild/tools/job/gc3pie.py +++ b/easybuild/tools/job/gc3pie.py @@ -69,7 +69,7 @@ class AbortingDependentTaskCollection(AbortOnError, DependentTaskCollection): """ pass -except ImportError as err: +except ImportError: _log.debug("Failed to import gc3libs from GC3Pie." " Silently ignoring, this is a real issue only when GC3Pie is used as backend for --job") diff --git a/easybuild/tools/job/pbs_python.py b/easybuild/tools/job/pbs_python.py index 6e427b6e2c..d91f2fa915 100644 --- a/easybuild/tools/job/pbs_python.py +++ b/easybuild/tools/job/pbs_python.py @@ -54,7 +54,7 @@ from PBSQuery import PBSQuery KNOWN_HOLD_TYPES = [pbs.USER_HOLD, pbs.OTHER_HOLD, pbs.SYSTEM_HOLD] -except ImportError as err: +except ImportError: _log.debug("Failed to import pbs/PBSQuery from pbs_python." " Silently ignoring, this is a real issue only when pbs_python is used as backend for --job") @@ -171,7 +171,8 @@ def _get_ppn(self): # return most frequent freq_count, freq_np = max([(j, i) for i, j in res.items()]) - self.log.debug("Found most frequent np %s (%s times) in interesting nodes %s" % (freq_np, freq_count, interesting_nodes)) + self.log.debug("Found most frequent np %s (%s times) in interesting nodes %s", + freq_np, freq_count, interesting_nodes) self._ppn = freq_np @@ -346,7 +347,8 @@ def _submit(self): self.log.debug("Going to submit to queue %s" % self.queue) - # job submission sometimes fails without producing an error, e.g. when one of the dependency jobs has already finished + # job submission sometimes fails without producing an error, + # e.g. when one of the dependency jobs has already finished # when that occurs, None will be returned by pbs_submit as job id jobid = pbs.pbs_submit(self.pbsconn, pbs_attributes, scriptfn, self.queue, NULL) is_error, errormsg = pbs.error() @@ -453,7 +455,7 @@ def info(self, types=None): return None # convert single type into list - if type(types) is str: + if isinstance(types, str): types = [types] self.log.debug("Return info types %s" % types) diff --git a/easybuild/tools/module_generator.py b/easybuild/tools/module_generator.py index e3859880e1..32458cc829 100644 --- a/easybuild/tools/module_generator.py +++ b/easybuild/tools/module_generator.py @@ -524,7 +524,8 @@ def _generate_extension_list(self): """ Generate a string with a comma-separated list of extensions. """ - exts_list = self.app.cfg['exts_list'] + # We need only name and version, so don't resolve templates + exts_list = self.app.cfg.get_ref('exts_list') extensions = ', '.join(sorted(['-'.join(ext[:2]) for ext in exts_list], key=str.lower)) return extensions @@ -735,7 +736,7 @@ def get_description(self, conflict=True): """ txt = '\n'.join([ "proc ModulesHelp { } {", - " puts stderr {%s" % re.sub('([{}\[\]])', r'\\\1', self._generate_help_text()), + " puts stderr {%s" % re.sub(r'([{}\[\]])', r'\\\1', self._generate_help_text()), " }", '}', '', @@ -762,7 +763,10 @@ def get_description(self, conflict=True): # - 'conflict Compiler/GCC/4.8.2/OpenMPI' for 'Compiler/GCC/4.8.2/OpenMPI/1.6.4' lines.extend(['', "conflict %s" % os.path.dirname(self.app.short_mod_name)]) - whatis_lines = ["module-whatis {%s}" % re.sub(r'([{}\[\]])', r'\\\1', l) for l in self._generate_whatis_lines()] + whatis_lines = [ + "module-whatis {%s}" % re.sub(r'([{}\[\]])', r'\\\1', line) + for line in self._generate_whatis_lines() + ] txt += '\n'.join([''] + lines + ['']) % { 'name': self.app.name, 'version': self.app.version, diff --git a/easybuild/tools/module_naming_scheme/categorized_mns.py b/easybuild/tools/module_naming_scheme/categorized_mns.py index 46f0b0e86d..3cc0ae1746 100644 --- a/easybuild/tools/module_naming_scheme/categorized_mns.py +++ b/easybuild/tools/module_naming_scheme/categorized_mns.py @@ -35,6 +35,7 @@ from easybuild.tools.module_naming_scheme.mns import ModuleNamingScheme from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version + class CategorizedModuleNamingScheme(ModuleNamingScheme): """Class implementing the categorized module naming scheme.""" @@ -55,11 +56,10 @@ def is_short_modname_for(self, short_modname, name): Default implementation checks via a strict regex pattern, and assumes short module names are of the form: /[-] """ - modname_regex = re.compile('^[^/]+/%s/\S+$' % re.escape(name)) + modname_regex = re.compile(r'^[^/]+/%s/\S+$' % re.escape(name)) res = bool(modname_regex.match(short_modname)) tup = (short_modname, name, modname_regex.pattern, res) self.log.debug("Checking whether '%s' is a module name for software with name '%s' via regex %s: %s" % tup) return res - diff --git a/easybuild/tools/module_naming_scheme/hierarchical_mns.py b/easybuild/tools/module_naming_scheme/hierarchical_mns.py index f6d7cf1423..1647f4c96e 100644 --- a/easybuild/tools/module_naming_scheme/hierarchical_mns.py +++ b/easybuild/tools/module_naming_scheme/hierarchical_mns.py @@ -41,9 +41,11 @@ CORE = 'Core' COMPILER = 'Compiler' MPI = 'MPI' +TOOLCHAIN = 'Toolchain' MODULECLASS_COMPILER = 'compiler' MODULECLASS_MPI = 'mpi' +MODULECLASS_TOOLCHAIN = 'toolchain' GCCCORE = GCCcore.NAME @@ -107,7 +109,11 @@ def det_toolchain_compilers_name_version(self, tc_comps): # no compiler in toolchain, system toolchain res = None elif len(tc_comps) == 1: - res = (tc_comps[0]['name'], self.det_full_version(tc_comps[0])) + tc_comp = tc_comps[0] + if tc_comp is None: + res = None + else: + res = (tc_comp['name'], self.det_full_version(tc_comp)) else: comp_versions = dict([(comp['name'], self.det_full_version(comp)) for comp in tc_comps]) comp_names = comp_versions.keys() @@ -135,6 +141,10 @@ def det_module_subdir(self, ec): if tc_comps is None: # no compiler in toolchain, system toolchain => Core module subdir = CORE + elif tc_comps == [None]: + # no info on toolchain compiler (cfr. Cray toolchains), + # then use toolchain name/version + subdir = os.path.join(TOOLCHAIN, ec.toolchain.name, ec.toolchain.version) else: tc_comp_name, tc_comp_ver = self.det_toolchain_compilers_name_version(tc_comps) tc_mpi = det_toolchain_mpi(ec) @@ -223,6 +233,10 @@ def det_modpath_extensions(self, ec): fullver = self.det_full_version(ec) paths.append(os.path.join(MPI, tc_comp_name, tc_comp_ver, ec['name'], fullver)) + # special case for Cray toolchains + elif modclass == MODULECLASS_TOOLCHAIN and tc_comp_info is None and ec.name.startswith('Cray'): + paths.append(os.path.join(TOOLCHAIN, ec.name, ec.version)) + return paths def expand_toolchain_load(self, ec=None): diff --git a/easybuild/tools/module_naming_scheme/migrate_from_eb_to_hmns.py b/easybuild/tools/module_naming_scheme/migrate_from_eb_to_hmns.py index 7dfdfc38e4..70629b7628 100644 --- a/easybuild/tools/module_naming_scheme/migrate_from_eb_to_hmns.py +++ b/easybuild/tools/module_naming_scheme/migrate_from_eb_to_hmns.py @@ -30,6 +30,7 @@ from easybuild.tools.module_naming_scheme.easybuild_mns import EasyBuildMNS from easybuild.tools.module_naming_scheme.hierarchical_mns import HierarchicalMNS + class MigrateFromEBToHMNS(HierarchicalMNS, EasyBuildMNS): def det_install_subdir(self, ec): diff --git a/easybuild/tools/module_naming_scheme/mns.py b/easybuild/tools/module_naming_scheme/mns.py index d180551771..b4c854d582 100644 --- a/easybuild/tools/module_naming_scheme/mns.py +++ b/easybuild/tools/module_naming_scheme/mns.py @@ -162,7 +162,7 @@ def is_short_modname_for(self, short_modname, name): Default implementation checks via a strict regex pattern, and assumes short module names are of the form: /[-] """ - modname_regex = re.compile('^%s(/\S+)?$' % re.escape(name)) + modname_regex = re.compile(r'^%s(/\S+)?$' % re.escape(name)) res = bool(modname_regex.match(short_modname)) self.log.debug("Checking whether '%s' is a module name for software with name '%s' via regex %s: %s", diff --git a/easybuild/tools/modules.py b/easybuild/tools/modules.py index 8a5323434d..dda1eb40f3 100644 --- a/easybuild/tools/modules.py +++ b/easybuild/tools/modules.py @@ -444,9 +444,9 @@ def check_module_path(self): idx = 1 while(curr_mod_paths[-idx:] == self.mod_paths[-idx:]): idx += 1 - self.log.debug("Not prepending %d last entries of %s", idx-1, self.mod_paths) + self.log.debug("Not prepending %d last entries of %s", idx - 1, self.mod_paths) - for mod_path in self.mod_paths[::-1][idx-1:]: + for mod_path in self.mod_paths[::-1][idx - 1:]: self.prepend_module_path(mod_path) self.log.info("$MODULEPATH set via list of module paths (w/ 'module use'): %s" % os.environ['MODULEPATH']) @@ -488,6 +488,7 @@ def module_wrapper_exists(self, mod_name, modulerc_fn='.modulerc', mod_wrapper_r Determine whether a module wrapper with specified name exists. Only .modulerc file in Tcl syntax is considered here. """ + if mod_wrapper_regex_template is None: mod_wrapper_regex_template = "^[ ]*module-version (?P[^ ]*) %s$" @@ -528,24 +529,40 @@ def module_wrapper_exists(self, mod_name, modulerc_fn='.modulerc', mod_wrapper_r return wrapped_mod - def exist(self, mod_names, mod_exists_regex_template=r'^\s*\S*/%s.*:\s*$', skip_avail=False, maybe_partial=True): + def exist(self, mod_names, mod_exists_regex_template=None, skip_avail=False, maybe_partial=True): """ Check if modules with specified names exists. :param mod_names: list of module names - :param mod_exists_regex_template: template regular expression to search 'module show' output with + :param mod_exists_regex_template: DEPRECATED and unused :param skip_avail: skip checking through 'module avail', only check via 'module show' :param maybe_partial: indicates if the module name may be a partial module name """ + if mod_exists_regex_template is not None: + self.log.deprecated('mod_exists_regex_template is no longer used', '5.0') + def mod_exists_via_show(mod_name): """ Helper function to check whether specified module name exists through 'module show'. :param mod_name: module name """ - mod_exists_regex = mod_exists_regex_template % re.escape(mod_name) - txt = self.show(mod_name) - return bool(re.search(mod_exists_regex, txt, re.M)) + stderr = self.show(mod_name) + res = False + # Parse the output: + # - Skip whitespace + # - Any error -> Module does not exist + # - Check first non-whitespace line for something that looks like an absolute path terminated by a colon + mod_exists_regex = r'\s*/.+:\s*' + for line in stderr.split('\n'): + if OUTPUT_MATCHES['whitespace'].search(line): + continue + if OUTPUT_MATCHES['error'].search(line): + break + if re.match(mod_exists_regex, line): + res = True + break + return res if skip_avail: avail_mod_names = [] @@ -571,6 +588,10 @@ def mod_exists_via_show(mod_name): mod_exists = mod_exists_via_show(mod_name) # if no module file was found, check whether specified module name can be a 'wrapper' module... + # this fallback mechanism is important when using a hierarchical module naming scheme, + # where "full" module names (like Core/Java/11) are used to check whether modules exist already; + # Lmod will report module wrappers as non-existent when full module name is used, + # see https://github.com/TACC/Lmod/issues/446 if not mod_exists: self.log.debug("Module %s not found via module avail/show, checking whether it is a wrapper", mod_name) wrapped_mod = self.module_wrapper_exists(mod_name) @@ -643,7 +664,7 @@ def show(self, mod_name): ans = MODULE_SHOW_CACHE[key] self.log.debug("Found cached result for 'module show %s' with key '%s': %s", mod_name, key, ans) else: - ans = self.run_module('show', mod_name, check_output=False, return_output=True) + ans = self.run_module('show', mod_name, check_output=False, return_stderr=True) MODULE_SHOW_CACHE[key] = ans self.log.debug("Cached result for 'module show %s' with key '%s': %s", mod_name, key, ans) @@ -759,13 +780,15 @@ def run_module(self, *args, **kwargs): # also catch and check exit code exit_code = proc.returncode if kwargs.get('check_exit_code', True) and exit_code != 0: - raise EasyBuildError("Module command 'module %s' failed with exit code %s; stderr: %s; stdout: %s", - ' '.join(cmd_list[2:]), exit_code, stderr, stdout) + raise EasyBuildError("Module command '%s' failed with exit code %s; stderr: %s; stdout: %s", + ' '.join(cmd_list), exit_code, stderr, stdout) if kwargs.get('check_output', True): self.check_module_output(full_cmd, stdout, stderr) - if kwargs.get('return_output', False): + if kwargs.get('return_stderr', False): + return stderr + elif kwargs.get('return_output', False): return stdout + stderr else: # the module command was run with an outdated selected environment variables (see LD_ENV_VAR_KEYS list) @@ -1058,7 +1081,7 @@ def path_to_top_of_module_tree(self, top_paths, mod_name, full_mod_subdir, deps, if path_matches(full_mod_subdir, full_modpath_exts): # full path to module subdir of dependency is simply path to module file without (short) module name - dep_full_mod_subdir = self.modulefile_path(dep, strip_ext=True)[:-len(dep)-1] + dep_full_mod_subdir = self.modulefile_path(dep, strip_ext=True)[:-len(dep) - 1] full_mod_subdirs.append(dep_full_mod_subdir) mods_to_top.append(dep) @@ -1410,19 +1433,6 @@ def module_wrapper_exists(self, mod_name): return res - def exist(self, mod_names, skip_avail=False, maybe_partial=True): - """ - Check if modules with specified names exists. - - :param mod_names: list of module names - :param skip_avail: skip checking through 'module avail', only check via 'module show' - """ - # module file may be either in Tcl syntax (no file extension) or Lua sytax (.lua extension); - # the current configuration for matters little, since the module may have been installed with a different cfg; - # Lmod may pick up both Tcl and Lua module files, regardless of the EasyBuild configuration - return super(Lmod, self).exist(mod_names, mod_exists_regex_template=r'^\s*\S*/%s.*(\.lua)?:\s*$', - skip_avail=skip_avail, maybe_partial=maybe_partial) - def get_setenv_value_from_modulefile(self, mod_name, var_name): """ Get value for specific 'setenv' statement from module file for the specified module. @@ -1599,6 +1609,7 @@ def invalidate_module_caches_for(path): class Modules(EnvironmentModulesC): """NO LONGER SUPPORTED: interface to modules tool, use modules_tool from easybuild.tools.modules instead""" + def __init__(self, *args, **kwargs): _log.nosupport("modules.Modules class is now an abstract interface, use modules.modules_tool instead", '2.0') @@ -1611,7 +1622,7 @@ def __init__(self, *args, **kwargs): def exist(self, mod_names, *args, **kwargs): """No modules, so nothing exists""" - return [False]*len(mod_names) + return [False] * len(mod_names) def check_loaded_modules(self): """Nothing to do since no modules""" diff --git a/easybuild/tools/multidiff.py b/easybuild/tools/multidiff.py index 63967244ee..a102a3de74 100644 --- a/easybuild/tools/multidiff.py +++ b/easybuild/tools/multidiff.py @@ -69,6 +69,7 @@ class MultiDiff(object): """ Class representing a multi-diff. """ + def __init__(self, base_fn, base_lines, files, colored=True): """ MultiDiff constructor diff --git a/easybuild/tools/options.py b/easybuild/tools/options.py index 2a09600f78..ebad095341 100644 --- a/easybuild/tools/options.py +++ b/easybuild/tools/options.py @@ -64,10 +64,10 @@ from easybuild.tools.config import DEFAULT_JOB_BACKEND, DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS from easybuild.tools.config import DEFAULT_MNS, DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES from easybuild.tools.config import DEFAULT_PATH_SUBDIRS, DEFAULT_PKG_RELEASE, DEFAULT_PKG_TOOL, DEFAULT_PKG_TYPE -from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, EBROOT_ENV_VAR_ACTIONS, ERROR -from easybuild.tools.config import FORCE_DOWNLOAD_CHOICES, GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR -from easybuild.tools.config import JOB_DEPS_TYPE_ALWAYS_RUN, LOADED_MODULES_ACTIONS, WARN -from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS +from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, DEFAULT_WAIT_ON_LOCK_INTERVAL +from easybuild.tools.config import DEFAULT_WAIT_ON_LOCK_LIMIT, EBROOT_ENV_VAR_ACTIONS, ERROR, FORCE_DOWNLOAD_CHOICES +from easybuild.tools.config import GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR, JOB_DEPS_TYPE_ALWAYS_RUN +from easybuild.tools.config import LOADED_MODULES_ACTIONS, LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS, WARN from easybuild.tools.config import get_pretend_installpath, init, init_build_options, mk_full_default_path from easybuild.tools.configobj import ConfigObj, ConfigObjError from easybuild.tools.docs import FORMAT_TXT, FORMAT_RST @@ -76,9 +76,8 @@ from easybuild.tools.docs import list_easyblocks, list_toolchains from easybuild.tools.environment import restore_env, unset_env_vars from easybuild.tools.filetools import CHECKSUM_TYPE_SHA256, CHECKSUM_TYPES, install_fake_vsc, move_file, which -from easybuild.tools.github import GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO -from easybuild.tools.github import GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED, GITHUB_PR_STATE_OPEN -from easybuild.tools.github import GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS +from easybuild.tools.github import GITHUB_EB_MAIN, GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED +from easybuild.tools.github import GITHUB_PR_STATE_OPEN, GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS from easybuild.tools.github import HAVE_GITHUB_API, HAVE_KEYRING, VALID_CLOSE_PR_REASONS from easybuild.tools.github import fetch_easyblocks_from_pr, fetch_github_token from easybuild.tools.hooks import KNOWN_HOOKS @@ -442,8 +441,15 @@ def override_options(self): None, 'store_true', False), 'verify-easyconfig-filenames': ("Verify whether filename of specified easyconfigs matches with contents", None, 'store_true', False), - 'wait-on-lock': ("Wait interval (in seconds) to use when waiting for existing lock to be removed " - "(0: implies no waiting, but exiting with an error)", int, 'store', 0), + 'wait-on-lock': ("Wait for lock to be released; 0 implies no waiting (exit with an error if the lock " + "already exists), non-zero value specified waiting interval [DEPRECATED: " + "use --wait-on-lock-interval and --wait-on-lock-limit instead]", + int, 'store_or_None', None), + 'wait-on-lock-interval': ("Wait interval (in seconds) to use when waiting for existing lock to be removed", + int, 'store', DEFAULT_WAIT_ON_LOCK_INTERVAL), + 'wait-on-lock-limit': ("Maximum amount of time (in seconds) to wait until lock is released (0 means no " + "waiting at all, exit with error; -1 means no waiting limit, keep waiting)", + int, 'store', DEFAULT_WAIT_ON_LOCK_LIMIT), 'zip-logs': ("Zip logs that are copied to install directory, using specified command", None, 'store_or_None', 'gzip'), diff --git a/easybuild/tools/ordereddict.py b/easybuild/tools/ordereddict.py index 5cdd3da4f9..34b2a771fa 100644 --- a/easybuild/tools/ordereddict.py +++ b/easybuild/tools/ordereddict.py @@ -1,4 +1,4 @@ -## http://code.activestate.com/recipes/576693/ (r9) +# http://code.activestate.com/recipes/576693/ (r9) # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. @@ -170,7 +170,7 @@ def update(self, *args, **kwds): ''' if len(args) > 1: raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (1+len(args),)) + 'arguments (%d given)' % (1 + len(args),)) # Make progressively weaker assumptions about "other" other = () if len(args) == 1: @@ -255,7 +255,7 @@ def __eq__(self, other): ''' if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() + return len(self) == len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): @@ -274,4 +274,3 @@ def viewvalues(self): def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) - diff --git a/easybuild/tools/parallelbuild.py b/easybuild/tools/parallelbuild.py index 8a7348fb18..4109492fde 100644 --- a/easybuild/tools/parallelbuild.py +++ b/easybuild/tools/parallelbuild.py @@ -127,11 +127,17 @@ def submit_jobs(ordered_ecs, cmd_line_opts, testing=False, prepare_first=True): curdir = os.getcwd() # regex pattern for options to ignore (help options can't reach here) - ignore_opts = re.compile('^--robot$|^--job$|^--try-.*$') + ignore_opts = re.compile('^--robot$|^--job|^--try-.*$') # generate_cmd_line returns the options in form --longopt=value opts = [o for o in cmd_line_opts if not ignore_opts.match(o.split('=')[0])] + # add --disable-job to make sure the submitted job doesn't submit a job itself, + # resulting in an infinite cycle of jobs; + # this can happen if job submission is enabled via a configuration file or via $EASYBUILD_JOB, + # cfr. https://github.com/easybuilders/easybuild-framework/issues/3307 + opts.append('--disable-job') + # compose string with command line options, properly quoted and with '%' characters escaped opts_str = ' '.join(opts).replace('%', '%%') diff --git a/easybuild/tools/py2vs3/py2.py b/easybuild/tools/py2vs3/py2.py index 7dbd9161dc..8558d9f917 100644 --- a/easybuild/tools/py2vs3/py2.py +++ b/easybuild/tools/py2vs3/py2.py @@ -34,6 +34,7 @@ import json import subprocess import urllib2 as std_urllib # noqa +from HTMLParser import HTMLParser # noqa from string import letters as ascii_letters # noqa from string import lowercase as ascii_lowercase # noqa from StringIO import StringIO # noqa @@ -64,7 +65,7 @@ def subprocess_popen_text(cmd, **kwargs): def raise_with_traceback(exception_class, message, traceback): """Raise exception of specified class with given message and traceback.""" - raise exception_class, message, traceback + raise exception_class, message, traceback # noqa: E999 def extract_method_name(method_func): diff --git a/easybuild/tools/py2vs3/py3.py b/easybuild/tools/py2vs3/py3.py index 3ff66d6802..f1e8f9ec88 100644 --- a/easybuild/tools/py2vs3/py3.py +++ b/easybuild/tools/py2vs3/py3.py @@ -38,6 +38,7 @@ from collections import OrderedDict # noqa from distutils.version import LooseVersion from functools import cmp_to_key +from html.parser import HTMLParser # noqa from itertools import zip_longest from io import StringIO # noqa from string import ascii_letters, ascii_lowercase # noqa @@ -94,7 +95,7 @@ class WrapperBase(object, metaclass=metaclass): def safe_cmp_looseversions(v1, v2): """Safe comparison function for two (values containing) LooseVersion instances.""" - if type(v1) != type(v2): + if not isinstance(v1, type(v2)): raise TypeError("Can't compare values of different types: %s (%s) vs %s (%s)" % (v1, type(v1), v2, type(v2))) # if we receive two iterative values, we need to recurse @@ -123,7 +124,7 @@ def simple_compare(x1, x2): # implementation based on '14894.patch' patch file provided in https://bugs.python.org/issue14894 for ver1_part, ver2_part in zip_longest(v1.version, v2.version, fillvalue=''): # use string comparison if version parts have different type - if type(ver1_part) != type(ver2_part): + if not isinstance(ver1_part, type(ver2_part)): ver1_part = str(ver1_part) ver2_part = str(ver2_part) diff --git a/easybuild/tools/repository/hgrepo.py b/easybuild/tools/repository/hgrepo.py index cb121f5cb2..0b34a82db5 100644 --- a/easybuild/tools/repository/hgrepo.py +++ b/easybuild/tools/repository/hgrepo.py @@ -100,7 +100,7 @@ def create_working_copy(self): # try to get a copy of try: - client = hglib.clone(self.repo, self.wc) + hglib.clone(self.repo, self.wc) self.log.debug("repo %s cloned in %s" % (self.repo, self.wc)) except (HgCommandError, OSError) as err: # it might already have existed diff --git a/easybuild/tools/robot.py b/easybuild/tools/robot.py index cd3a2dca55..a205f6c5b6 100644 --- a/easybuild/tools/robot.py +++ b/easybuild/tools/robot.py @@ -219,7 +219,7 @@ def check_conflict(parent, dep1, dep2): for runtime_deps in lists_of_runtime_deps: # also check whether module itself clashes with any of its dependencies for i, dep1 in enumerate(build_deps + runtime_deps + [key]): - for dep2 in (build_deps + runtime_deps)[i+1:]: + for dep2 in (build_deps + runtime_deps)[i + 1:]: # don't worry about conflicts between module itself and any of its build deps if dep1 != key or dep2 not in build_deps: res |= check_conflict(key, dep1, dep2) diff --git a/easybuild/tools/run.py b/easybuild/tools/run.py index a3471abc04..3a9cb05d1f 100644 --- a/easybuild/tools/run.py +++ b/easybuild/tools/run.py @@ -338,7 +338,7 @@ def run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, re def escape_special(string): return re.sub(r"([\+\?\(\)\[\]\*\.\\\$])", r"\\\1", string) - split = '[\s\n]+' + split = r'[\s\n]+' regSplit = re.compile(r"" + split) def process_QA(q, a_s): @@ -569,10 +569,10 @@ def parse_log_for_error(txt, regExp=None, stdout=True, msg=None): """ global errors_found_in_log - if regExp and type(regExp) == bool: + if regExp and isinstance(regExp, bool): regExp = r"(?[^\s]+)\s+", out) + find_version = re.search(r"^gcc\s+\([^)]+\)\s+(?P[^\s]+)\s+", out) if find_version: res = find_version.group('version') _log.debug("Found GCC version: %s from %s", res, out) @@ -770,6 +769,7 @@ def get_system_info(): return { 'core_count': get_avail_core_count(), 'total_memory': get_total_memory(), + 'cpu_arch': get_cpu_architecture(), 'cpu_arch_name': get_cpu_arch_name(), 'cpu_model': get_cpu_model(), 'cpu_speed': get_cpu_speed(), diff --git a/easybuild/tools/testing.py b/easybuild/tools/testing.py index ec7d83ba37..7086c692fb 100644 --- a/easybuild/tools/testing.py +++ b/easybuild/tools/testing.py @@ -268,12 +268,11 @@ def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, if system_info['cpu_arch_name'] != UNKNOWN: system_info['cpu_model'] += " (%s)" % system_info['cpu_arch_name'] - short_system_info = "%(hostname)s - %(os_type)s %(os_name)s %(os_version)s, %(cpu_model)s, Python %(pyver)s" % { + os_info = '%(hostname)s - %(os_type)s %(os_name)s %(os_version)s' % system_info + short_system_info = "%(os_info)s, %(cpu_arch)s, %(cpu_model)s, Python %(pyver)s" % { + 'os_info': os_info, + 'cpu_arch': system_info['cpu_arch'], 'cpu_model': system_info['cpu_model'], - 'hostname': system_info['hostname'], - 'os_name': system_info['os_name'], - 'os_type': system_info['os_type'], - 'os_version': system_info['os_version'], 'pyver': system_info['python_version'].split(' ')[0], } diff --git a/easybuild/tools/toolchain/constants.py b/easybuild/tools/toolchain/constants.py index 0ddd41de70..423154e6b9 100644 --- a/easybuild/tools/toolchain/constants.py +++ b/easybuild/tools/toolchain/constants.py @@ -203,5 +203,3 @@ BLAS_MAP_CLASS, LAPACK_MAP_CLASS, BLACS_MAP_CLASS, SCALAPACK_MAP_CLASS, FFT_MAP_CLASS, FFTW_MAP_CLASS, CO_COMPILER_MAP_CLASS, ] - - diff --git a/easybuild/tools/toolchain/linalg.py b/easybuild/tools/toolchain/linalg.py index 8edafd839c..c001bcf2ee 100644 --- a/easybuild/tools/toolchain/linalg.py +++ b/easybuild/tools/toolchain/linalg.py @@ -87,7 +87,7 @@ def __init__(self, *args, **kwargs): def set_variables(self): """Set the variables""" - ## TODO is link order fully preserved with this order ? + # TODO is link order fully preserved with this order ? self._set_blas_variables() self._set_lapack_variables() if getattr(self, 'MPI_MODULE_NAME', None): @@ -110,7 +110,7 @@ def _set_blas_variables(self): if 'FLIBS' in self.variables: self.variables.join('LIBBLAS', 'FLIBS') - ## multi-threaded + # multi-threaded if self.BLAS_LIB_MT is None: self.variables.join('LIBBLAS_MT', 'LIBBLAS') else: @@ -133,7 +133,7 @@ def _set_blas_variables(self): self.variables.append_exists('BLAS_LIB_DIR', root, self.BLAS_LIB_DIR) self.variables.append_exists('BLAS_INC_DIR', root, self.BLAS_INCLUDE_DIR) - ## add general dependency variables + # add general dependency variables self._add_dependency_variables(self.BLAS_MODULE_NAME, ld=self.BLAS_LIB_DIR, cpp=self.BLAS_INCLUDE_DIR) def _set_lapack_variables(self): @@ -160,14 +160,14 @@ def _set_lapack_variables(self): toggle_staticdynamic=self.LAPACK_LIB_STATIC) if self.LAPACK_LIB_MT is None: - ## reuse LAPACK variables + # reuse LAPACK variables self.variables.join('LIBLAPACK_MT_ONLY', 'LIBLAPACK_ONLY') else: self.variables.nappend('LIBLAPACK_MT_ONLY', self.LAPACK_LIB_MT) if getattr(self, 'LIB_MULTITHREAD', None) is not None: self.variables.nappend('LIBLAPACK_MT_ONLY', self.LIB_MULTITHREAD) - ## need BLAS for LAPACK ? + # need BLAS for LAPACK ? if self.LAPACK_REQUIRES is not None: self.variables.join('LIBLAPACK', 'LIBLAPACK_ONLY', *self.LAPACK_REQUIRES) lapack_mt = ["%s_MT" % x for x in self.LAPACK_REQUIRES] @@ -195,7 +195,7 @@ def _set_lapack_variables(self): self.variables.join('BLAS_LAPACK_STATIC_LIBS', 'LAPACK_STATIC_LIBS', 'BLAS_STATIC_LIBS') self.variables.join('BLAS_LAPACK_MT_STATIC_LIBS', 'LAPACK_MT_STATIC_LIBS', 'BLAS_MT_STATIC_LIBS') - ## add general dependency variables + # add general dependency variables self._add_dependency_variables(self.LAPACK_MODULE_NAME, ld=self.LAPACK_LIB_DIR, cpp=self.LAPACK_INCLUDE_DIR) def _set_blacs_variables(self): @@ -207,8 +207,7 @@ def _set_blacs_variables(self): if hasattr(self, 'BLACS_LIB_MAP') and self.BLACS_LIB_MAP is not None: lib_map.update(self.BLACS_LIB_MAP) - - ## BLACS + # BLACS self.BLACS_LIB = self.variables.nappend('LIBBLACS', [x % lib_map for x in self.BLACS_LIB]) if self.BLACS_LIB is not None: self.variables.add_begin_end_linkerflags(self.BLACS_LIB, @@ -218,7 +217,8 @@ def _set_blacs_variables(self): if self.BLACS_LIB_MT is None: self.variables.join('LIBBLACS_MT', 'LIBBLACS') else: - self.BLACS_LIB_MT = self.variables.nappend('LIBBLACS_MT', [x % self.BLACS_LIB_MAP for x in self.BLACS_LIB_MT]) + self.BLACS_LIB_MT = self.variables.nappend( + 'LIBBLACS_MT', [x % self.BLACS_LIB_MAP for x in self.BLACS_LIB_MT]) if self.BLACS_LIB_MT is not None: self.variables.add_begin_end_linkerflags(self.BLACS_LIB_MT, toggle_startstopgroup=self.BLACS_LIB_GROUP, @@ -234,7 +234,7 @@ def _set_blacs_variables(self): self.variables.append_exists('BLACS_LIB_DIR', root, self.BLACS_LIB_DIR) self.variables.append_exists('BLACS_INC_DIR', root, self.BLACS_INCLUDE_DIR) - ## add general dependency variables + # add general dependency variables self._add_dependency_variables(self.BLACS_MODULE_NAME, ld=self.BLACS_LIB_DIR, cpp=self.BLACS_INCLUDE_DIR) def _set_scalapack_variables(self): @@ -259,13 +259,13 @@ def _set_scalapack_variables(self): if 'FLIBS' in self.variables: self.variables.join('LIBSCALAPACK_ONLY', 'FLIBS') - ## multi-threaded + # multi-threaded if self.SCALAPACK_LIB_MT is None: - ## reuse BLAS variables + # reuse BLAS variables self.variables.join('LIBSCALAPACK_MT_ONLY', 'LIBSCALAPACK_ONLY') else: self.SCALAPACK_LIB_MT = self.variables.nappend('LIBSCALAPACK_MT_ONLY', - [x % lib_map for x in self.SCALAPACK_LIB_MT]) + [x % lib_map for x in self.SCALAPACK_LIB_MT]) self.variables.add_begin_end_linkerflags(self.SCALAPACK_LIB_MT, toggle_startstopgroup=self.SCALAPACK_LIB_GROUP, toggle_staticdynamic=self.SCALAPACK_LIB_STATIC) diff --git a/easybuild/tools/toolchain/mpi.py b/easybuild/tools/toolchain/mpi.py index 052c3e061d..d67c9f96c5 100644 --- a/easybuild/tools/toolchain/mpi.py +++ b/easybuild/tools/toolchain/mpi.py @@ -28,10 +28,12 @@ :author: Stijn De Weirdt (Ghent University) :author: Kenneth Hoste (Ghent University) """ +import copy import os import tempfile from distutils.version import LooseVersion +from easybuild.base import fancylogger import easybuild.tools.environment as env import easybuild.tools.toolchain as toolchain from easybuild.tools.build_log import EasyBuildError @@ -41,6 +43,95 @@ from easybuild.tools.toolchain.toolchain import Toolchain +_log = fancylogger.getLogger('tools.toolchain.mpi', fname=False) + + +def get_mpi_cmd_template(mpi_family, params, mpi_version=None): + """ + Return template for MPI command, for specified MPI family. + + :param mpi_family: MPI family to use to determine MPI command template + """ + + params = copy.deepcopy(params) + + mpi_cmd_template = build_option('mpi_cmd_template') + if mpi_cmd_template: + _log.info("Using specified template for MPI commands: %s", mpi_cmd_template) + else: + # different known mpirun commands + mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" + mpi_cmds = { + toolchain.OPENMPI: mpirun_n_cmd, + toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", + toolchain.INTELMPI: mpirun_n_cmd, + toolchain.MVAPICH2: mpirun_n_cmd, + toolchain.MPICH: mpirun_n_cmd, + toolchain.MPICH2: mpirun_n_cmd, + } + + # Intel MPI mpirun needs more work + if mpi_cmd_template is None: + + if mpi_family == toolchain.INTELMPI: + + if mpi_version is None: + raise EasyBuildError("Intel MPI version unknown, can't determine MPI command template!") + + # for old versions of Intel MPI, we need to use MPD + if LooseVersion(mpi_version) <= LooseVersion('4.1'): + + mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" + + # set temporary dir for MPD + # note: this needs to be kept *short*, + # to avoid mpirun failing with "socket.error: AF_UNIX path too long" + # exact limit is unknown, but ~20 characters seems to be OK + env.setvar('I_MPI_MPD_TMPDIR', tempfile.gettempdir()) + mpd_tmpdir = os.environ['I_MPI_MPD_TMPDIR'] + if len(mpd_tmpdir) > 20: + _log.warning("$I_MPI_MPD_TMPDIR should be (very) short to avoid problems: %s", mpd_tmpdir) + + # temporary location for mpdboot and nodes files + tmpdir = tempfile.mkdtemp(prefix='mpi_cmd_for-') + + # set PBS_ENVIRONMENT, so that --file option for mpdboot isn't stripped away + env.setvar('PBS_ENVIRONMENT', "PBS_BATCH_MPI") + + # make sure we're always using mpd as process manager + # only required for/picked up by Intel MPI v4.1 or higher, no harm done for others + env.setvar('I_MPI_PROCESS_MANAGER', 'mpd') + + # create mpdboot file + mpdboot = os.path.join(tmpdir, 'mpdboot') + write_file(mpdboot, "localhost ifhn=localhost") + + params.update({'mpdbf': "--file=%s" % mpdboot}) + + # create nodes file + nodes = os.path.join(tmpdir, 'nodes') + write_file(nodes, "localhost\n" * int(params['nr_ranks'])) + + params.update({'nodesfile': "-machinefile %s" % nodes}) + + if mpi_family in mpi_cmds: + mpi_cmd_template = mpi_cmds[mpi_family] + _log.info("Using template MPI command '%s' for MPI family '%s'", mpi_cmd_template, mpi_family) + else: + raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + + missing = [] + for key in sorted(params.keys()): + tmpl = '%(' + key + ')s' + if tmpl not in mpi_cmd_template: + missing.append(tmpl) + if missing: + raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", + mpi_cmd_template, ', '.join(missing)) + + return mpi_cmd_template, params + + class Mpi(Toolchain): """General MPI-like class can't be used without creating new class M(Mpi) @@ -60,10 +151,10 @@ class Mpi(Toolchain): MPI_UNIQUE_OPTION_MAP = None MPI_SHARED_OPTION_MAP = { '_opt_MPICC': 'cc=%(CC_base)s', - '_opt_MPICXX':'cxx=%(CXX_base)s', - '_opt_MPIF77':'fc=%(F77_base)s', - '_opt_MPIF90':'f90=%(F90_base)s', - '_opt_MPIFC':'fc=%(FC_base)s', + '_opt_MPICXX': 'cxx=%(CXX_base)s', + '_opt_MPIF77': 'fc=%(F77_base)s', + '_opt_MPIF90': 'f90=%(F90_base)s', + '_opt_MPIFC': 'fc=%(FC_base)s', } MPI_COMPILER_MPICC = 'mpicc' @@ -82,7 +173,6 @@ def __init__(self, *args, **kwargs): super(Mpi, self).__init__(*args, **kwargs) - def _set_mpi_options(self): self.options.add_options(self.MPI_SHARED_OPTS, self.MPI_SHARED_OPTION_MAP) @@ -90,7 +180,6 @@ def _set_mpi_options(self): self.log.devel('_set_mpi_options: all current options %s', self.options) - def set_variables(self): """Set the variables""" self._set_mpi_compiler_variables() @@ -107,7 +196,7 @@ def _set_mpi_compiler_variables(self): for var_tuple in COMPILER_VARIABLES: c_var = var_tuple[0] # [1] is the description - var = MPI_COMPILER_TEMPLATE % {'c_var':c_var} + var = MPI_COMPILER_TEMPLATE % {'c_var': c_var} value = getattr(self, 'MPI_COMPILER_%s' % var.upper(), None) if value is None: @@ -116,7 +205,7 @@ def _set_mpi_compiler_variables(self): # complete compiler variable template to produce e.g. 'mpicc -cc=icc -X -Y' from 'mpicc -cc=%(CC_base)' templatedict = { - c_var:str(self.variables[c_var]), + c_var: str(self.variables[c_var]), '%s_base' % c_var: str(self.variables[c_var].get_first()), } @@ -133,7 +222,6 @@ def _set_mpi_compiler_variables(self): c_var, self.variables[c_var], var, self.variables[var]) self.variables[c_var] = self.variables[var] - if self.options.get('cciscxx', None): self.log.debug("_set_mpi_compiler_variables: cciscxx set: switching MPICXX %s for MPICC value %s", self.variables['MPICXX'], self.variables['MPICC']) @@ -151,7 +239,8 @@ def _set_mpi_variables(self): if not self.options.get('32bit', None): suffix = '64' - for root in self.get_software_root(self.MPI_MODULE_NAME): + # take into account that MPI_MODULE_NAME could be None (see Cray toolchains) + for root in self.get_software_root(self.MPI_MODULE_NAME or []): self.variables.append_exists('MPI_LIB_STATIC', root, lib_dir, filename="lib%s.a" % self.MPI_LIBRARY_NAME, suffix=suffix) self.variables.append_exists('MPI_LIB_SHARED', root, lib_dir, filename="lib%s.so" % self.MPI_LIBRARY_NAME, @@ -191,79 +280,15 @@ def mpi_cmd_for(self, cmd, nr_ranks): 'cmd': cmd, } - mpi_cmd_template = build_option('mpi_cmd_template') - if mpi_cmd_template: - self.log.info("Using specified template for MPI commands: %s", mpi_cmd_template) - else: - # different known mpirun commands - mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" - mpi_cmds = { - toolchain.OPENMPI: mpirun_n_cmd, - toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", - toolchain.INTELMPI: mpirun_n_cmd, - toolchain.MVAPICH2: mpirun_n_cmd, - toolchain.MPICH: mpirun_n_cmd, - toolchain.MPICH2: mpirun_n_cmd, - } - mpi_family = self.mpi_family() - # Intel MPI mpirun needs more work - if mpi_cmd_template is None: - - if mpi_family == toolchain.INTELMPI: - - # for old versions of Intel MPI, we need to use MPD - impi_ver = self.get_software_version(self.MPI_MODULE_NAME)[0] - if LooseVersion(impi_ver) <= LooseVersion('4.1'): - - mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" - - # set temporary dir for MPD - # note: this needs to be kept *short*, - # to avoid mpirun failing with "socket.error: AF_UNIX path too long" - # exact limit is unknown, but ~20 characters seems to be OK - env.setvar('I_MPI_MPD_TMPDIR', tempfile.gettempdir()) - mpd_tmpdir = os.environ['I_MPI_MPD_TMPDIR'] - if len(mpd_tmpdir) > 20: - self.log.warning("$I_MPI_MPD_TMPDIR should be (very) short to avoid problems: %s", mpd_tmpdir) - - # temporary location for mpdboot and nodes files - tmpdir = tempfile.mkdtemp(prefix='mpi_cmd_for-') - - # set PBS_ENVIRONMENT, so that --file option for mpdboot isn't stripped away - env.setvar('PBS_ENVIRONMENT', "PBS_BATCH_MPI") - - # make sure we're always using mpd as process manager - # only required for/picked up by Intel MPI v4.1 or higher, no harm done for others - env.setvar('I_MPI_PROCESS_MANAGER', 'mpd') - - # create mpdboot file - mpdboot = os.path.join(tmpdir, 'mpdboot') - write_file(mpdboot, "localhost ifhn=localhost") - - params.update({'mpdbf': "--file=%s" % mpdboot}) - - # create nodes file - nodes = os.path.join(tmpdir, 'nodes') - write_file(nodes, "localhost\n" * int(nr_ranks)) - - params.update({'nodesfile': "-machinefile %s" % nodes}) - - if mpi_family in mpi_cmds.keys(): - mpi_cmd_template = mpi_cmds[mpi_family] - self.log.info("Using template MPI command '%s' for MPI family '%s'", mpi_cmd_template, mpi_family) - else: - raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + if mpi_family == toolchain.INTELMPI: + mpi_version = self.get_software_version(self.MPI_MODULE_NAME)[0] + else: + mpi_version = None - missing = [] - for key in sorted(params.keys()): - tmpl = '%(' + key + ')s' - if tmpl not in mpi_cmd_template: - missing.append(tmpl) - if missing: - raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", - mpi_cmd_template, ', '.join(missing)) + mpi_cmd_template, params = get_mpi_cmd_template(mpi_family, params, mpi_version=mpi_version) + self.log.info("Using MPI command template '%s' (params: %s)", mpi_cmd_template, params) try: res = mpi_cmd_template % params diff --git a/easybuild/tools/toolchain/toolchain.py b/easybuild/tools/toolchain/toolchain.py index d05fb4a030..9043676b46 100644 --- a/easybuild/tools/toolchain/toolchain.py +++ b/easybuild/tools/toolchain/toolchain.py @@ -331,8 +331,7 @@ def show_variables(self, offset='', sep='\n', verbose=False): if self.vars is None: self.generate_vars() - var_names = self.variables.keys() - var_names.sort() + var_names = sorted(self.variables.keys()) res = [] for v in var_names: res.append("%s=%s" % (v, self.variables[v])) @@ -661,8 +660,8 @@ def _load_modules(self, silent=False): raise EasyBuildError("No module found for toolchain: %s", self.mod_short_name) if self.is_system_toolchain(): - self.log.info("Loading dependencies using system toolchain...") - self._load_dependencies_modules(silent=silent) + self.log.info("Loading dependencies using system toolchain...") + self._load_dependencies_modules(silent=silent) else: # load the toolchain and dependencies modules self.log.debug("Loading toolchain module and dependencies...") @@ -675,7 +674,7 @@ def _load_modules(self, silent=False): dry_run_msg("\nFull list of loaded modules:", silent=silent) if loaded_mods: for i, mod_name in enumerate([m['mod_name'] for m in loaded_mods]): - dry_run_msg(" %d) %s" % (i+1, mod_name), silent=silent) + dry_run_msg(" %d) %s" % (i + 1, mod_name), silent=silent) else: dry_run_msg(" (none)", silent=silent) dry_run_msg('', silent=silent) diff --git a/easybuild/tools/toolchain/variables.py b/easybuild/tools/toolchain/variables.py index 39d2d0f13c..204b6f9e02 100644 --- a/easybuild/tools/toolchain/variables.py +++ b/easybuild/tools/toolchain/variables.py @@ -54,6 +54,7 @@ class CommandFlagList(FlagList): First of the list has no prefix (i.e. the executable) The remainder of the options are considered flags """ + def _str_self(self): """Like a regular flag list, but set first element to original value""" tmp_str = [self.str_convert(x) for x in self if self._str_ok(x)] @@ -66,7 +67,7 @@ class LibraryList(StrList): """Link library list""" PREFIX = "-l" - SANITIZE_REMOVE_DUPLICATE_KEEP = -1 # sanitize from end + SANITIZE_REMOVE_DUPLICATE_KEEP = -1 # sanitize from end JOIN_BEGIN_END = True @@ -174,5 +175,3 @@ def sanitize(self): self.IS_BEGIN, self.PREFIX, self.BEGIN) super(LinkerFlagList, self).sanitize() - - diff --git a/easybuild/tools/variables.py b/easybuild/tools/variables.py index d1bdab9301..210290a17d 100644 --- a/easybuild/tools/variables.py +++ b/easybuild/tools/variables.py @@ -52,7 +52,7 @@ def get_class(name, default_class, map_class=None): if name is not None: try: klass = map_class[name] - except: + except BaseException: for k, v in map_class.items(): if type(k) in (type,) and name in v: klass = k @@ -154,7 +154,7 @@ class CommaList(StrList): class AbsPathList(StrList): """Absolute paths (eg -L or -I)""" - SANITIZE_REMOVE_DUPLICATE_KEEP = -1 # sanitize from end + SANITIZE_REMOVE_DUPLICATE_KEEP = -1 # sanitize from end def append_exists(self, prefix, paths, suffix=None, filename=None, append_all=False): """ @@ -225,11 +225,11 @@ def __init__(self, *args, **kwargs): self.protected_classes = self.PROTECTED_CLASSES[:] if self.PROTECT_CLASS_SELF: - if not self.DEFAULT_CLASS in self.protected_classes: + if self.DEFAULT_CLASS not in self.protected_classes: self.protected_classes.append(self.DEFAULT_CLASS) self.protected_instances = self.PROTECTED_INSTANCES[:] if self.PROTECT_INSTANCE_SELF: - if not self.DEFAULT_CLASS in self.protected_instances: + if self.DEFAULT_CLASS not in self.protected_instances: self.protected_instances.append(self.DEFAULT_CLASS) def append_empty(self): @@ -287,11 +287,11 @@ def nappend(self, value, **kwargs): try: # this might work, but probably not newvalue = klass(value, **kwargs) - except: + except BaseException: newvalue = klass(**kwargs) if value is not None: newvalue.append(value) - if not position is None: + if position is not None: newvalue.POSITION = position if self._str_ok(newvalue) or append_empty: self.append(newvalue) @@ -328,7 +328,7 @@ def nextend(self, value=None, **kwargs): try: # this might work, but probably not newvalue = klass(el) - except: + except BaseException: newvalue = klass() if value is not None: newvalue.append(el) @@ -378,7 +378,8 @@ def sanitize(self): to_remove = [] # work in reversed order; don't check last one (ie real el 0), it has no next element for idx in range(1, len(self))[::-1]: - if self[idx].BEGIN is None or self[idx].END is None: continue + if self[idx].BEGIN is None or self[idx].END is None: + continue self.log.devel("idx %s len %s", idx, len(self)) # do check POSITION, sorting already done if self[idx].BEGIN == self[idx - 1].BEGIN and self[idx].END == self[idx - 1].END: @@ -564,4 +565,3 @@ def _passthrough(name, *args, **kwargs): return _passthrough else: return super(Variables, self).__getattribute__(attr_name) - diff --git a/easybuild/tools/version.py b/easybuild/tools/version.py index 79824ff42b..fe72d66d59 100644 --- a/easybuild/tools/version.py +++ b/easybuild/tools/version.py @@ -43,7 +43,7 @@ # recent setuptools versions will *TRANSFORM* something like 'X.Y.Zdev' into 'X.Y.Z.dev0', with a warning like # UserWarning: Normalizing '2.4.0dev' to '2.4.0.dev0' # This causes problems further up the dependency chain... -VERSION = LooseVersion('4.2.0') +VERSION = LooseVersion('4.2.1') UNKNOWN = 'UNKNOWN' diff --git a/test/framework/easyblock.py b/test/framework/easyblock.py index 25e9789d14..53eeb4c331 100644 --- a/test/framework/easyblock.py +++ b/test/framework/easyblock.py @@ -46,13 +46,14 @@ from easybuild.tools import config from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import get_module_syntax -from easybuild.tools.filetools import copy_dir, copy_file, mkdir, read_file, remove_file, write_file +from easybuild.tools.filetools import change_dir, copy_dir, copy_file, mkdir, read_file, remove_file, write_file from easybuild.tools.module_generator import module_generator from easybuild.tools.modules import reset_module_caches from easybuild.tools.utilities import time2str from easybuild.tools.version import get_git_revision, this_is_easybuild from easybuild.tools.py2vs3 import string_type + class EasyBlockTest(EnhancedTestCase): """ Baseclass for easyblock testcases """ @@ -515,6 +516,48 @@ def test_make_module_extra(self): for pattern in patterns: self.assertTrue(re.search(pattern, txt, re.M), "Pattern '%s' found in: %s" % (pattern, txt)) + def test_make_module_deppaths(self): + """Test for make_module_deppaths""" + init_config(build_options={'silent': True}) + + self.contents = '\n'.join([ + 'easyblock = "ConfigureMake"', + 'name = "pi"', + 'version = "3.14"', + 'homepage = "http://example.com"', + 'description = "test easyconfig"', + "toolchain = {'name': 'gompi', 'version': '2018a'}", + 'moddependpaths = "/path/to/mods"', + 'dependencies = [', + " ('FFTW', '3.3.7'),", + ']', + ]) + self.writeEC() + eb = EasyBlock(EasyConfig(self.eb_file)) + + eb.installdir = os.path.join(config.install_path(), 'pi', '3.14') + eb.check_readiness_step() + eb.make_builddir() + eb.prepare_step() + + if get_module_syntax() == 'Tcl': + use_load = '\n'.join([ + 'if { [ file isdirectory "/path/to/mods" ] } {', + ' module use "/path/to/mods"', + '}', + ]) + elif get_module_syntax() == 'Lua': + use_load = '\n'.join([ + 'if isDir("/path/to/mods") then', + ' prepend_path("MODULEPATH", "/path/to/mods")', + 'end', + ]) + else: + self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax()) + + expected = use_load + self.assertEqual(eb.make_module_deppaths().strip(), expected) + def test_make_module_dep(self): """Test for make_module_dep""" init_config(build_options={'silent': True}) @@ -1567,8 +1610,13 @@ def test_guess_start_dir(self): test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0] + cwd = os.getcwd() + self.assertTrue(os.path.exists(cwd)) + def check_start_dir(expected_start_dir): """Check start dir.""" + # make sure we're in an existing directory at the start + change_dir(cwd) eb = EasyBlock(ec['ec']) eb.silent = True eb.cfg['stop'] = 'patch' @@ -1928,6 +1976,73 @@ def test_time2str(self): error_pattern = "Incorrect value type provided to time2str, should be datetime.timedelta: <.* 'int'>" self.assertErrorRegex(EasyBuildError, error_pattern, time2str, 123) + def test_sanity_check_paths_verification(self): + """Test verification of sanity_check_paths w.r.t. keys & values.""" + + testdir = os.path.abspath(os.path.dirname(__file__)) + toy_ec = os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + eb = EasyBlock(EasyConfig(toy_ec)) + eb.dry_run = True + + error_pattern = r"Incorrect format for sanity_check_paths: " + error_pattern += r"should \(only\) have 'dirs', 'files' keys, " + error_pattern += r"values should be lists \(at least one non-empty\)." + + def run_sanity_check_step(sanity_check_paths, enhance_sanity_check): + """Helper function to run sanity check step, and do trivial check on generated output.""" + self.mock_stderr(True) + self.mock_stdout(True) + eb.cfg['sanity_check_paths'] = sanity_check_paths + eb.cfg['enhance_sanity_check'] = enhance_sanity_check + eb.sanity_check_step() + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertFalse(stderr) + self.assertTrue(stdout.startswith("Sanity check paths")) + + # partial sanity_check_paths, only allowed when using enhance_sanity_check + test_cases = [ + {'dirs': ['foo']}, + {'files': ['bar']}, + {'dirs': []}, + {'files': []}, + {'files': [], 'dirs': []}, + ] + for test_case in test_cases: + # without enhanced sanity check, these are all invalid sanity_check_paths values + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + + # if enhance_sanity_check is enabled, these are acceptable sanity_check_step values + run_sanity_check_step(test_case, True) + + # some inputs are always invalid, regardless of enhance_sanity_check, due to wrong keys/values + test_cases = [ + {'foo': ['bar']}, + {'files': ['foo'], 'dirs': [], 'libs': ['libfoo.a']}, + {'files': ['foo'], 'libs': ['libfoo.a']}, + {'dirs': [], 'libs': ['libfoo.a']}, + ] + for test_case in test_cases: + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, True) + + # non-list values yield different errors with/without enhance_sanity_check + error_pattern_bis = r"Incorrect value type in sanity_check_paths, should be a list: .*" + test_cases = [ + {'files': 123, 'dirs': []}, + {'files': [], 'dirs': 123}, + {'files': 'foo', 'dirs': []}, + {'files': [], 'dirs': 'foo'}, + ] + for test_case in test_cases: + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + self.assertErrorRegex(EasyBuildError, error_pattern_bis, run_sanity_check_step, test_case, True) + + # empty sanity_check_paths is always OK, since then the fallback to default bin + lib/lib64 kicks in + run_sanity_check_step({}, False) + run_sanity_check_step({}, True) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/easyconfigparser.py b/test/framework/easyconfigparser.py index d684950974..be327d51de 100644 --- a/test/framework/easyconfigparser.py +++ b/test/framework/easyconfigparser.py @@ -180,11 +180,13 @@ def test_easyconfig_constants(self): system_constant = constants.pop('SYSTEM') self.assertEqual(system_constant, {'name': 'system', 'version': 'system'}) - # make sure both keys and values are only strings + # make sure both keys and values are of appropriate types for constant_name in constants: self.assertTrue(isinstance(constant_name, string_type), "Constant name %s is a string" % constant_name) val = constants[constant_name] - self.assertTrue(isinstance(val, (string_type, dict)), "Constant value %s is a string or dict" % val) + fail_msg = "The constant %s should have an acceptable type, found %s (%s)" % (constant_name, + type(val), str(val)) + self.assertTrue(isinstance(val, (string_type, dict, tuple)), fail_msg) # check a couple of randomly picked constant values self.assertEqual(constants['SOURCE_TAR_GZ'], '%(name)s-%(version)s.tar.gz') diff --git a/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb b/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb new file mode 100644 index 0000000000..607821faf2 --- /dev/null +++ b/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb @@ -0,0 +1,14 @@ +easyblock = 'ConfigureMake' + +name = 'HPL' +version = '2.1' + +homepage = 'http://www.netlib.org/benchmark/hpl/' +description = "HPL, you know, LINPACK" + +toolchain = {'name': 'CrayCCE', 'version': '5.1.29'} + +source_urls = ['http://www.netlib.org/benchmark/%(namelower)s'] +sources = [SOURCELOWER_TAR_GZ] + +moduleclass = 'tools' diff --git a/test/framework/environment.py b/test/framework/environment.py index 7782067d13..7e0ca9ccc8 100644 --- a/test/framework/environment.py +++ b/test/framework/environment.py @@ -131,6 +131,37 @@ def test_unset_env_vars(self): } self.assertEqual(res, expected) + def test_sanitize_env(self): + """Test sanitize_env function.""" + + # define $*PATH variable that include empty entries, those should get filtered out + os.environ['PATH'] = '/bar::/foo:' + self.test_prefix # middle empty entry + os.environ['LD_LIBRARY_PATH'] = '/apps/slurm/default/lib:/usr/lib:' # trailing empty entry + os.environ['LIBRARY_PATH'] = self.test_prefix + ':' + os.environ['HOME'] # no empty entries here + os.environ['CPATH'] = ':' + self.test_prefix # leading empty entry + os.environ['LD_PRELOAD'] = ':::' # only empty entries (should get unset!) + + # define $PYTHON* environment variables, these should be unset by sanitize_env + os.environ['PYTHONNOUSERSITE'] = '1' + os.environ['PYTHONPATH'] = self.test_prefix + os.environ['PYTHONOPTIMIZE'] = '1' + + env.sanitize_env() + + self.assertFalse(any(x for x in os.environ.keys() if x.startswith('PYTHON'))) + + expected = { + 'CPATH': self.test_prefix, + 'LD_LIBRARY_PATH': '/apps/slurm/default/lib:/usr/lib', + 'LIBRARY_PATH': self.test_prefix + ':' + os.environ['HOME'], + 'PATH': '/bar:/foo:' + self.test_prefix, + } + for key in sorted(expected): + self.assertEqual(os.getenv(key), expected[key]) + self.assertEqual(os.environ[key], expected[key]) + + self.assertEqual(os.getenv('LD_PRELOAD'), None) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/filetools.py b/test/framework/filetools.py index 6595a7884d..fd2e4e5c68 100644 --- a/test/framework/filetools.py +++ b/test/framework/filetools.py @@ -29,6 +29,7 @@ @author: Kenneth Hoste (Ghent University) @author: Stijn De Weirdt (Ghent University) @author: Ward Poelmans (Ghent University) +@author: Maxime Boissonneault (Compute Canada, Universite Laval) """ import datetime import glob @@ -148,6 +149,29 @@ def test_find_base_dir(self): os.chdir(tmpdir) self.assertTrue(os.path.samefile(foodir, ft.find_base_dir())) + def test_find_glob_pattern(self): + """test find_glob_pattern function""" + tmpdir = tempfile.mkdtemp() + os.mkdir(os.path.join(tmpdir, 'python2.7')) + os.mkdir(os.path.join(tmpdir, 'python2.7', 'include')) + os.mkdir(os.path.join(tmpdir, 'python3.5m')) + os.mkdir(os.path.join(tmpdir, 'python3.5m', 'include')) + + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python2.7*')), + os.path.join(tmpdir, 'python2.7')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python2.7*', 'include')), + os.path.join(tmpdir, 'python2.7', 'include')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.5*')), + os.path.join(tmpdir, 'python3.5m')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.5*', 'include')), + os.path.join(tmpdir, 'python3.5m', 'include')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.6*'), False), None) + self.assertErrorRegex(EasyBuildError, "Was expecting exactly", ft.find_glob_pattern, + os.path.join(tmpdir, 'python3.6*')) + self.assertErrorRegex(EasyBuildError, "Was expecting exactly", ft.find_glob_pattern, + os.path.join(tmpdir, 'python*')) + + def test_encode_class_name(self): """Test encoding of class names.""" for (class_name, encoded_class_name) in self.class_names: @@ -1292,8 +1316,8 @@ def test_pypi_source_urls(self): eb340_url += 'easybuild-3.4.0.tar.gz#md5=267a056a77a8f77fccfbf56354364045' self.assertTrue(eb340_url, res) pattern = '^https://pypi.python.org/packages/[a-f0-9]{2}/[a-f0-9]{2}/[a-f0-9]{60}/' - pattern_md5 = pattern + 'easybuild-[0-9rc.]+.tar.gz#md5=[a-f0-9]{32}$' - pattern_sha256 = pattern + 'easybuild-[0-9rc.]+.tar.gz#sha256=[a-f0-9]{64}$' + pattern_md5 = pattern + 'easybuild-[0-9a-z.]+.tar.gz#md5=[a-f0-9]{32}$' + pattern_sha256 = pattern + 'easybuild-[0-9a-z.]+.tar.gz#sha256=[a-f0-9]{64}$' regex_md5 = re.compile(pattern_md5) regex_sha256 = re.compile(pattern_sha256) for url in res: @@ -1303,6 +1327,15 @@ def test_pypi_source_urls(self): # more than 50 releases at time of writing test, which always stay there self.assertTrue(len(res) > 50) + # check for Python package that has yanked releases, + # see https://github.com/easybuilders/easybuild-framework/issues/3301 + res = ft.pypi_source_urls('ipython') + self.assertTrue(isinstance(res, list) and res) + prefix = 'https://pypi.python.org/packages' + for entry in res: + self.assertTrue(entry.startswith(prefix), "'%s' should start with '%s'" % (entry, prefix)) + self.assertTrue('ipython' in entry, "Pattern 'ipython' should be found in '%s'" % entry) + def test_derive_alt_pypi_url(self): """Test derive_alt_pypi_url() function.""" url = 'https://pypi.python.org/packages/source/e/easybuild/easybuild-2.7.0.tar.gz' @@ -1327,7 +1360,8 @@ def test_apply_patch(self): """ Test apply_patch """ testdir = os.path.dirname(os.path.abspath(__file__)) tmpdir = self.test_prefix - path = ft.extract_file(os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), tmpdir) + toy_tar_gz = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') + path = ft.extract_file(toy_tar_gz, tmpdir, change_into_dir=False) toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' toy_patch = os.path.join(testdir, 'sandbox', 'sources', 'toy', toy_patch_fn) @@ -1475,22 +1509,45 @@ def test_copy_dir(self): ft.copy_dir(to_copy, testdir, dirs_exist_ok=True) self.assertTrue(sorted(os.listdir(to_copy)) == sorted(os.listdir(testdir))) - # if the directory already exists and 'dirs_exist_ok' is True and there is another named argument (ignore) - # we expect clean error on Python < 3.8 and pass the test on Python >= 3.8 - # NOTE: reused ignore from previous test + # check whether use of 'ignore' works if target path already exists and 'dirs_exist_ok' is enabled def ignore_func(_, names): return [x for x in names if '6.4.0-2.28' in x] shutil.rmtree(testdir) ft.mkdir(testdir) - if sys.version_info >= (3, 8): - ft.copy_dir(to_copy, testdir, dirs_exist_ok=True, ignore=ignore_func) - self.assertEqual(sorted(os.listdir(testdir)), expected) - self.assertFalse(os.path.exists(os.path.join(testdir, 'GCC-6.4.0-2.28.eb'))) - else: - error_pattern = "Unknown named arguments passed to copy_dir with dirs_exist_ok=True: ignore" - self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_dir, to_copy, testdir, - dirs_exist_ok=True, ignore=ignore_func) + ft.copy_dir(to_copy, testdir, dirs_exist_ok=True, ignore=ignore_func) + self.assertEqual(sorted(os.listdir(testdir)), expected) + self.assertFalse(os.path.exists(os.path.join(testdir, 'GCC-6.4.0-2.28.eb'))) + + # test copy_dir when broken symlinks are involved + srcdir = os.path.join(self.test_prefix, 'topdir_to_copy') + ft.mkdir(srcdir) + ft.write_file(os.path.join(srcdir, 'test.txt'), '123') + subdir = os.path.join(srcdir, 'subdir') + # introduce broken file symlink + foo_txt = os.path.join(subdir, 'foo.txt') + ft.write_file(foo_txt, 'bar') + ft.symlink(foo_txt, os.path.join(subdir, 'bar.txt')) + ft.remove_file(foo_txt) + # introduce broken dir symlink + subdir_tmp = os.path.join(srcdir, 'subdir_tmp') + ft.mkdir(subdir_tmp) + ft.symlink(subdir_tmp, os.path.join(srcdir, 'subdir_link')) + ft.remove_dir(subdir_tmp) + + target_dir = os.path.join(self.test_prefix, 'target_to_copy_to') + + # trying this without symlinks=True ends in tears, because bar.txt points to a non-existing file + self.assertErrorRegex(EasyBuildError, "Failed to copy directory", ft.copy_dir, srcdir, target_dir) + ft.remove_dir(target_dir) + + ft.copy_dir(srcdir, target_dir, symlinks=True) + + # copying directory with broken symlinks should also work if target directory already exists + ft.remove_dir(target_dir) + ft.mkdir(target_dir) + ft.mkdir(subdir) + ft.copy_dir(srcdir, target_dir, symlinks=True, dirs_exist_ok=True) # also test behaviour of copy_file under --dry-run build_options = { @@ -1509,7 +1566,7 @@ def ignore_func(_, names): self.mock_stdout(False) self.assertFalse(os.path.exists(target_dir)) - self.assertTrue(re.search("^copied directory .*/GCC to .*/GCC", txt)) + self.assertTrue(re.search("^copied directory .*/GCC to .*/%s" % os.path.basename(target_dir), txt)) # forced copy, even in dry run mode self.mock_stdout(True) @@ -1597,19 +1654,24 @@ def test_change_dir(self): def test_extract_file(self): """Test extract_file""" + cwd = os.getcwd() + testdir = os.path.dirname(os.path.abspath(__file__)) toy_tarball = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) - path = ft.extract_file(toy_tarball, self.test_prefix) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=False) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) + # still in same directory as before if change_into_dir is set to False + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) shutil.rmtree(os.path.join(path, 'toy-0.0')) toy_tarball_renamed = os.path.join(self.test_prefix, 'toy_tarball') shutil.copyfile(toy_tarball, toy_tarball_renamed) - path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s") + path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s", change_into_dir=False) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) @@ -1622,17 +1684,56 @@ def test_extract_file(self): init_config(build_options=build_options) self.mock_stdout(True) - path = ft.extract_file(toy_tarball, self.test_prefix) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=False) txt = self.get_stdout() self.mock_stdout(False) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) self.assertTrue(os.path.samefile(path, self.test_prefix)) self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0'))) self.assertTrue(re.search('running command "tar xzf .*/toy-0.0.tar.gz"', txt)) - path = ft.extract_file(toy_tarball, self.test_prefix, forced=True) + path = ft.extract_file(toy_tarball, self.test_prefix, forced=True, change_into_dir=False) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) + + build_options['extended_dry_run'] = False + init_config(build_options=build_options) + + ft.remove_dir(os.path.join(self.test_prefix, 'toy-0.0')) + + # a deprecation warning is printed (which is an error in this context) + # if the 'change_into_dir' named argument was left unspecified + error_pattern = "extract_file function was called without specifying value for change_into_dir" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.extract_file, toy_tarball, self.test_prefix) + self.allow_deprecated_behaviour() + + # make sure we're not in self.test_prefix now (checks below assumes so) + self.assertFalse(os.path.samefile(os.getcwd(), self.test_prefix)) + + # by default, extract_file changes to directory in which source file was unpacked + self.mock_stderr(True) + path = ft.extract_file(toy_tarball, self.test_prefix) + stderr = self.get_stderr().strip() + self.mock_stderr(False) + self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), self.test_prefix)) + regex = re.compile("^WARNING: .*extract_file function was called without specifying value for change_into_dir") + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + ft.change_dir(cwd) + self.assertFalse(os.path.samefile(os.getcwd(), self.test_prefix)) + + # no deprecation warning when change_into_dir is set to True + self.mock_stderr(True) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=True) + stderr = self.get_stderr().strip() + self.mock_stderr(False) + + self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), self.test_prefix)) + self.assertFalse(stderr) def test_remove(self): """Test remove_file, remove_dir and join remove functions.""" @@ -1715,7 +1816,7 @@ def test_index_functions(self): # test with specified path with and without trailing '/'s for path in [test_ecs, test_ecs + '/', test_ecs + '//']: index = ft.create_index(path) - self.assertEqual(len(index), 81) + self.assertEqual(len(index), 82) expected = [ os.path.join('b', 'bzip2', 'bzip2-1.0.6-GCC-4.9.2.eb'), @@ -1926,19 +2027,35 @@ def makedirs_in_test(*paths): def test_find_eb_script(self): """Test find_eb_script function.""" + + # make sure $EB_SCRIPT_PATH is not set already (used as fallback mechanism in find_eb_script) + if 'EB_SCRIPT_PATH' in os.environ: + del os.environ['EB_SCRIPT_PATH'] + self.assertTrue(os.path.exists(ft.find_eb_script('rpath_args.py'))) self.assertTrue(os.path.exists(ft.find_eb_script('rpath_wrapper_template.sh.in'))) self.assertErrorRegex(EasyBuildError, "Script 'no_such_script' not found", ft.find_eb_script, 'no_such_script') # put test script in place relative to location of 'eb' - ft.write_file(os.path.join(self.test_prefix, 'bin', 'eb'), '#!/bin/bash\necho "fake eb"') - ft.adjust_permissions(os.path.join(self.test_prefix, 'bin', 'eb'), stat.S_IXUSR) - os.environ['PATH'] = '%s:%s' % (os.path.join(self.test_prefix, 'bin'), os.getenv('PATH', '')) + fake_eb = os.path.join(self.test_prefix, 'bin', 'eb') + ft.write_file(fake_eb, '#!/bin/bash\necho "fake eb"') + ft.adjust_permissions(fake_eb, stat.S_IXUSR) + os.environ['PATH'] = '%s:%s' % (os.path.dirname(fake_eb), os.getenv('PATH', '')) - justatest = os.path.join(self.test_prefix, 'easybuild', 'scripts', 'justatest.sh') + justatest = os.path.join(self.test_prefix, 'easybuild', 'scripts', 'thisisjustatestscript.sh') ft.write_file(justatest, '#!/bin/bash') - self.assertTrue(os.path.samefile(ft.find_eb_script('justatest.sh'), justatest)) + self.assertTrue(os.path.samefile(ft.find_eb_script('thisisjustatestscript.sh'), justatest)) + + # $EB_SCRIPT_PATH can also be used (overrules 'eb' found via $PATH) + ft.remove_file(fake_eb) + os.environ['EB_SCRIPT_PATH'] = os.path.join(self.test_prefix, 'easybuild', 'scripts') + self.assertTrue(os.path.samefile(ft.find_eb_script('thisisjustatestscript.sh'), justatest)) + + # if script can't be found via either $EB_SCRIPT_PATH or location of 'eb', we get a clean error + del os.environ['EB_SCRIPT_PATH'] + error_pattern = "Script 'thisisjustatestscript.sh' not found at expected location" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.find_eb_script, 'thisisjustatestscript.sh') def test_move_file(self): """Test move_file function""" @@ -2399,6 +2516,97 @@ def test_copy_framework_files(self): self.assertEqual(res['new'], expected_new) + def test_locks(self): + """Tests for lock-related functions.""" + + init_config(build_options={'silent': True}) + + # make sure that global list of locks is empty when we start off + self.assertFalse(ft.global_lock_names) + + # use a realistic lock name (cfr. EasyBlock.run_all_steps) + installdir = os.path.join(self.test_installpath, 'software', 'test', '1.2.3-foss-2019b-Python-3.7.4') + lock_name = installdir.replace('/', '_') + + # det_lock_path returns full path to lock with specified name + # (used internally by create_lock, check_lock, remove_lock) + lock_path = ft.det_lock_path(lock_name) + self.assertFalse(os.path.exists(lock_path)) + + locks_dir = os.path.dirname(lock_path) + self.assertFalse(os.path.exists(locks_dir)) + + # if lock doesn't exist yet, check_lock just returns + ft.check_lock(lock_name) + + # create lock, and check whether it actually was created + ft.create_lock(lock_name) + self.assertTrue(os.path.exists(lock_path)) + + # can't use os.path.samefile until locks_dir actually exists + self.assertTrue(os.path.samefile(locks_dir, os.path.join(self.test_installpath, 'software', '.locks'))) + + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + # if lock exists, then check_lock raises an error + self.assertErrorRegex(EasyBuildError, "Lock .* already exists", ft.check_lock, lock_name) + + # remove_lock should... remove the lock + ft.remove_lock(lock_name) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + + # no harm done if remove_lock is called if lock is already gone + ft.remove_lock(lock_name) + + # check_lock just returns again after lock is removed + ft.check_lock(lock_name) + + # global list of locks should be empty at this point + self.assertFalse(ft.global_lock_names) + + # calling clean_up_locks when there are no locks should not cause trouble + ft.clean_up_locks() + + ft.create_lock(lock_name) + self.assertEqual(ft.global_lock_names, set([lock_name])) + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + ft.clean_up_locks() + self.assertFalse(ft.global_lock_names) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + + # no problem with multiple locks + lock_names = [lock_name, 'test123', 'foo@bar%baz'] + lock_paths = [os.path.join(locks_dir, x + '.lock') for x in lock_names] + for ln in lock_names: + ft.create_lock(ln) + for lp in lock_paths: + self.assertTrue(os.path.exists(lp), "Path %s should exist" % lp) + + self.assertEqual(ft.global_lock_names, set(lock_names)) + expected_locks = sorted(ln + '.lock' for ln in lock_names) + self.assertEqual(sorted(os.listdir(locks_dir)), expected_locks) + + ft.clean_up_locks() + for lp in lock_paths: + self.assertFalse(os.path.exists(lp), "Path %s should not exist" % lp) + self.assertFalse(ft.global_lock_names) + self.assertEqual(os.listdir(locks_dir), []) + + # also test signal handler that is supposed to clean up locks + ft.create_lock(lock_name) + self.assertTrue(ft.global_lock_names) + self.assertTrue(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + # clean_up_locks_signal_handler causes sys.exit with specified exit code + self.assertErrorRegex(SystemExit, '15', ft.clean_up_locks_signal_handler, 15, None) + self.assertFalse(ft.global_lock_names) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/module_generator.py b/test/framework/module_generator.py index 81ce794218..439091a3a3 100644 --- a/test/framework/module_generator.py +++ b/test/framework/module_generator.py @@ -1273,6 +1273,11 @@ def test_ec(ecfile, short_modname, mod_subdir, modpath_exts, user_modpath_exts, ['MPI/intel-CUDA/%s-5.5.22/impi/5.1.2.150' % iccver], ['MPI/intel-CUDA/%s-5.5.22/impi/5.1.2.150' % iccver], ['Core']), + 'CrayCCE-5.1.29.eb': ('CrayCCE/5.1.29', 'Core', + ['Toolchain/CrayCCE/5.1.29'], + ['Toolchain/CrayCCE/5.1.29'], + ['Core']), + 'HPL-2.1-CrayCCE-5.1.29.eb': ('HPL/2.1', 'Toolchain/CrayCCE/5.1.29', [], [], ['Core']), } for ecfile, mns_vals in test_ecs.items(): test_ec(ecfile, *mns_vals) diff --git a/test/framework/modules.py b/test/framework/modules.py index 93015b4a07..4fe661e9b5 100644 --- a/test/framework/modules.py +++ b/test/framework/modules.py @@ -44,6 +44,7 @@ from easybuild.framework.easyblock import EasyBlock from easybuild.framework.easyconfig.easyconfig import EasyConfig from easybuild.tools.build_log import EasyBuildError +from easybuild.tools.environment import modify_env from easybuild.tools.filetools import adjust_permissions, copy_file, copy_dir, mkdir from easybuild.tools.filetools import read_file, remove_dir, remove_file, symlink, write_file from easybuild.tools.modules import EnvironmentModules, EnvironmentModulesC, EnvironmentModulesTcl, Lmod, NoModulesTool @@ -92,6 +93,104 @@ def test_long_module_path(self): shutil.rmtree(tmpdir) + def test_run_module(self): + """Test for ModulesTool.run_module method.""" + + testdir = os.path.dirname(os.path.abspath(__file__)) + + for key in ['EBROOTGCC', 'EBROOTOPENMPI', 'EBROOTOPENBLAS']: + if key in os.environ: + del os.environ[key] + + # arguments can be passed in two ways: multiple arguments, or just 1 list argument + self.modtool.run_module('load', 'GCC/6.4.0-2.28') + self.assertEqual(os.environ['EBROOTGCC'], '/prefix/software/GCC/6.4.0-2.28') + + # restore original environment + modify_env(os.environ, self.orig_environ, verbose=False) + self.reset_modulepath([os.path.join(testdir, 'modules')]) + + self.assertFalse('EBROOTGCC' in os.environ) + self.modtool.run_module(['load', 'GCC/6.4.0-2.28']) + self.assertEqual(os.environ['EBROOTGCC'], '/prefix/software/GCC/6.4.0-2.28') + + # skip tests that rely on exit codes when using EnvironmentModulesTcl modules tool, + # because it doesn't use proper exit codes + if not isinstance(self.modtool, EnvironmentModulesTcl): + + # by default, exit code is checked and an error is raised if we run something that fails + error_pattern = "Module command '.*thisdoesnotmakesense' failed with exit code [1-9]" + self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.run_module, 'thisdoesnotmakesense') + + # we need to use a different error pattern here with EnvironmentModulesC, + # because a load of a non-existing module doesnt' trigger a non-zero exit code... + # it will still fail though, just differently + if isinstance(self.modtool, EnvironmentModulesC): + error_pattern = "Unable to locate a modulefile for 'nosuchmodule/1.2.3'" + else: + error_pattern = "Module command '.*load nosuchmodule/1.2.3' failed with exit code [1-9]" + self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.run_module, 'load', 'nosuchmodule/1.2.3') + + # we can choose to blatently ignore the exit code, + # and also disable the output check that serves as a fallback; + # we also enable return_output here, because trying to apply the environment changes produced + # by a faulty command is bound to cause trouble... + kwargs = { + 'check_exit_code': False, + 'check_output': False, + 'return_output': True, + } + self.modtool.run_module('thisdoesnotmakesense', **kwargs) + self.modtool.run_module('load', 'nosuchmodule/1.2.3', **kwargs) + + # by default, the output (stdout+stderr) produced by the command is processed; + # result is a list of useful info (module names in case of list/avail) + res = self.modtool.run_module('list') + self.assertEqual(res, [{'mod_name': 'GCC/6.4.0-2.28', 'default': None}]) + + res = self.modtool.run_module('avail', 'GCC/4.6.3') + self.assertTrue(isinstance(res, list)) + self.assertEqual(sorted([x['mod_name'] for x in res]), ['GCC/4.6.3']) + + # loading a module produces no output, so we get an empty list + res = self.modtool.run_module('load', 'OpenMPI/2.1.2-GCC-6.4.0-2.28') + self.assertEqual(res, []) + self.assertEqual(os.environ['EBROOTOPENMPI'], '/prefix/software/OpenMPI/2.1.2-GCC-6.4.0-2.28') + + # we can opt into getting back the raw output (stdout + stderr); + # in that cases, the output includes Python statements to change the environment; + # the changes that would be made by the module command are *not* applied to the environment + out = self.modtool.run_module('load', 'OpenBLAS/0.2.20-GCC-6.4.0-2.28', return_output=True) + patterns = [ + r"^os.environ\[.EBROOTOPENBLAS.\]\s*=\s*./prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28.", + r"^os.environ\[.LOADEDMODULES.\]\s*=.*OpenBLAS/0.2.20-GCC-6.4.0-2.28", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # OpenBLAS module did *not* get loaded + self.assertFalse('EBROOTOPENBLAS' in os.environ) + res = self.modtool.list() + expected = ['GCC/6.4.0-2.28', 'OpenMPI/2.1.2-GCC-6.4.0-2.28', 'hwloc/1.11.8-GCC-6.4.0-2.28'] + self.assertEqual(sorted([x['mod_name'] for x in res]), expected) + + # we can also only obtain the stderr output (which contains the user-facing output), + # and just drop the stdout output (which contains the statements to change the environment) + out = self.modtool.run_module('show', 'OpenBLAS/0.2.20-GCC-6.4.0-2.28', return_stderr=True) + patterns = [ + r"test/framework/modules/OpenBLAS/0.2.20-GCC-6.4.0-2.28:\s*$", + r"setenv\W+EBROOTOPENBLAS.+/prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28", + r"prepend[_-]path\W+LD_LIBRARY_PATH.+/prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28/lib", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # show method only returns user-facing output (obtained via stderr), not changes to the environment + regex = re.compile(r'^os\.environ\[', re.M) + self.assertFalse(regex.search(out), "Pattern '%s' should not be found in: %s" % (regex.pattern, out)) + def test_avail(self): """Test if getting a (restricted) list of available modules works.""" self.init_testmods() @@ -168,6 +267,7 @@ def test_exist(self): java_mod_dir = os.path.join(self.test_prefix, 'Java') write_file(os.path.join(java_mod_dir, '1.8.0_181'), '#%Module') + write_file(os.path.join(self.test_prefix, 'toy', '42.1337'), '#%Module') if self.modtool.__class__ == EnvironmentModulesC: modulerc_tcl_txt = '\n'.join([ @@ -175,11 +275,18 @@ def test_exist(self): 'if {"Java/1.8" eq [module-info version Java/1.8]} {', ' module-version Java/1.8.0_181 1.8', '}', + 'if {"Java/site_default" eq [module-info version Java/site_default]} {', + ' module-version Java/1.8.0_181 site_default', + '}', ]) else: modulerc_tcl_txt = '\n'.join([ '#%Module', 'module-version Java/1.8.0_181 1.8', + 'module-version Java/1.8.0_181 site_default', + 'module-alias Java/Alias toy/42.1337', + # 'module-alias Java/NonExist non_existant/1', # (only) LMod has this in module avail, disable for now + 'module-alias JavaAlias Java/1.8.0_181', # LMod 7+ only ]) write_file(os.path.join(java_mod_dir, '.modulerc'), modulerc_tcl_txt) @@ -188,8 +295,20 @@ def test_exist(self): self.assertTrue('Java/1.8.0_181' in avail_mods) if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.0'): self.assertTrue('Java/1.8' in avail_mods) + self.assertTrue('Java/site_default' in avail_mods) + self.assertTrue('JavaAlias' in avail_mods) + self.assertEqual(self.modtool.exist(['JavaAlias']), [True]) + self.assertEqual(self.modtool.exist(['Java/1.8', 'Java/1.8.0_181']), [True, True]) - self.assertEqual(self.modtool.module_wrapper_exists('Java/1.8'), 'Java/1.8.0_181') + + # module-version with different version suffix than the base module + self.assertEqual(self.modtool.exist(['Java/site_default']), [True]) + # Check for aliases: + # - completely different nameTrue, True, + # - alias to non existant module + # Skipped for EnvironmentModulesC as module-alias not working correctly there + if self.modtool.__class__ != EnvironmentModulesC: + self.assertEqual(self.modtool.exist(['Java/Alias', 'Java/NonExist']), [True, False]) reset_module_caches() @@ -199,8 +318,11 @@ def test_exist(self): self.assertTrue('Core/Java/1.8.0_181' in self.modtool.available()) self.assertEqual(self.modtool.exist(['Core/Java/1.8.0_181']), [True]) - self.assertEqual(self.modtool.exist(['Core/Java/1.8']), [True]) - self.assertEqual(self.modtool.module_wrapper_exists('Core/Java/1.8'), 'Core/Java/1.8.0_181') + # there's a workaround to ensure that module wrappers/aliases are recognized when they're + # being checked with the full module name (see https://github.com/TACC/Lmod/issues/446); + # that's necessary when using a hierarchical module naming scheme, + # see https://github.com/easybuilders/easybuild-framework/issues/3335 + self.assertEqual(self.modtool.exist(['Core/Java/1.8', 'Core/Java/site_default']), [True, True]) # also check with .modulerc.lua for Lmod 7.8 or newer if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.8'): @@ -208,13 +330,22 @@ def test_exist(self): reset_module_caches() remove_file(os.path.join(java_mod_dir, '.modulerc')) - write_file(os.path.join(java_mod_dir, '.modulerc.lua'), 'module_version("Java/1.8.0_181", "1.8")') + write_file(os.path.join(java_mod_dir, '.modulerc.lua'), + '\n'.join([ + 'module_version("Java/1.8.0_181", "1.8")', + 'module_version("Java/1.8.0_181", "site_default")', + 'module_alias("JavaAlias", "Java/1.8")', + ])) avail_mods = self.modtool.available() self.assertTrue('Java/1.8.0_181' in avail_mods) self.assertTrue('Java/1.8' in avail_mods) self.assertEqual(self.modtool.exist(['Java/1.8', 'Java/1.8.0_181']), [True, True]) - self.assertEqual(self.modtool.module_wrapper_exists('Java/1.8'), 'Java/1.8.0_181') + + # check for an alias with a different version suffix than the base module + self.assertEqual(self.modtool.exist(['Java/site_default']), [True]) + # And completely different name + self.assertEqual(self.modtool.exist(['JavaAlias']), [True]) reset_module_caches() @@ -223,7 +354,23 @@ def test_exist(self): self.assertTrue('Core/Java/1.8.0_181' in self.modtool.available()) self.assertEqual(self.modtool.exist(['Core/Java/1.8.0_181']), [True]) self.assertEqual(self.modtool.exist(['Core/Java/1.8']), [True]) - self.assertEqual(self.modtool.module_wrapper_exists('Core/Java/1.8'), 'Core/Java/1.8.0_181') + self.assertEqual(self.modtool.exist(['Core/Java/site_default']), [True]) + + # Test alias in home directory .modulerc + if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.0'): + # Required or temporary HOME would be in MODULEPATH already + self.init_testmods() + # Sanity check: Module aliases don't exist yet + self.assertEqual(self.modtool.exist(['OpenMPI/99', 'OpenMPIAlias']), [False, False]) + # Use a temporary dir, not the users HOME + os.environ['HOME'] = tempfile.mkdtemp() + reset_module_caches() + write_file(os.path.join(os.environ['HOME'], '.modulerc'), '\n'.join([ + '#%Module', + 'module-version OpenMPI/2.1.2-GCC-6.4.0-2.28 99', + 'module-alias OpenMPIAlias OpenMPI/2.1.2-GCC-6.4.0-2.28', + ])) + self.assertEqual(self.modtool.exist(['OpenMPI/99', 'OpenMPIAlias']), [True, True]) def test_load(self): """ test if we load one module it is in the loaded_modules """ @@ -284,6 +431,25 @@ def test_load(self): self.assertEqual(os.environ.get('EBROOTGCC'), None) self.assertFalse(loaded_modules[-1] == 'GCC/6.4.0-2.28') + def test_show(self): + """Test for ModulesTool.show method.""" + + out = self.modtool.show('GCC/7.3.0-2.30') + + patterns = [ + # full path to module is included in output of 'show' + r"test/framework/modules/GCC/7.3.0-2.30:\s*$", + r"setenv\W+EBROOTGCC.+prefix/software/GCC/7.3.0-2.30", + r"^prepend[_-]path\W+PATH.+/prefix/software/GCC/7.3.0-2.30/bin", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # show method only returns user-facing output (obtained via stderr), not changes to the environment + regex = re.compile(r'^os\.environ\[', re.M) + self.assertFalse(regex.search(out), "Pattern '%s' should not be found in: %s" % (regex.pattern, out)) + def test_curr_module_paths(self): """Test for curr_module_paths function.""" @@ -544,24 +710,6 @@ def test_modulefile_path(self): res = modtool.modulefile_path('bzip2/.1.0.6', strip_ext=True) self.assertTrue(res.endswith('test/framework/modules/bzip2/.1.0.6')) - # hack into 'module show GCC/6.4.0-2.28' cache and inject alternate output that modulecmd.tcl sometimes produces - # make sure we only extract the module file path, nothing else... - # cfr. https://github.com/easybuilders/easybuild/issues/368 - modulepath = os.environ['MODULEPATH'].split(':') - mod_show_cache_key = modtool.mk_module_cache_key('GCC/6.4.0-2.28') - mod.MODULE_SHOW_CACHE[mod_show_cache_key] = '\n'.join([ - "import os", - "os.environ['MODULEPATH_modshare'] = '%s'" % ':'.join(m + ':1' for m in modulepath), - "os.environ['MODULEPATH'] = '%s'" % ':'.join(modulepath), - "------------------------------------------------------------------------------", - "%s:" % gcc_mod_file, - "------------------------------------------------------------------------------", - # remainder of output doesn't really matter in this context - "setenv EBROOTGCC /prefix/GCC/6.4.0-2.28" - ]) - res = modtool.modulefile_path('GCC/6.4.0-2.28') - self.assertTrue(os.path.samefile(res, os.path.join(test_dir, 'modules', 'GCC', '6.4.0-2.28'))) - reset_module_caches() def test_path_to_top_of_module_tree(self): @@ -891,7 +1039,7 @@ def test_modules_tool_stateless(self): # exact error message depends on Lmod version load_err_msg = '|'.join([ r'These[\s\sn]*module\(s\)[\s\sn]*exist[\s\sn]*but[\s\sn]*cannot[\s\sn]*be', - 'The[\s\sn]*following[\s\sn]*module\(s\)[\s\sn]*are[\s\sn]*unknown', + r'The[\s\sn]*following[\s\sn]*module\(s\)[\s\sn]*are[\s\sn]*unknown', ]) else: load_err_msg = "Unable to locate a modulefile" @@ -1060,7 +1208,7 @@ def test_load_in_hierarchy(self): def test_exit_code_check(self): """Verify that EasyBuild checks exit code of executed module commands""" if isinstance(self.modtool, Lmod): - error_pattern = "Module command 'module load nosuchmoduleavailableanywhere' failed with exit code" + error_pattern = "Module command '.*load nosuchmoduleavailableanywhere' failed with exit code" else: # Tcl implementations exit with 0 even when a non-existing module is loaded... error_pattern = "Unable to locate a modulefile for 'nosuchmoduleavailableanywhere'" @@ -1101,7 +1249,7 @@ def check_loaded_modules(): r"^\* GCC/6.4.0-2.28", r"^\* hwloc/1.11.8-GCC-6.4.0-2.28", r"^\* OpenMPI/2.1.2-GCC-6.4.0-2.28", - "This is not recommended since it may affect the installation procedure\(s\) performed by EasyBuild.", + r"This is not recommended since it may affect the installation procedure\(s\) performed by EasyBuild.", "To make EasyBuild allow particular loaded modules, use the --allow-loaded-modules configuration option.", "To specify action to take when loaded modules are detected, use " "--detect-loaded-modules={error,ignore,purge,unload,warn}", @@ -1119,7 +1267,7 @@ def check_loaded_modules(): # error mentioning 1 non-allowed module (OpenMPI), both GCC and hwloc loaded modules are allowed error_pattern = r"Found one or more non-allowed loaded .* module.*\n" - error_pattern += "\* OpenMPI/2.1.2-GCC-6.4.0-2.28\n\nThis is not" + error_pattern += r"\* OpenMPI/2.1.2-GCC-6.4.0-2.28\n\nThis is not" self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.check_loaded_modules) # check for warning message when purge is being run on loaded modules diff --git a/test/framework/options.py b/test/framework/options.py index a755b7d7c4..731dc8c214 100644 --- a/test/framework/options.py +++ b/test/framework/options.py @@ -517,6 +517,58 @@ def test__list_toolchains(self): if os.path.exists(dummylogfn): os.remove(dummylogfn) + def test_list_toolchains_rst(self): + """Test --list-toolchains --output-format=rst.""" + + args = [ + '--list-toolchains', + '--output-format=rst', + ] + self.mock_stderr(True) + self.mock_stdout(True) + self.eb_main(args, raise_error=True) + stderr, stdout = self.get_stderr(), self.get_stdout().strip() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + + title = "List of known toolchains" + + # separator line: starts/ends with sequence of '=', 4 spaces in between columns + sep_line = r'=(=+\s{4})+[=]+=' + + col_names = ['Name', r'Compiler\(s\)', 'MPI', 'Linear algebra', 'FFT'] + col_names_line = r'\s+'.join(col_names) + r'\s*' + + patterns = [ + # title + '^' + title + '\n' + '-' * len(title) + '\n', + # header + '\n' + '\n'.join([sep_line, col_names_line, sep_line]) + '\n', + # compiler-only GCC toolchain + r"\n\*\*GCC\*\*\s+GCC\s+\*\(none\)\*\s+\*\(none\)\*\s+\*\(none\)\*\s*\n", + # gompi compiler + MPI toolchain + r"\n\*\*gompi\*\*\s+GCC\s+OpenMPI\s+\*\(none\)\*\s+\*\(none\)\*\s*\n", + # full 'foss' toolchain + r"\*\*foss\*\*\s+GCC\s+OpenMPI\s+OpenBLAS,\s+ScaLAPACK\s+FFTW\s*\n", + # compiler-only iccifort toolchain + r"\*\*iccifort\*\*\s+icc,\s+ifort\s+\*\(none\)\*\s+\*\(none\)\*\s+\*\(none\)\*\s*\n", + # full 'intel' toolchain (imkl appears twice, in linalg + FFT columns) + r"\*\*intel\*\*\s+icc,\s+ifort\s+impi\s+imkl\s+imkl\s*\n", + # fosscuda toolchain, also lists CUDA in compilers column + r"\*\*fosscuda\*\*\s+GCC,\s+CUDA\s+OpenMPI\s+OpenBLAS,\s+ScaLAPACK\s+FFTW\s*\n", + # system toolchain: 'none' in every column + r"\*\*system\*\*\s+\*\(none\)\*\s+\*\(none\)\*\s+\*\(none\)\*\s+\*\(none\)\*\s*\n", + # Cray special case + r"\n\*\*CrayGNU\*\*\s+PrgEnv-gnu\s+cray-mpich\s+cray-libsci\s+\*\(none\)\*\s*\n", + # footer + '\n' + sep_line + '$', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + def test_avail_lists(self): """Test listing available values of certain types.""" @@ -1894,6 +1946,51 @@ def test_try(self): allargs = args + ['--software-version=1.2.3', '--toolchain=gompi,2018a'] self.assertErrorRegex(EasyBuildError, "version .* not available", self.eb_main, allargs, raise_error=True) + def test_try_with_copy(self): + """Test whether --try options are taken into account.""" + ecs_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + tweaked_toy_ec = os.path.join(self.test_buildpath, 'toy-0.0-tweaked.eb') + copy_file(os.path.join(ecs_path, 't', 'toy', 'toy-0.0.eb'), tweaked_toy_ec) + f = open(tweaked_toy_ec, 'a') + f.write("easyblock = 'ConfigureMake'") + f.close() + + args = [ + tweaked_toy_ec, + '--sourcepath=%s' % self.test_sourcepath, + '--buildpath=%s' % self.test_buildpath, + '--installpath=%s' % self.test_installpath, + '--dry-run', + '--robot=%s' % ecs_path, + '--copy-ec', + ] + self.mock_stdout(True) + self.mock_stderr(True) + copied_ec = os.path.join(self.test_buildpath, 'my_eb.eb') + self.eb_main(args + [copied_ec], verbose=True, raise_error=True) + outtxt = self.get_stdout() + errtxt = self.get_stderr() + self.assertTrue(r'toy-0.0-tweaked.eb copied to ' + copied_ec in outtxt) + self.assertFalse(errtxt) + self.mock_stdout(False) + self.mock_stderr(False) + self.assertTrue(os.path.exists(copied_ec)) + + self.mock_stdout(True) + self.mock_stderr(True) + tweaked_ecs_dir = os.path.join(self.test_buildpath, 'my_tweaked_ecs') + self.eb_main(args + ['--try-software=foo,1.2.3', '--try-toolchain=gompi,2018a', tweaked_ecs_dir], + verbose=True, raise_error=True) + outtxt = self.get_stdout() + errtxt = self.get_stderr() + self.assertTrue(r'1 file(s) copied to ' + tweaked_ecs_dir in outtxt) + self.assertFalse(errtxt) + self.mock_stdout(False) + self.mock_stderr(False) + self.assertTrue( + os.path.exists(os.path.join(self.test_buildpath, tweaked_ecs_dir, 'foo-1.2.3-GCC-6.4.0-2.28.eb')) + ) + def test_software_version_ordering(self): """Test whether software versions are correctly ordered when using --software.""" ecs_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') @@ -2750,7 +2847,7 @@ def test_xxx_include_easyblocks_from_pr(self): write_file(self.logfile, '') args = [ - '--from-pr=9979', # PR for CMake easyconfig + '--from-pr=10487', # PR for CMake easyconfig '--include-easyblocks-from-pr=1936', # PR for EB_CMake easyblock '--unittest-file=%s' % self.logfile, '--github-user=%s' % GITHUB_TEST_ACCOUNT, @@ -2760,8 +2857,8 @@ def test_xxx_include_easyblocks_from_pr(self): logtxt = read_file(self.logfile) # easyconfig from pr is found - ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr9979', 'c', 'CMake', - 'CMake-3.16.4-GCCcore-9.2.0.eb') + ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr10487', 'c', 'CMake', + 'CMake-3.16.4-GCCcore-9.3.0.eb') ec_regex = re.compile(r"Parsing easyconfig file %s" % ec_pattern, re.M) self.assertTrue(ec_regex.search(logtxt), "Pattern '%s' found in: %s" % (ec_regex.pattern, logtxt)) @@ -3308,6 +3405,7 @@ def test_new_pr_from_branch(self): '--new-pr-from-branch=%s' % test_branch, '--github-user=%s' % GITHUB_TEST_ACCOUNT, # used to get GitHub token '--github-org=boegel', # used to determine account to grab branch from + '--pr-descr="an easyconfig for toy"', '-D', ] txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) @@ -3326,6 +3424,7 @@ def test_new_pr_from_branch(self): r"\* target: easybuilders/easybuild-easyconfigs:develop$", r"^\* from: boegel/easybuild-easyconfigs:test_new_pr_from_branch_DO_NOT_REMOVE$", r'^\* title: "\{tools\}\[system/system\] toy v0\.0"$', + r'^"an easyconfig for toy"$', r"^ 1 file changed, 32 insertions\(\+\)$", r"^\* overview of changes:\n easybuild/easyconfigs/t/toy/toy-0\.0\.eb | 32", ] @@ -4263,54 +4362,128 @@ def test_list_prs(self): def test_list_software(self): """Test --list-software and --list-installed-software.""" - test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'v1.0') - args = [ - '--list-software', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) - expected = '\n'.join([ - "== Processed 5/5 easyconfigs...", - "== Found 2 different software packages", - '', - "* GCC", - "* gzip", - '', + + # copy selected test easyconfigs for testing --list-*software options with; + # full test is a nuisance, because all dependencies must be available and toolchains like intel must have + # all expected components when testing with HierarchicalMNS (which the test easyconfigs don't always have) + topdir = os.path.dirname(os.path.abspath(__file__)) + + cray_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'c', 'CrayCCE', 'CrayCCE-5.1.29.eb') + gcc_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'GCC', 'GCC-4.6.3.eb') + gzip_ec = os.path.join(topdir, 'easyconfigs', 'v1.0', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb') + gzip_system_ec = os.path.join(topdir, 'easyconfigs', 'v1.0', 'g', 'gzip', 'gzip-1.4.eb') + + test_ecs = os.path.join(self.test_prefix, 'test_ecs') + for ec in [cray_ec, gcc_ec, gzip_ec, gzip_system_ec]: + subdirs = os.path.dirname(ec).split(os.path.sep)[-2:] + target_dir = os.path.join(test_ecs, *subdirs) + mkdir(target_dir, parents=True) + copy_file(ec, target_dir) + + # add (fake) HPL easyconfig using CrayCCE toolchain + # (required to trigger bug reported in https://github.com/easybuilders/easybuild-framework/issues/3265) + hpl_cray_ec_txt = '\n'.join([ + 'easyblock = "ConfigureMake"', + 'name = "HPL"', + 'version = "2.3"', + "homepage = 'http://www.netlib.org/benchmark/hpl/'", + 'description = "HPL"', + 'toolchain = {"name": "CrayCCE", "version": "5.1.29"}', ]) - self.assertTrue(txt.endswith(expected)) + hpl_cray_ec = os.path.join(self.test_prefix, 'test_ecs', 'h', 'HPL', 'HPL-2.3-CrayCCE-5.1.29.eb') + write_file(hpl_cray_ec, hpl_cray_ec_txt) - args = [ - '--list-software=detailed', - '--output-format=rst', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False) - self.assertTrue(re.search(r'^\*GCC\*', txt, re.M)) - self.assertTrue(re.search(r'^``4.6.3``\s+``system``', txt, re.M)) - self.assertTrue(re.search(r'^\*gzip\*', txt, re.M)) - self.assertTrue(re.search(r'^``1.5``\s+``foss/2018a``,\s+``intel/2018a``', txt, re.M)) + # put dummy Core/GCC/4.6.3 in place + modpath = os.path.join(self.test_prefix, 'modules') + write_file(os.path.join(modpath, 'Core', 'GCC', '4.6.3'), '#%Module') + self.modtool.use(modpath) - args = [ - '--list-installed-software', - '--output-format=rst', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False, raise_error=True) - self.assertTrue(re.search(r'== Processed 5/5 easyconfigs...', txt, re.M)) - self.assertTrue(re.search(r'== Found 2 different software packages', txt, re.M)) - self.assertTrue(re.search(r'== Retained 1 installed software packages', txt, re.M)) - self.assertTrue(re.search(r'^\* GCC', txt, re.M)) - self.assertFalse(re.search(r'gzip', txt, re.M)) + # test with different module naming scheme active + # (see https://github.com/easybuilders/easybuild-framework/issues/3265) + for mns in ['EasyBuildMNS', 'HierarchicalMNS']: - args = [ - '--list-installed-software=detailed', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False) - self.assertTrue(re.search(r'^== Retained 1 installed software packages', txt, re.M)) - self.assertTrue(re.search(r'^\* GCC', txt, re.M)) - self.assertTrue(re.search(r'^\s+\* GCC v4.6.3: system', txt, re.M)) - self.assertFalse(re.search(r'gzip', txt, re.M)) + args = [ + '--list-software', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^\* CrayCCE", + r"^\* GCC", + r"^\* gzip", + r"^\* HPL", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + args = [ + '--list-software=detailed', + '--output-format=rst', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r'^\*CrayCCE\*', + r'^``5.1.29``\s+``system``', + r'^\*GCC\*', + r'^``4.6.3``\s+``system``', + r'^\*gzip\*', + r'^``1.4`` ``GCC/4.6.3``, ``system``', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + args = [ + '--list-installed-software', + '--output-format=rst', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^== Retained 1 installed software packages", + r'^\* GCC', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + self.assertFalse(re.search(r'gzip', txt, re.M)) + self.assertFalse(re.search(r'CrayCCE', txt, re.M)) + + args = [ + '--list-installed-software=detailed', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^== Retained 1 installed software packages", + r'^\* GCC', + r'^\s+\* GCC v4.6.3: system', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + self.assertFalse(re.search(r'gzip', txt, re.M)) + self.assertFalse(re.search(r'CrayCCE', txt, re.M)) def test_parse_optarch(self): """Test correct parsing of optarch option.""" diff --git a/test/framework/parallelbuild.py b/test/framework/parallelbuild.py index 9487b63e8d..79608c5b24 100644 --- a/test/framework/parallelbuild.py +++ b/test/framework/parallelbuild.py @@ -173,8 +173,10 @@ def test_build_easyconfigs_in_parallel_pbs_python(self): # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script) self.assertEqual(len(jobs[3].deps), 2) - regex = re.compile('toy-0.0-deps.eb\s* --hidden') - self.assertTrue(regex.search(jobs[3].deps[0].script)) + regex = re.compile(r'toy-0.0-deps\.eb.* --hidden') + script_txt = jobs[3].deps[0].script + fail_msg = "Pattern '%s' should be found in: %s" % (regex.pattern, script_txt) + self.assertTrue(regex.search(script_txt), fail_msg) self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script) # also test use of --pre-create-installdir @@ -290,6 +292,7 @@ def test_submit_jobs(self): '--try-toolchain=intel,2016a', # should be excluded in job script '--robot', self.test_prefix, # should be excluded in job script '--job', # should be excluded in job script + '--job-cores=3', ] eb_go = parse_options(args=args) cmd = submit_jobs([toy_ec], eb_go.generate_cmd_line(), testing=True) @@ -306,6 +309,7 @@ def test_submit_jobs(self): ' eb %\(spec\)s ', ' %\(add_opts\)s ', ' --testoutput=%\(output_dir\)s', + ' --disable-job ', ] for regex in regexs: regex = re.compile(regex) @@ -313,9 +317,9 @@ def test_submit_jobs(self): # these patterns should NOT be found, these options get filtered out # (self.test_prefix was argument to --robot) - for regex in ['--job', '--try-toolchain', '--robot=[ =]', self.test_prefix + ' ']: + for regex in ['--job', '--job-cores', '--try-toolchain', '--robot=[ =]', self.test_prefix + ' ']: regex = re.compile(regex) - self.assertFalse(regex.search(cmd), "Pattern '%s' *not* found in: %s" % (regex.pattern, cmd)) + self.assertFalse(regex.search(cmd), "Pattern '%s' should *not* be found in: %s" % (regex.pattern, cmd)) def test_build_easyconfigs_in_parallel_slurm(self): """Test build_easyconfigs_in_parallel(), using (mocked) Slurm as backend for --job.""" diff --git a/test/framework/run.py b/test/framework/run.py index e7d608c7b2..5150838d80 100644 --- a/test/framework/run.py +++ b/test/framework/run.py @@ -206,22 +206,27 @@ def test_run_cmd_negative_exit_code(self): def handler(signum, _): raise RuntimeError("Signal handler called with signal %s" % signum) - # set the signal handler and a 3-second alarm - signal.signal(signal.SIGALRM, handler) - signal.alarm(3) + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) - (_, ec) = run_cmd("kill -9 $$", log_ok=False) - self.assertEqual(ec, -9) + try: + # set the signal handler and a 3-second alarm + signal.signal(signal.SIGALRM, handler) + signal.alarm(3) - # reset the alarm - signal.alarm(0) - signal.alarm(3) + (_, ec) = run_cmd("kill -9 $$", log_ok=False) + self.assertEqual(ec, -9) - (_, ec) = run_cmd_qa("kill -9 $$", {}, log_ok=False) - self.assertEqual(ec, -9) + # reset the alarm + signal.alarm(0) + signal.alarm(3) - # disable the alarm - signal.alarm(0) + (_, ec) = run_cmd_qa("kill -9 $$", {}, log_ok=False) + self.assertEqual(ec, -9) + + finally: + # cleanup: disable the alarm + reset signal handler for SIGALRM + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) def test_run_cmd_bis(self): """More 'complex' test for run_cmd function.""" diff --git a/test/framework/sandbox/easybuild/easyblocks/t/toy.py b/test/framework/sandbox/easybuild/easyblocks/t/toy.py index 79f3d3d8fc..11b2dff542 100644 --- a/test/framework/sandbox/easybuild/easyblocks/t/toy.py +++ b/test/framework/sandbox/easybuild/easyblocks/t/toy.py @@ -46,7 +46,7 @@ class EB_toy(ExtensionEasyBlock): @staticmethod def extra_options(extra_vars=None): - """Custom easyconfig parameters for toytoy.""" + """Custom easyconfig parameters for toy.""" if extra_vars is None: extra_vars = {} diff --git a/test/framework/toolchain.py b/test/framework/toolchain.py index 2b0fc84634..4ed54ead66 100644 --- a/test/framework/toolchain.py +++ b/test/framework/toolchain.py @@ -40,6 +40,7 @@ from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, find_full_path, init_config import easybuild.tools.modules as modules +import easybuild.tools.toolchain as toolchain import easybuild.tools.toolchain.compiler from easybuild.framework.easyconfig.easyconfig import EasyConfig, ActiveMNS from easybuild.toolchains.system import SystemToolchain @@ -49,6 +50,7 @@ from easybuild.tools.filetools import adjust_permissions, copy_dir, find_eb_script, mkdir, read_file, write_file, which from easybuild.tools.py2vs3 import string_type from easybuild.tools.run import run_cmd +from easybuild.tools.toolchain.mpi import get_mpi_cmd_template from easybuild.tools.toolchain.toolchain import env_vars_external_module from easybuild.tools.toolchain.utilities import get_toolchain, search_toolchain @@ -1027,6 +1029,40 @@ def test_mpi_cmd_for(self): error_pattern = "Failed to complete MPI cmd template .* with .*: KeyError 'foo'" self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + def test_get_mpi_cmd_template(self): + """Test get_mpi_cmd_template function.""" + + # search_toolchain needs to be called once to make sure constants like toolchain.OPENMPI are in place + search_toolchain('') + + input_params = {'nr_ranks': 123, 'cmd': 'this_is_just_a_test'} + + for mpi_fam in [toolchain.OPENMPI, toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2]: + mpi_cmd_tmpl, params = get_mpi_cmd_template(mpi_fam, input_params) + self.assertEqual(mpi_cmd_tmpl, "mpirun -n %(nr_ranks)s %(cmd)s") + self.assertEqual(params, input_params) + + # Intel MPI is a special case, also requires MPI version to be known + impi = toolchain.INTELMPI + error_pattern = "Intel MPI version unknown, can't determine MPI command template!" + self.assertErrorRegex(EasyBuildError, error_pattern, get_mpi_cmd_template, impi, {}) + + mpi_cmd_tmpl, params = get_mpi_cmd_template(toolchain.INTELMPI, input_params, mpi_version='1.0') + self.assertEqual(mpi_cmd_tmpl, "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s") + self.assertEqual(sorted(params.keys()), ['cmd', 'mpdbf', 'nodesfile', 'nr_ranks']) + self.assertEqual(params['cmd'], 'this_is_just_a_test') + self.assertEqual(params['nr_ranks'], 123) + + mpdbf = params['mpdbf'] + regex = re.compile('^--file=.*/mpdboot$') + self.assertTrue(regex.match(mpdbf), "'%s' should match pattern '%s'" % (mpdbf, regex.pattern)) + self.assertTrue(os.path.exists(mpdbf.split('=')[1])) + + nodesfile = params['nodesfile'] + regex = re.compile('^-machinefile /.*/nodes$') + self.assertTrue(regex.match(nodesfile), "'%s' should match pattern '%s'" % (nodesfile, regex.pattern)) + self.assertTrue(os.path.exists(nodesfile.split(' ')[1])) + def test_prepare_deps(self): """Test preparing for a toolchain when dependencies are involved.""" tc = self.get_toolchain('GCC', version='6.4.0-2.28') diff --git a/test/framework/toy_build.py b/test/framework/toy_build.py index 1570504205..5f519fe31b 100644 --- a/test/framework/toy_build.py +++ b/test/framework/toy_build.py @@ -1,4 +1,5 @@ -# # +# -*- coding: utf-8 -*- +## # Copyright 2013-2020 Ghent University # # This file is part of EasyBuild, @@ -50,10 +51,11 @@ from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import get_module_syntax, get_repositorypath from easybuild.tools.environment import modify_env -from easybuild.tools.filetools import adjust_permissions, mkdir, read_file, remove_dir, remove_file, which, write_file +from easybuild.tools.filetools import adjust_permissions, change_dir, mkdir, read_file, remove_dir, remove_file +from easybuild.tools.filetools import which, write_file from easybuild.tools.module_generator import ModuleGeneratorTcl from easybuild.tools.modules import Lmod -from easybuild.tools.py2vs3 import string_type +from easybuild.tools.py2vs3 import reload, string_type from easybuild.tools.run import run_cmd from easybuild.tools.version import VERSION as EASYBUILD_VERSION @@ -73,7 +75,31 @@ def setUp(self): def tearDown(self): """Cleanup.""" + + # kick out any paths for included easyblocks from sys.path, + # to avoid infected any other tests + for path in sys.path[:]: + if '/included-easyblocks' in path: + sys.path.remove(path) + + # reload toy easyblock (and generic toy_extension easyblock that imports it) after cleaning up sys.path, + # to avoid trouble in other tests due to included toy easyblock that is cached somewhere + # (despite the cleanup in sys.modules); + # important for tests that include a customised copy of the toy easyblock + # (like test_toy_build_enhanced_sanity_check) + import easybuild.easyblocks.toy + reload(easybuild.easyblocks.toy) + import easybuild.easyblocks.toytoy + reload(easybuild.easyblocks.toytoy) + import easybuild.easyblocks.generic.toy_extension + reload(easybuild.easyblocks.generic.toy_extension) + + del sys.modules['easybuild.easyblocks.toy'] + del sys.modules['easybuild.easyblocks.toytoy'] + del sys.modules['easybuild.easyblocks.generic.toy_extension'] + super(ToyBuildTest, self).tearDown() + # remove logs if os.path.exists(self.dummylogfn): os.remove(self.dummylogfn) @@ -117,7 +143,8 @@ def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versio self.assertTrue(os.path.exists(devel_module_path)) def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True, fails=False, verbose=True, - raise_error=False, test_report=None, versionsuffix='', testing=True): + raise_error=False, test_report=None, versionsuffix='', testing=True, + raise_systemexit=False): """Perform a toy build.""" if extra_args is None: extra_args = [] @@ -144,7 +171,7 @@ def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True myerr = None try: outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=verbose, - raise_error=raise_error, testing=testing) + raise_error=raise_error, testing=testing, raise_systemexit=raise_systemexit) except Exception as err: myerr = err if raise_error: @@ -1337,7 +1364,7 @@ def test_external_dependencies(self): write_file(toy_ec, ectxt + extraectxt) if isinstance(self.modtool, Lmod): - err_msg = r"Module command \\'module load nosuchbuilddep/0.0.0\\' failed" + err_msg = r"Module command \\'.*load nosuchbuilddep/0.0.0\\' failed" else: err_msg = r"Unable to locate a modulefile for 'nosuchbuilddep/0.0.0'" @@ -1349,7 +1376,7 @@ def test_external_dependencies(self): write_file(toy_ec, ectxt + extraectxt) if isinstance(self.modtool, Lmod): - err_msg = r"Module command \\'module load nosuchmodule/1.2.3\\' failed" + err_msg = r"Module command \\'.*load nosuchmodule/1.2.3\\' failed" else: err_msg = r"Unable to locate a modulefile for 'nosuchmodule/1.2.3'" @@ -1887,6 +1914,169 @@ def test_sanity_check_paths_lib64(self): write_file(test_ec, ectxt) self.test_toy_build(ec_file=test_ec, raise_error=True) + def test_toy_build_enhanced_sanity_check(self): + """Test enhancing of sanity check.""" + + # if toy easyblock was imported, get rid of corresponding entry in sys.modules, + # to avoid that it messes up the use of --include-easyblocks=toy.py below... + if 'easybuild.easyblocks.toy' in sys.modules: + del sys.modules['easybuild.easyblocks.toy'] + + test_dir = os.path.join(os.path.abspath(os.path.dirname(__file__))) + toy_ec = os.path.join(test_dir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + toy_ec_txt = read_file(toy_ec) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + + # get rid of custom sanity check paths in test easyconfig + regex = re.compile(r'^sanity_check_paths\s*=\s*{[^}]+}', re.M) + test_ec_txt = regex.sub('', toy_ec_txt) + write_file(test_ec, test_ec_txt) + + self.assertFalse('sanity_check_' in test_ec_txt) + + # create custom easyblock for toy that has a custom sanity_check_step + toy_easyblock = os.path.join(test_dir, 'sandbox', 'easybuild', 'easyblocks', 't', 'toy.py') + + toy_easyblock_txt = read_file(toy_easyblock) + + toy_custom_sanity_check_step = '\n'.join([ + '', + " def sanity_check_step(self):", + " paths = {", + " 'files': ['bin/toy'],", + " 'dirs': [],", + " }", + " cmds = ['toy']", + " return super(EB_toy, self).sanity_check_step(custom_paths=paths, custom_commands=cmds)", + ]) + test_toy_easyblock = os.path.join(self.test_prefix, 'toy.py') + write_file(test_toy_easyblock, toy_easyblock_txt + toy_custom_sanity_check_step) + + eb_args = [ + '--extended-dry-run', + '--include-easyblocks=%s' % test_toy_easyblock, + ] + + # by default, sanity check commands & paths specified by easyblock are used + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* bin/toy", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\(none\)", + r"Sanity check commands", + r"\s*\* toy", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + # we need to manually wipe the entry for the included toy easyblock, + # to avoid trouble with subsequent EasyBuild sessions in this test + del sys.modules['easybuild.easyblocks.toy'] + + # easyconfig specifies custom sanity_check_paths & sanity_check_commands, + # the ones defined by the easyblock are skipped by default + test_ec_txt = test_ec_txt + '\n'.join([ + '', + "sanity_check_paths = {", + " 'files': ['README'],", + " 'dirs': ['bin/']", + "}", + "sanity_check_commands = ['ls %(installdir)s']", + ]) + write_file(test_ec, test_ec_txt) + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* README", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\* bin/", + r"Sanity check commands", + r"\s*\* ls .*/software/toy/0.0", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # if enhance_sanity_check is enabled, then sanity check paths/commands specified in easyconfigs + # are used in addition to those defined in easyblock + test_ec_txt = test_ec_txt + '\nenhance_sanity_check = True' + write_file(test_ec, test_ec_txt) + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + # now 'bin/toy' file and 'toy' command should also be part of sanity check + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* README", + r"\s*\* bin/toy", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\* bin/", + r"Sanity check commands", + r"\s*\* ls .*/software/toy/0.0", + r"\s*\* toy", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # sanity_check_paths with only one key is allowed if enhance_sanity_check is enabled; + test_ec_txt = test_ec_txt + "\nsanity_check_paths = {'files': ['README']}" + write_file(test_ec, test_ec_txt) + + # we need to do a non-dry run here, to ensure the code we want to test is triggered + # (EasyConfig.dump called by 'reproduce_build' function from 'build_and_install_one') + eb_args = [ + '--include-easyblocks=%s' % test_toy_easyblock, + '--trace', + ] + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"^== sanity checking\.\.\.", + r" >> file 'bin/toy' found: OK", + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + # no directories are checked in sanity check now, only files (since dirs is an empty list) + regex = re.compile(r"directory .* found:", re.M) + self.assertFalse(regex.search(stdout), "Pattern '%s' should be not found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # if enhance_sanity_check is disabled, both files/dirs keys are strictly required in sanity_check_paths + test_ec_txt = test_ec_txt + '\nenhance_sanity_check = False' + write_file(test_ec, test_ec_txt) + + error_pattern = " Missing mandatory key 'dirs' in sanity_check_paths." + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec, + extra_args=eb_args, raise_error=True, verbose=False) + + del sys.modules['easybuild.easyblocks.toy'] + def test_toy_dumped_easyconfig(self): """ Test dumping of file in eb_filerepo in both .eb and .yeb format """ filename = 'toy-0.0' @@ -2068,9 +2258,16 @@ def test_toy_modaltsoftname(self): def test_toy_build_trace(self): """Test use of --trace""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, read_file(toy_ec_file) + '\nsanity_check_commands = ["toy"]') + self.mock_stderr(True) self.mock_stdout(True) - self.test_toy_build(extra_args=['--trace', '--experimental'], verify=False, testing=False) + self.test_toy_build(ec_file=test_ec, extra_args=['--trace', '--experimental'], verify=False, testing=False) stderr = self.get_stderr() stdout = self.get_stdout() self.mock_stderr(False) @@ -2095,6 +2292,8 @@ def test_toy_build_trace(self): r"== sanity checking\.\.\.", r" >> file 'bin/yot' or 'bin/toy' found: OK", r" >> \(non-empty\) directory 'bin' found: OK", + r" >> running command 'toy' \.\.\.", + r" >> result for command 'toy': OK", ]) + r'$', r"^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", ] @@ -2545,8 +2744,10 @@ def test_toy_build_lock(self): # also test use of --ignore-locks self.test_toy_build(extra_args=extra_args + ['--ignore-locks'], verify=True, raise_error=True) + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) + # define a context manager that remove a lock after a while, so we can check the use of --wait-for-lock - class remove_lock_after: + class remove_lock_after(object): def __init__(self, seconds, lock_fp): self.seconds = seconds self.lock_fp = lock_fp @@ -2559,45 +2760,90 @@ def __enter__(self): signal.alarm(self.seconds) def __exit__(self, type, value, traceback): - pass + # clean up SIGALRM signal handler, and cancel scheduled alarm + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) - # wait for lock to be removed, with 1 second interval of checking - extra_args.append('--wait-on-lock=1') + # wait for lock to be removed, with 1 second interval of checking; + # check with both --wait-on-lock-interval and deprecated --wait-on-lock options wait_regex = re.compile("^== lock .*_software_toy_0.0.lock exists, waiting 1 seconds", re.M) ok_regex = re.compile("^== COMPLETED: Installation ended successfully", re.M) - self.assertTrue(os.path.exists(toy_lock_path)) + test_cases = [ + ['--wait-on-lock=1'], + ['--wait-on-lock=1', '--wait-on-lock-interval=60'], + ['--wait-on-lock=100', '--wait-on-lock-interval=1'], + ['--wait-on-lock-limit=100', '--wait-on-lock=1'], + ['--wait-on-lock-limit=100', '--wait-on-lock-interval=1'], + ['--wait-on-lock-limit=-1', '--wait-on-lock=1'], + ['--wait-on-lock-limit=-1', '--wait-on-lock-interval=1'], + ] - # use context manager to remove lock after 3 seconds - with remove_lock_after(3, toy_lock_path): - self.mock_stderr(True) - self.mock_stdout(True) - self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) - stderr, stdout = self.get_stderr(), self.get_stdout() - self.mock_stderr(False) - self.mock_stdout(False) + for opts in test_cases: - self.assertEqual(stderr, '') + if any('--wait-on-lock=' in x for x in opts): + self.allow_deprecated_behaviour() + else: + self.disallow_deprecated_behaviour() - wait_matches = wait_regex.findall(stdout) - # we can't rely on an exact number of 'waiting' messages, so let's go with a range... - self.assertTrue(len(wait_matches) in range(2, 5)) + if not os.path.exists(toy_lock_path): + mkdir(toy_lock_path) - self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertTrue(os.path.exists(toy_lock_path)) + + all_args = extra_args + opts + + # use context manager to remove lock after 3 seconds + with remove_lock_after(3, toy_lock_path): + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) - # when there is no lock in place, --wait-on-lock has no impact - self.assertFalse(os.path.exists(toy_lock_path)) + if any('--wait-on-lock=' in x for x in all_args): + self.assertTrue("Use of --wait-on-lock is deprecated" in stderr) + else: + self.assertEqual(stderr, '') + + wait_matches = wait_regex.findall(stdout) + # we can't rely on an exact number of 'waiting' messages, so let's go with a range... + self.assertTrue(len(wait_matches) in range(2, 5)) + + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + + # check use of --wait-on-lock-limit: if lock is never removed, we should give up when limit is reached + mkdir(toy_lock_path) + all_args = extra_args + ['--wait-on-lock-limit=3', '--wait-on-lock-interval=1'] self.mock_stderr(True) self.mock_stdout(True) - self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + error_pattern = r"Maximum wait time for lock /.*toy_0.0.lock to be released reached: [0-9]+ sec >= 3 sec" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=all_args, + verify=False, raise_error=True, testing=False) stderr, stdout = self.get_stderr(), self.get_stdout() self.mock_stderr(False) self.mock_stdout(False) - self.assertEqual(stderr, '') - self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) - self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) + wait_matches = wait_regex.findall(stdout) + self.assertTrue(len(wait_matches) in range(2, 5)) + + # when there is no lock in place, --wait-on-lock* has no impact + remove_dir(toy_lock_path) + for opt in ['--wait-on-lock=1', '--wait-on-lock-limit=3', '--wait-on-lock-interval=1']: + all_args = extra_args + [opt] + self.assertFalse(os.path.exists(toy_lock_path)) + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) # check for clean error on creation of lock extra_args = ['--locks-dir=/'] @@ -2606,6 +2852,91 @@ def __exit__(self, type, value, traceback): self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=extra_args, raise_error=True, verbose=False) + def test_toy_lock_cleanup_signals(self): + """Test cleanup of locks after EasyBuild session gets a cancellation signal.""" + + orig_wd = os.getcwd() + + locks_dir = os.path.join(self.test_installpath, 'software', '.locks') + self.assertFalse(os.path.exists(locks_dir)) + + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) + + # context manager which stops the function being called with the specified signal + class wait_and_signal(object): + def __init__(self, seconds, signum): + self.seconds = seconds + self.signum = signum + + def send_signal(self, *args): + os.kill(os.getpid(), self.signum) + + def __enter__(self): + signal.signal(signal.SIGALRM, self.send_signal) + signal.alarm(self.seconds) + + def __exit__(self, type, value, traceback): + # clean up SIGALRM signal handler, and cancel scheduled alarm + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) + + # add extra sleep command to ensure session takes long enough + test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + toy_ec_txt = read_file(os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb')) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, toy_ec_txt + '\npostinstallcmds = ["sleep 5"]') + + signums = [ + (signal.SIGABRT, SystemExit), + (signal.SIGINT, KeyboardInterrupt), + (signal.SIGTERM, SystemExit), + (signal.SIGQUIT, SystemExit), + ] + for (signum, exc) in signums: + + # avoid recycling stderr of previous test + stderr = '' + + with wait_and_signal(1, signum): + + # change back to original working directory before each test + change_dir(orig_wd) + + self.mock_stderr(True) + self.mock_stdout(True) + self.assertErrorRegex(exc, '.*', self.test_toy_build, ec_file=test_ec, verify=False, + raise_error=True, testing=False, raise_systemexit=True) + + stderr = self.get_stderr().strip() + self.mock_stderr(False) + self.mock_stdout(False) + + pattern = r"^WARNING: signal received \(%s\), " % int(signum) + pattern += r"cleaning up locks \(.*software_toy_0.0\)\.\.\." + regex = re.compile(pattern) + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + def test_toy_build_unicode_description(self): + """Test installation of easyconfig file that has non-ASCII characters in description.""" + # cfr. https://github.com/easybuilders/easybuild-framework/issues/3284 + + test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + toy_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb') + toy_ec_txt = read_file(toy_ec) + + # the tilde character included here is a Unicode tilde character, not a regular ASCII tilde (~) + descr = "This description includes a unicode tilde character: ∼, for your entertainment." + self.assertFalse('~' in descr) + + regex = re.compile(r'^description\s*=.*', re.M) + test_ec_txt = regex.sub(r'description = "%s"' % descr, toy_ec_txt) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, test_ec_txt) + + self.test_toy_build(ec_file=test_ec, raise_error=True) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/tweak.py b/test/framework/tweak.py index 0797e76de5..16cd6ee31e 100644 --- a/test/framework/tweak.py +++ b/test/framework/tweak.py @@ -40,6 +40,7 @@ from easybuild.framework.easyconfig.tweak import get_matching_easyconfig_candidates, map_toolchain_hierarchies from easybuild.framework.easyconfig.tweak import find_potential_version_mappings from easybuild.framework.easyconfig.tweak import map_easyconfig_to_target_tc_hierarchy +from easybuild.framework.easyconfig.tweak import list_deps_versionsuffixes from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import module_classes from easybuild.tools.filetools import change_dir, write_file @@ -471,8 +472,7 @@ def test_map_easyconfig_to_target_tc_hierarchy(self): update_build_specs={'version': new_version}, update_dep_versions=False) tweaked_ec = process_easyconfig(tweaked_spec)[0] - tweaked_dict = tweaked_ec['ec'].asdict() - extensions = tweaked_dict['exts_list'] + extensions = tweaked_ec['ec']['exts_list'] # check one extension with the same name exists and that the version has been updated hit_extension = 0 for extension in extensions: @@ -483,6 +483,24 @@ def test_map_easyconfig_to_target_tc_hierarchy(self): hit_extension += 1 self.assertEqual(hit_extension, 1, "Should only have updated one extension") + def test_list_deps_versionsuffixes(self): + """Test listing of dependencies' version suffixes""" + test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + build_options = { + 'robot_path': [test_easyconfigs], + 'silent': True, + 'valid_module_classes': module_classes(), + } + init_config(build_options=build_options) + get_toolchain_hierarchy.clear() + + ec_spec = os.path.join(test_easyconfigs, 'g', 'golf', 'golf-2018a.eb') + self.assertEqual(list_deps_versionsuffixes(ec_spec), ['-serial']) + ec_spec = os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-deps.eb') + self.assertEqual(list_deps_versionsuffixes(ec_spec), []) + ec_spec = os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb') + self.assertEqual(list_deps_versionsuffixes(ec_spec), ['-deps']) + def suite(): """ return all the tests in this file """ return TestLoaderFiltered().loadTestsFromTestCase(TweakTest, sys.argv[1:]) diff --git a/test/framework/utilities.py b/test/framework/utilities.py index 2c2bd73ffb..1d98dbcced 100644 --- a/test/framework/utilities.py +++ b/test/framework/utilities.py @@ -125,9 +125,8 @@ def setUp(self): os.environ['EASYBUILD_ROBOT_PATHS'] = os.path.join(testdir, 'easyconfigs', 'test_ecs') # make sure no deprecated behaviour is being triggered (unless intended by the test) - # trip *all* log.deprecated statements by setting deprecation version ridiculously high self.orig_current_version = eb_build_log.CURRENT_VERSION - os.environ['EASYBUILD_DEPRECATED'] = '10000000' + self.disallow_deprecated_behaviour() init_config() @@ -181,6 +180,11 @@ def setUp(self): self.reset_modulepath([os.path.join(testdir, 'modules')]) reset_module_caches() + def disallow_deprecated_behaviour(self): + """trip *all* log.deprecated statements by setting deprecation version ridiculously high""" + os.environ['EASYBUILD_DEPRECATED'] = '10000000' + eb_build_log.CURRENT_VERSION = os.environ['EASYBUILD_DEPRECATED'] + def allow_deprecated_behaviour(self): """Restore EasyBuild version to what it was originally, to allow triggering deprecated behaviour.""" if 'EASYBUILD_DEPRECATED' in os.environ: