diff --git a/reframe/core/buildsystems.py b/reframe/core/buildsystems.py new file mode 100644 index 0000000000..fb015f79c0 --- /dev/null +++ b/reframe/core/buildsystems.py @@ -0,0 +1,273 @@ +import abc +import os + +import reframe.core.fields as fields +from reframe.core.exceptions import BuildSystemError + + +class BuildSystem: + cc = fields.StringField('cc', allow_none=True) + cxx = fields.StringField('cxx', allow_none=True) + ftn = fields.StringField('ftn', allow_none=True) + nvcc = fields.StringField('nvcc', allow_none=True) + cflags = fields.TypedListField('cflags', str, allow_none=True) + cxxflags = fields.TypedListField('cxxflags', str, allow_none=True) + cppflags = fields.TypedListField('cppflags', str, allow_none=True) + fflags = fields.TypedListField('fflags', str, allow_none=True) + ldflags = fields.TypedListField('ldflags', str, allow_none=True) + # Set compiler and compiler flags from the programming environment + # + # :type: :class:`bool` + # :default: :class:`True` + flags_from_environ = fields.BooleanField('flags_from_environ') + + def __init__(self): + self.cc = None + self.cxx = None + self.ftn = None + self.nvcc = None + self.cflags = None + self.cxxflags = None + self.cppflags = None + self.fflags = None + self.ldflags = None + self.flags_from_environ = True + + @abc.abstractmethod + def emit_build_commands(self, environ): + """Return a list of commands needed for building using this build system. + + The build commands may always assume to be issued from the top-level + directory of the code that is to be built. + + :arg environ: The programming environment for which to emit the build + instructions. + """ + + def _resolve_flags(self, flags, environ, allow_none=True): + _flags = getattr(self, flags) + if _flags is not None: + return _flags + + if self.flags_from_environ: + return getattr(environ, flags) + + return None + + def _fix_flags(self, flags): + # FIXME: That's a necessary workaround to the fact the environment + # defines flags as strings, but here as lists of strings. Should be + # removed as soon as setting directly the flags in an environment will + # be disabled. + if isinstance(flags, str): + return flags.split() + else: + return flags + + def _cc(self, environ): + return self._resolve_flags('cc', environ, False) + + def _cxx(self, environ): + return self._resolve_flags('cxx', environ, False) + + def _ftn(self, environ): + return self._resolve_flags('ftn', environ, False) + + def _nvcc(self, environ): + return self._resolve_flags('nvcc', environ, False) + + def _cppflags(self, environ): + return self._fix_flags(self._resolve_flags('cppflags', environ)) + + def _cflags(self, environ): + return self._fix_flags(self._resolve_flags('cflags', environ)) + + def _cxxflags(self, environ): + return self._fix_flags(self._resolve_flags('cxxflags', environ)) + + def _fflags(self, environ): + return self._fix_flags(self._resolve_flags('fflags', environ)) + + def _ldflags(self, environ): + return self._fix_flags(self._resolve_flags('ldflags', environ)) + + +class Make(BuildSystem): + options = fields.TypedListField('options', str) + makefile = fields.StringField('makefile', allow_none=True) + srcdir = fields.StringField('srcdir', allow_none=True) + max_concurrency = fields.IntegerField('max_concurrency', allow_none=True) + + def __init__(self): + super().__init__() + self.options = [] + self.makefile = None + self.srcdir = None + self.max_concurrency = None + + def emit_build_commands(self, environ): + cmd_parts = ['make'] + if self.makefile: + cmd_parts += ['-f %s' % self.makefile] + + if self.srcdir: + cmd_parts += ['-C %s' % self.srcdir] + + cmd_parts += ['-j'] + if self.max_concurrency is not None: + cmd_parts += [str(self.max_concurrency)] + + cc = self._cc(environ) + cxx = self._cxx(environ) + ftn = self._ftn(environ) + nvcc = self._nvcc(environ) + cppflags = self._cppflags(environ) + cflags = self._cflags(environ) + cxxflags = self._cxxflags(environ) + fflags = self._fflags(environ) + ldflags = self._ldflags(environ) + if cc is not None: + cmd_parts += ["CC='%s'" % cc] + + if cxx is not None: + cmd_parts += ["CXX='%s'" % cxx] + + if ftn is not None: + cmd_parts += ["FC='%s'" % ftn] + + if nvcc is not None: + cmd_parts += ["NVCC='%s'" % nvcc] + + if cppflags is not None: + cmd_parts += ["CPPFLAGS='%s'" % ' '.join(cppflags)] + + if cflags is not None: + cmd_parts += ["CFLAGS='%s'" % ' '.join(cflags)] + + if cxxflags is not None: + cmd_parts += ["CXXFLAGS='%s'" % ' '.join(cxxflags)] + + if fflags is not None: + cmd_parts += ["FFLAGS='%s'" % ' '.join(fflags)] + + if ldflags is not None: + cmd_parts += ["LDFLAGS='%s'" % ' '.join(ldflags)] + + if self.options: + cmd_parts += self.options + + return [' '.join(cmd_parts)] + + +class SingleSource(BuildSystem): + srcfile = fields.StringField('srcfile', allow_none=True) + executable = fields.StringField('executable', allow_none=True) + include_path = fields.TypedListField('include_path', str) + lang = fields.StringField('lang', allow_none=True) + + def __init__(self): + super().__init__() + self.srcfile = None + self.executable = None + self.include_path = [] + self.lang = None + + def _auto_exec_name(self): + return '%s.exe' % os.path.splitext(self.srcfile)[0] + + def emit_build_commands(self, environ): + if not self.srcfile: + raise BuildSystemError( + 'a source file is required when using the %s build system' % + type(self).__name__) + + cc = self._cc(environ) + cxx = self._cxx(environ) + ftn = self._ftn(environ) + nvcc = self._nvcc(environ) + cppflags = self._cppflags(environ) or [] + cflags = self._cflags(environ) or [] + cxxflags = self._cxxflags(environ) or [] + fflags = self._fflags(environ) or [] + ldflags = self._ldflags(environ) or [] + + # Adjust cppflags with the include directories + # NOTE: We do not use the += operator on purpose, be cause we don't + # want to change the original list passed by the user + cppflags = cppflags + [*map(lambda d: '-I ' + d, self.include_path)] + + # Generate the executable + executable = self.executable or self._auto_exec_name() + + # Prepare the compilation command + lang = self.lang or self._guess_language(self.srcfile) + cmd_parts = [] + if lang == 'C': + if cc is None: + raise BuildSystemError('I do not know how to compile a ' + 'C program') + + cmd_parts += [cc, *cppflags, *cflags, self.srcfile, + '-o', executable, *ldflags] + elif lang == 'C++': + if cxx is None: + raise BuildSystemError('I do not know how to compile a ' + 'C++ program') + + cmd_parts += [cxx, *cppflags, *cxxflags, self.srcfile, + '-o', executable, *ldflags] + elif lang == 'Fortran': + if ftn is None: + raise BuildSystemError('I do not know how to compile a ' + 'Fortran program') + + cmd_parts += [ftn, *cppflags, *fflags, self.srcfile, + '-o', executable, *ldflags] + elif lang == 'CUDA': + if nvcc is None: + raise BuildSystemError('I do not know how to compile a ' + 'CUDA program') + + cmd_parts += [nvcc, *cppflags, *cxxflags, self.srcfile, + '-o', executable, *ldflags] + else: + BuildSystemError('could not guess language of file: %s' % + self.srcfile) + + return [' '.join(cmd_parts)] + + def _guess_language(self, filename): + _, ext = os.path.splitext(filename) + if ext in ['.c']: + return 'C' + + if ext in ['.cc', '.cp', '.cxx', '.cpp', '.CPP', '.c++', '.C']: + return 'C++' + + if ext in ['.f', '.for', '.ftn', '.F', '.FOR', '.fpp', + '.FPP', '.FTN', '.f90', '.f95', '.f03', '.f08', + '.F90', '.F95', '.F03', '.F08']: + return 'Fortran' + + if ext in ['.cu']: + return 'CUDA' + + +class BuildSystemField(fields.TypedField): + """A field representing a build system. + + You may either assign an instance of :class:`BuildSystem` or a string + representing the name of the concrete class of a build system. + """ + + def __init__(self, fieldname, allow_none=False): + super().__init__(fieldname, BuildSystem, allow_none) + + def __set__(self, obj, value): + if isinstance(value, str): + try: + value = globals()[value]() + except KeyError: + raise ValueError('unknown build system: %s' % value) from None + + super().__set__(obj, value) diff --git a/reframe/core/environments.py b/reframe/core/environments.py index c3cc20ae60..bf0fae2add 100644 --- a/reframe/core/environments.py +++ b/reframe/core/environments.py @@ -1,11 +1,11 @@ +import collections import errno +import itertools import os -import reframe.core.debug as debug import reframe.core.fields as fields import reframe.utility.os_ext as os_ext -from reframe.core.exceptions import (EnvironError, SpawnedProcessError, - CompilationError) +from reframe.core.exceptions import EnvironError, SpawnedProcessError from reframe.core.runtime import runtime @@ -23,7 +23,8 @@ class Environment: def __init__(self, name, modules=[], variables={}, **kwargs): self._name = name self._modules = list(modules) - self._variables = dict(variables) + self._variables = collections.OrderedDict() + self._variables.update(variables) self._loaded = False self._saved_variables = {} self._conflicted = [] @@ -115,23 +116,36 @@ def unload(self): self._loaded = False - def emit_load_instructions(self, builder): - for stmt in self._load_stmts: - builder.verbatim(stmt) - - for k, v in self._variables.items(): - builder.set_variable(k, v, export=True) + def emit_load_commands(self): + rt = runtime() + if self.is_loaded: + ret = self._load_stmts + else: + ret = list( + itertools.chain(*(rt.modules_system.emit_load_commands(m) + for m in self.modules)) + ) - # FIXME: Does not correspond to the actual process in unload() - def emit_unload_instructions(self, builder): for k, v in self._variables.items(): - builder.unset_variable(k) + ret.append('export %s=%s' % (k, v)) - for m in self._modules: - builder.verbatim('module unload %s' % m) + return ret - for m in self._conflicted: - builder.verbatim('module load %s' % m) + def emit_unload_commands(self): + rt = runtime() + ret = [] + for var in self._variables.keys(): + ret.append('unset %s' % var) + + ret += list( + itertools.chain(*(rt.modules_system.emit_unload_commands(m) + for m in reversed(self._modules))) + ) + ret += list( + itertools.chain(*(rt.modules_system.emit_load_commands(m) + for m in self._conflicted)) + ) + return ret def __eq__(self, other): if not isinstance(other, type(self)): @@ -141,9 +155,6 @@ def __eq__(self, other): set(self._modules) == set(other._modules) and self._variables == other._variables) - def __repr__(self): - return debug.repr(self) - def __str__(self): ret = "{0}(name='{1}', modules={2}, variables={3})" return ret.format(type(self).__name__, self.name, @@ -210,54 +221,100 @@ class ProgEnvironment(Environment): #: The C compiler of this programming environment. #: #: :type: :class:`str` - cc = fields.StringField('cc') + cc = fields.DeprecatedField(fields.StringField('cc'), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _cc = fields.StringField('cc') #: The C++ compiler of this programming environment. #: #: :type: :class:`str` or :class:`None` - cxx = fields.StringField('cxx', allow_none=True) + cxx = fields.DeprecatedField(fields.StringField('cxx', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _cxx = fields.StringField('cxx', allow_none=True) #: The Fortran compiler of this programming environment. #: #: :type: :class:`str` or :class:`None` - ftn = fields.StringField('ftn', allow_none=True) + ftn = fields.DeprecatedField(fields.StringField('ftn', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _ftn = fields.StringField('ftn', allow_none=True) #: The preprocessor flags of this programming environment. #: #: :type: :class:`str` or :class:`None` - cppflags = fields.StringField('cppflags', allow_none=True) + cppflags = fields.DeprecatedField( + fields.StringField('cppflags', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _cppflags = fields.StringField('cppflags', allow_none=True) #: The C compiler flags of this programming environment. #: #: :type: :class:`str` or :class:`None` - cflags = fields.StringField('cflags', allow_none=True) + cflags = fields.DeprecatedField( + fields.StringField('cflags', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _cflags = fields.StringField('cflags', allow_none=True) #: The C++ compiler flags of this programming environment. #: #: :type: :class:`str` or :class:`None` - cxxflags = fields.StringField('cxxflags', allow_none=True) + cxxflags = fields.DeprecatedField( + fields.StringField('cxxflags', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _cxxflags = fields.StringField('cxxflags', allow_none=True) #: The Fortran compiler flags of this programming environment. #: #: :type: :class:`str` or :class:`None` - fflags = fields.StringField('fflags', allow_none=True) + fflags = fields.DeprecatedField( + fields.StringField('fflags', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _fflags = fields.StringField('fflags', allow_none=True) #: The linker flags of this programming environment. #: #: :type: :class:`str` or :class:`None` - ldflags = fields.StringField('ldflags', allow_none=True) + ldflags = fields.DeprecatedField( + fields.StringField('ldflags', allow_none=True), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _ldflags = fields.StringField('ldflags', allow_none=True) #: The include search path of this programming environment. #: #: :type: :class:`list` of :class:`str` #: :default: ``[]`` - include_search_path = fields.TypedListField('include_search_path', str) + include_search_path = fields.DeprecatedField( + fields.TypedListField('include_search_path', str), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _include_search_path = fields.TypedListField('include_search_path', str) #: Propagate the compilation flags to the ``make`` invocation. #: #: :type: :class:`bool` #: :default: :class:`True` - propagate = fields.BooleanField('propagate') + propagate = fields.DeprecatedField(fields.BooleanField('propagate'), + 'setting this field is deprecated; ' + 'please set it through a build system', + fields.DeprecatedField.OP_SET) + _propagate = fields.BooleanField('propagate') def __init__(self, name, @@ -266,6 +323,7 @@ def __init__(self, cc='cc', cxx='CC', ftn='ftn', + nvcc='nvcc', cppflags=None, cflags=None, cxxflags=None, @@ -273,126 +331,18 @@ def __init__(self, ldflags=None, **kwargs): super().__init__(name, modules, variables) - self.cc = cc - self.cxx = cxx - self.ftn = ftn - self.cppflags = cppflags - self.cflags = cflags - self.cxxflags = cxxflags - self.fflags = fflags - self.ldflags = ldflags - self.include_search_path = [] - self.propagate = True - - def guess_language(self, filename): - ext = filename.split('.')[-1] - if ext in ['c']: - return 'C' - - if ext in ['cc', 'cp', 'cxx', 'cpp', 'CPP', 'c++', 'C']: - return 'C++' - - if ext in ['f', 'for', 'ftn', 'F', 'FOR', 'fpp', 'FPP', 'FTN', - 'f90', 'f95', 'f03', 'f08', 'F90', 'F95', 'F03', 'F08']: - return 'Fortran' - - if ext in ['cu']: - return 'CUDA' - - def compile(self, sourcepath, makefile=None, executable=None, - lang=None, options=''): - - if not os.path.exists(sourcepath): - raise FileNotFoundError(errno.ENOENT, - os.strerror(errno.ENOENT), sourcepath) - - if os.path.isdir(sourcepath): - return self._compile_dir(sourcepath, makefile, options) - else: - return self._compile_file(sourcepath, executable, lang, options) - - def _compile_file(self, source_file, executable, lang, options): - if not executable: - # default executable, same as source_file without the extension - executable = os.path.join(os.path.dirname(source_file), - source_file.rsplit('.')[:-1][0]) - - if not lang: - lang = self.guess_language(source_file) - - # Replace None's with empty strings - cppflags = self.cppflags or '' - cflags = self.cflags or '' - cxxflags = self.cxxflags or '' - fflags = self.fflags or '' - ldflags = self.ldflags or '' - - flags = [cppflags] - if lang == 'C': - compiler = self.cc - flags.append(cflags) - elif lang == 'C++': - compiler = self.cxx - flags.append(cxxflags) - elif lang == 'Fortran': - compiler = self.ftn - flags.append(fflags) - elif lang == 'CUDA': - compiler = 'nvcc' - flags.append(cxxflags) - else: - raise EnvironError('Unknown language: %s' % lang) - - # Append include search path - flags += ['-I' + d for d in self.include_search_path] - cmd = ('%s %s %s -o %s %s %s' % (compiler, ' '.join(flags), - source_file, executable, - ldflags, options)) - try: - return os_ext.run_command(cmd, check=True) - except SpawnedProcessError as e: - # Re-raise as compilation error - raise CompilationError(command=e.command, - stdout=e.stdout, - stderr=e.stderr, - exitcode=e.exitcode) from None - - def _compile_dir(self, source_dir, makefile, options): - if makefile: - cmd = 'make -C %s -f %s %s ' % (source_dir, makefile, options) - else: - cmd = 'make -C %s %s ' % (source_dir, options) - - # Pass a set of predefined options to the Makefile - if self.propagate: - flags = ["CC='%s'" % self.cc, - "CXX='%s'" % self.cxx, - "FC='%s'" % self.ftn] - - # Explicitly check against None here; the user may explicitly want - # to clear the flags - if self.cppflags is not None: - flags.append("CPPFLAGS='%s'" % self.cppflags) - - if self.cflags is not None: - flags.append("CFLAGS='%s'" % self.cflags) - - if self.cxxflags is not None: - flags.append("CXXFLAGS='%s'" % self.cxxflags) + self._cc = cc + self._cxx = cxx + self._ftn = ftn + self._nvcc = nvcc + self._cppflags = cppflags + self._cflags = cflags + self._cxxflags = cxxflags + self._fflags = fflags + self._ldflags = ldflags + self._include_search_path = [] + self._propagate = True - if self.fflags is not None: - flags.append("FFLAGS='%s'" % self.fflags) - - if self.ldflags is not None: - flags.append("LDFLAGS='%s'" % self.ldflags) - - cmd += ' '.join(flags) - - try: - return os_ext.run_command(cmd, check=True) - except SpawnedProcessError as e: - # Re-raise as compilation error - raise CompilationError(command=e.command, - stdout=e.stdout, - stderr=e.stderr, - exitcode=e.exitcode) from None + @property + def nvcc(self): + return self._nvcc diff --git a/reframe/core/exceptions.py b/reframe/core/exceptions.py index b8eef2c638..b59f417042 100644 --- a/reframe/core/exceptions.py +++ b/reframe/core/exceptions.py @@ -94,6 +94,23 @@ class StatisticsError(ReframeError): """Raised to denote an error in dealing with statistics.""" +class BuildSystemError(ReframeError): + """Raised when a build system is not configured properly.""" + + +class BuildError(ReframeError): + """Raised when a build fails.""" + + def __init__(self, stdout, stderr): + self._stdout = stdout + self._stderr = stderr + + def __str__(self): + return ("standard error can be found in `%s', " + "standard output can be found in `%s'" % (self._stderr, + self._stdout)) + + class SpawnedProcessError(ReframeError): """Raised when a spawned OS command has failed.""" @@ -162,10 +179,6 @@ def timeout(self): return self._timeout -class CompilationError(SpawnedProcessError): - """Raised by compilation commands""" - - class JobError(ReframeError): """Job related errors.""" @@ -232,6 +245,9 @@ def format_user_frame(frame): if isinstance(exc_value, AbortTaskError): return 'aborted due to %s' % type(exc_value.__cause__).__name__ + if isinstance(exc_value, BuildError): + return 'build failure: %s' % exc_value + if isinstance(exc_value, ReframeError): return 'caught framework exception: %s' % exc_value diff --git a/reframe/core/fields.py b/reframe/core/fields.py index 41b72bd21b..07900846e8 100644 --- a/reframe/core/fields.py +++ b/reframe/core/fields.py @@ -285,6 +285,14 @@ def __init__(self, fieldname, allow_none=False): class TypedListField(AggregateTypeField): """Stores a list of objects of the same type""" + def __init__(self, fieldname, elemtype, allow_none=False): + super().__init__( + fieldname, (list, elemtype), allow_none) + + +class TypedSequenceField(AggregateTypeField): + """Stores a list of objects of the same type""" + def __init__(self, fieldname, elemtype, allow_none=False): super().__init__( fieldname, (collections.abc.Sequence, elemtype), allow_none) @@ -410,14 +418,23 @@ def __set__(self, obj, value): class DeprecatedField(Field): """Field wrapper for deprecating fields.""" - def __init__(self, target_field, message): + OP_SET = 1 + OP_GET = 2 + OP_ALL = OP_SET | OP_GET + + def __init__(self, target_field, message, op=OP_ALL): self._target_field = target_field self._message = message + self._op = op def __set__(self, obj, value): - user_deprecation_warning(self._message) + if self._op & DeprecatedField.OP_SET: + user_deprecation_warning(self._message) + self._target_field.__set__(obj, value) def __get__(self, obj, objtype): - user_deprecation_warning(self._message) + if self._op & DeprecatedField.OP_GET: + user_deprecation_warning(self._message) + return self._target_field.__get__(obj, objtype) diff --git a/reframe/core/launchers/__init__.py b/reframe/core/launchers/__init__.py index 576e7096d0..04bc2dd365 100644 --- a/reframe/core/launchers/__init__.py +++ b/reframe/core/launchers/__init__.py @@ -40,9 +40,8 @@ def command(self, job): executable). """ - def emit_run_command(self, job, builder): - return builder.verbatim( - ' '.join(self.command(job) + self.options + [job.command])) + def run_command(self, job): + return ' '.join(self.command(job) + self.options) class LauncherWrapper(JobLauncher): diff --git a/reframe/core/pipeline.py b/reframe/core/pipeline.py index 9ff6afd034..b43e5509c2 100644 --- a/reframe/core/pipeline.py +++ b/reframe/core/pipeline.py @@ -16,15 +16,17 @@ import reframe.core.fields as fields import reframe.core.logging as logging import reframe.core.runtime as rt +import reframe.core.shell as shell import reframe.utility as util import reframe.utility.os_ext as os_ext +from reframe.core.buildsystems import BuildSystem, BuildSystemField from reframe.core.deferrable import deferrable, _DeferredExpression, evaluate from reframe.core.environments import Environment -from reframe.core.exceptions import PipelineError, SanityError +from reframe.core.exceptions import (BuildError, PipelineError, SanityError, + user_deprecation_warning) from reframe.core.launchers.registry import getlauncher from reframe.core.schedulers import Job from reframe.core.schedulers.registry import getscheduler -from reframe.core.shell import BashScriptBuilder from reframe.core.systems import SystemPartition from reframe.utility.sanity import assert_reference @@ -123,6 +125,8 @@ class RegressionTest: #: Support for Git repositories was added. sourcesdir = fields.StringField('sourcesdir', allow_none=True) + build_system = BuildSystemField('build_system', allow_none=True) + #: List of shell commands to be executed before compiling. #: #: These commands are executed during the compilation phase and from @@ -468,6 +472,7 @@ class RegressionTest: _current_environ = fields.TypedField('_current_environ', Environment, allow_none=True) _job = fields.TypedField('_job', Job, allow_none=True) + _build_job = fields.TypedField('_build_job', Job, allow_none=True) def __new__(cls, *args, **kwargs): obj = super().__new__(cls) @@ -554,7 +559,9 @@ def __init__(self, name=None, prefix=None): self._stderr = None # Compilation process output + self._build_job = None self._compile_proc = None + self.build_system = None # Performance logging self._perf_logger = logging.null_logger @@ -651,7 +658,7 @@ def stdout(self): :type: :class:`str`. """ - return self._stdout + return self._job.stdout @property @deferrable @@ -665,7 +672,17 @@ def stderr(self): :type: :class:`str`. """ - return self._stderr + return self._job.stderr + + @property + @deferrable + def build_stdout(self): + return self._build_job.stdout + + @property + @deferrable + def build_stderr(self): + return self._build_job.stderr def __repr__(self): return debug.repr(self) @@ -756,20 +773,14 @@ def _setup_paths(self): self.logger.debug('setting up paths') try: self._stagedir = rt.runtime().resources.make_stagedir( - self._current_partition.name, - self.name, - self._current_environ.name) - + self.current_system.name, self._current_partition.name, + self._current_environ.name, self.name) self._outputdir = rt.runtime().resources.make_outputdir( - self._current_partition.name, - self.name, - self._current_environ.name) + self.current_system.name, self._current_partition.name, + self._current_environ.name, self.name) except OSError as e: raise PipelineError('failed to set up paths') from e - self._stdout = os.path.join(self._stagedir, '%s.out' % self.name) - self._stderr = os.path.join(self._stagedir, '%s.err' % self.name) - def _setup_job(self, **job_opts): """Setup the job related to this check.""" @@ -793,20 +804,9 @@ def _setup_job(self, **job_opts): scheduler_type = self._current_partition.scheduler launcher_type = self._current_partition.launcher - job_name = '%s_%s_%s_%s' % (self.name, - self.current_system.name, - self._current_partition.name, - self._current_environ.name) - job_script_filename = os.path.join(self._stagedir, job_name + '.sh') - self._job = scheduler_type( - name=job_name, - command=' '.join([self.executable] + self.executable_opts), + name='rfm_%s_job' % self.name, launcher=launcher_type(), - environs=[ - self._current_partition.local_env, - self._current_environ - ], workdir=self._stagedir, num_tasks=self.num_tasks, num_tasks_per_node=self.num_tasks_per_node, @@ -815,9 +815,6 @@ def _setup_job(self, **job_opts): num_cpus_per_task=self.num_cpus_per_task, use_smt=self.use_multithreading, time_limit=self.time_limit, - script_filename=job_script_filename, - stdout=self._stdout, - stderr=self._stderr, sched_exclusive_access=self.exclusive_access, **job_opts) @@ -867,16 +864,6 @@ def _clone_to_stagedir(self, url): (url, self._stagedir)) os_ext.git_clone(self.sourcesdir, self._stagedir) - def prebuild(self): - for cmd in self.prebuild_cmd: - self.logger.debug('executing prebuild commands') - os_ext.run_command(cmd, check=True, shell=True) - - def postbuild(self): - for cmd in self.postbuild_cmd: - self.logger.debug('executing postbuild commands') - os_ext.run_command(cmd, check=True, shell=True) - def compile(self, **compile_opts): """The compilation phase of the regression test pipeline. @@ -897,8 +884,8 @@ def compile(self, **compile_opts): if commonpath: self.logger.warn( - "sourcepath (`%s') seems to be a subdirectory of " - "sourcesdir (`%s'), but it will be interpreted " + "sourcepath `%s' seems to be a subdirectory of " + "sourcesdir `%s', but it will be interpreted " "as relative to it." % (self.sourcepath, self.sourcesdir)) if os_ext.is_url(self.sourcesdir): @@ -918,35 +905,65 @@ def compile(self, **compile_opts): staged_sourcepath = os.path.join(self._stagedir, self.sourcepath) self.logger.debug('Staged sourcepath: %s' % staged_sourcepath) + if os.path.isdir(staged_sourcepath): + if not self.build_system: + self.build_system = 'Make' - # Remove source and executable from compile_opts - compile_opts.pop('source', None) - compile_opts.pop('executable', None) + self.build_system.srcdir = self.sourcepath + else: + if not self.build_system: + self.build_system = 'SingleSource' + + self.build_system.srcfile = self.sourcepath + self.build_system.executable = self.executable + + if compile_opts: + user_deprecation_warning( + 'passing options to the compile() method is deprecated; ' + 'please use a build system.') + + # Remove source and executable from compile_opts + compile_opts.pop('source', None) + compile_opts.pop('executable', None) + try: + self.build_system.makefile = compile_opts['makefile'] + self.build_system.options = compile_opts['options'] + except KeyError: + pass + + # Prepare build job + build_commands = [ + *self.prebuild_cmd, + *self.build_system.emit_build_commands(self._current_environ), + *self.postbuild_cmd + ] + environs = [self._current_partition.local_env, self._current_environ] + self._build_job = getscheduler('local')( + name='rfm_%s_build' % self.name, + launcher=getlauncher('local')(), + workdir=self._stagedir) - # Change working dir to stagedir although absolute paths are used - # everywhere in the compilation process. This is done to ensure that - # any other files (besides the executable) generated during the the - # compilation will remain in the stage directory with os_ext.change_dir(self._stagedir): - self.prebuild() - if os.path.isdir(staged_sourcepath): - includedir = staged_sourcepath - else: - includedir = os.path.dirname(staged_sourcepath) - - self._current_environ.include_search_path.append(includedir) - self._compile_proc = self._current_environ.compile( - sourcepath=staged_sourcepath, - executable=os.path.join(self._stagedir, self.executable), - **compile_opts) - self.logger.debug('compilation stdout:\n%s' % - self._compile_proc.stdout) - self.logger.debug('compilation stderr:\n%s' % - self._compile_proc.stderr) - self.postbuild() + try: + self._build_job.prepare(build_commands, environs, + trap_errors=True) + except OSError as e: + raise PipelineError('failed to prepare build job') from e + + self._build_job.submit() + def compile_wait(self): + """Wait for compilation phase to finish. + + .. versionadded:: 2.13 + """ + self._build_job.wait() self.logger.debug('compilation finished') + # FIXME: this check is not reliable for certain scheduler backends + if self._build_job.exitcode != 0: + raise BuildError(self._build_job.stdout, self._build_job.stderr) + def run(self): """The run phase of the regression test pipeline. @@ -956,12 +973,13 @@ def run(self): if not self.current_system or not self._current_partition: raise PipelineError('no system or system partition is set') - # FIXME: Temporary fix to support multiple run steps - self._job._pre_run += self.pre_run - self._job._post_run += self.post_run + exec_cmd = [self.job.launcher.run_command(self.job), + self.executable, *self.executable_opts] + commands = [*self.pre_run, ' '.join(exec_cmd), *self.post_run] + environs = [self._current_partition.local_env, self._current_environ] with os_ext.change_dir(self._stagedir): try: - self._job.prepare(BashScriptBuilder(login=True)) + self._job.prepare(commands, environs, login=True) except OSError as e: raise PipelineError('failed to prepare job') from e @@ -1049,19 +1067,29 @@ def check_performance(self): refval, low_thres, high_thres = reference evaluate(assert_reference(val, refval, low_thres, high_thres)) + def _copy_job_files(self, job, dst): + if job is None: + return + + stdout = os.path.join(self._stagedir, job.stdout) + stderr = os.path.join(self._stagedir, job.stderr) + script = os.path.join(self._stagedir, job.script_filename) + shutil.copy(stdout, dst) + shutil.copy(stderr, dst) + shutil.copy(script, dst) + def _copy_to_outputdir(self): """Copy checks interesting files to the output directory.""" self.logger.debug('copying interesting files to output directory') - shutil.copy(self._stdout, self._outputdir) - shutil.copy(self._stderr, self._outputdir) - if self._job: - shutil.copy(self._job.script_filename, self._outputdir) + self._copy_job_files(self._job, self.outputdir) + self._copy_job_files(self._build_job, self.outputdir) # Copy files specified by the user for f in self.keep_files: if not os.path.isabs(f): f = os.path.join(self._stagedir, f) - shutil.copy(f, self._outputdir) + + shutil.copy(f, self.outputdir) def cleanup(self, remove_files=False, unload_env=True): """The cleanup phase of the regression test pipeline. @@ -1108,6 +1136,12 @@ def compile(self, **compile_opts): This is a no-op for this type of test. """ + def compile_wait(self): + """Wait for compilation phase to finish. + + This is a no-op for this type of test. + """ + def run(self): """The run phase of the regression test pipeline. @@ -1152,22 +1186,15 @@ def setup(self, partition, environ, **job_opts): self._setup_environ(environ) self._setup_paths() - def compile(self, **compile_opts): - """The compilation stage of the regression test pipeline. - - The standard output and standard error of this stage will be used as - the standard output and error of the test. - """ - super().compile(**compile_opts) - - try: - with open(self._stdout, 'w') as f: - f.write(self._compile_proc.stdout) + @property + @deferrable + def stdout(self): + return self._build_job.stdout - with open(self._stderr, 'w') as f: - f.write(self._compile_proc.stderr) - except OSError as e: - raise PipelineError('could not write stdout/stderr') from e + @property + @deferrable + def stderr(self): + return self._build_job.stderr def run(self): """The run stage of the regression test pipeline. diff --git a/reframe/core/runtime.py b/reframe/core/runtime.py index 4f36f52a40..449125d0ab 100644 --- a/reframe/core/runtime.py +++ b/reframe/core/runtime.py @@ -221,7 +221,6 @@ def modules_system(self): return self._modules_system - # Global resources for the current host _runtime_context = None diff --git a/reframe/core/schedulers/__init__.py b/reframe/core/schedulers/__init__.py index dd013086fd..badc68bbba 100644 --- a/reframe/core/schedulers/__init__.py +++ b/reframe/core/schedulers/__init__.py @@ -3,9 +3,11 @@ # import abc +import os import reframe.core.debug as debug import reframe.core.fields as fields +import reframe.core.shell as shell from reframe.core.exceptions import JobNotStartedError from reframe.core.launchers import JobLauncher @@ -54,9 +56,7 @@ class Job(abc.ABC): # The sched_* arguments are exposed also to the frontend def __init__(self, name, - command, launcher, - environs=[], workdir='.', num_tasks=1, num_tasks_per_node=None, @@ -80,15 +80,9 @@ def __init__(self, # Mutable fields self.options = list(sched_options) - - # Commands to be run before and after the job is launched - self._pre_run = list(pre_run) - self._post_run = list(post_run) self.launcher = launcher - self._name = 'rfm_' + name - self._command = command - self._environs = list(environs) + self._name = name self._workdir = workdir self._num_tasks = num_tasks self._num_tasks_per_node = num_tasks_per_node @@ -97,8 +91,8 @@ def __init__(self, self._num_cpus_per_task = num_cpus_per_task self._use_smt = use_smt self._script_filename = script_filename or '%s.sh' % name - self._stdout = stdout or '%s.out' % name - self._stderr = stderr or '%s.err' % name + self._stdout = stdout or os.path.join(workdir, '%s.out' % name) + self._stderr = stderr or os.path.join(workdir, '%s.err' % name) self._time_limit = time_limit # Backend scheduler related information @@ -135,18 +129,10 @@ def state(self): def name(self): return self._name - @property - def command(self): - return self._command - @property def workdir(self): return self._workdir - @property - def environs(self): - return self._environs - @property def num_tasks(self): return self._num_tasks @@ -211,29 +197,19 @@ def sched_account(self): def sched_exclusive_access(self): return self._sched_exclusive_access - def emit_environ(self, builder): - for e in self._environs: - e.emit_load_instructions(builder) - - def emit_pre_run(self, builder): - for c in self._pre_run: - builder.verbatim(c) - - def emit_post_run(self, script_builder): - for c in self._post_run: - script_builder.verbatim(c) + def prepare(self, commands, environs=None, **gen_opts): + environs = environs or [] + with shell.generate_script(self.script_filename, + **gen_opts) as builder: + builder.write_prolog(self.emit_preamble()) + for e in environs: + builder.write(e.emit_load_commands()) - def prepare(self, script_builder): - self.emit_preamble(script_builder) - self.emit_environ(script_builder) - self.emit_pre_run(script_builder) - self.launcher.emit_run_command(self, script_builder) - self.emit_post_run(script_builder) - with open(self.script_filename, 'w') as fp: - fp.write(script_builder.finalise()) + for c in commands: + builder.write_body(c) @abc.abstractmethod - def emit_preamble(self, builder): + def emit_preamble(self): pass @abc.abstractmethod diff --git a/reframe/core/schedulers/local.py b/reframe/core/schedulers/local.py index 730bebb334..28cb7f25e7 100644 --- a/reframe/core/schedulers/local.py +++ b/reframe/core/schedulers/local.py @@ -55,8 +55,8 @@ def submit(self): # Update job info self._jobid = self._proc.pid - def emit_preamble(self, builder): - pass + def emit_preamble(self): + return [] def _kill_all(self): """Send SIGKILL to all the processes of the spawned job.""" diff --git a/reframe/core/schedulers/pbs.py b/reframe/core/schedulers/pbs.py index 766d174f52..7b4fc12053 100644 --- a/reframe/core/schedulers/pbs.py +++ b/reframe/core/schedulers/pbs.py @@ -33,7 +33,7 @@ def __init__(self, *args, **kwargs): # Optional part of the job id refering to the PBS server self._pbs_server = None - def _emit_lselect_option(self, builder): + def _emit_lselect_option(self): num_tasks_per_node = self._num_tasks_per_node or 1 num_cpus_per_task = self._num_cpus_per_task or 1 num_nodes = self._num_tasks // num_tasks_per_node @@ -53,15 +53,12 @@ def _emit_lselect_option(self, builder): else: select_opt += ':' + opt - self._emit_job_option(select_opt, builder) - for opt in rem_opts: - self._emit_job_option(opt, builder) + return [self._format_option(select_opt), + *(self._format_option(opt) for opt in rem_opts), + *verb_opts] - for opt in verb_opts: - builder.verbatim(opt) - - def _emit_job_option(self, option, builder): - builder.verbatim(self._prefix + ' ' + option) + def _format_option(self, option): + return self._prefix + ' ' + option def _run_command(self, cmd, timeout=None): """Run command cmd and re-raise any exception as a JobError.""" @@ -70,19 +67,22 @@ def _run_command(self, cmd, timeout=None): except SpawnedProcessError as e: raise JobError(jobid=self._jobid) from e - def emit_preamble(self, builder): - self._emit_job_option('-N "%s"' % self.name, builder) - self._emit_lselect_option(builder) - self._emit_job_option('-l walltime=%d:%d:%d' % self.time_limit, - builder) + def emit_preamble(self): + preamble = [ + self._format_option('-N "%s"' % self.name), + self._format_option('-l walltime=%d:%d:%d' % self.time_limit), + self._format_option('-o %s' % self.stdout), + self._format_option('-e %s' % self.stderr), + ] if self.sched_partition: - self._emit_job_option('-q %s' % self.sched_partition, builder) + preamble.append( + self._format_option('-q %s' % self.sched_partition)) - self._emit_job_option('-o %s' % self.stdout, builder) - self._emit_job_option('-e %s' % self.stderr, builder) + preamble += self._emit_lselect_option() # PBS starts the job in the home directory by default - builder.verbatim('cd %s' % self.workdir) + preamble.append('cd %s' % self.workdir) + return preamble def submit(self): # `-o` and `-e` options are only recognized in command line by the PBS diff --git a/reframe/core/schedulers/slurm.py b/reframe/core/schedulers/slurm.py index 8e4ebb48e4..50ee6399fb 100644 --- a/reframe/core/schedulers/slurm.py +++ b/reframe/core/schedulers/slurm.py @@ -70,50 +70,43 @@ def __init__(self, *args, **kwargs): self._is_cancelling = False self._update_state_count = 0 - def _emit_job_option(self, var, option, builder): + def _format_option(self, var, option): if var is not None: - builder.verbatim(self._prefix + ' ' + option.format(var)) - - def _run_command(self, cmd, timeout=None): - """Run command cmd and re-raise any exception as a JobError.""" - try: - return os_ext.run_command(cmd, check=True, timeout=timeout) - except SpawnedProcessError as e: - raise JobError(jobid=self._jobid) from e + return self._prefix + ' ' + option.format(var) + else: + return '' + + def emit_preamble(self): + preamble = [ + self._format_option(self.name, '--job-name="{0}"'), + self._format_option('%d:%d:%d' % self.time_limit, '--time={0}'), + self._format_option(self.num_tasks, '--ntasks={0}'), + self._format_option(self.num_tasks_per_node, + '--ntasks-per-node={0}'), + self._format_option(self.num_tasks_per_core, + '--ntasks-per-core={0}'), + self._format_option(self.num_tasks_per_socket, + '--ntasks-per-socket={0}'), + self._format_option(self.num_cpus_per_task, '--cpus-per-task={0}'), + self._format_option(self.sched_partition, '--partition={0}'), + self._format_option(self.sched_account, '--account={0}'), + self._format_option(self.sched_nodelist, '--nodelist={0}'), + self._format_option(self.sched_exclude_nodelist, '--exclude={0}'), + self._format_option(self.sched_reservation, '--reservation={0}'), + self._format_option(self.stdout, '--output={0}'), + self._format_option(self.stderr, '--error={0}'), + ] - def emit_preamble(self, builder): - self._emit_job_option(self.name, '--job-name="{0}"', builder) - self._emit_job_option('%d:%d:%d' % self.time_limit, - '--time={0}', builder) - self._emit_job_option(self.num_tasks, '--ntasks={0}', builder) - self._emit_job_option(self.num_tasks_per_node, - '--ntasks-per-node={0}', builder) - self._emit_job_option(self.num_tasks_per_core, - '--ntasks-per-core={0}', builder) - self._emit_job_option(self.num_tasks_per_socket, - '--ntasks-per-socket={0}', builder) - self._emit_job_option(self.num_cpus_per_task, - '--cpus-per-task={0}', builder) - self._emit_job_option(self.sched_partition, '--partition={0}', builder) if self.sched_exclusive_access: - self._emit_job_option(self.sched_exclusive_access, - '--exclusive', builder) + preamble.append(self._format_option( + self.sched_exclusive_access, '--exclusive')) - self._emit_job_option(self.sched_account, '--account={0}', builder) - self._emit_job_option(self.sched_nodelist, '--nodelist={0}', builder) - self._emit_job_option(self.sched_exclude_nodelist, - '--exclude={0}', builder) if self.use_smt is None: hint = None else: hint = 'multithread' if self.use_smt else 'nomultithread' - self._emit_job_option(hint, '--hint={0}', builder) - self._emit_job_option(self.sched_reservation, - '--reservation={0}', builder) - self._emit_job_option(self.stdout, '--output={0}', builder) - self._emit_job_option(self.stderr, '--error={0}', builder) - + preamble.append(self._format_option(hint, '--hint={0}')) prefix_patt = re.compile(r'(#\w+)') for opt in self.options: if not prefix_patt.match(opt): @@ -123,11 +116,21 @@ def emit_preamble(self, builder): if (opt.startswith(('-p', '--partition')) and self.sched_partition): continue - builder.verbatim('%s %s' % (self._prefix, opt)) + + preamble.append('%s %s' % (self._prefix, opt)) else: - builder.verbatim(opt) + preamble.append(opt) + + return preamble + + def _run_command(self, cmd, timeout=None): + """Run command cmd and re-raise any exception as a JobError.""" + try: + return os_ext.run_command(cmd, check=True, timeout=timeout) + except SpawnedProcessError as e: + raise JobError(jobid=self._jobid) from e - def prepare(self, builder): + def prepare(self, *args, **kwargs): if self.num_tasks == 0: if self.sched_reservation: nodes = self._get_reservation_nodes() @@ -147,7 +150,7 @@ def prepare(self, builder): raise JobError('A reservation has to be specified ' 'when setting the num_tasks to 0.') - super().prepare(builder) + super().prepare(*args, **kwargs) def submit(self): cmd = 'sbatch %s' % self.script_filename diff --git a/reframe/core/shell.py b/reframe/core/shell.py index 96ea4ad3cf..8536edfca5 100644 --- a/reframe/core/shell.py +++ b/reframe/core/shell.py @@ -1,60 +1,121 @@ # # Shell script generators # +import abc -import reframe.core.debug as debug +_RFM_TRAP_ERROR = ''' +_onerror() +{ + exitcode=$? + echo "-reframe: command \`$BASH_COMMAND' failed (exit code: $exitcode)" + exit $exitcode +} -class ShellScriptBuilder: - def __init__(self, name='default', login=False): - self._name = name - if login: - self._header = '#!/bin/sh -l' - else: - self._header = '#!/bin/sh' +trap _onerror ERR +''' + + +_RFM_TRAP_EXIT = ''' +_onexit() +{ + exitcode=$? + echo "-reframe: script exiting with exit code: $exitcode" + exit $exitcode +} + +trap _onexit EXIT +''' + +_RFM_TRAP_SIGNALS = ''' +_onsignal() +{ + exitcode=$? + ((signal = exitcode - 128)) + echo "-reframe: script caught signal: $signal" + exit $exitcode +} + +trap _onsignal $(seq 1 15) $(seq 24 27) +''' + + +class ShellScriptGenerator: + def __init__(self, login=False, trap_errors=False, + trap_exit=False, trap_signals=False): + self.login = login + self.trap_errors = trap_errors + self.trap_exit = trap_exit + self.trap_signals = trap_signals + self._prolog = [] + self._epilog = [] + self._body = [] + if self.trap_errors: + self._body.append(_RFM_TRAP_ERROR) + + if self.trap_exit: + self._body.append(_RFM_TRAP_EXIT) - self.statements = [] + if self.trap_signals: + self._body.append(_RFM_TRAP_SIGNALS) - def __repr__(self): - return debug.repr(self) + @property + def prolog(self): + return self._prolog @property - def name(): - return self._name + def epilog(self): + return self._epilog - def verbatim(self, stmt, suppress=False): - """Append statement stmt verbatim. + @property + def body(self): + return self._body - If suppress=True, stmt will not be in the generated script file but it - will be returned from this function. This feature is useful when you - want only the command that would be generated but you don't want it to - be actually generated in the scipt file.""" - if not suppress: - self.statements.append(stmt) + @property + def shebang(self): + ret = '#!/bin/bash' + if self.login: + ret += ' -l' - return stmt + return ret - def set_variable(self, name, value, export=False, suppress=False): - if export: - export_keyword = 'export ' + def write(self, s, where='body'): + section = getattr(self, '_' + where) + if isinstance(s, str): + section.append(s) + elif isinstance(s, list): + section += s else: - export_keyword = '' + section.append(str(s)) - return self.verbatim( - '%s%s=%s' % (export_keyword, name, value), suppress - ) + def write_prolog(self, s): + self.write(s, 'prolog') - def unset_variable(self, name, suppress=False): - return self.verbatim('unset %s' % name, suppress) + def write_epilog(self, s): + self.write(s, 'epilog') - def finalise(self): - return '%s\n' % self._header + '\n'.join(self.statements) + '\n' + def write_body(self, s): + self.write(s, 'body') + def finalize(self): + ret = '\n'.join([self.shebang, *self._prolog, + *self._body, *self._epilog]) -class BashScriptBuilder(ShellScriptBuilder): - def __init__(self, name='bash', login=False): - super().__init__(name, login) - if login: - self._header = '#!/bin/bash -l' - else: - self._header = '#!/bin/bash' + # end with a new line + if ret: + ret += '\n' + + return ret + + +class generate_script: + def __init__(self, filename, *args, **kwargs): + self._shgen = ShellScriptGenerator(*args, **kwargs) + self._file = open(filename, 'wt') + + def __enter__(self): + return self._shgen + + def __exit__(self, exc_type, exc_value, traceback): + self._file.write(self._shgen.finalize()) + self._file.close() diff --git a/reframe/frontend/executors/__init__.py b/reframe/frontend/executors/__init__.py index 4dc6f7671c..87629078b6 100644 --- a/reframe/frontend/executors/__init__.py +++ b/reframe/frontend/executors/__init__.py @@ -70,6 +70,9 @@ def setup(self, *args, **kwargs): def compile(self): self._safe_call(self._check.compile) + def compile_wait(self): + self._safe_call(self._check.compile_wait) + def run(self): self._safe_call(self._check.run) self._notify_listeners('on_task_run') diff --git a/reframe/frontend/executors/policies.py b/reframe/frontend/executors/policies.py index 0934d53aa4..9742dcbccb 100644 --- a/reframe/frontend/executors/policies.py +++ b/reframe/frontend/executors/policies.py @@ -33,6 +33,7 @@ def run_check(self, check, partition, environ): sched_options=self.sched_options) task.compile() + task.compile_wait() task.run() task.wait() if not self.skip_sanity_check: @@ -245,6 +246,7 @@ def _reschedule(self, task, load_env=True): task.resume() task.compile() + task.compile_wait() task.run() def _reschedule_all(self): diff --git a/reframe/settings.py b/reframe/settings.py index b0c335677e..1cfde61da8 100644 --- a/reframe/settings.py +++ b/reframe/settings.py @@ -4,7 +4,7 @@ class ReframeSettings: - reframe_module = 'reframe' + reframe_module = None job_poll_intervals = [1, 2, 3] job_submit_timeout = 60 checks_path = ['checks/'] diff --git a/unittests/resources/checks/hellocheck_make.py b/unittests/resources/checks/hellocheck_make.py index fb42fb5222..b67e3f3526 100644 --- a/unittests/resources/checks/hellocheck_make.py +++ b/unittests/resources/checks/hellocheck_make.py @@ -17,18 +17,16 @@ def __init__(self, **kwargs): # All available systems are supported self.valid_systems = ['*'] self.valid_prog_environs = ['*'] - self.sourcepath = '' + self.build_system = 'Make' + self.build_system.cflags = ['-O3'] + self.build_system.cxxflags = ['-O3'] + self.build_system.makefile = 'Makefile.nofort' self.executable = './hello_cpp' self.keep_files = ['hello_cpp'] self.tags = {'foo', 'bar'} self.sanity_patterns = sn.assert_found(r'Hello, World\!', self.stdout) self.maintainers = ['VK'] - def compile(self): - self.current_environ.cflags = '-O3' - self.current_environ.cxxflags = '-O3' - super().compile(makefile='Makefile.nofort') - def _get_checks(**kwargs): return [HelloMakeTest(**kwargs)] diff --git a/unittests/resources/settings.py b/unittests/resources/settings.py index e441d04eb7..a9c9ec7996 100644 --- a/unittests/resources/settings.py +++ b/unittests/resources/settings.py @@ -4,7 +4,7 @@ class ReframeSettings: - reframe_module = 'reframe' + reframe_module = None job_poll_intervals = [1, 2, 3] job_submit_timeout = 60 checks_path = ['checks/'] diff --git a/unittests/test_buildsystems.py b/unittests/test_buildsystems.py new file mode 100644 index 0000000000..474216a6bc --- /dev/null +++ b/unittests/test_buildsystems.py @@ -0,0 +1,153 @@ +import abc +import unittest + +import reframe.core.buildsystems as bs +from reframe.core.environments import ProgEnvironment +from reframe.core.exceptions import BuildSystemError + + +class _BuildSystemTest: + @abc.abstractmethod + def create_build_system(self): + pass + + def setUp(self): + self.environ = ProgEnvironment(name='test_env', + cc='gcc', + cxx='g++', + ftn='gfortran', + cppflags='-DNDEBUG', + cflags='-Wall -std=c99', + cxxflags='-Wall -std=c++11', + fflags='-Wall', + ldflags='-dynamic') + self.build_system = self.create_build_system() + + def setup_base_buildsystem(self): + self.build_system.cc = 'cc' + self.build_system.cxx = 'CC' + self.build_system.ftn = 'ftn' + self.build_system.nvcc = 'clang' + self.build_system.cppflags = ['-DFOO'] + self.build_system.cflags = ['-Wall', '-std=c99', '-O3'] + self.build_system.cxxflags = ['-Wall', '-std=c++11', '-O3'] + self.build_system.fflags = ['-Wall', '-O3'] + self.build_system.ldflags = ['-static'] + self.build_system.flags_from_environ = False + + +class TestMake(_BuildSystemTest, unittest.TestCase): + def create_build_system(self): + return bs.Make() + + def test_emit_from_env(self): + self.build_system.makefile = 'Makefile_foo' + self.build_system.srcdir = 'foodir' + self.build_system.options = ['FOO=1'] + self.build_system.max_concurrency = 32 + expected = [ + "make -f Makefile_foo -C foodir -j 32 CC='gcc' CXX='g++' " + "FC='gfortran' NVCC='nvcc' CPPFLAGS='-DNDEBUG' " + "CFLAGS='-Wall -std=c99' CXXFLAGS='-Wall -std=c++11' " + "FFLAGS='-Wall' LDFLAGS='-dynamic' FOO=1" + ] + self.assertEqual(expected, + self.build_system.emit_build_commands(self.environ)) + + def test_emit_no_env(self): + super().setup_base_buildsystem() + self.build_system.makefile = 'Makefile_foo' + self.build_system.srcdir = 'foodir' + self.build_system.options = ['FOO=1'] + self.build_system.max_concurrency = None + expected = [ + "make -f Makefile_foo -C foodir -j CC='cc' CXX='CC' FC='ftn' " + "NVCC='clang' CPPFLAGS='-DFOO' CFLAGS='-Wall -std=c99 -O3' " + "CXXFLAGS='-Wall -std=c++11 -O3' FFLAGS='-Wall -O3' " + "LDFLAGS='-static' FOO=1" + ] + self.assertEqual(expected, + self.build_system.emit_build_commands(self.environ)) + + def test_emit_no_env_defaults(self): + self.build_system.flags_from_environ = False + self.assertEqual(['make -j'], + self.build_system.emit_build_commands(self.environ)) + + +class TestSingleSource(_BuildSystemTest, unittest.TestCase): + def create_build_system(self): + return bs.SingleSource() + + def setUp(self): + super().setUp() + self.compilers = { + 'C': 'gcc', + 'C++': 'g++', + 'Fortran': 'gfortran', + 'CUDA': 'nvcc', + } + + self.flags = { + 'C': '-Wall -std=c99', + 'C++': '-Wall -std=c++11', + 'Fortran': '-Wall', + } + + self.files = { + 'C': 'foo.c', + 'C++': 'foo.cpp', + 'Fortran': 'foo.f90', + 'CUDA': 'foo.cu', + } + + def test_emit_from_env(self): + self.build_system.include_path = ['foodir/include', 'bardir/include'] + self.build_system.executable = 'foo.e' + for lang, comp in self.compilers.items(): + self.build_system.srcfile = self.files[lang] + if lang == 'CUDA': + flags = self.flags['C++'] + else: + flags = self.flags[lang] + + ldflags = '-dynamic' + expected = [ + '%s -DNDEBUG -I foodir/include -I bardir/include %s %s ' + '-o foo.e %s' % (comp, flags, + self.build_system.srcfile, ldflags) + ] + self.assertEqual(expected, + self.build_system.emit_build_commands(self.environ)) + + def test_emit_no_env(self): + super().setup_base_buildsystem() + self.compilers = { + 'C': 'cc', + 'C++': 'CC', + 'Fortran': 'ftn', + 'CUDA': 'clang', + } + + self.flags = { + 'C': '-Wall -std=c99 -O3', + 'C++': '-Wall -std=c++11 -O3', + 'Fortran': '-Wall -O3', + } + self.build_system.include_path = ['foodir/include', 'bardir/include'] + self.build_system.executable = 'foo.e' + for lang, comp in self.compilers.items(): + self.build_system.srcfile = self.files[lang] + if lang == 'CUDA': + flags = self.flags['C++'] + else: + flags = self.flags[lang] + + ldflags = '-static' + expected = [ + '%s -DFOO -I foodir/include -I bardir/include %s %s ' + '-o foo.e %s' % (comp, flags, + self.build_system.srcfile, ldflags) + ] + self.assertEqual(expected, + self.build_system.emit_build_commands(self.environ)) diff --git a/unittests/test_cli.py b/unittests/test_cli.py index d87710ebd4..e07a938070 100644 --- a/unittests/test_cli.py +++ b/unittests/test_cli.py @@ -102,10 +102,10 @@ def _run_reframe(self): return run_command_inline(self.argv, cli.main) def _stage_exists(self, check_name, partitions, environs): - stagedir = os.path.join(self.prefix, 'stage') + stagedir = os.path.join(self.prefix, 'stage', 'generic') for p in partitions: for e in environs: - path = os.path.join(stagedir, p, check_name, e) + path = os.path.join(stagedir, p, e, check_name) if not os.path.exists(path): return False diff --git a/unittests/test_environments.py b/unittests/test_environments.py index 2937370095..2f02681df3 100644 --- a/unittests/test_environments.py +++ b/unittests/test_environments.py @@ -5,7 +5,7 @@ import reframe.utility.os_ext as os_ext import unittests.fixtures as fixtures from reframe.core.runtime import runtime -from reframe.core.exceptions import CompilationError, EnvironError +from reframe.core.exceptions import EnvironError class TestEnvironment(unittest.TestCase): @@ -165,68 +165,76 @@ def test_swap(self): self.assertFalse(self.environ.is_loaded) self.assertTrue(self.environ_other.is_loaded) + @fixtures.switch_to_user_runtime + def test_emit_load_commands(self): + self.setup_modules_system() + rt = runtime() + expected_commands = [ + rt.modules_system.emit_load_commands('testmod_foo')[0], + 'export _fookey1=value3', + 'export _fookey2=value2', + 'export _fookey3b=$_fookey1b', + 'export _fookey4b=${_fookey2b}', + ] + self.assertEqual(expected_commands, self.environ.emit_load_commands()) -class TestProgEnvironment(unittest.TestCase): - def setUp(self): - self.environ_save = renv.EnvironmentSnapshot() - self.executable = os.path.join(fixtures.TEST_RESOURCES_CHECKS, 'hello') + @fixtures.switch_to_user_runtime + def test_emit_load_commands_with_confict(self): + self.setup_modules_system() - def tearDown(self): - # Remove generated executable ingoring file-not-found errors - os_ext.force_remove_file(self.executable) - self.environ_save.load() + # Load a conflicting module + self.modules_system.load_module('testmod_bar') + + # When the environment is not loaded, the conflicting module does not + # make a difference + self.test_emit_load_commands() + + self.environ.load() + rt = runtime() + expected_commands = [ + rt.modules_system.emit_unload_commands('testmod_bar')[0], + rt.modules_system.emit_load_commands('testmod_foo')[0], + 'export _fookey1=value3', + 'export _fookey2=value2', + 'export _fookey3b=$_fookey1b', + 'export _fookey4b=${_fookey2b}', + ] + self.assertEqual(expected_commands, self.environ.emit_load_commands()) + + @fixtures.switch_to_user_runtime + def test_emit_unload_commands(self): + self.setup_modules_system() + rt = runtime() + expected_commands = [ + 'unset _fookey1', + 'unset _fookey2', + 'unset _fookey3b', + 'unset _fookey4b', + rt.modules_system.emit_unload_commands('testmod_foo')[0], + ] + self.assertEqual(expected_commands, + self.environ.emit_unload_commands()) + + @fixtures.switch_to_user_runtime + def test_emit_unload_commands_with_confict(self): + self.setup_modules_system() + + # Load a conflicting module + self.modules_system.load_module('testmod_bar') - def assertHelloMessage(self, executable=None): - if not executable: - executable = self.executable - - self.assertTrue(os_ext.grep_command_output(cmd=executable, - pattern='Hello, World\!')) - os_ext.force_remove_file(executable) - - def compile_with_env(self, env, skip_fortran=False): - srcdir = os.path.join(fixtures.TEST_RESOURCES_CHECKS, 'src') - env.cxxflags = '-O2' - env.load() - env.compile(sourcepath=os.path.join(srcdir, 'hello.c'), - executable=self.executable) - self.assertHelloMessage() - - env.compile(sourcepath=os.path.join(srcdir, 'hello.cpp'), - executable=self.executable) - self.assertHelloMessage() - - if not skip_fortran: - env.compile(sourcepath=os.path.join(srcdir, 'hello.f90'), - executable=self.executable) - self.assertHelloMessage() - - env.unload() - - def compile_dir_with_env(self, env, skip_fortran=False): - srcdir = os.path.join(fixtures.TEST_RESOURCES_CHECKS, 'src') - env.cxxflags = '-O3' - env.load() - - executables = ['hello_c', 'hello_cpp'] - if skip_fortran: - env.compile(srcdir, makefile='Makefile.nofort') - else: - env.compile(srcdir) - executables.append('hello_fort') - - for e in executables: - self.assertHelloMessage(os.path.join(srcdir, e)) - - env.compile(sourcepath=srcdir, options='clean') - env.unload() - - def test_compile(self): - # Compile a 'Hello, World' with the builtin gcc/g++ - env = renv.ProgEnvironment(name='builtin-gcc', - cc='gcc', cxx='g++', ftn=None) - try: - self.compile_with_env(env, skip_fortran=True) - self.compile_dir_with_env(env, skip_fortran=True) - except CompilationError as e: - self.fail("Compilation failed\n") + # When the environment is not loaded, the conflicting module does not + # make a difference + self.test_emit_unload_commands() + + self.environ.load() + rt = runtime() + expected_commands = [ + 'unset _fookey1', + 'unset _fookey2', + 'unset _fookey3b', + 'unset _fookey4b', + rt.modules_system.emit_unload_commands('testmod_foo')[0], + rt.modules_system.emit_load_commands('testmod_bar')[0], + ] + self.assertEqual(expected_commands, + self.environ.emit_unload_commands()) diff --git a/unittests/test_fields.py b/unittests/test_fields.py index 895e6a022e..17a308c859 100644 --- a/unittests/test_fields.py +++ b/unittests/test_fields.py @@ -272,16 +272,31 @@ def __init__(self, value): def test_typed_list_field(self): class FieldTester: - field = fields.TypedListField('field', int) + field = fields.TypedListField('field', str) def __init__(self, value): self.field = value - tester = FieldTester([1, 2, 3]) - self.assertEqual([1, 2, 3], tester.field) + tester = FieldTester(['a', 'b', 'c']) + self.assertEqual(['a', 'b', 'c'], tester.field) + self.assertRaises(TypeError, FieldTester, [1, 'foo']) + with self.assertRaises(TypeError): + tester.field = 'foo' + + def test_typed_sequence_field(self): + class FieldTester: + field = fields.TypedSequenceField('field', str) + + def __init__(self, value): + self.field = value + + tester = FieldTester(['a', 'b', 'c']) + self.assertEqual(['a', 'b', 'c'], tester.field) + + # strings are valid sequences + tester.field = 'foo' + self.assertEqual('foo', tester.field) self.assertRaises(TypeError, FieldTester, [1, 'foo']) - self.assertRaises(TypeError, exec, 'tester.field = 3', - globals(), locals()) def test_typed_set_field(self): class FieldTester: @@ -414,10 +429,45 @@ def test_deprecated_field(self): class FieldTester: value = fields.DeprecatedField(fields.IntegerField('value'), 'value field is deprecated') + _value = fields.IntegerField('value') + ro = fields.DeprecatedField(fields.IntegerField('ro'), + 'value field is deprecated', + fields.DeprecatedField.OP_SET) + _ro = fields.IntegerField('ro') + wo = fields.DeprecatedField(fields.IntegerField('wo'), + 'value field is deprecated', + fields.DeprecatedField.OP_GET) + + def __init__(self): + self._value = 1 + self._ro = 2 + self.wo = 3 tester = FieldTester() - self.assertWarns(ReframeDeprecationWarning, exec, 'tester.value = 2', - globals(), locals()) + + # Test set operation + with self.assertWarns(ReframeDeprecationWarning): + tester.value = 2 + + with self.assertWarns(ReframeDeprecationWarning): + tester.ro = 1 + + try: + tester.wo = 20 + except ReframeDeprecationWarning: + self.fail('deprecation warning not expected here') + + # Test get operation + try: + a = tester.ro + except ReframeDeprecationWarning: + self.fail('deprecation warning not expected here') + + with self.assertWarns(ReframeDeprecationWarning): + a = tester.value + + with self.assertWarns(ReframeDeprecationWarning): + a = tester.wo def test_absolute_path_field(self): class FieldTester: diff --git a/unittests/test_launchers.py b/unittests/test_launchers.py index 1e60a90975..b62e2592b4 100644 --- a/unittests/test_launchers.py +++ b/unittests/test_launchers.py @@ -4,7 +4,6 @@ import reframe.core.launchers as launchers from reframe.core.launchers.registry import getlauncher from reframe.core.schedulers import Job -from reframe.core.shell import BashScriptBuilder class FakeJob(Job): @@ -28,9 +27,7 @@ class _TestLauncher(abc.ABC): """Base class for launcher tests.""" def setUp(self): - self.builder = BashScriptBuilder() self.job = FakeJob(name='fake_job', - command='ls -l', launcher=self.launcher, num_tasks=4, num_tasks_per_node=2, @@ -50,9 +47,7 @@ def setUp(self): sched_exclusive_access='fake_exclude_access', sched_options=['--fake']) self.job.options += ['--gres=gpu:4', '#DW jobdw anything'] - self.minimal_job = FakeJob(name='fake_job', - command='ls -l', - launcher=self.launcher) + self.minimal_job = FakeJob(name='fake_job', launcher=self.launcher) @property @abc.abstractmethod @@ -69,14 +64,12 @@ def expected_command(self): def expected_minimal_command(self): """The command expected to be emitted by the launcher.""" - def test_emit_command(self): - emitted_command = self.launcher.emit_run_command(self.job, - self.builder) + def test_run_command(self): + emitted_command = self.launcher.run_command(self.job) self.assertEqual(self.expected_command, emitted_command) - def test_emit_minimal_command(self): - emitted_command = self.launcher.emit_run_command(self.minimal_job, - self.builder) + def test_run_minimal_command(self): + emitted_command = self.launcher.run_command(self.minimal_job) self.assertEqual(self.expected_minimal_command, emitted_command) @@ -87,11 +80,11 @@ def launcher(self): @property def expected_command(self): - return 'srun --foo ls -l' + return 'srun --foo' @property def expected_minimal_command(self): - return 'srun --foo ls -l' + return 'srun --foo' class TestSrunallocLauncher(_TestLauncher, unittest.TestCase): @@ -103,7 +96,7 @@ def launcher(self): @property def expected_command(self): return ('srun ' - '--job-name=rfm_fake_job ' + '--job-name=fake_job ' '--time=0:10:0 ' '--output=fake_stdout ' '--error=fake_stderr ' @@ -121,19 +114,17 @@ def expected_command(self): '--exclude=fake_exclude_nodelist ' '--fake ' '--gres=gpu:4 ' - '--foo ' - 'ls -l') + '--foo') @property def expected_minimal_command(self): return ('srun ' - '--job-name=rfm_fake_job ' + '--job-name=fake_job ' '--time=0:10:0 ' - '--output=fake_job.out ' - '--error=fake_job.err ' + '--output=./fake_job.out ' + '--error=./fake_job.err ' '--ntasks=1 ' - '--foo ' - 'ls -l') + '--foo') class TestAlpsLauncher(_TestLauncher, unittest.TestCase): @@ -143,11 +134,11 @@ def launcher(self): @property def expected_command(self): - return 'aprun -B --foo ls -l' + return 'aprun -B --foo' @property def expected_minimal_command(self): - return 'aprun -B --foo ls -l' + return 'aprun -B --foo' class TestMpirunLauncher(_TestLauncher, unittest.TestCase): @@ -157,11 +148,11 @@ def launcher(self): @property def expected_command(self): - return 'mpirun -np 4 --foo ls -l' + return 'mpirun -np 4 --foo' @property def expected_minimal_command(self): - return 'mpirun -np 1 --foo ls -l' + return 'mpirun -np 1 --foo' class TestMpiexecLauncher(_TestLauncher, unittest.TestCase): @@ -171,11 +162,11 @@ def launcher(self): @property def expected_command(self): - return 'mpiexec -n 4 --foo ls -l' + return 'mpiexec -n 4 --foo' @property def expected_minimal_command(self): - return 'mpiexec -n 1 --foo ls -l' + return 'mpiexec -n 1 --foo' class TestLauncherWrapperAlps(_TestLauncher, unittest.TestCase): @@ -188,11 +179,11 @@ def launcher(self): @property def expected_command(self): - return 'ddt --offline aprun -B --foo ls -l' + return 'ddt --offline aprun -B --foo' @property def expected_minimal_command(self): - return 'ddt --offline aprun -B --foo ls -l' + return 'ddt --offline aprun -B --foo' class TestLocalLauncher(_TestLauncher, unittest.TestCase): @@ -202,8 +193,8 @@ def launcher(self): @property def expected_command(self): - return 'ls -l' + return '' @property def expected_minimal_command(self): - return 'ls -l' + return '' diff --git a/unittests/test_pipeline.py b/unittests/test_pipeline.py index 63aae7f073..7fc1647449 100644 --- a/unittests/test_pipeline.py +++ b/unittests/test_pipeline.py @@ -6,9 +6,8 @@ import reframe.core.runtime as rt import reframe.utility.sanity as sn import unittests.fixtures as fixtures -from reframe.core.exceptions import (ReframeError, ReframeSyntaxError, - PipelineError, SanityError, - CompilationError) +from reframe.core.exceptions import (BuildError, PipelineError, ReframeError, + ReframeSyntaxError, SanityError) from reframe.core.pipeline import (CompileOnlyRegressionTest, RegressionTest, RunOnlyRegressionTest) from reframe.frontend.loader import RegressionCheckLoader @@ -32,11 +31,13 @@ def setup_remote_execution(self): def setUp(self): self.setup_local_execution() - self.resourcesdir = tempfile.mkdtemp(dir='unittests') self.loader = RegressionCheckLoader(['unittests/resources/checks']) + # Set runtime prefix + rt.runtime().resources.prefix = tempfile.mkdtemp(dir='unittests') + def tearDown(self): - shutil.rmtree(self.resourcesdir, ignore_errors=True) + shutil.rmtree(rt.runtime().resources.prefix, ignore_errors=True) shutil.rmtree('.rfm_testing', ignore_errors=True) def replace_prefix(self, filename, new_prefix): @@ -80,6 +81,7 @@ def test_environ_setup(self): def _run_test(self, test, compile_only=False): test.setup(self.partition, self.progenv) test.compile() + test.compile_wait() test.run() test.wait() test.check_sanity() @@ -193,13 +195,15 @@ def test_compile_only_failure(self): test.valid_prog_environs = ['*'] test.valid_systems = ['*'] test.setup(self.partition, self.progenv) - self.assertRaises(CompilationError, test.compile) + test.compile() + self.assertRaises(BuildError, test.compile_wait) def test_compile_only_warning(self): test = CompileOnlyRegressionTest('compileonlycheckwarning', 'unittests/resources/checks') - test.sourcepath = 'compiler_warning.c' - self.progenv.cflags = '-Wall' + test.build_system = 'SingleSource' + test.build_system.srcfile = 'compiler_warning.c' + test.build_system.cflags = ['-Wall'] test.valid_prog_environs = ['*'] test.valid_systems = ['*'] test.sanity_patterns = sn.assert_found(r'warning', test.stderr) @@ -283,7 +287,7 @@ def test_sourcesdir_none_compile_only(self): test.sourcesdir = None test.valid_prog_environs = ['*'] test.valid_systems = ['*'] - self.assertRaises(CompilationError, self._run_test, test) + self.assertRaises(BuildError, self._run_test, test) def test_sourcesdir_none_run_only(self): test = RunOnlyRegressionTest('hellocheck', diff --git a/unittests/test_policies.py b/unittests/test_policies.py index 35c9ff242d..4f598c0d31 100644 --- a/unittests/test_policies.py +++ b/unittests/test_policies.py @@ -3,7 +3,7 @@ import tempfile import unittest -import reframe.core.runtime as runtime +import reframe.core.runtime as rt import reframe.frontend.executors as executors import reframe.frontend.executors.policies as policies from reframe.core.exceptions import JobNotStartedError @@ -16,7 +16,6 @@ class TestSerialExecutionPolicy(unittest.TestCase): def setUp(self): - self.resourcesdir = tempfile.mkdtemp(dir='unittests') self.loader = RegressionCheckLoader(['unittests/resources/checks'], ignore_conflicts=True) @@ -24,8 +23,11 @@ def setUp(self): self.runner = executors.Runner(policies.SerialExecutionPolicy()) self.checks = self.loader.load_all() + # Set runtime prefix + rt.runtime().resources.prefix = tempfile.mkdtemp(dir='unittests') + def tearDown(self): - shutil.rmtree(self.resourcesdir, ignore_errors=True) + shutil.rmtree(rt.runtime().resources.prefix, ignore_errors=True) def _num_failures_stage(self, stage): stats = self.runner.stats @@ -219,7 +221,7 @@ def setUp(self): self.runner.policy.task_listeners.append(self.monitor) def set_max_jobs(self, value): - for p in runtime.runtime().system.partitions: + for p in rt.runtime().system.partitions: p._max_jobs = value def read_timestamps(self, tasks): diff --git a/unittests/test_schedulers.py b/unittests/test_schedulers.py index 5e47c1ffbe..4d136679f1 100644 --- a/unittests/test_schedulers.py +++ b/unittests/test_schedulers.py @@ -16,7 +16,6 @@ from reframe.core.launchers.registry import getlauncher from reframe.core.schedulers.registry import getscheduler from reframe.core.schedulers.slurm import SlurmNode -from reframe.core.shell import BashScriptBuilder class _TestJob: @@ -24,22 +23,28 @@ def setUp(self): self.workdir = tempfile.mkdtemp(dir='unittests') self.testjob = self.job_type( name='testjob', - command='hostname', launcher=self.launcher, - environs=[Environment(name='foo', modules=['testmod_foo'])], workdir=self.workdir, script_filename=os_ext.mkstemp_path( dir=self.workdir, suffix='.sh'), stdout=os_ext.mkstemp_path(dir=self.workdir, suffix='.out'), stderr=os_ext.mkstemp_path(dir=self.workdir, suffix='.err'), - pre_run=['echo prerun'], - post_run=['echo postrun'] ) - self.builder = BashScriptBuilder() + self.environs = [Environment(name='foo', modules=['testmod_foo'])] + self.pre_run = ['echo prerun'] + self.post_run = ['echo postrun'] + self.parallel_cmd = 'hostname' def tearDown(self): shutil.rmtree(self.workdir) + @property + def commands(self): + runcmd = self.launcher.run_command(self.testjob) + return [*self.pre_run, + runcmd + ' ' + self.parallel_cmd, + *self.post_run] + @property def job_type(self): return getscheduler(self.sched_name) @@ -95,13 +100,13 @@ def setup_job(self): '#DW stage_in source=/foo'] def test_prepare(self): - self.testjob.prepare(self.builder) + self.testjob.prepare(self.commands, self.environs) self.assertScriptSanity(self.testjob.script_filename) @fixtures.switch_to_user_runtime def test_submit(self): self.setup_user() - self.testjob.prepare(self.builder) + self.testjob.prepare(self.commands, self.environs) self.testjob.submit() self.assertIsNotNone(self.testjob.jobid) self.testjob.wait() @@ -109,9 +114,9 @@ def test_submit(self): @fixtures.switch_to_user_runtime def test_submit_timelimit(self, check_elapsed_time=True): self.setup_user() - self.testjob._command = 'sleep 10' + self.parallel_cmd = 'sleep 10' self.testjob._time_limit = (0, 0, 2) - self.testjob.prepare(self.builder) + self.testjob.prepare(self.commands, self.environs) t_job = datetime.now() self.testjob.submit() self.assertIsNotNone(self.testjob.jobid) @@ -127,8 +132,8 @@ def test_submit_timelimit(self, check_elapsed_time=True): @fixtures.switch_to_user_runtime def test_cancel(self): self.setup_user() - self.testjob._command = 'sleep 30' - self.testjob.prepare(self.builder) + self.parallel_cmd = 'sleep 30' + self.testjob.prepare(self.commands, self.environs) t_job = datetime.now() self.testjob.submit() self.testjob.cancel() @@ -138,27 +143,27 @@ def test_cancel(self): self.assertLess(t_job.total_seconds(), 30) def test_cancel_before_submit(self): - self.testjob._command = 'sleep 3' - self.testjob.prepare(self.builder) + self.parallel_cmd = 'sleep 3' + self.testjob.prepare(self.commands, self.environs) self.assertRaises(JobNotStartedError, self.testjob.cancel) def test_wait_before_submit(self): - self.testjob._command = 'sleep 3' - self.testjob.prepare(self.builder) + self.parallel_cmd = 'sleep 3' + self.testjob.prepare(self.commands, self.environs) self.assertRaises(JobNotStartedError, self.testjob.wait) @fixtures.switch_to_user_runtime def test_poll(self): self.setup_user() - self.testjob._command = 'sleep 2' - self.testjob.prepare(self.builder) + self.parallel_cmd = 'sleep 2' + self.testjob.prepare(self.commands, self.environs) self.testjob.submit() self.assertFalse(self.testjob.finished()) self.testjob.wait() def test_poll_before_submit(self): - self.testjob._command = 'sleep 3' - self.testjob.prepare(self.builder) + self.parallel_cmd = 'sleep 3' + self.testjob.prepare(self.commands, self.environs) self.assertRaises(JobNotStartedError, self.testjob.finished) @@ -210,13 +215,13 @@ def test_cancel_with_grace(self): # We also check that the additional spawned process is also killed. from reframe.core.schedulers.local import LOCAL_JOB_TIMEOUT - self.testjob._command = 'sleep 5 &' + self.parallel_cmd = 'sleep 5 &' + self.pre_run = ['trap -- "" TERM'] + self.post_run = ['echo $!', 'wait'] self.testjob._time_limit = (0, 1, 0) self.testjob.cancel_grace_period = 2 - self.testjob._pre_run = ['trap -- "" TERM'] - self.testjob._post_run = ['echo $!', 'wait'] - self.testjob.prepare(self.builder) + self.testjob.prepare(self.commands, self.environs) self.testjob.submit() # Stall a bit here to let the the spawned process start and install its @@ -253,12 +258,12 @@ def test_cancel_term_ignore(self): # kills it. from reframe.core.schedulers.local import LOCAL_JOB_TIMEOUT - self.testjob._pre_run = [] - self.testjob._post_run = [] - self.testjob._command = os.path.join(fixtures.TEST_RESOURCES_CHECKS, - 'src', 'sleep_deeply.sh') + self.pre_run = [] + self.post_run = [] + self.parallel_cmd = os.path.join(fixtures.TEST_RESOURCES_CHECKS, + 'src', 'sleep_deeply.sh') self.testjob.cancel_grace_period = 2 - self.testjob.prepare(self.builder) + self.testjob.prepare(self.commands, self.environs) self.testjob.submit() # Stall a bit here to let the the spawned process start and install its @@ -301,7 +306,7 @@ def test_prepare(self): self.setup_job() super().test_prepare() expected_directives = set([ - '#SBATCH --job-name="rfm_testjob"', + '#SBATCH --job-name="testjob"', '#SBATCH --time=0:5:0', '#SBATCH --output=%s' % self.testjob.stdout, '#SBATCH --error=%s' % self.testjob.stderr, @@ -414,7 +419,7 @@ def test_prepare(self): num_cpus_per_node = (self.testjob.num_cpus_per_task * self.testjob.num_tasks_per_node) expected_directives = set([ - '#PBS -N "rfm_testjob"', + '#PBS -N "testjob"', '#PBS -l walltime=0:5:0', '#PBS -o %s' % self.testjob.stdout, '#PBS -e %s' % self.testjob.stderr, @@ -441,7 +446,7 @@ def test_prepare_no_cpus(self): num_nodes = self.testjob.num_tasks // self.testjob.num_tasks_per_node num_cpus_per_node = self.testjob.num_tasks_per_node expected_directives = set([ - '#PBS -N "rfm_testjob"', + '#PBS -N "testjob"', '#PBS -l walltime=0:5:0', '#PBS -o %s' % self.testjob.stdout, '#PBS -e %s' % self.testjob.stderr, @@ -525,15 +530,12 @@ def setUp(self): slurm_scheduler = getscheduler('slurm') self.testjob = slurm_scheduler( name='testjob', - command='hostname', launcher=getlauncher('local')(), - environs=[Environment(name='foo')], workdir=self.workdir, script_filename=os.path.join(self.workdir, 'testjob.sh'), stdout=os.path.join(self.workdir, 'testjob.out'), stderr=os.path.join(self.workdir, 'testjob.err') ) - self.builder = BashScriptBuilder() # monkey patch `_get_reservation_nodes` to simulate extraction of # slurm nodes through the use of `scontrol show` self.testjob._get_reservation_nodes = self.create_dummy_nodes @@ -593,7 +595,7 @@ def test_noreservation(self): self.assertRaises(JobError, self.prepare_job) def prepare_job(self): - self.testjob.prepare(self.builder) + self.testjob.prepare(['hostname']) class TestSlurmFlexibleNodeAllocationExclude(TestSlurmFlexibleNodeAllocation): diff --git a/unittests/test_script_builders.py b/unittests/test_script_builders.py deleted file mode 100644 index 9db8770a7f..0000000000 --- a/unittests/test_script_builders.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import stat -import tempfile -import unittest - -import reframe.core.shell as builders -import reframe.utility.os_ext as os_ext - - -class TestShellScriptBuilder(unittest.TestCase): - def setUp(self): - self.script_file = tempfile.NamedTemporaryFile(mode='w+', delete=False) - os.chmod(self.script_file.name, - os.stat(self.script_file.name).st_mode | stat.S_IEXEC) - - def tearDown(self): - os.remove(self.script_file.name) - - def test_bash_builder(self): - builder = builders.BashScriptBuilder() - builder.set_variable('var1', '13') - builder.set_variable('var2', '2') - builder.set_variable('foo', '33', suppress=True) - builder.verbatim('((var3 = var1 + var2)); echo hello $var3') - self.script_file.write(builder.finalise()) - self.script_file.close() - self.assertTrue( - os_ext.grep_command_output(self.script_file.name, 'hello 15')) - self.assertFalse( - os_ext.grep_command_output(self.script_file.name, 'foo')) diff --git a/unittests/test_shell.py b/unittests/test_shell.py new file mode 100644 index 0000000000..775deacf34 --- /dev/null +++ b/unittests/test_shell.py @@ -0,0 +1,126 @@ +import os +import stat +import tempfile +import time +import unittest + +import reframe.core.shell as shell +import reframe.utility.os_ext as os_ext +from reframe.core.exceptions import SpawnedProcessError + + +class TestShellScriptGenerator(unittest.TestCase): + def setUp(self): + self.script_file = tempfile.NamedTemporaryFile(mode='w+', delete=False) + self.script_file.close() + os.chmod(self.script_file.name, + os.stat(self.script_file.name).st_mode | stat.S_IEXEC) + + def tearDown(self): + os.remove(self.script_file.name) + + def test_generate(self): + with shell.generate_script(self.script_file.name) as gen: + gen.write_prolog('# this is a test script') + gen.write_prolog('# another comment') + gen.write('v1=10') + gen.write_prolog('export v2=20') + gen.write_body('((v3 = v1 + v2))') + gen.write_body('echo hello $v3') + gen.write('unset v2') + gen.write_epilog('echo foo') + gen.write_epilog('unset v1') + + expected_output = '''#!/bin/bash +# this is a test script +# another comment +export v2=20 +v1=10 +((v3 = v1 + v2)) +echo hello $v3 +unset v2 +echo foo +unset v1 +''' + with open(self.script_file.name) as fp: + self.assertEqual(expected_output, fp.read()) + + def test_generate_login(self): + with shell.generate_script(self.script_file.name, login=True) as gen: + gen.write('echo hello') + + expected_output = '''#!/bin/bash -l +echo hello +''' + with open(self.script_file.name) as fp: + self.assertEqual(expected_output, fp.read()) + + def test_write_types(self): + class C: + def __str__(self): + return 'echo "C()"' + + with shell.generate_script(self.script_file.name) as gen: + gen.write(['echo foo', 'echo hello']) + gen.write('echo bar') + gen.write(C()) + + expected_output = '''#!/bin/bash +echo foo +echo hello +echo bar +echo "C()" +''' + with open(self.script_file.name) as fp: + self.assertEqual(expected_output, fp.read()) + + def test_trap_error(self): + with shell.generate_script(self.script_file.name, + trap_errors=True) as gen: + gen.write('false') + gen.write('echo hello') + + with self.assertRaises(SpawnedProcessError) as cm: + os_ext.run_command(self.script_file.name, check=True) + + exc = cm.exception + self.assertNotIn('hello', exc.stdout) + self.assertEqual(1, exc.exitcode) + self.assertIn("-reframe: command `false' failed (exit code: 1)", + exc.stdout) + + def test_trap_exit(self): + with shell.generate_script(self.script_file.name, + trap_exit=True) as gen: + gen.write('echo hello') + + completed = os_ext.run_command(self.script_file.name, check=True) + self.assertIn('hello', completed.stdout) + self.assertEqual(0, completed.returncode) + self.assertIn("-reframe: script exiting with exit code: 0", + completed.stdout) + + def test_trap_signal(self): + with shell.generate_script(self.script_file.name, + trap_signals=True) as gen: + gen.write('sleep 10') + gen.write('echo hello') + + f_stdout = tempfile.NamedTemporaryFile(mode='w+', delete=False) + proc = os_ext.run_command_async(self.script_file.name, + stdout=f_stdout, + start_new_session=True) + + # Yield for some time to allow the script to start + time.sleep(1) + + # We kill the whole spawned process group (we are launching a shell) + os.killpg(proc.pid, 15) + proc.wait() + + f_stdout.flush() + f_stdout.seek(0) + stdout = f_stdout.read() + self.assertNotIn('hello', stdout) + self.assertEqual(143, proc.returncode) + self.assertIn("-reframe: script caught signal: 15", stdout)