Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 10 additions & 5 deletions reframe/core/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
import traceback

import reframe.utility.osext as osext
from reframe.core.exceptions import ReframeSyntaxError, user_frame
from reframe.core.exceptions import (ReframeSyntaxError,
SkipTestError,
user_frame)
from reframe.core.logging import getlogger
from reframe.core.pipeline import RegressionTest
from reframe.utility.versioning import VersionValidator
Expand Down Expand Up @@ -54,12 +56,15 @@ def _instantiate_all():

try:
ret.append(_instantiate(cls, args))
except SkipTestError as e:
getlogger().warning(f'skipping test {cls.__name__!r}: {e}')
except Exception:
frame = user_frame(*sys.exc_info())
msg = "skipping test due to errors: %s: " % cls.__name__
msg += "use `-v' for more information\n"
msg += " FILE: %s:%s" % (frame.filename, frame.lineno)
getlogger().warning(msg)
getlogger().warning(
f"skipping test {cls.__name__!r} due to errors: "
f"use `-v' for more information\n"
f" FILE: {frame.filename}:{frame.lineno}"
)
getlogger().verbose(traceback.format_exc())

return ret
Expand Down
4 changes: 4 additions & 0 deletions reframe/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,10 @@ class DependencyError(ReframeError):
'''Raised when a dependency problem is encountered.'''


class SkipTestError(ReframeError):
'''Raised when a test needs to be skipped.'''


def user_frame(exc_type, exc_value, tb):
'''Return a user frame from the exception's traceback.

Expand Down
24 changes: 22 additions & 2 deletions reframe/core/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
from reframe.core.containers import ContainerPlatformField
from reframe.core.deferrable import _DeferredExpression
from reframe.core.exceptions import (BuildError, DependencyError,
PipelineError, SanityError,
PerformanceError)
PerformanceError, PipelineError,
SanityError, SkipTestError)
from reframe.core.meta import RegressionTestMeta
from reframe.core.schedulers import Job
from reframe.core.warnings import user_deprecation_warning
Expand Down Expand Up @@ -1843,6 +1843,26 @@ def getdep(self, target, environ=None, part=None):
raise DependencyError(f'could not resolve dependency to ({target!r}, '
f'{part!r}, {environ!r})')

def skip(self, msg=None):
'''Skip test.

:arg msg: A message explaining why the test was skipped.

.. versionadded:: 3.5.1
'''
raise SkipTestError(msg)

def skip_if(self, cond, msg=None):
'''Skip test if condition is true.

:arg cond: The condition to check for skipping the test.
:arg msg: A message explaining why the test was skipped.

.. versionadded:: 3.5.1
'''
if cond:
self.skip(msg)

def __str__(self):
return "%s(name='%s', prefix='%s')" % (type(self).__name__,
self.name, self.prefix)
Expand Down
38 changes: 31 additions & 7 deletions reframe/frontend/executors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
JobNotStartedError,
FailureLimitError,
ForceExitError,
SkipTestError,
TaskExit)
from reframe.core.schedulers.local import LocalJobScheduler
from reframe.frontend.printer import PrettyPrinter
Expand Down Expand Up @@ -131,6 +132,7 @@ def __init__(self, case, listeners=[]):
self._current_stage = 'startup'
self._exc_info = (None, None, None)
self._listeners = list(listeners)
self._skipped = False

# Reference count for dependent tests; safe to cleanup the test only
# if it is zero
Expand Down Expand Up @@ -212,7 +214,8 @@ def exc_info(self):

@property
def failed(self):
return self._failed_stage is not None and not self._aborted
return (self._failed_stage is not None and
not self._aborted and not self._skipped)

@property
def failed_stage(self):
Expand All @@ -230,6 +233,10 @@ def completed(self):
def aborted(self):
return self._aborted

@property
def skipped(self):
return self._skipped

def _notify_listeners(self, callback_name):
for l in self._listeners:
callback = getattr(l, callback_name)
Expand Down Expand Up @@ -260,7 +267,12 @@ def __exit__(this, exc_type, exc_value, traceback):
logger.debug(f'Entering stage: {self._current_stage}')
with update_timestamps():
return fn(*args, **kwargs)

except SkipTestError as e:
if not self.succeeded:
# Only skip a test if it hasn't finished yet;
# This practically ignores skipping during the cleanup phase
self.skip()
raise TaskExit from e
except ABORT_REASONS:
self.fail()
raise
Expand Down Expand Up @@ -321,6 +333,12 @@ def fail(self, exc_info=None):
self._exc_info = exc_info or sys.exc_info()
self._notify_listeners('on_task_failure')

def skip(self, exc_info=None):
self._skipped = True
self._failed_stage = self._current_stage
self._exc_info = exc_info or sys.exc_info()
self._notify_listeners('on_task_skip')

def abort(self, cause=None):
if self.failed or self._aborted:
return
Expand Down Expand Up @@ -355,6 +373,10 @@ def on_task_run(self, task):
def on_task_exit(self, task):
'''Called whenever a RegressionTask finishes.'''

@abc.abstractmethod
def on_task_skip(self, task):
'''Called whenever a RegressionTask is skipped.'''

@abc.abstractmethod
def on_task_failure(self, task):
'''Called when a regression test has failed.'''
Expand Down Expand Up @@ -400,7 +422,6 @@ def stats(self):
return self._stats

def runall(self, testcases, restored_cases=None):
abort_reason = None
num_checks = len({tc.check.name for tc in testcases})
self._printer.separator('short double line',
'Running %d check(s)' % num_checks)
Expand All @@ -415,18 +436,21 @@ def runall(self, testcases, restored_cases=None):
# Print the summary line
num_failures = len(self._stats.failed())
num_completed = len(self._stats.completed())
num_skipped = len(self._stats.skipped())
num_tasks = len(self._stats.tasks())
if num_failures > 0 or num_completed < num_tasks:
if num_failures > 0 or num_completed + num_skipped < num_tasks:
status = 'FAILED'
else:
status = 'PASSED'

total_run = len(testcases) - num_tasks + num_completed
total_run = len(testcases)
total_completed = len(self._stats.completed(0))
total_skipped = len(self._stats.skipped(0))
self._printer.status(
status,
f'Ran {num_completed}/{total_run}'
f'Ran {total_completed}/{total_run}'
f' test case(s) from {num_checks} check(s) '
f'({num_failures} failure(s))',
f'({num_failures} failure(s), {total_skipped} skipped)',
just='center'
)
self._printer.timestamp('Finished on', 'short double line')
Expand Down
52 changes: 45 additions & 7 deletions reframe/frontend/executors/policies.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import time

from reframe.core.exceptions import (FailureLimitError,
SkipTestError,
TaskDependencyError,
TaskExit)
from reframe.core.logging import getlogger
Expand Down Expand Up @@ -100,6 +101,17 @@ def runcase(self, case):
for c in case.deps if c in self._task_index):
raise TaskDependencyError('dependencies failed')

if any(self._task_index[c].skipped
for c in case.deps if c in self._task_index):

# We raise the SkipTestError here and catch it immediately in
# order for `skip()` to get the correct exception context.
try:
raise SkipTestError('skipped due to skipped dependencies')
except SkipTestError as e:
task.skip()
raise TaskExit from e

partname = task.testcase.partition.fullname
task.setup(task.testcase.partition,
task.testcase.environ,
Expand Down Expand Up @@ -132,12 +144,10 @@ def runcase(self, case):

self._retired_tasks.append(task)
task.finalize()

except TaskExit:
return
except ABORT_REASONS as e:
task.abort(e)

raise
except BaseException:
task.fail(sys.exc_info())
Expand All @@ -151,6 +161,10 @@ def on_task_run(self, task):
def on_task_exit(self, task):
pass

def on_task_skip(self, task):
msg = str(task.exc_info[1])
self.printer.status('SKIP', msg, just='right')

def on_task_failure(self, task):
self._num_failed_tasks += 1
timings = task.pipeline_timings(['compile_complete',
Expand Down Expand Up @@ -247,6 +261,8 @@ def _remove_from_running(self, task):
getlogger().debug2('Task was not running')
pass

# FIXME: The following functions are very similar and they are also reused
# in the serial policy; we should refactor them
def deps_failed(self, task):
# NOTE: Restored dependencies are not in the task_index
return any(self._task_index[c].failed
Expand All @@ -257,6 +273,11 @@ def deps_succeeded(self, task):
return all(self._task_index[c].succeeded
for c in task.testcase.deps if c in self._task_index)

def deps_skipped(self, task):
# NOTE: Restored dependencies are not in the task_index
return any(self._task_index[c].skipped
for c in task.testcase.deps if c in self._task_index)

def on_task_setup(self, task):
partname = task.check.current_partition.fullname
self._ready_tasks[partname].append(task)
Expand All @@ -265,6 +286,17 @@ def on_task_run(self, task):
partname = task.check.current_partition.fullname
self._running_tasks[partname].append(task)

def on_task_skip(self, task):
# Remove the task from the running list if it was skipped after the
# run phase
if task.check.current_partition:
partname = task.check.current_partition.fullname
if task.failed_stage in ('run_complete', 'run_wait'):
self._running_tasks[partname].remove(task)

msg = str(task.exc_info[1])
self.printer.status('SKIP', msg, just='right')

def on_task_failure(self, task):
if task.aborted:
return
Expand Down Expand Up @@ -308,7 +340,13 @@ def on_task_exit(self, task):
self._completed_tasks.append(task)

def _setup_task(self, task):
if self.deps_succeeded(task):
if self.deps_skipped(task):
try:
raise SkipTestError('skipped due to skipped dependencies')
except SkipTestError as e:
task.skip()
return False
elif self.deps_succeeded(task):
try:
task.setup(task.testcase.partition,
task.testcase.environ,
Expand Down Expand Up @@ -346,7 +384,7 @@ def runcase(self, case):
try:
partname = partition.fullname
if not self._setup_task(task):
if not task.failed:
if not task.skipped and not task.failed:
self.printer.status(
'DEP', '%s on %s using %s' %
(check.name, partname, environ.name),
Expand All @@ -371,7 +409,7 @@ def runcase(self, case):
else:
self.printer.status('HOLD', task.check.info(), just='right')
except TaskExit:
if not task.failed:
if not task.failed or not task.skipped:
with contextlib.suppress(TaskExit):
self._reschedule(task)

Expand All @@ -380,7 +418,6 @@ def runcase(self, case):
# If abort was caused due to failure elsewhere, abort current
# task as well
task.abort(e)

self._failall(e)
raise

Expand Down Expand Up @@ -416,7 +453,8 @@ def split_jobs(tasks):
def _setup_all(self):
still_waiting = []
for task in self._waiting_tasks:
if not self._setup_task(task) and not task.failed:
if (not self._setup_task(task) and
not task.failed and not task.skipped):
still_waiting.append(task)

self._waiting_tasks[:] = still_waiting
Expand Down
8 changes: 8 additions & 0 deletions reframe/frontend/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ def tasks(self, run=-1):
def failed(self, run=-1):
return [t for t in self.tasks(run) if t.failed]

def skipped(self, run=-1):
return [t for t in self.tasks(run) if t.skipped]

def aborted(self, run=-1):
return [t for t in self.tasks(run) if t.aborted]

Expand Down Expand Up @@ -83,6 +86,7 @@ def json(self, force=False):
testcases = []
num_failures = 0
num_aborted = 0
num_skipped = 0
for t in run:
check = t.check
partition = check.current_partition
Expand Down Expand Up @@ -158,6 +162,9 @@ def json(self, force=False):
'traceback': t.exc_info[2]
}
entry['fail_severe'] = errors.is_severe(*t.exc_info)
elif t.skipped:
entry['result'] = 'skipped'
num_skipped += 1
else:
entry['result'] = 'success'
entry['outputdir'] = check.outputdir
Expand All @@ -183,6 +190,7 @@ def json(self, force=False):
'num_cases': len(run),
'num_failures': num_failures,
'num_aborted': num_aborted,
'num_skipped': num_skipped,
'runid': runid,
'testcases': testcases
})
Expand Down
11 changes: 11 additions & 0 deletions unittests/resources/checks/hellocheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,14 @@ def __init__(self):
self.valid_prog_environs = ['*']
self.sourcepath = 'hello.c'
self.sanity_patterns = sn.assert_not_found(r'(?i)error', self.stdout)


@rfm.simple_test
class SkipTest(rfm.RunOnlyRegressionTest):
'''Test to be always skipped'''
valid_systems = ['*']
valid_prog_environs = ['*']
sanity_patterns = sn.assert_true(1)

def __init__(self):
self.skip_if(True, 'unsupported')
3 changes: 2 additions & 1 deletion unittests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -734,7 +734,8 @@ def test_maxfail_option(run_reframe):
)
assert 'Traceback' not in stdout
assert 'Traceback' not in stderr
assert 'Ran 2/2 test case(s) from 2 check(s) (0 failure(s))' in stdout
assert ('Ran 2/2 test case(s) from 2 check(s) '
'(0 failure(s), 0 skipped)') in stdout
assert returncode == 0


Expand Down
Loading