Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions reframe/core/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,7 @@ def __init__(self, name=None, prefix=None):
self.readonly_files = []
self.tags = set()
self.maintainers = []
self._perfvalues = {}

# Strict performance check, if applicable
self.strict_check = True
Expand Down Expand Up @@ -658,6 +659,10 @@ def current_system(self):
"""
return rt.runtime().system

@property
def perfvalues(self):
return util.MappingView(self._perfvalues)

@property
def job(self):
"""The job descriptor associated with this test.
Expand Down Expand Up @@ -1110,7 +1115,6 @@ def check_performance(self):
# We first evaluate and log all performance values and then we
# check them against the reference. This way we always log them
# even if the don't meet the reference.
perf_values = []
for tag, expr in self.perf_patterns.items():
value = evaluate(expr)
key = '%s:%s' % (self._current_partition.fullname, tag)
Expand All @@ -1119,11 +1123,11 @@ def check_performance(self):
"tag `%s' not resolved in references for `%s'" %
(tag, self._current_partition.fullname))

perf_values.append((value, self.reference[key]))
self._perfvalues[key] = (value, *self.reference[key])
self._perf_logger.log_performance(logging.INFO, tag, value,
*self.reference[key])

for val, reference in perf_values:
for val, *reference in self._perfvalues.values():
ref, low_thres, high_thres, *_ = reference
try:
evaluate(assert_reference(val, ref, low_thres, high_thres))
Expand Down
47 changes: 26 additions & 21 deletions reframe/frontend/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,45 +185,47 @@ def main():

# Miscellaneous options
misc_options.add_argument(
'-m', '--module', action='append', default=[],
metavar='MOD', dest='user_modules',
help='Load module MOD before running the regression')
'-C', '--config-file', action='store', dest='config_file',
metavar='FILE', default=os.path.join(reframe.INSTALL_PREFIX,
'reframe/settings.py'),
help='Specify a custom config-file for the machine. '
'(default: %s' % os.path.join(reframe.INSTALL_PREFIX,
'reframe/settings.py'))
misc_options.add_argument(
'-M', '--map-module', action='append', metavar='MAPPING',
dest='module_mappings', default=[],
help='Apply a single module mapping')
misc_options.add_argument(
'-m', '--module', action='append', default=[],
metavar='MOD', dest='user_modules',
help='Load module MOD before running the regression')
misc_options.add_argument(
'--module-mappings', action='store', metavar='FILE',
dest='module_map_file',
help='Apply module mappings defined in FILE')
misc_options.add_argument(
'--purge-env', action='store_true', dest='purge_env', default=False,
help='Purge modules environment before running any tests')
misc_options.add_argument(
'--nocolor', action='store_false', dest='colorize', default=True,
help='Disable coloring of output')
misc_options.add_argument('--performance-report', action='store_true',
help='Print the performance report')
misc_options.add_argument(
'--timestamp', action='store', nargs='?',
const='%FT%T', metavar='TIMEFMT',
help='Append a timestamp component to the regression directories'
'(default format "%%FT%%T")'
)
misc_options.add_argument(
'--system', action='store',
help='Load SYSTEM configuration explicitly')
misc_options.add_argument(
'-C', '--config-file', action='store', dest='config_file',
metavar='FILE', default=os.path.join(reframe.INSTALL_PREFIX,
'reframe/settings.py'),
help='Specify a custom config-file for the machine. '
'(default: %s' % os.path.join(reframe.INSTALL_PREFIX,
'reframe/settings.py'))
'--purge-env', action='store_true', dest='purge_env', default=False,
help='Purge modules environment before running any tests')
misc_options.add_argument(
'--show-config', action='store_true',
help='Print configuration of the current system and exit')
misc_options.add_argument(
'--show-config-env', action='store', metavar='ENV',
help='Print configuration of environment ENV and exit')
misc_options.add_argument(
'--system', action='store',
help='Load SYSTEM configuration explicitly')
misc_options.add_argument(
'--timestamp', action='store', nargs='?',
const='%FT%T', metavar='TIMEFMT',
help='Append a timestamp component to the regression directories'
'(default format "%%FT%%T")'
)
misc_options.add_argument('-V', '--version', action='version',
version=reframe.VERSION)
misc_options.add_argument('-v', '--verbose', action='count', default=0,
Expand Down Expand Up @@ -537,6 +539,9 @@ def main():
printer.info(runner.stats.failure_report())
success = False

if options.performance_report:
printer.info(runner.stats.performance_report())

else:
printer.info('No action specified. Exiting...')
printer.info("Try `%s -h' for a list of available actions." %
Expand Down
32 changes: 32 additions & 0 deletions reframe/frontend/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,3 +107,35 @@ def failure_report(self):

report.append(line_width * '-')
return '\n'.join(report)

def performance_report(self):
line_width = 78
report = [line_width * '=']
report.append('PERFORMANCE REPORT')
previous_name = ''
previous_part = ''
for t in self.tasks():
if t.check.perfvalues.keys():
if t.check.name != previous_name:
report.append(line_width * '-')
report.append('%s' % t.check.name)
previous_name = t.check.name

if t.check.current_partition.fullname != previous_part:
report.append('- %s' % t.check.current_partition.fullname)
previous_part = t.check.current_partition.fullname

report.append(' - %s' % t.check.current_environ)

for key, ref in t.check.perfvalues.items():
var = key.split(':')[-1]
val = ref[0]
try:
unit = ref[4]
except IndexError:
unit = '(no unit specified)'

report.append(' * %s: %s %s' % (var, val, unit))

report.append(line_width * '-')
return '\n'.join(report)