diff --git a/config/cscs-pbs.py b/config/cscs-pbs.py index ffbcd8bb03..01b2009596 100644 --- a/config/cscs-pbs.py +++ b/config/cscs-pbs.py @@ -4,12 +4,12 @@ class ReframeSettings: - _reframe_module = 'reframe' - _job_poll_intervals = [1, 2, 3] - _job_submit_timeout = 60 - _checks_path = ['checks/'] - _checks_path_recurse = True - _site_configuration = { + reframe_module = 'reframe' + job_poll_intervals = [1, 2, 3] + job_submit_timeout = 60 + checks_path = ['checks/'] + checks_path_recurse = True + site_configuration = { 'systems': { 'dom': { 'descr': 'Dom TDS', @@ -102,10 +102,12 @@ class ReframeSettings: }, } - _logging_config = { + logging_config = { 'level': 'DEBUG', - 'handlers': { - 'reframe.log': { + 'handlers': [ + { + 'type': 'file', + 'name': 'reframe.log', 'level': 'DEBUG', 'format': '[%(asctime)s] %(levelname)s: ' '%(check_info)s: %(message)s', @@ -113,49 +115,41 @@ class ReframeSettings: }, # Output handling - '&1': { + { + 'type': 'stream', + 'name': 'stdout', 'level': 'INFO', 'format': '%(message)s' }, - 'reframe.out': { + { + 'type': 'file', + 'name': 'reframe.out', 'level': 'INFO', 'format': '%(message)s', 'append': False, } - } + ] } - @property - def version(self): - return self._version - - @property - def reframe_module(self): - return self._reframe_module - - @property - def job_poll_intervals(self): - return self._job_poll_intervals - - @property - def job_submit_timeout(self): - return self._job_submit_timeout - - @property - def checks_path(self): - return self._checks_path - - @property - def checks_path_recurse(self): - return self._checks_path_recurse - - @property - def site_configuration(self): - return self._site_configuration - - @property - def logging_config(self): - return self._logging_config + perf_logging_config = { + 'level': 'DEBUG', + 'handlers': [ + { + 'type': 'filelog', + 'prefix': '%(check_system)s/%(check_partition)s', + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'append': True + } + ] + } settings = ReframeSettings() diff --git a/config/cscs.py b/config/cscs.py index 9f327ba392..7e1a7f477a 100644 --- a/config/cscs.py +++ b/config/cscs.py @@ -4,12 +4,12 @@ class ReframeSettings: - _reframe_module = 'reframe' - _job_poll_intervals = [1, 2, 3] - _job_submit_timeout = 60 - _checks_path = ['checks/'] - _checks_path_recurse = True - _site_configuration = { + reframe_module = 'reframe' + job_poll_intervals = [1, 2, 3] + job_submit_timeout = 60 + checks_path = ['checks/'] + checks_path_recurse = True + site_configuration = { 'systems': { 'daint': { 'descr': 'Piz Daint', @@ -324,7 +324,7 @@ class ReframeSettings: '--exec-policy=async', '--strict', '--output=$APPS/UES/$USER/regression/maintenance', - '--logdir=$APPS/UES/$USER/regression/maintenance/logs', + '--perflogdir=$APPS/UES/$USER/regression/maintenance/logs', '--stage=$SCRATCH/regression/maintenance/stage', '--reservation=maintenance', '--save-log-files', @@ -335,7 +335,7 @@ class ReframeSettings: '--exec-policy=async', '--strict', '--output=$APPS/UES/$USER/regression/production', - '--logdir=$APPS/UES/$USER/regression/production/logs', + '--perflogdir=$APPS/UES/$USER/regression/production/logs', '--stage=$SCRATCH/regression/production/stage', '--save-log-files', '--tag=production', @@ -345,10 +345,12 @@ class ReframeSettings: } } - _logging_config = { + logging_config = { 'level': 'DEBUG', - 'handlers': { - 'reframe.log': { + 'handlers': [ + { + 'type': 'file', + 'name': 'reframe.log', 'level': 'DEBUG', 'format': '[%(asctime)s] %(levelname)s: ' '%(check_info)s: %(message)s', @@ -356,49 +358,52 @@ class ReframeSettings: }, # Output handling - '&1': { + { + 'type': 'stream', + 'name': 'stdout', 'level': 'INFO', 'format': '%(message)s' }, - 'reframe.out': { + { + 'type': 'file', + 'name': 'reframe.out', 'level': 'INFO', 'format': '%(message)s', 'append': False, } - } + ] } - @property - def version(self): - return self._version - - @property - def reframe_module(self): - return self._reframe_module - - @property - def job_poll_intervals(self): - return self._job_poll_intervals - - @property - def job_submit_timeout(self): - return self._job_submit_timeout - - @property - def checks_path(self): - return self._checks_path - - @property - def checks_path_recurse(self): - return self._checks_path_recurse - - @property - def site_configuration(self): - return self._site_configuration - - @property - def logging_config(self): - return self._logging_config + perf_logging_config = { + 'level': 'DEBUG', + 'handlers': [ + { + 'type': 'graylog', + 'host': 'your-server-here', + 'port': 12345, + 'level': 'INFO', + 'format': '%(message)s', + 'extras': { + 'facility': 'reframe', + 'data-version': '1.0', + } + }, + { + 'type': 'filelog', + 'prefix': '%(check_system)s/%(check_partition)s', + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'append': True + } + ] + } settings = ReframeSettings() diff --git a/docs/running.rst b/docs/running.rst index 702c29f82f..58508477bd 100644 --- a/docs/running.rst +++ b/docs/running.rst @@ -376,7 +376,7 @@ They are summarized below: Configuring ReFrame Directories ------------------------------- -ReFrame uses three basic directories during the execution of tests: +ReFrame uses two basic directories during the execution of tests: 1. The stage directory @@ -390,16 +390,11 @@ ReFrame uses three basic directories during the execution of tests: By default these are the standard output, standard error and the generated job script file. A regression test may also specify to keep additional files. -3. The log directory - - * This is where the performance log files of the individual performance tests are placed (see `Logging <#logging>`__ for more information) - -By default, all these directories are placed under a common prefix, which defaults to ``.``. +By default, these directories are placed under a common prefix, which defaults to ``.``. The rest of the directories are organized as follows: * Stage directory: ``${prefix}/stage/`` * Output directory: ``${prefix}/output/`` -* Performance log directory: ``${prefix}/logs`` You can optionally append a timestamp directory component to the above paths (except the logs directory), by using the ``--timestamp`` option. This options takes an optional argument to specify the timestamp format. @@ -410,7 +405,6 @@ You can override either the default global prefix or any of the default individu * ``--prefix DIR``: set prefix to ``DIR``. * ``--output DIR``: set output directory to ``DIR``. * ``--stage DIR``: set stage directory to ``DIR``. -* ``--logdir DIR``: set performance log directory to ``DIR``. The stage and output directories are created only when you run a regression test. However you can view the directories that will be created even when you do a listing of the available checks with the ``-l`` option. @@ -430,9 +424,9 @@ This is useful if you want to check the directories that ReFrame will create. ============= Check prefix : /path/to/reframe (R) Check search path : 'checks/' - Stage dir prefix : /foo/stage/ - Output dir prefix : /foo/output/ - Logging dir : /foo/logs + Stage dir prefix : /foo/stage/ + Output dir prefix : /foo/output/ + Perf. logging prefix : /Users/karakasv/Repositories/reframe/logs List of matched checks ====================== Found 0 check(s). @@ -506,104 +500,225 @@ Each line starts with a timestamp, the level of the message (``info``, ``debug`` Every time ReFrame is run, both ``reframe.out`` and ``reframe.log`` files will be rewritten. However, you can ask ReFrame to copy them to the output directory before exiting by passing it the ``--save-log-files`` option. -Configuring logging +Configuring Logging ^^^^^^^^^^^^^^^^^^^ You can configure several aspects of logging in ReFrame and even how the output will look like. ReFrame's logging mechanism is built upon Python's `logging `__ framework adding extra logging levels and more formatting capabilities. -Logging in ReFrame is configured by the ``_logging_config`` variable in the ``reframe/settings.py`` file. +Logging in ReFrame is configured by the ``logging_config`` variable in the ``reframe/settings.py`` file. The default configuration looks as follows: -.. code-block:: python - - _logging_config = { - 'level': 'DEBUG', - 'handlers': { - 'reframe.log' : { - 'level' : 'DEBUG', - 'format' : '[%(asctime)s] %(levelname)s: ' - '%(check_info)s: %(message)s', - 'append' : False, - }, - - # Output handling - '&1': { - 'level' : 'INFO', - 'format' : '%(message)s' - }, - 'reframe.out' : { - 'level' : 'INFO', - 'format' : '%(message)s', - 'append' : False, - } - } - } +.. literalinclude:: ../reframe/settings.py + :lines: 51-78 + :dedent: 4 Note that this configuration dictionary is not the same as the one used by Python's logging framework. It is a simplified version adapted to the needs of ReFrame. -The ``_logging_config`` dictionary has two main key entries: +The ``logging_config`` dictionary has two main key entries: -* ``level`` (default: ``'INFO'``): This is the lowest level of messages that will be passed down to the different log record handlers. +* ``level`` (default: ``'INFO'``): + This is the lowest level of messages that will be passed down to the different log record handlers. Any message with a lower level than that, it will be filtered out immediately and will not be passed to any handler. ReFrame defines the following logging levels with a decreasing severity: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``VERBOSE`` and ``DEBUG``. Note that the level name is *not* case sensitive in ReFrame. -* ``handlers``: A dictionary defining the properties of the handlers that are attached to ReFrame's logging mechanism. - The key is either a filename or a special character combination denoting standard output (``&1``) or standard error (``&2``). +* ``handlers``: + A list of log record handlers that are attached to ReFrame's logging mechanism. You can attach as many handlers as you like. - The value of each handler key is another dictionary that holds the properties of the corresponding handler as key/value pairs. + For example, by default ReFrame uses three handlers: (a) a handler that logs debug information into ``reframe.log``, (b) a handler that controls the actual output of the framework to the standart output, which does not print any debug messages, and (c) a handler that writes the same output to a file ``reframe.out``. + +Each handler is configured by another dictionary that holds its properties as string key/value pairs. +For standard ReFrame logging there are currently two types of handlers, which recognize different properties. + +.. note:: + New syntax for handlers is introduced. + The old syntax is still valid, but users are advised to update their logging configuration to the new syntax. + + .. versionchanged:: 2.13 + + +Common Log Handler Attributes +""""""""""""""""""""""""""""" + +All handlers accept the following set of attributes (keys) in their configuration: -The configurable properties of a log record handler are the following: +* ``type``: (required) the type of the handler. + There are two types of handlers used for standard logging in ReFrame -* ``level`` (default: ``'debug'``): The lowest level of log records that this handler can process. + 1. ``file``: a handler that writes log records in file. + 2. ``stream``: a handler that writes log records in a file stream. + + +* ``level``: (default: ``DEBUG``) The lowest level of log records that this handler can process. * ``format`` (default: ``'%(message)s'``): Format string for the printout of the log record. ReFrame supports all the `format strings `__ from Python's logging library and provides the following additional ones: - * ``check_name``: Prints the name of the regression test on behalf of which ReFrame is currently executing. - If ReFrame is not in the context of regression test, ``reframe`` will be printed. - * ``check_jobid``: Prints the job or process id of the job or process associated with currently executing regression test. - If a job or process is not yet created, ``-1`` will be printed. - * ``check_info``: Print live information of the currently executing check. + - ``check_environ``: The programming environment a test is currently executing for. + - ``check_info``: Print live information of the currently executing check. By default this field has the form `` on using ``. - It can be configured on a per test basis by overriding the :func:`info ` method in your regression test. + It can be configured on a per test basis by overriding the :func:`info ` method of a specific regression test. + - ``check_jobid``: Prints the job or process id of the job or process associated with the currently executing regression test. + If a job or process is not yet created, ``-1`` will be printed. + - ``check_name``: Prints the name of the regression test on behalf of which ReFrame is currently executing. + If ReFrame is not in the context of regression test, ``reframe`` will be printed. + - ``check_outputdir``: The output directory associated with the currently executing test. + - ``check_partition``: The system partition where this test is currently executing. + - ``check_stagedir``: The stage directory associated with the currently executing test. + - ``check_system``: The host system where this test is currently executing. + - ``check_tags``: The tags associated with this test. + - ``version``: The ReFrame version. * ``datefmt`` (default: ``'%FT%T'``) The format that will be used for outputting timestamps (i.e., the ``%(asctime)s`` field). Acceptable formats must conform to standard library's `time.strftime() `__ function. -* ``append`` (default: :class:`False`) Controls whether ReFrame should append to this file or not. - This is ignored for the standard output/error handlers. -* ``timestamp`` (default: :class:`None`): Append a timestamp to this log filename. - This property may accept any date format as the ``datefmt`` property. - If set for a ``filename.log`` handler entry, the resulting log file name will be ``filename_.log``. - This property is ignored for the standard output/error handlers. .. caution:: - The ``testcase_name`` logging attribute was replaced with the ``check_info``, which is now also configurable + The ``testcase_name`` logging attribute is replaced with the ``check_info``, which is now also configurable .. versionchanged:: 2.10 + +File log handlers +""""""""""""""""" + +In addition to the common log handler attributes, file log handlers accept the following: + +* ``name``: (required) The name of the file where log records will be written. +* ``append`` (default: :class:`False`) Controls whether ReFrame should append to this file or not. +* ``timestamp`` (default: :class:`None`): Append a timestamp to this log filename. + This property may accept any date format that is accepted also by the ``datefmt`` property. + If the name of the file is ``filename.log`` and this attribute is set to ``True``, the resulting log file name will be ``filename_.log``. + + + +Stream log handlers +""""""""""""""""""" + +In addition to the common log handler attributes, file log handlers accept the following: + +* ``name``: (default ``stdout``) The symbolic name of the log stream to use. + Available values: ``stdout`` for standard output and ``stderr`` for standard error. + + + Performance Logging ^^^^^^^^^^^^^^^^^^^ -ReFrame supports additional logging for performance tests specifically, in order to record historical performance data. -For each performance test, a log file of the form ``.log`` is created under the ReFrame's `log directory <#configuring-reframe-directories>`__ where the test's performance is recorded. -The default format used for this file is ``'[%(asctime)s] %(check_info)s (jobid=%(check_jobid)s): %(message)s'`` and ReFrame always appends to this file. -Currently, it is not possible for users to configure performance logging. - -The resulting log file looks like the following: +ReFrame supports an additional logging facility for recording performance values, in order to be able to keep historical performance data. +This is configured by the ``perf_logging_config`` variables, whose syntax is the same as for the ``logging_config``: + +.. literalinclude:: ../reframe/settings.py + :lines: 80-98 + :dedent: 4 + +Performance logging introduces two new log record handlers, specifically designed for this purpose. + +File-based Performance Logging +"""""""""""""""""""""""""""""" + +The type of this handler is ``filelog`` and logs the performance of a regression test in one or more files. +The attributes of this handler are the following: + +* ``prefix``: This is the directory prefix (usually dynamic) where the performance logs of a test will be stored. + This attribute accepts any of the check-specific formatting placeholders described `above <#common-log-handler-attributes>`__. + This allows you to create dynamic paths based on the current system, partition and/or programming environment a test executes. + This dynamic prefix is appended to the "global" performance log directory prefix, configurable through the ``--perflogdir`` option. + The default configuration of ReFrame for performance logging (shown in the previous listing) generates the following files: + + .. code-block:: none + + {PERFLOG_PREFIX}/ + system1/ + partition1/ + test_name.log + partition2/ + test_name.log + ... + system2/ + ... + + A log file, named after the test's name, is generated in different directories, which are themselves named after the system and partition names that this test has run on. + The ``PERFLOG_PREFIX`` will have the value of ``--perflogdir`` option, if specified, otherwise it will default to ``{REFRAME_PREFIX}/perflogs``. + You can always check its value by looking into the paths printed by ReFrame at the beginning of its output: + + .. code-block:: none + + Command line: ./reframe.py --prefix=/foo --system=generic -l + Reframe version: 2.13-dev0 + Launched by user: USER + Launched on host: HOSTNAME + Reframe paths + ============= + Check prefix : /Users/karakasv/Repositories/reframe + (R) Check search path : 'checks/' + Stage dir prefix : /foo/stage/ + Output dir prefix : /foo/output/ + Perf. logging prefix : /foo/perflogs + List of matched checks + ====================== + Found 0 check(s). + +* ``format``: The syntax of this attribute is the same as of the standard logging facility, except that it adds a couple more performance-specific formatting placeholders: + + - ``check_perf_lower_thres``: The lower threshold of the difference from the reference value expressed as a fraction of the reference. + - ``check_perf_upper_thres``: The upper threshold of the difference from the reference value expressed as a fraction of the reference. + - ``check_perf_ref``: The reference performance value of a certain performance variable. + - ``check_perf_value``: The performance value obtained by this test for a certain performance variable. + - ``check_perf_var``: The name of the `performance variable `__, whose value is logged. + +Using the default performance log format, the resulting log entries look like the following: .. code-block:: none - [2018-05-26T00:30:47] reframe 2.13-dev0: Example7Test on daint:gpu using PrgEnv-cray (jobid=746641): value: 49.246694, reference: (50.0, -0.1, 0.1) - [2018-05-26T00:30:54] reframe 2.13-dev0: Example7Test on daint:gpu using PrgEnv-gnu (jobid=746642): value: 48.781683, reference: (50.0, -0.1, 0.1) - [2018-05-26T00:31:02] reframe 2.13-dev0: Example7Test on daint:gpu using PrgEnv-pgi (jobid=746643): value: 49.139091, reference: (50.0, -0.1, 0.1) + 2018-05-30T00:14:53|reframe 2.13-dev0|Example7Test on daint:gpu using PrgEnv-gnu|jobid=749667|perf=49.152408|ref=50.0 (l=-0.1, u=0.1) + 2018-05-30T00:14:53|reframe 2.13-dev0|Example7Test on daint:gpu using PrgEnv-pgi|jobid=749668|perf=48.930356|ref=50.0 (l=-0.1, u=0.1) + 2018-05-30T00:14:53|reframe 2.13-dev0|Example7Test on daint:gpu using PrgEnv-cray|jobid=749666|perf=48.914735|ref=50.0 (l=-0.1, u=0.1) The interpretation of the performance values depends on the individual tests. The above output is from the CUDA performance test we presented in the `tutorial `__, so the value refers to the achieved Gflop/s. -The reference value is a three-element tuple of the form ``(, , )``, where the ``lower-threshold`` and ``upper-threshold`` are the acceptable tolerance thresholds expressed in percentages. -For example, the performance check shown above has a reference value of 50 Gflop/s ± 10%. + + +Performance Logging Using Graylog +""""""""""""""""""""""""""""""""" + +The type of this handler is ``graylog`` and it logs performance data to a `Graylog `__ server. +Graylog is a distributed enterprise log management service. +An example configuration of such a handler is the following: + +.. code-block:: python + + { + 'type': 'graylog', + 'host': 'my.graylog.server', + 'port': 12345, + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'extras': { + 'facility': 'reframe', + } + }, + +This handler introduces three new attributes: + +* ``host``: (required) The Graylog server that accepts the log messages. +* ``port``: (required) The port where the Graylog server accepts connections. +* ``extras``: (optional) A set of optional user attributes to be passed with each log record to the server. + These may depend on the server configuration. + +This log handler uses internally `pygelf `__, so this Python module must be available, otherwise this log handler will be ignored. +`GELF `__ is a format specification for log messages that are sent over the network. +The ReFrame's ``graylog`` handler sends log messages in JSON format using an HTTP POST request to the specified host and port. +More details on this log format may be found `here `__ + Asynchronous Execution of Regression Checks ------------------------------------------- diff --git a/reframe/core/exceptions.py b/reframe/core/exceptions.py index aec55cdbc6..b8eef2c638 100644 --- a/reframe/core/exceptions.py +++ b/reframe/core/exceptions.py @@ -72,6 +72,10 @@ class SystemAutodetectionError(UnknownSystemError): """Raised when the host system cannot be auto-detected""" +class LoggingError(ReframeError): + """Raised when an error related to logging has occurred.""" + + class EnvironError(ReframeError): """Raised when an error related to an environment occurs.""" diff --git a/reframe/core/logging.py b/reframe/core/logging.py index 5c756545f1..cc165d0924 100644 --- a/reframe/core/logging.py +++ b/reframe/core/logging.py @@ -4,12 +4,22 @@ import logging.handlers import numbers import os +import pprint import shutil import sys +import warnings from datetime import datetime import reframe import reframe.core.debug as debug +from reframe.core.exceptions import ConfigError, LoggingError + + +# Global configuration options for logging +LOG_CONFIG_OPTS = { + 'handlers.filelog.prefix': './logs/' +} + # Reframe's log levels CRITICAL = 50 @@ -83,50 +93,183 @@ def set_handler_level(hdlr, level): logging.Handler.setLevel = set_handler_level +class MultiFileHandler(logging.FileHandler): + """A file handler that allows writing on different log files based on + information from the log record. + """ + + def __init__(self, prefix, mode='a', encoding=None): + super().__init__(prefix, mode, encoding, delay=True) + + # Reset FileHandler's filename + self.baseFilename = None + self._prefix = prefix + + # Associates filenames with open streams + self._streams = {} + + def emit(self, record): + try: + dirname = self._prefix % record.__dict__ + os.makedirs(dirname, exist_ok=True) + except KeyError as e: + raise LoggingError('logging failed: unknown placeholder in ' + 'filename pattern: %s' % e) from None + except OSError as e: + raise LoggingError('logging failed') from e + + self.baseFilename = os.path.join(dirname, record.check_name + '.log') + self.stream = self._streams.get(self.baseFilename, None) + super().emit(record) + self._streams[self.baseFilename] = self.stream + + def close(self): + # Close all open streams + for s in self._streams.values(): + self.stream = s + super().close() + + def load_from_dict(logging_config): if not isinstance(logging_config, collections.abc.Mapping): raise TypeError('logging configuration is not a dict') level = logging_config.get('level', 'info').lower() - handlers_dict = logging_config.get('handlers', None) + handlers_descr = logging_config.get('handlers', None) logger = Logger('reframe') logger.setLevel(_log_level_values[level]) - - for handler in _extract_handlers(handlers_dict): + for handler in _extract_handlers(handlers_descr): logger.addHandler(handler) return logger -def _extract_handlers(handlers_dict): +def _convert_handler_syntax(handler_dict): + handler_list = [] + for filename, handler_config in handler_dict.items(): + descr = handler_config + new_keys = {} + if filename == '&1': + new_keys['type'] = 'stream' + new_keys['name'] = 'stdout' + elif filename == '&2': + new_keys['type'] = 'stream' + new_keys['name'] = 'stderr' + else: + new_keys['type'] = 'file' + new_keys['name'] = filename + + descr.update(new_keys) + handler_list.append(descr) + + return handler_list + + +def _create_file_handler(handler_config): + try: + filename = handler_config['name'] + except KeyError: + raise ConfigError('no file specified for file handler:\n%s' % + pprint.pformat(handler_config)) from None + + timestamp = handler_config.get('timestamp', None) + if timestamp: + basename, ext = os.path.splitext(filename) + filename = '%s_%s%s' % (basename, + datetime.now().strftime(timestamp), ext) + + append = handler_config.get('append', False) + return logging.handlers.RotatingFileHandler(filename, + mode='a+' if append else 'w+') + + +def _create_filelog_handler(handler_config): + logdir = os.path.abspath(LOG_CONFIG_OPTS['handlers.filelog.prefix']) + try: + filename_patt = os.path.join(logdir, handler_config['prefix']) + except KeyError: + raise ConfigError('no file specified for file handler:\n%s' % + pprint.pformat(handler_config)) from None + + append = handler_config.get('append', False) + return MultiFileHandler(filename_patt, mode='a+' if append else 'w+') + + +def _create_stream_handler(handler_config): + stream = handler_config.get('name', 'stdout') + if stream == 'stdout': + return logging.StreamHandler(stream=sys.stdout) + elif stream == 'stderr': + return logging.StreamHandler(stream=sys.stderr) + else: + raise ConfigError('unknown stream: %s' % stream) + + +def _create_graylog_handler(handler_config): + try: + import pygelf + except ImportError: + return None + + host = handler_config.get('host', None) + port = handler_config.get('port', None) + extras = handler_config.get('extras', None) + if host is None: + raise ConfigError('graylog handler: no host specified') + + if port is None: + raise ConfigError('graylog handler: no port specified') + + if extras is not None and not isinstance(extras, collections.abc.Mapping): + raise ConfigError('graylog handler: extras must be a mapping type') + + return pygelf.GelfHttpHandler(host=host, port=port, debug=True, + static_fields=extras, + include_extra_fields=True) + + +def _extract_handlers(handlers_list): + # Check if we are using the old syntax + if isinstance(handlers_list, collections.abc.Mapping): + handlers_list = _convert_handler_syntax(handlers_list) + sys.stderr.write( + 'WARNING: looks like you are using an old syntax for the ' + 'logging configuration; please update your syntax as follows:\n' + '\nhandlers: %s\n' % pprint.pformat(handlers_list, indent=1)) + handlers = [] - if not handlers_dict: - raise ValueError('no handlers are defined for logger') + if not handlers_list: + raise ValueError('no handlers are defined for logging') - for filename, handler_config in handlers_dict.items(): + for i, handler_config in enumerate(handlers_list): if not isinstance(handler_config, collections.abc.Mapping): - raise TypeError('handler %s is not a dictionary' % filename) + raise TypeError('handler config at position %s ' + 'is not a dictionary' % i) - level = handler_config.get('level', 'debug').lower() - fmt = handler_config.get('format', '%(message)s') - datefmt = handler_config.get('datefmt', '%FT%T') - append = handler_config.get('append', False) - timestamp = handler_config.get('timestamp', None) - - if filename == '&1': - hdlr = logging.StreamHandler(stream=sys.stdout) - elif filename == '&2': - hdlr = logging.StreamHandler(stream=sys.stderr) + try: + handler_type = handler_config['type'] + except KeyError: + raise ConfigError('no type specified for ' + 'handler at position %s' % i) from None + + if handler_type == 'file': + hdlr = _create_file_handler(handler_config) + elif handler_type == 'filelog': + hdlr = _create_filelog_handler(handler_config) + elif handler_type == 'stream': + hdlr = _create_stream_handler(handler_config) + elif handler_type == 'graylog': + hdlr = _create_graylog_handler(handler_config) + if hdlr is None: + sys.stderr.write('WARNING: could not initialize the ' + 'graylog handler; ignoring...\n') + continue else: - if timestamp: - basename, ext = os.path.splitext(filename) - filename = '%s_%s%s' % ( - basename, datetime.now().strftime(timestamp), ext - ) - - hdlr = logging.handlers.RotatingFileHandler( - filename, mode='a+' if append else 'w+') + raise ConfigError('unknown handler type: %s' % handler_type) + level = handler_config.get('level', 'debug').lower() + fmt = handler_config.get('format', '%(message)s') + datefmt = handler_config.get('datefmt', '%FT%T') hdlr.setFormatter(logging.Formatter(fmt=fmt, datefmt=datefmt)) hdlr.setLevel(_check_level(level)) handlers.append(hdlr) @@ -187,9 +330,20 @@ def __init__(self, logger=None, check=None): super().__init__( logger, { - 'check_name': check.name if check else 'reframe', + 'check_name': 'reframe', 'check_jobid': '-1', - 'check_info': check.info() if check else 'reframe', + 'check_info': 'reframe', + 'check_system': None, + 'check_partition': None, + 'check_environ': None, + 'check_outputdir': None, + 'check_stagedir': None, + 'check_perf_var': None, + 'check_perf_value': None, + 'check_perf_ref': None, + 'check_perf_lower_thres': None, + 'check_perf_upper_thres': None, + 'check_tags': None, 'version': reframe.VERSION, } ) @@ -202,13 +356,45 @@ def setLevel(self, level): if self.logger: super().setLevel(level) + def _update_check_extras(self): + """Return a dictionary with all the check-specific information.""" + if self.check is None: + return + + self.extra['check_name'] = self.check.name + self.extra['check_info'] = self.check.info() + self.extra['check_outputdir'] = self.check.outputdir + self.extra['check_stagedir'] = self.check.stagedir + self.extra['check_tags'] = ','.join(self.check.tags) + if self.check.current_system: + self.extra['check_system'] = self.check.current_system.name + + if self.check.current_partition: + self.extra['check_partition'] = self.check.current_partition.name + + if self.check.current_environ: + self.extra['check_environ'] = self.check.current_environ.name + + if self.check.job: + self.extra['check_jobid'] = self.check.job.jobid + + def log_performance(self, level, tag, value, ref, + low_thres, upper_thres, msg=None): + + # Update the performance-relevant extras and log the message + self.extra['check_perf_var'] = tag + self.extra['check_perf_value'] = value + self.extra['check_perf_ref'] = ref + self.extra['check_perf_lower_thres'] = low_thres + self.extra['check_perf_upper_thres'] = upper_thres + if msg is None: + msg = 'sent by ' + os.environ['USER'] + + self.log(level, msg) + def process(self, msg, kwargs): # Setup dynamic fields of the check - if self.check: - self.extra['check_info'] = self.check.info() - if self.check.job: - self.extra['check_jobid'] = self.check.job.jobid - + self._update_check_extras() try: self.extra.update(kwargs['extra']) except KeyError: @@ -229,6 +415,7 @@ def verbose(self, message, *args, **kwargs): null_logger = LoggerAdapter() _logger = None +_perf_logger = None _context_logger = null_logger @@ -257,19 +444,24 @@ def __exit__(self, exc_type, exc_value, traceback): _context_logger = self._orig_logger -def configure_logging(config): - global _logger - global _context_logger +def configure_logging(loggin_config): + global _logger, _context_logger - if config is None: + if loggin_config is None: _logger = None _context_logger = null_logger return - _logger = load_from_dict(config) + _logger = load_from_dict(loggin_config) _context_logger = LoggerAdapter(_logger) +def configure_perflogging(perf_logging_config): + global _perf_logger + + _perf_logger = load_from_dict(perf_logging_config) + + def save_log_files(dest): os.makedirs(dest, exist_ok=True) for hdlr in _logger.handlers: @@ -279,3 +471,7 @@ def save_log_files(dest): def getlogger(): return _context_logger + + +def getperflogger(check): + return LoggerAdapter(_perf_logger, check) diff --git a/reframe/core/pipeline.py b/reframe/core/pipeline.py index 30710ce750..01fa2b1be5 100644 --- a/reframe/core/pipeline.py +++ b/reframe/core/pipeline.py @@ -463,7 +463,6 @@ class RegressionTest: _stagedir = fields.StringField('_stagedir', allow_none=True) _stdout = fields.StringField('_stdout', allow_none=True) _stderr = fields.StringField('_stderr', allow_none=True) - _perf_logfile = fields.StringField('_perf_logfile', allow_none=True) _current_partition = fields.TypedField('_current_partition', SystemPartition, allow_none=True) _current_environ = fields.TypedField('_current_environ', Environment, @@ -550,6 +549,7 @@ def __init__(self, name=None, prefix=None): # Dynamic paths of the regression check; will be set in setup() self._stagedir = None + self._outputdir = None self._stdout = None self._stderr = None @@ -558,7 +558,6 @@ def __init__(self, name=None, prefix=None): # Performance logging self._perf_logger = logging.null_logger - self._perf_logfile = None # Export read-only views to interesting fields @property @@ -628,6 +627,18 @@ def stagedir(self): """ return self._stagedir + @property + def outputdir(self): + """The output directory of the test. + + This is set during the :func:`setup` phase. + + .. versionadded:: 2.13 + + :type: :class:`str`. + """ + return self._outputdir + @property @deferrable def stdout(self): @@ -749,7 +760,7 @@ def _setup_paths(self): self.name, self._current_environ.name) - self.outputdir = rt.runtime().resources.make_outputdir( + self._outputdir = rt.runtime().resources.make_outputdir( self._current_partition.name, self.name, self._current_environ.name) @@ -821,32 +832,9 @@ def _setup_job(self, **job_opts): self._job.options = (self._current_partition.access + resources_opts + self._job.options) - # FIXME: This is a temporary solution to address issue #157 def _setup_perf_logging(self): self.logger.debug('setting up performance logging') - self._perf_logfile = os.path.join( - rt.runtime().resources.make_perflogdir( - self._current_partition.name), - self.name + '.log' - ) - - perf_logging_config = { - 'level': 'INFO', - 'handlers': { - self._perf_logfile: { - 'level': 'DEBUG', - 'format': '[%(asctime)s] reframe %(version)s: ' - '%(check_info)s ' - '(jobid=%(check_jobid)s): %(message)s', - 'append': True, - } - } - } - - self._perf_logger = logging.LoggerAdapter( - logger=logging.load_from_dict(perf_logging_config), - check=self - ) + self._perf_logger = logging.getperflogger(self) def setup(self, partition, environ, **job_opts): """The setup phase of the regression test pipeline. @@ -1044,30 +1032,28 @@ def check_performance(self): key = '%s:%s' % (self._current_partition.fullname, tag) try: ref, low_thres, high_thres = self.reference[key] - self._perf_logger.info( - 'value: %s, reference: %s' % - (value, self.reference[key]) - ) except KeyError: raise SanityError( "tag `%s' not resolved in references for `%s'" % - (tag, self._current_partition.fullname) - ) + (tag, self._current_partition.fullname)) + + self._perf_logger.log_performance(logging.INFO, tag, value, + ref, low_thres, high_thres) evaluate(assert_reference(value, ref, low_thres, high_thres)) def _copy_to_outputdir(self): """Copy checks interesting files to the output directory.""" self.logger.debug('copying interesting files to output directory') - shutil.copy(self._stdout, self.outputdir) - shutil.copy(self._stderr, self.outputdir) + shutil.copy(self._stdout, self._outputdir) + shutil.copy(self._stderr, self._outputdir) if self._job: - shutil.copy(self._job.script_filename, self.outputdir) + shutil.copy(self._job.script_filename, self._outputdir) # Copy files specified by the user for f in self.keep_files: if not os.path.isabs(f): f = os.path.join(self._stagedir, f) - shutil.copy(f, self.outputdir) + shutil.copy(f, self._outputdir) def cleanup(self, remove_files=False, unload_env=True): """The cleanup phase of the regression test pipeline. @@ -1077,7 +1063,7 @@ def cleanup(self, remove_files=False, unload_env=True): :arg unload_env: If :class:`True`, the environment that was used to run this test will be unloaded. """ - aliased = os.path.samefile(self._stagedir, self.outputdir) + aliased = os.path.samefile(self._stagedir, self._outputdir) if aliased: self.logger.debug('skipping copy to output dir ' 'since they alias each other') @@ -1096,9 +1082,9 @@ def cleanup(self, remove_files=False, unload_env=True): def __str__(self): return ('%s (found in %s)\n' ' descr: %s\n' - ' tags: %s, maintainers: %s' % + ' tags: {%s}, maintainers: %s' % (self.name, inspect.getfile(type(self)), - self.descr, self.tags, self.maintainers)) + self.descr, ','.join(self.tags), self.maintainers)) class RunOnlyRegressionTest(RegressionTest): diff --git a/reframe/core/runtime.py b/reframe/core/runtime.py index d5a1b9d72f..4f36f52a40 100644 --- a/reframe/core/runtime.py +++ b/reframe/core/runtime.py @@ -95,14 +95,12 @@ class HostResources: prefix = fields.AbsolutePathField('prefix') outputdir = fields.AbsolutePathField('outputdir', allow_none=True) stagedir = fields.AbsolutePathField('stagedir', allow_none=True) - perflogdir = fields.AbsolutePathField('perflogdir', allow_none=True) def __init__(self, prefix=None, stagedir=None, - outputdir=None, perflogdir=None, timefmt=None): + outputdir=None, timefmt=None): self.prefix = prefix or '.' self.stagedir = stagedir self.outputdir = outputdir - self.perflogdir = perflogdir self._timestamp = datetime.now() self.timefmt = timefmt @@ -134,23 +132,12 @@ def stage_prefix(self): else: return os.path.join(self.stagedir, self.timestamp) - @property - def perflog_prefix(self): - """The prefix directory of the performance logs of ReFrame.""" - if self.perflogdir is None: - return os.path.join(self.prefix, 'logs') - else: - return self.perflogdir - def make_stagedir(self, *dirs, wipeout=True): return self._makedir(self.stage_prefix, *dirs, wipeout=wipeout) def make_outputdir(self, *dirs, wipeout=True): return self._makedir(self.output_prefix, *dirs, wipeout=wipeout) - def make_perflogdir(self, *dirs, wipeout=False): - return self._makedir(self.perflog_prefix, *dirs, wipeout=wipeout) - class RuntimeContext: """The runtime context of the framework. diff --git a/reframe/frontend/cli.py b/reframe/frontend/cli.py index c6224f7836..b70659f4e7 100644 --- a/reframe/frontend/cli.py +++ b/reframe/frontend/cli.py @@ -61,7 +61,12 @@ def main(): help='Set regression stage directory to DIR') output_options.add_argument( '--logdir', action='store', metavar='DIR', - help='Set regression log directory to DIR') + help='(deprecated) Use --perflogdir instead.') + output_options.add_argument( + '--perflogdir', action='store', metavar='DIR', + help='Set directory prefix for the performance logs ' + '(default: ${prefix}/perflogs, ' + 'relevant only if the filelog backend is used)') output_options.add_argument( '--keep-stage-files', action='store_true', help='Keep stage directory even if check is successful') @@ -201,6 +206,12 @@ def main(): # Parse command line options = argparser.parse_args() + if options.logdir: + sys.stderr.write('WARNING: --logdir option is deprecated; ' + 'please use --perflogdir instead.\n') + if not options.perflogdir: + options.perflogdir = options.logdir + # Load configuration try: settings = config.load_settings_from_file(options.config_file) @@ -211,7 +222,7 @@ def main(): # Configure logging try: - logging.configure_logging(settings.logging_config) + logging.configure_logging(settings.logging_config), except (OSError, ConfigError) as e: sys.stderr.write('could not configure logging: %s\n' % e) sys.exit(1) @@ -261,7 +272,6 @@ def main(): rt.resources.prefix = os.path.expandvars(options.prefix) rt.resources.outputdir = None rt.resources.stagedir = None - rt.resources.perflogdir = None if options.output: rt.resources.outputdir = os.path.expandvars(options.output) @@ -269,9 +279,6 @@ def main(): if options.stage: rt.resources.stagedir = os.path.expandvars(options.stage) - if options.logdir: - rt.resources.perflogdir = os.path.expandvars(options.logdir) - if (os_ext.samefile(rt.resources.stage_prefix, rt.resources.output_prefix) and not options.keep_stage_files): @@ -283,6 +290,26 @@ def main(): if options.timestamp: rt.resources.timefmt = options.timestamp + # Configure performance logging + # NOTE: we need resources to be configured in order to set the global + # perf. logging prefix correctly + if options.perflogdir: + logging.LOG_CONFIG_OPTS['handlers.filelog.prefix'] = ( + os.path.expandvars(options.perflogdir)) + else: + logging.LOG_CONFIG_OPTS['handlers.filelog.prefix'] = ( + os.path.join(rt.resources.prefix, 'perflogs')) + + if hasattr(settings, 'perf_logging_config'): + try: + logging.configure_perflogging(settings.perf_logging_config) + except (OSError, ConfigError) as e: + printer.error('could not configure performance logging: %s\n' % e) + sys.exit(1) + else: + printer.warning('no performance logging is configured; ' + 'please check documentation') + # Setup the check loader if options.checkpath: load_path = [] @@ -319,9 +346,11 @@ def main(): printer.info('%03s Check search path : %s' % ('(R)' if loader.recurse else '', "'%s'" % ':'.join(loader.load_path))) - printer.info(' Stage dir prefix : %s' % rt.resources.stage_prefix) - printer.info(' Output dir prefix : %s' % rt.resources.output_prefix) - printer.info(' Logging dir : %s' % rt.resources.perflog_prefix) + printer.info(' Stage dir prefix : %s' % rt.resources.stage_prefix) + printer.info(' Output dir prefix : %s' % rt.resources.output_prefix) + printer.info( + ' Perf. logging prefix : %s' % + os.path.abspath(logging.LOG_CONFIG_OPTS['handlers.filelog.prefix'])) try: # Locate and load checks try: diff --git a/reframe/frontend/printer.py b/reframe/frontend/printer.py index c499bc8692..442644fa45 100644 --- a/reframe/frontend/printer.py +++ b/reframe/frontend/printer.py @@ -107,12 +107,19 @@ def timestamp(self, msg='', separator=None): else: self.info(msg) - def error(self, msg): - self._logger.error('%s: %s' % (sys.argv[0], msg)) - def info(self, msg=''): self._logger.info(msg) + def warning(self, msg): + msg = AnsiColorizer.colorize('%s: %s' % (sys.argv[0], msg), + AnsiColorizer.yellow) + self._logger.warning(msg) + + def error(self, msg): + msg = AnsiColorizer.colorize('%s: %s' % (sys.argv[0], msg), + AnsiColorizer.red) + self._logger.error('%s: %s' % (sys.argv[0], msg)) + def log_config(self, options): opt_list = [' %s=%s' % (attr, val) for attr, val in sorted(options.__dict__.items())] diff --git a/reframe/settings.py b/reframe/settings.py index 6d2d1072a2..b0c335677e 100644 --- a/reframe/settings.py +++ b/reframe/settings.py @@ -4,12 +4,12 @@ class ReframeSettings: - _reframe_module = 'reframe' - _job_poll_intervals = [1, 2, 3] - _job_submit_timeout = 60 - _checks_path = ['checks/'] - _checks_path_recurse = True - _site_configuration = { + reframe_module = 'reframe' + job_poll_intervals = [1, 2, 3] + job_submit_timeout = 60 + checks_path = ['checks/'] + checks_path_recurse = True + site_configuration = { 'systems': { # Generic system used also in unit tests 'generic': { @@ -48,10 +48,12 @@ class ReframeSettings: } } - _logging_config = { + logging_config = { 'level': 'DEBUG', - 'handlers': { - 'reframe.log': { + 'handlers': [ + { + 'type': 'file', + 'name': 'reframe.log', 'level': 'DEBUG', 'format': '[%(asctime)s] %(levelname)s: ' '%(check_info)s: %(message)s', @@ -59,49 +61,41 @@ class ReframeSettings: }, # Output handling - '&1': { + { + 'type': 'stream', + 'name': 'stdout', 'level': 'INFO', 'format': '%(message)s' }, - 'reframe.out': { + { + 'type': 'file', + 'name': 'reframe.out', 'level': 'INFO', 'format': '%(message)s', 'append': False, } - } + ] } - @property - def version(self): - return self._version - - @property - def reframe_module(self): - return self._reframe_module - - @property - def job_poll_intervals(self): - return self._job_poll_intervals - - @property - def job_submit_timeout(self): - return self._job_submit_timeout - - @property - def checks_path(self): - return self._checks_path - - @property - def checks_path_recurse(self): - return self._checks_path_recurse - - @property - def site_configuration(self): - return self._site_configuration - - @property - def logging_config(self): - return self._logging_config + perf_logging_config = { + 'level': 'DEBUG', + 'handlers': [ + { + 'type': 'filelog', + 'prefix': '%(check_system)s/%(check_partition)s', + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'append': True + } + ] + } settings = ReframeSettings() diff --git a/tutorial/config/settings.py b/tutorial/config/settings.py index 4fd2d0f6fa..07286791c2 100644 --- a/tutorial/config/settings.py +++ b/tutorial/config/settings.py @@ -4,12 +4,12 @@ class ReframeSettings: - _reframe_module = 'reframe' - _job_poll_intervals = [1, 2, 3] - _job_submit_timeout = 60 - _checks_path = ['checks/'] - _checks_path_recurse = True - _site_configuration = { + reframe_module = 'reframe' + job_poll_intervals = [1, 2, 3] + job_submit_timeout = 60 + checks_path = ['checks/'] + checks_path_recurse = True + site_configuration = { 'systems': { 'daint': { 'descr': 'Piz Daint', @@ -55,7 +55,6 @@ class ReframeSettings: 'type': 'ProgEnvironment', 'modules': ['PrgEnv-cray'], }, - 'PrgEnv-gnu': { 'type': 'ProgEnvironment', 'modules': ['PrgEnv-gnu'], @@ -74,10 +73,12 @@ class ReframeSettings: } } - _logging_config = { + logging_config = { 'level': 'DEBUG', - 'handlers': { - 'reframe.log': { + 'handlers': [ + { + 'type': 'file', + 'name': 'reframe.log', 'level': 'DEBUG', 'format': '[%(asctime)s] %(levelname)s: ' '%(check_name)s: %(message)s', @@ -85,49 +86,41 @@ class ReframeSettings: }, # Output handling - '&1': { + { + 'type': 'stream', + 'name': 'stdout', 'level': 'INFO', 'format': '%(message)s' }, - 'reframe.out': { + { + 'type': 'file', + 'name': 'reframe.out', 'level': 'INFO', 'format': '%(message)s', 'append': False, } - } + ] } - @property - def version(self): - return self._version - - @property - def reframe_module(self): - return self._reframe_module - - @property - def job_poll_intervals(self): - return self._job_poll_intervals - - @property - def job_submit_timeout(self): - return self._job_submit_timeout - - @property - def checks_path(self): - return self._checks_path - - @property - def checks_path_recurse(self): - return self._checks_path_recurse - - @property - def site_configuration(self): - return self._site_configuration - - @property - def logging_config(self): - return self._logging_config + perf_logging_config = { + 'level': 'DEBUG', + 'handlers': [ + { + 'type': 'filelog', + 'prefix': '%(check_system)s/%(check_partition)s', + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'append': True + } + ] + } settings = ReframeSettings() diff --git a/unittests/resources/settings.py b/unittests/resources/settings.py index 4740b52b7b..e441d04eb7 100644 --- a/unittests/resources/settings.py +++ b/unittests/resources/settings.py @@ -4,12 +4,12 @@ class ReframeSettings: - _reframe_module = 'reframe' - _job_poll_intervals = [1, 2, 3] - _job_submit_timeout = 60 - _checks_path = ['checks/'] - _checks_path_recurse = True - _site_configuration = { + reframe_module = 'reframe' + job_poll_intervals = [1, 2, 3] + job_submit_timeout = 60 + checks_path = ['checks/'] + checks_path_recurse = True + site_configuration = { 'systems': { # Generic system configuration that allows to run ReFrame locally # on any system. @@ -103,54 +103,46 @@ class ReframeSettings: } } - _logging_config = { + logging_config = { 'level': 'DEBUG', - 'handlers': { - '.reframe_unittest.log': { + 'handlers': [ + { + 'type': 'file', + 'name': '.rfm_unittest.log', 'level': 'DEBUG', 'format': ('[%(asctime)s] %(levelname)s: ' '%(check_name)s: %(message)s'), 'datefmt': '%FT%T', 'append': False, }, - '&1': { + { + 'type': 'stream', + 'name': 'stdout', 'level': 'INFO', 'format': '%(message)s' }, - } + ] } - @property - def version(self): - return self._version - - @property - def reframe_module(self): - return self._reframe_module - - @property - def job_poll_intervals(self): - return self._job_poll_intervals - - @property - def job_submit_timeout(self): - return self._job_submit_timeout - - @property - def checks_path(self): - return self._checks_path - - @property - def checks_path_recurse(self): - return self._checks_path_recurse - - @property - def site_configuration(self): - return self._site_configuration - - @property - def logging_config(self): - return self._logging_config + perf_logging_config = { + 'level': 'DEBUG', + 'handlers': [ + { + 'type': 'filelog', + 'prefix': '%(check_system)s/%(check_partition)s', + 'level': 'INFO', + 'format': ( + '%(asctime)s|reframe %(version)s|' + '%(check_info)s|jobid=%(check_jobid)s|' + '%(check_perf_var)s=%(check_perf_value)s|' + 'ref=%(check_perf_ref)s ' + '(l=%(check_perf_lower_thres)s, ' + 'u=%(check_perf_upper_thres)s)' + ), + 'append': True + } + ] + } settings = ReframeSettings() diff --git a/unittests/test_cli.py b/unittests/test_cli.py index d40206ca7c..d87710ebd4 100644 --- a/unittests/test_cli.py +++ b/unittests/test_cli.py @@ -25,6 +25,7 @@ def run_command_inline(argv, funct, *args, **kwargs): captured_stdout = StringIO() captured_stderr = StringIO() + print(*sys.argv) with redirect_stdout(captured_stdout): with redirect_stderr(captured_stderr): try: @@ -71,6 +72,9 @@ def argv(self): if self.ignore_check_conflicts: ret += ['--ignore-check-conflicts'] + if self.perflogdir: + ret += ['--perflogdir', self.perflogdir] + ret += self.more_options return ret @@ -84,11 +88,13 @@ def setUp(self): self.more_options = [] self.mode = None self.config_file = 'unittests/resources/settings.py' - self.logfile = '.reframe_unittest.log' + self.logfile = '.rfm_unittest.log' self.ignore_check_conflicts = True + self.perflogdir = '.rfm-perflogs' def tearDown(self): shutil.rmtree(self.prefix) + shutil.rmtree(self.perflogdir, ignore_errors=True) os_ext.force_remove_file(self.logfile) def _run_reframe(self): @@ -105,14 +111,9 @@ def _stage_exists(self, check_name, partitions, environs): return True - def _perflog_exists(self, check_name, partitions): - logdir = os.path.join(self.prefix, 'logs') - for p in partitions: - logfile = os.path.join(logdir, p, check_name + '.log') - if not os.path.exists(logfile): - return False - - return True + def _perflog_exists(self, check_name): + logfile = '.rfm-perflogs/generic/login/%s.log' % check_name + return os.path.exists(logfile) def assert_log_file_is_saved(self): outputdir = os.path.join(self.prefix, 'output') @@ -120,6 +121,13 @@ def assert_log_file_is_saved(self): self.assertTrue(os.path.exists( os.path.join(outputdir, os.path.basename(self.logfile)))) + def test_default_settings(self): + # Simply make sure that a default settings file exists + try: + import reframe.settings as settings + except ImportError: + self.fail('default settings file find could not be found') + def test_check_success(self): self.more_options = ['--save-log-files'] returncode, stdout, _ = self._run_reframe() @@ -206,8 +214,7 @@ def test_performance_check_failure(self): self.assertNotEqual(0, returncode) self.assertTrue(self._stage_exists('PerformanceFailureCheck', ['login'], self.environs)) - self.assertTrue(self._perflog_exists('PerformanceFailureCheck', - ['login'])) + self.assertTrue(self._perflog_exists('PerformanceFailureCheck')) def test_skip_system_check_option(self): self.checkpath = ['unittests/resources/checks/frontend_checks.py'] diff --git a/unittests/test_fields.py b/unittests/test_fields.py index 1739ec4469..895e6a022e 100644 --- a/unittests/test_fields.py +++ b/unittests/test_fields.py @@ -408,12 +408,6 @@ def __init__(self, value): self.assertRaises(TypeError, exec, 'tester.value = {1, 2}', globals(), locals()) - def test_settings(self): - self.assertRaises(AttributeError, exec, - "from reframe.settings import settings\n" - "settings.checks_path = ['/foo']", - globals(), locals()) - def test_deprecated_field(self): from reframe.core.exceptions import ReframeDeprecationWarning diff --git a/unittests/test_logging.py b/unittests/test_logging.py index 3c03064416..3ef799a011 100644 --- a/unittests/test_logging.py +++ b/unittests/test_logging.py @@ -7,7 +7,7 @@ from datetime import datetime import reframe.core.logging as rlog -from reframe.core.exceptions import ReframeError +from reframe.core.exceptions import ConfigError, ReframeError from reframe.core.pipeline import RegressionTest @@ -95,21 +95,23 @@ def test_logger_levels(self): self.assertTrue(self.found_in_logfile('foo')) -class TestLoggerConfiguration(unittest.TestCase): +class TestLoggingConfiguration(unittest.TestCase): def setUp(self): tmpfd, self.logfile = tempfile.mkstemp(dir='.') os.close(tmpfd) self.logging_config = { 'level': 'INFO', - 'handlers': { - self.logfile: { + 'handlers': [ + { + 'type': 'file', + 'name': self.logfile, 'level': 'WARNING', 'format': '[%(asctime)s] %(levelname)s: ' '%(check_name)s: %(message)s', 'datefmt': '%F', 'append': True, } - } + ] } self.check = RegressionTest('random_check', '.') @@ -146,7 +148,7 @@ def test_no_handlers(self): self.logging_config) def test_empty_handlers(self): - self.logging_config['handlers'] = {} + self.logging_config['handlers'] = [] self.assertRaises(ValueError, rlog.configure_logging, self.logging_config) @@ -173,14 +175,16 @@ def test_handler_append(self): def test_handler_noappend(self): self.logging_config = { 'level': 'INFO', - 'handlers': { - self.logfile: { + 'handlers': [ + { + 'type': 'file', + 'name': self.logfile, 'level': 'WARNING', 'format': '[%(asctime)s] %(levelname)s: %(message)s', 'datefmt': '%F', 'append': False, } - } + ] } rlog.configure_logging(self.logging_config) @@ -194,18 +198,76 @@ def test_handler_noappend(self): self.assertFalse(self.found_in_logfile('foo')) self.assertTrue(self.found_in_logfile('bar')) - # FIXME: this test is not so robust def test_date_format(self): rlog.configure_logging(self.logging_config) rlog.getlogger().warning('foo') self.assertTrue(self.found_in_logfile(datetime.now().strftime('%F'))) - def test_stream_handler_stdout(self): + def test_unknown_handler(self): self.logging_config = { 'level': 'INFO', - 'handlers': { - '&1': {}, + 'handlers': [ + {'type': 'stream', 'name': 'stderr'}, + {'type': 'foo'} + ], + } + self.assertRaises(ConfigError, rlog.configure_logging, + self.logging_config) + + def test_handler_syntax_no_type(self): + self.logging_config = { + 'level': 'INFO', + 'handlers': [{'name': 'stderr'}] + } + self.assertRaises(ConfigError, rlog.configure_logging, + self.logging_config) + + def test_handler_convert_syntax(self): + old_syntax = { + self.logfile: { + 'level': 'INFO', + 'format': '%(message)s', + 'append': False, + }, + '&1': { + 'level': 'INFO', + 'format': '%(message)s' + }, + '&2': { + 'level': 'ERROR', + 'format': '%(message)s' + } + } + + new_syntax = [ + { + 'type': 'file', + 'name': self.logfile, + 'level': 'INFO', + 'format': '%(message)s', + 'append': False + }, + { + 'type': 'stream', + 'name': 'stdout', + 'level': 'INFO', + 'format': '%(message)s' + }, + { + 'type': 'stream', + 'name': 'stderr', + 'level': 'ERROR', + 'format': '%(message)s' } + ] + + self.assertCountEqual(new_syntax, + rlog._convert_handler_syntax(old_syntax)) + + def test_stream_handler_stdout(self): + self.logging_config = { + 'level': 'INFO', + 'handlers': [{'type': 'stream', 'name': 'stdout'}], } rlog.configure_logging(self.logging_config) raw_logger = rlog.getlogger().logger @@ -218,9 +280,7 @@ def test_stream_handler_stdout(self): def test_stream_handler_stderr(self): self.logging_config = { 'level': 'INFO', - 'handlers': { - '&2': {}, - } + 'handlers': [{'type': 'stream', 'name': 'stderr'}], } rlog.configure_logging(self.logging_config) @@ -234,14 +294,42 @@ def test_stream_handler_stderr(self): def test_multiple_handlers(self): self.logging_config = { 'level': 'INFO', - 'handlers': { - '&1': {}, - self.logfile: {}, - } + 'handlers': [ + {'type': 'stream', 'name': 'stderr'}, + {'type': 'file', 'name': self.logfile} + ], } rlog.configure_logging(self.logging_config) self.assertEqual(len(rlog.getlogger().logger.handlers), 2) + def test_file_handler_timestamp(self): + self.logging_config['handlers'][0]['timestamp'] = '%F' + rlog.configure_logging(self.logging_config) + rlog.getlogger().warning('foo') + logfile = '%s_%s' % (self.logfile, datetime.now().strftime('%F')) + self.assertTrue(os.path.exists(logfile)) + os.remove(logfile) + + def test_file_handler_syntax_no_name(self): + self.logging_config = { + 'level': 'INFO', + 'handlers': [ + {'type': 'file'} + ], + } + self.assertRaises(ConfigError, rlog.configure_logging, + self.logging_config) + + def test_stream_handler_unknown_stream(self): + self.logging_config = { + 'level': 'INFO', + 'handlers': [ + {'type': 'stream', 'name': 'foo'}, + ], + } + self.assertRaises(ConfigError, rlog.configure_logging, + self.logging_config) + def test_global_noconfig(self): # This is to test the case when no configuration is set, but since the # order the unit tests are invoked is arbitrary, we emulate the