Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions docs/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -843,6 +843,16 @@ where specified.

*Introduced*: 3.0a7

``stdout_syslog``

If true, stdout will be directed to syslog along with the process name.

*Default*: False

*Required*: No.

*Introduced*: 3.0b3

``stderr_logfile``

Put process stderr output in this file unless ``redirect_stderr`` is
Expand Down Expand Up @@ -910,6 +920,16 @@ where specified.

*Introduced*: 3.0a7

``stderr_syslog``

If true, stderr will be directed to syslog along with the process name.

*Default*: False

*Required*: No.

*Introduced*: 3.0b3

``environment``

A list of key/value pairs in the form ``KEY=val,KEY2=val2`` that
Expand Down
9 changes: 3 additions & 6 deletions docs/logging.rst
Original file line number Diff line number Diff line change
Expand Up @@ -134,13 +134,10 @@ The configuration keys that influence child process logging in
``[program:x]`` and ``[fcgi-program:x]`` sections are these:

``redirect_stderr``, ``stdout_logfile``, ``stdout_logfile_maxbytes``,
``stdout_logfile_backups``, ``stdout_capture_maxbytes``,
``stdout_logfile_backups``, ``stdout_capture_maxbytes``, ``stdout_syslog``,
``stderr_logfile``, ``stderr_logfile_maxbytes``,
``stderr_logfile_backups`` and ``stderr_capture_maxbytes``.

One may set ``stdout_logfile`` or ``stderr_logfile`` to the
special string "syslog". In this case, logs will be routed to the
syslog service instead of being saved to files.
``stderr_logfile_backups``, ``stderr_capture_maxbytes``, and
``stderr_syslog``.

``[eventlistener:x]`` sections may not specify
``stdout_capture_maxbytes`` or ``stderr_capture_maxbytes``,
Expand Down
81 changes: 54 additions & 27 deletions supervisor/dispatchers.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import warnings
import errno
from supervisor.medusa.asyncore_25 import compact_traceback

Expand Down Expand Up @@ -61,9 +62,15 @@ def flush(self):
pass

class POutputDispatcher(PDispatcher):
""" Output (stdout/stderr) dispatcher, capture output sent within
<!--XSUPERVISOR:BEGIN--><!--XSUPERVISOR:END--> tags and notify
with a ProcessCommunicationEvent """
"""
A Process Output (stdout/stderr) dispatcher. Serves several purposes:

- capture output sent within <!--XSUPERVISOR:BEGIN--> and
<!--XSUPERVISOR:END--> tags and signal a ProcessCommunicationEvent
by calling notify(event).
- route the output to the appropriate log handlers as specified in the
config.
"""

process = None # process which "owns" this dispatcher
channel = None # 'stderr' or 'stdout'
Expand All @@ -74,35 +81,25 @@ class POutputDispatcher(PDispatcher):
output_buffer = '' # data waiting to be logged

def __init__(self, process, event_type, fd):
"""
Initialize the dispatcher.

`event_type` should be one of ProcessLogStdoutEvent or
ProcessLogStderrEvent
"""
self.process = process
self.event_type = event_type
self.fd = fd
self.channel = channel = self.event_type.channel

logfile = getattr(process.config, '%s_logfile' % channel)
self._setup_logging(process.config, channel)

capture_maxbytes = getattr(process.config,
'%s_capture_maxbytes' % channel)

if logfile:
maxbytes = getattr(process.config, '%s_logfile_maxbytes' % channel)
backups = getattr(process.config, '%s_logfile_backups' % channel)
fmt = '%(message)s'
if logfile == 'syslog':
fmt = ' '.join((process.config.name, fmt))
self.mainlog = process.config.options.getLogger(
logfile,
loggers.LevelsByName.INFO,
fmt=fmt,
rotating=not not maxbytes, # optimization
maxbytes=maxbytes,
backups=backups)

if capture_maxbytes:
self.capturelog = self.process.config.options.getLogger(
None, # BoundIO
loggers.LevelsByName.INFO,
'%(message)s',
rotating=False,
self.capturelog = loggers.handle_boundIO(
self.process.config.options.getLogger(),
fmt='%(message)s',
maxbytes=capture_maxbytes,
)

Expand All @@ -119,6 +116,35 @@ def __init__(self, process, event_type, fd):
self.stdout_events_enabled = config.stdout_events_enabled
self.stderr_events_enabled = config.stderr_events_enabled

def _setup_logging(self, config, channel):
"""
Configure the main log according to the process' configuration and
channel. Sets `mainlog` on self. Returns nothing.
"""

logfile = getattr(config, '%s_logfile' % channel)
if not logfile:
return

maxbytes = getattr(config, '%s_logfile_maxbytes' % channel)
backups = getattr(config, '%s_logfile_backups' % channel)
fmt = '%(message)s'
if logfile == 'syslog':
warnings.warn("Specifying 'syslog' for filename is deprecated. "
"Use %s_syslog instead." % channel, DeprecationWarning)
fmt = ' '.join((config.name, fmt))
self.mainlog = loggers.handle_file(
config.options.getLogger(),
filename=logfile,
fmt=fmt,
rotating=not not maxbytes, # optimization
maxbytes=maxbytes,
backups=backups)

if getattr(config, '%s_syslog' % channel, False):
fmt = config.name + ' %(message)s'
loggers.handle_syslog(self.mainlog, fmt)

def removelogs(self):
for log in (self.mainlog, self.capturelog):
if log is not None:
Expand Down Expand Up @@ -264,13 +290,14 @@ def __init__(self, process, channel, fd):
if logfile:
maxbytes = getattr(process.config, '%s_logfile_maxbytes' % channel)
backups = getattr(process.config, '%s_logfile_backups' % channel)
self.childlog = process.config.options.getLogger(
self.childlog = loggers.handle_file(
process.config.options.getLogger(),
logfile,
loggers.LevelsByName.INFO,
'%(message)s',
rotating=not not maxbytes, # optimization
maxbytes=maxbytes,
backups=backups)
backups=backups,
)

def removelogs(self):
if self.childlog is not None:
Expand Down
53 changes: 31 additions & 22 deletions supervisor/loggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,36 +320,45 @@ def emit(self, record):
except:
self.handleError(record)

def getLogger(filename, level, fmt, rotating=False, maxbytes=0, backups=0,
stdout=False):
def getLogger(level=None):
return Logger(level)

handlers = []
_2MB = 1<<21

logger = Logger(level)
def handle_boundIO(logger, fmt, maxbytes=_2MB):
io = BoundIO(maxbytes)
handler = StreamHandler(io)
handler.setLevel(logger.level)
handler.setFormat(fmt)
logger.addHandler(handler)
logger.getvalue = io.getvalue

if filename is None:
if not maxbytes:
maxbytes = 1<<21 #2MB
io = BoundIO(maxbytes)
handlers.append(StreamHandler(io))
logger.getvalue = io.getvalue
return logger

def handle_stdout(logger, fmt):
handler = StreamHandler(sys.stdout)
handler.setFormat(fmt)
handler.setLevel(logger.level)
logger.addHandler(handler)

def handle_syslog(logger, fmt):
handler = SyslogHandler()
handler.setFormat(fmt)
handler.setLevel(logger.level)
logger.addHandler(handler)

elif filename == 'syslog':
handlers.append(SyslogHandler())
def handle_file(logger, filename, fmt, rotating=False, maxbytes=0, backups=0):
if filename == 'syslog':
handler = SyslogHandler()

else:
if rotating is False:
handlers.append(FileHandler(filename))
handler = FileHandler(filename)
else:
handlers.append(RotatingFileHandler(filename,'a',maxbytes,backups))
handler = RotatingFileHandler(filename, 'a', maxbytes, backups)

if stdout:
handlers.append(StreamHandler(sys.stdout))

for handler in handlers:
handler.setFormat(fmt)
handler.setLevel(level)
logger.addHandler(handler)
handler.setFormat(fmt)
handler.setLevel(logger.level)
logger.addHandler(handler)

return logger

38 changes: 24 additions & 14 deletions supervisor/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def realize(self, args=None, doc=None,

def process_config(self, do_usage=True):
"""Process configuration data structure.

This includes reading config file if necessary, setting defaults etc.
"""
if self.configfile:
Expand Down Expand Up @@ -438,10 +438,12 @@ def version(self, dummy):
self.stdout.write('%s\n' % VERSION)
self.exit(0)

def getLogger(self, filename, level, fmt, rotating=False, maxbytes=0,
backups=0, stdout=False):
return loggers.getLogger(filename, level, fmt, rotating, maxbytes,
backups, stdout)
def getLogger(self, *args, **kwargs):
"""
A proxy to loggers.getLogger so the options might customize log setup.
Used by tests to mock log setup.
"""
return loggers.getLogger(*args, **kwargs)

def realize(self, *arg, **kw):
Options.realize(self, *arg, **kw)
Expand Down Expand Up @@ -811,7 +813,7 @@ def processes_from_section(self, parser, section, group_name,
raise ValueError(
'%(process_num) must be present within process_name when '
'numprocs > 1')

if stopasgroup and not killasgroup:
raise ValueError("Cannot set stopasgroup=true and killasgroup=false")

Expand Down Expand Up @@ -848,6 +850,10 @@ def processes_from_section(self, parser, section, group_name,
maxbytes = byte_size(get(section, mb_key, '50MB'))
logfiles[mb_key] = maxbytes

sy_key = '%s_syslog' % k
syslog = boolean(get(section, sy_key, False))
logfiles[sy_key] = syslog

if lf_val is Automatic and not maxbytes:
self.parse_warnings.append(
'For [%s], AUTO logging used for %s without '
Expand All @@ -871,11 +877,13 @@ def processes_from_section(self, parser, section, group_name,
stdout_events_enabled = stdout_events,
stdout_logfile_backups=logfiles['stdout_logfile_backups'],
stdout_logfile_maxbytes=logfiles['stdout_logfile_maxbytes'],
stdout_syslog=logfiles['stdout_syslog'],
stderr_logfile=logfiles['stderr_logfile'],
stderr_capture_maxbytes = stderr_cmaxbytes,
stderr_events_enabled = stderr_events,
stderr_logfile_backups=logfiles['stderr_logfile_backups'],
stderr_logfile_maxbytes=logfiles['stderr_logfile_maxbytes'],
stderr_syslog=logfiles['stderr_syslog'],
stopsignal=stopsignal,
stopwaitsecs=stopwaitsecs,
stopasgroup=stopasgroup,
Expand Down Expand Up @@ -1183,7 +1191,7 @@ def dropPrivileges(self, user):

# always put our primary gid first in this list, otherwise we can
# lose group info since sometimes the first group in the setgroups
# list gets overwritten on the subsequent setgid call (at least on
# list gets overwritten on the subsequent setgid call (at least on
# freebsd 9 with python 2.7 - this will be safe though for all unix
# /python version combos)
groups.insert(0, gid)
Expand Down Expand Up @@ -1276,16 +1284,18 @@ def set_rlimits(self):

def make_logger(self, critical_messages, warn_messages, info_messages):
# must be called after realize() and after supervisor does setuid()
format = '%(asctime)s %(levelname)s %(message)s\n'
self.logger = loggers.getLogger(
format = '%(asctime)s %(levelname)s %(message)s\n'
self.logger = loggers.getLogger(self.loglevel)
if self.nodaemon:
loggers.handle_stdout(self.logger, format)
loggers.handle_file(
self.logger,
self.logfile,
self.loglevel,
format,
rotating=True,
maxbytes=self.logfile_maxbytes,
backups=self.logfile_backups,
stdout = self.nodaemon,
)
)
for msg in critical_messages:
self.logger.critical(msg)
for msg in warn_messages:
Expand Down Expand Up @@ -1594,11 +1604,11 @@ class ProcessConfig(Config):
'name', 'uid', 'command', 'directory', 'umask', 'priority',
'autostart', 'autorestart', 'startsecs', 'startretries',
'stdout_logfile', 'stdout_capture_maxbytes',
'stdout_events_enabled',
'stdout_events_enabled', 'stdout_syslog',
'stdout_logfile_backups', 'stdout_logfile_maxbytes',
'stderr_logfile', 'stderr_capture_maxbytes',
'stderr_logfile_backups', 'stderr_logfile_maxbytes',
'stderr_events_enabled',
'stderr_events_enabled', 'stderr_syslog',
'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup',
'exitcodes', 'redirect_stderr' ]
optional_param_names = [ 'environment', 'serverurl' ]
Expand Down
Loading