Permalink
Browse files

Clean up logging situation

  • Loading branch information...
1 parent 47e5c92 commit 8d92314b94af9e4a263572f1fa5be80d8e45d4a4 @oldpatricka oldpatricka committed Jan 19, 2012
Showing with 55 additions and 39 deletions.
  1. +9 −4 README.md
  2. +1 −1 epuharness/config/epuharness.yml
  3. +3 −1 epuharness/deployment.py
  4. +41 −33 epuharness/harness.py
  5. +1 −0 setup.py
View
@@ -16,6 +16,7 @@ configuration. The default configuration is as follows:
process-dispatchers:
pd_0:
+ logfile: /tmp/pd_0.log
engines:
default:
deployable_type: eeagent
@@ -24,15 +25,16 @@ configuration. The default configuration is as follows:
nodes:
nodeone:
dt: eeagent
+ process-dispatcher: pd_0
eeagents:
eeagent_nodeone:
- process-dispatcher: pd_0
+ logfile: /tmp/eeagent_nodeone.log
If you want two nodes, for example, your configuration file would look like:
-
process-dispatchers:
pd_0:
+ logfile: /tmp/pd_0.log
engines:
default:
deployable_type: eeagent
@@ -41,14 +43,17 @@ If you want two nodes, for example, your configuration file would look like:
nodes:
nodeone:
dt: eeagent
+ process-dispatcher: pd_0
eeagents:
eeagent_nodeone:
- process-dispatcher: pd_0
+ logfile: /tmp/eeagent_nodeone.log
nodetwo:
dt: eeagent
+ process-dispatcher: pd_0
eeagents:
eeagent_nodetwo:
- process-dispatcher: pd_0
+ logfile: /tmp/eeagent_nodetwo.log
+
To use the profile, save it to a yml file, and launch it like so:
@@ -35,7 +35,7 @@ logging:
formatter: detailed
level: DEBUG
filename: logs/logfile.txt
- maxBytes: 1024
+ maxBytes: 1048576
backupCount: 3
syslog:
class: logging.handlers.SysLogHandler
View
@@ -5,6 +5,7 @@
DEFAULT_DEPLOYMENT = """---
process-dispatchers:
pd_0:
+ logfile: /tmp/pd_0.log
engines:
default:
deployable_type: eeagent
@@ -13,9 +14,10 @@
nodes:
nodeone:
dt: eeagent
+ process-dispatcher: pd_0
eeagents:
eeagent_nodeone:
- process-dispatcher: pd_0
+ logfile: /tmp/eeagent_nodeone.log
"""
View
@@ -72,18 +72,22 @@ def start(self, deployment_file=None):
self.process_dispatchers = deployment.get('process-dispatchers', {})
for pd_name, pd in self.process_dispatchers.iteritems():
- self._start_process_dispatcher(pd_name, pd.get('engines', {}))
+ self._start_process_dispatcher(pd_name, pd.get('engines', {}),
+ logfile=pd.get('logfile'))
nodes = deployment.get('nodes', {})
for node_name, node in nodes.iteritems():
- self.announce_node(node_name, node.get('dt', ''))
+ self.announce_node(node_name, node.get('dt', ''),
+ node.get('process-dispatcher', ''))
for eeagent_name, eeagent in node.get('eeagents', {}).iteritems():
- self._start_eeagent(eeagent_name, eeagent['process-dispatcher'])
+ dispatcher = eeagent.get('process-dispatcher') or \
+ node.get('process-dispatcher', '')
+ self._start_eeagent(eeagent_name, dispatcher, eeagent.get('logfile'))
- def _start_process_dispatcher(self, name, engines,
+ def _start_process_dispatcher(self, name, engines, logfile=None,
exe_name="epu-processdispatcher-service"):
"""Starts a process dispatcher with SupervisorD
@@ -96,7 +100,7 @@ def _start_process_dispatcher(self, name, engines,
log.info("Starting Process Dispatcher '%s'" % name)
config_file = self._build_process_dispatcher_config(self.exchange,
- name, engines)
+ name, engines, logfile=logfile)
cmd = "%s %s" % (exe_name, config_file)
log.debug("Running command '%s'" % cmd)
@@ -106,18 +110,18 @@ def _start_process_dispatcher(self, name, engines,
def _build_process_dispatcher_config(self, exchange, name, engines,
- log_file=None):
+ logfile=None):
"""Builds a yaml config file to feed to the process dispatcher
@param exchange: the AMQP exchange the service should be on
@param name: name of the process dispatcher, used as the topic to be
addressed on AMQP
@param engines: a dictionary of eeagent configs. Same format as the
Process Dispatcher config file
- @param log_file: the log file for the Process Dispatcher
+ @param logfile: the log file for the Process Dispatcher
"""
- if not log_file:
- log_file = "/tmp/pd.log"
+ if not logfile:
+ logfile = "/dev/null"
config = {
'dashi': {
@@ -135,7 +139,7 @@ def _build_process_dispatcher_config(self, exchange, name, engines,
},
'handlers': {
'file': {
- 'filename': log_file,
+ 'filename': logfile,
}
},
'root': {
@@ -152,34 +156,37 @@ def _build_process_dispatcher_config(self, exchange, name, engines,
return config_filename
- def _start_eeagent(self, name, process_dispatcher, exe_name="eeagent"):
+ def _start_eeagent(self, name, process_dispatcher, logfile=None,
+ exe_name="eeagent"):
"""Starts an eeagent with SupervisorD
@param name: Name of process dispatcher to start
@param process_dispatcher: The name of the parent Process Dispatcher to
connect to
+ @param logfile: the log file for the eeagent
@param exe_name: the name of the eeagent executable
"""
log.info("Starting EEAgent '%s'" % name)
- config_file = self._build_eeagent_config(self.exchange, name, process_dispatcher)
+ config_file = self._build_eeagent_config(self.exchange, name,
+ process_dispatcher, logfile=logfile)
cmd = "%s %s" % (exe_name, config_file)
pid = self.factory.get_pidantic(command=cmd, process_name=name,
directory=self.pidantic_dir)
pid.start()
- def _build_eeagent_config(self, exchange, name, process_dispatcher, log_file=None):
+ def _build_eeagent_config(self, exchange, name, process_dispatcher, logfile=None):
"""Builds a yaml config file to feed to the eeagent
@param exchange: the AMQP exchange the service should be on
@param name: name of the eeagent, used as the topic to be addressed
on AMQP
@param process_dispatcher: the name of the parent Process Dispatcher to
connect to
- @param log_file: the log file for the eeagent
+ @param logfile: the log file for the eeagent
"""
- if not log_file:
- log_file="/tmp/pd.log"
+ if not logfile:
+ logfile="/dev/null"
config = {
'dashi': {
@@ -199,7 +206,7 @@ def _build_eeagent_config(self, exchange, name, process_dispatcher, log_file=Non
},
'handlers': {
'file': {
- 'filename': log_file,
+ 'filename': logfile,
}
}
}
@@ -214,26 +221,27 @@ def _build_eeagent_config(self, exchange, name, process_dispatcher, log_file=Non
return config_filename
- def announce_node(self, node_name, deployable_type):
+ def announce_node(self, node_name, deployable_type, process_dispatcher,
+ state=None):
"""Announce a node to each process dispatcher.
- TODO: This should only announce to one PD
@param node_name: the name of the node to advertise
@param deployable_type: the deployable type of the node
+ @param process_dispatcher: the pd to announce to
+ @param state: the state to advertise to the pd
"""
- state = InstanceState.RUNNING
+ if not state:
+ state = InstanceState.RUNNING
- log.info("Announcing node '%s'" % node_name)
-
- for pd_name, pd in self.process_dispatchers.iteritems():
- pd_client = ProcessDispatcherClient(self.dashi, pd_name)
- log.debug("Announcing %s of type %s is '%s' to %s" % (node_name, deployable_type, state, pd_name))
+ pd_client = ProcessDispatcherClient(self.dashi, process_dispatcher)
+ log.info("Announcing %s of type %s is '%s' to %s" % (node_name,
+ deployable_type, state, process_dispatcher))
- for i in range(1, ADVERTISE_RETRIES):
- try:
- pd_client.dt_state(node_name, deployable_type, state)
- break
- except timeout:
- wait_time = i*i # Exponentially increasing wait
- log.warning("PD not available yet. Waiting %ss" % wait_time)
- time.sleep(i*i)
+ for i in range(1, ADVERTISE_RETRIES):
+ try:
+ pd_client.dt_state(node_name, deployable_type, state)
+ break
+ except timeout:
+ wait_time = i*i # Exponentially increasing wait
+ log.warning("PD not available yet. Waiting %ss" % wait_time)
+ time.sleep(i*i)
View
@@ -49,6 +49,7 @@
setupdict['install_requires'] += ['pyyaml',
'dashi==0.1',
'gevent==0.13.6',
+ 'pidantic',
'nose',
]
setupdict['tests_require'] = ['nose']

0 comments on commit 8d92314

Please sign in to comment.