Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Add bootstrapping code

  • Loading branch information...
commit 6a56d178c619e5d0a9537b34c90459d67c6d8b26 1 parent 47eb2a7
@oldpatricka oldpatricka authored
View
21 config/logging.local.yml
@@ -0,0 +1,21 @@
+# Section for defining logger log levels. Loggers
+# are defined using dot notation.
+#
+# Examples:
+# Enable DEBUG level for the pyon.datastore package
+# and all classes subordinate to that package.
+#
+# pyon.datastore:
+# handlers: [console, file]
+# level: DEBUG
+#
+# Enable INFO level for the module
+# pyon.datastore.couchchdb.couchdb_datastore
+#
+# pyon.datastore.couchdb.couchdb_datastore:
+# handlers: [console, file]
+# level: INFO
+loggers:
+ TestService:
+ handlers: [console]
+ level: INFO
View
55 config/logging.yml
@@ -0,0 +1,55 @@
+# Configuration file for setting log levels.
+
+# Required. Do not touch or alter.
+version: 1
+
+# Definition of the output format for the handlers.
+#
+# Users should generally not touch this section.
+formatters:
+ brief:
+ format: '%(message)s'
+ default:
+ format: '%(asctime)s %(levelname)-8s %(name)-15s %(message)s'
+ datefmt: '%Y-%m-%d %H:%M:%S'
+
+# Definition of the output handlers. In this case, we've defined
+# two handlers. One to display basic output to stdout. Another
+# to display more detailed info in a log file.
+#
+# Users should generally not touch this section.
+handlers:
+ console:
+ class: logging.StreamHandler
+ formatter: brief
+ level: DEBUG
+ stream: ext://sys.stdout
+ file:
+ class: logging.handlers.RotatingFileHandler
+ formatter: default
+ level: DEBUG
+ filename: logs/logfile.txt
+ maxBytes: 1024
+ backupCount: 3
+
+# Section for defining logger log levels. Loggers
+# are defined using dot notation.
+#
+# Examples:
+# Enable DEBUG level for the pyon.datastore package
+# and all classes subordinate to that package.
+#
+# pyon.datastore:
+# handlers: [console, file]
+# level: DEBUG
+#
+# Enable INFO level for the module
+# pyon.datastore.couchchdb.couchdb_datastore
+#
+# pyon.datastore.couchdb.couchdb_datastore:
+# handlers: [console, file]
+# level: INFO
+loggers:
+ pyon:
+ handlers: [console, file]
+ level: DEBUG
View
14 config/service.yml
@@ -0,0 +1,14 @@
+server:
+ amqp:
+ host: localhost
+ port: 5672
+ username: guest
+ password: guest
+ vhost: /
+ heartbeat: 30
+
+system:
+ # sysname - by default it is generated from your system's uname.
+ # uncomment and set here if you want to lock it to something.
+ #name: r2demo
+ name:
View
88 dashi/bootstrap/__init__.py
@@ -0,0 +1,88 @@
+import os
+import sys
+import logging
+import logging.config
+
+from copy import copy
+
+from config import Config
+
+DEFAULT_CONFIG_FILES = [
+ 'config/service.yml',
+ 'config/service.local.yml',
+ ]
+LOGGING_CONFIG_FILES = [
+ 'config/logging.yml',
+ 'config/logging.local.yml',
+ ]
+
+class Service(object):
+ """Base class for services. Meant to be subclassed
+
+ """
+
+ def __init__(self, config_files=DEFAULT_CONFIG_FILES, logging_config_files=LOGGING_CONFIG_FILES):
+
+ self.CFG = Config(config_files).data
+ self.LOGGING_CFG = Config(logging_config_files).data
+
+ self.CFG['cli_args'] = self._parse_argv()
+
+
+ def get_logger(self, name=None):
+ """set up logging for a service using the py 2.7 dictConfig
+ """
+
+ if not name:
+ name = self.__class__.__name__
+
+ logger = logging.getLogger(name)
+
+ # Make log directory if it doesn't exist
+ for handler in self.LOGGING_CFG.get('handlers', {}).itervalues():
+ if 'filename' in handler:
+ log_dir = os.path.dirname(handler['filename'])
+ if not os.path.exists(log_dir):
+ os.makedirs(log_dir)
+ try:
+ #TODO: This requires python 2.7
+ logging.config.dictConfig(self.LOGGING_CFG)
+ except AttributeError:
+ print >> sys.stderr, '"logging.config.dictConfig" doesn\'t seem to be supported in your python'
+ raise
+
+ return logger
+
+
+ def _parse_argv(self, argv=copy(sys.argv)):
+ """return argv as a parsed dictionary, looks like the following:
+
+ app --option1 likethis --option2 likethat --flag
+
+ ->
+
+ {'option1': 'likethis', 'option2': 'likethat', 'flag': True}
+ """
+
+ cli_args = {}
+ while argv:
+ arg = argv[0]
+ try:
+ maybe_val = argv[1]
+ except IndexError:
+ maybe_val = None
+
+ if arg[0] == '-':
+ key = arg.lstrip('-')
+ if not maybe_val or maybe_val[0] == '-':
+ val = True
+ argv = argv[1:]
+ else:
+ val = maybe_val
+ argv = argv[2:]
+ cli_args[key] = val
+ else:
+ #skip arguments that aren't preceded with -
+ argv = argv[1:]
+
+ return cli_args
View
51 dashi/bootstrap/config.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+__author__ = 'Adam R. Smith'
+__license__ = 'Apache 2.0'
+
+import yaml
+
+from containers import DotDict, dict_merge
+
+class Config(object):
+ """
+ YAML-based config loader that supports multiple paths.
+ Later paths get deep-merged over earlier ones.
+ """
+
+ def __init__(self, paths=(), dict_class=DotDict):
+ self.paths = list(paths)
+ self.paths_loaded = set()
+ self.dict_class = dict_class
+ self.data = self.dict_class()
+
+ if paths: self.load()
+
+ def add_path(self, path):
+ """ Add this path at the end of the list and load/merge its contents. """
+ self.paths.append(path)
+ self.load()
+
+ def load(self):
+ """ Load each path in order. Remember paths already loaded and only load new ones. """
+ data = self.dict_class()
+
+ for path in self.paths:
+ if path in self.paths_loaded: continue
+
+ try:
+ with open(path, 'r') as file:
+ path_data = yaml.load(file.read())
+ data = dict_merge(data, path_data)
+ self.paths_loaded.add(path)
+ except IOError:
+ # TODO: Log this correctly once logging is implemented
+ if not path.endswith('.local.yml'):
+ print 'CONFIG NOT FOUND: %s' % (path)
+
+ self.data = data
+
+ def reload(self):
+ self.paths_loaded.clear()
+ self.load()
+
View
180 dashi/bootstrap/containers.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python
+
+__author__ = 'Adam R. Smith'
+__license__ = 'Apache 2.0'
+
+import collections
+
+class DotNotationGetItem(object):
+ """ Drive the behavior for DotList and DotDict lookups by dot notation, JSON-style. """
+
+ def _convert(self, val):
+ """ Convert the type if necessary and return if a conversion happened. """
+ if isinstance(val, dict) and not isinstance(val, DotDict):
+ return DotDict(val), True
+ elif isinstance(val, list) and not isinstance(val, DotList):
+ return DotList(val), True
+
+ return val, False
+
+ def __getitem__(self, key):
+ val = super(DotNotationGetItem, self).__getitem__(key)
+ val, converted = self._convert(val)
+ if converted: self[key] = val
+
+ return val
+
+class DotList(DotNotationGetItem, list):
+ """ Partner class for DotDict; see that for docs. Both are needed to fully support JSON/YAML blocks. """
+
+ #def DotListIterator(list.)
+
+ def __iter__(self):
+ """ Monkey-patch the "next" iterator method to return modified versions. This will be slow. """
+ #it = super(DotList, self).__iter__()
+ #it_next = getattr(it, 'next')
+ #setattr(it, 'next', lambda: it_next(it))
+ #return it
+ for val in super(DotList, self).__iter__():
+ val, converted = self._convert(val)
+ yield val
+
+class DotDict(DotNotationGetItem, dict):
+ """
+ Subclass of dict that will recursively look up attributes with dot notation.
+ This is primarily for working with JSON-style data in a cleaner way like javascript.
+ Note that this will instantiate a number of child DotDicts when you first access attributes;
+ do not use in performance-critical parts of your code.
+ """
+
+ def __getattr__(self, key):
+ """ Make attempts to lookup by nonexistent attributes also attempt key lookups. """
+ try:
+ val = self.__getitem__(key)
+ except KeyError:
+ raise AttributeError(key)
+
+ return val
+
+ def copy(self):
+ return DotDict(dict.copy(self))
+
+ @classmethod
+ def fromkeys(cls, seq, value=None):
+ return DotDict(dict.fromkeys(seq, value))
+
+class DictModifier(DotDict):
+ """
+ Subclass of DotDict that allows the sparse overriding of dict values.
+ """
+ def __init__(self, base, data=None):
+ # base should be a DotDict, raise TypeError exception if not
+ if not isinstance(base, DotDict):
+ raise TypeError("Base must be of type DotDict")
+ self.base = base
+
+ if data is not None:
+ self.update(data)
+
+ def __getattr__(self, key):
+ try:
+ return DotDict.__getattr__(self, key)
+ except AttributeError, ae:
+ # Delegate to base
+ return getattr(self.base, key)
+
+ def __getitem__(self, key):
+ try:
+ return DotDict.__getitem__(self, key)
+ except KeyError, ke:
+ # Delegate to base
+ return getattr(self.base, key)
+
+# dict_merge from: http://appdelegateinc.com/blog/2011/01/12/merge-deeply-nested-dicts-in-python/
+
+def quacks_like_dict(object):
+ """Check if object is dict-like"""
+ return isinstance(object, collections.Mapping)
+
+def dict_merge(a, b):
+ """Merge two deep dicts non-destructively
+
+ Uses a stack to avoid maximum recursion depth exceptions
+
+ >>> a = {'a': 1, 'b': {1: 1, 2: 2}, 'd': 6}
+ >>> b = {'c': 3, 'b': {2: 7}, 'd': {'z': [1, 2, 3]}}
+ >>> c = merge(a, b)
+ >>> from pprint import pprint; pprint(c)
+ {'a': 1, 'b': {1: 1, 2: 7}, 'c': 3, 'd': {'z': [1, 2, 3]}}
+ """
+ assert quacks_like_dict(a), quacks_like_dict(b)
+ dst = a.copy()
+
+ stack = [(dst, b)]
+ while stack:
+ current_dst, current_src = stack.pop()
+ for key in current_src:
+ if key not in current_dst:
+ current_dst[key] = current_src[key]
+ else:
+ if quacks_like_dict(current_src[key]) and quacks_like_dict(current_dst[key]) :
+ stack.append((current_dst[key], current_src[key]))
+ else:
+ current_dst[key] = current_src[key]
+ return dst
+
+def named_any(name):
+ """
+ Retrieve a Python object by its fully qualified name from the global Python
+ module namespace. The first part of the name, that describes a module,
+ will be discovered and imported. Each subsequent part of the name is
+ treated as the name of an attribute of the object specified by all of the
+ name which came before it.
+ @param name: The name of the object to return.
+ @return: the Python object identified by 'name'.
+ """
+ assert name, 'Empty module name'
+ names = name.split('.')
+
+ topLevelPackage = None
+ moduleNames = names[:]
+ while not topLevelPackage:
+ if moduleNames:
+ trialname = '.'.join(moduleNames)
+ try:
+ topLevelPackage = __import__(trialname)
+ except Exception, ex:
+ moduleNames.pop()
+ else:
+ if len(names) == 1:
+ raise Exception("No module named %r" % (name,))
+ else:
+ raise Exception('%r does not name an object' % (name,))
+
+ obj = topLevelPackage
+ for n in names[1:]:
+ obj = getattr(obj, n)
+
+ return obj
+
+def for_name(modpath, classname):
+ '''
+ Returns a class of "classname" from module "modname".
+ '''
+ module = __import__(modpath, fromlist=[classname])
+ classobj = getattr(module, classname)
+ return classobj()
+
+
+if __name__ == '__main__':
+ dd = DotDict({'a':{'b':{'c':1, 'd':2}}})
+ print dd.a.b.c, dd.a.b.d
+ print dd.a.b
+ #print dd.foo
+
+ print dict.fromkeys(('a','b','c'), 'foo')
+ print DotDict.fromkeys(('a','b','c'), 'foo').a
+
+ dl = DotList([1, {'a':{'b':{'c':1, 'd':2}}}])
+ print dl[1].a.b.c
+
View
13 examples/testservice.py
@@ -0,0 +1,13 @@
+from dashi.bootstrap import Service
+
+class TestService(Service):
+
+ def __init__(self, *args, **kwargs):
+ super(TestService, self).__init__(*args, **kwargs)
+ self.log = self.get_logger()
+
+ self.log.info("%s started" % self.__class__.__name__)
+ self.log.info("config: %s" % self.CFG)
+
+if __name__ == "__main__":
+ TestService()
View
2  setup.py
@@ -22,7 +22,7 @@
from setuptools import setup, find_packages
setupdict['packages'] = find_packages()
-setupdict['install_requires'] = ['kombu']
+setupdict['install_requires'] = ['kombu', 'pyyaml']
setupdict['tests_require'] = ['nose']
setupdict['test_suite'] = 'nose.collector'
Please sign in to comment.
Something went wrong with that request. Please try again.