Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

initially adding the files

  • Loading branch information...
commit 24780e06f2ee6522355a4f2ec10d0c12e7dec3c3 0 parents
BuzzTroll authored
1  cloudyvents/.gitignore
@@ -0,0 +1 @@
+*.pyc
0  cloudyvents/__init__.py
No changes.
161 cloudyvents/cei_events.py
@@ -0,0 +1,161 @@
+import datetime
+import uuid
+import simplejson as json
+
+# Anything after this token in a log line is considered a parsable event
+CEI_EVENT_SEPARATOR = "CEI_EVENT_JSON:"
+
+KEY_SOURCE = "eventsource"
+KEY_NAME = "eventname"
+KEY_UNIQUEKEY = "uniquekey"
+KEY_EXTRA = "extra"
+KEY_STAMP = 'timestamp'
+KEY_STAMP_YEAR = 'year'
+KEY_STAMP_MONTH = 'month'
+KEY_STAMP_DAY = 'day'
+KEY_STAMP_HOUR = 'hour'
+KEY_STAMP_MINUTE = 'minute'
+KEY_STAMP_SECOND = 'second'
+KEY_STAMP_MICROSECOND = 'microsecond'
+
+# ------------------ EVENT CREATION --------------------------------
+
+def event(source, name, logger, extra=None):
+ """Record an event for later retrieval.
+ @param source The event source, can use this for grouping events.
+ @param name The event name.
+ @param logger logger must be provided from outside.
+ @param extra Some opaque dict that you will consult after parsing.
+ """
+ if not logger:
+ raise Exception("logger is required")
+ logger.warning(event_logtxt(source, name, extra=extra))
+
+def event_logtxt(source, name, extra=None):
+ """Same as the 'event' function, but you control where text is recorded.
+ """
+ json = event_json(source, name, extra=extra)
+ return "%s %s" % (CEI_EVENT_SEPARATOR, json)
+
+def event_json(source, name, extra=None):
+ """Event text without any keyword to help parser: you are on your own.
+ """
+ return json.dumps(_event_dict(source, name, extra=extra))
+
+def _event_dict(source, name, extra=None):
+ if not source:
+ raise Exception("event source is required")
+ if not name:
+ raise Exception("event name is required")
+ _valid(source)
+ _valid(name)
+ _valid_dict(extra)
+
+ uniquekey = str(uuid.uuid4())
+ timestamp = _timestamp_to_dict(_timestamp_now())
+
+ ev = {KEY_SOURCE: source,
+ KEY_NAME: name,
+ KEY_UNIQUEKEY: uniquekey,
+ KEY_STAMP: timestamp,
+ KEY_EXTRA: extra}
+
+ return ev
+
+def _valid(string):
+ if not string:
+ return
+ if string.rfind("\n") >= 0:
+ raise Exception("Cannot contain newline: %s" % string)
+
+def _valid_dict(adict):
+ if not adict:
+ return
+ # usually frowned upon, but feel strictness will do more good than harm here
+ if not isinstance(adict, dict):
+ raise Exception("the extra portion of an event needs to be a dict")
+
+ # only checks the first level for now..
+ for k in adict.keys():
+ if isinstance(k, int):
+ raise Exception("the json module won't support integer keys")
+
+
+# ------------------ EVENT HARVESTING --------------------------------
+
+class CEIEvent:
+ """Convenience class for a parsed event.
+ """
+
+ def __init__(self, source, name, key, timestamp, extra):
+ self.source = source
+ self.name = name
+ self.key = key
+ self.timestamp = timestamp
+ self.extra = extra
+
+def events_from_file(path, sourcefilter=None, namefilter=None):
+ """Return list of CEIEvent instances found in a file.
+ @param sourcefilter scope list to events with source having this prefix
+ @param namefilter scope list to events with name having this prefix
+ """
+
+ events = []
+ for line in open(path):
+ ev = _event_from_logline(line)
+ if not ev:
+ continue
+ if sourcefilter and not ev.source.startswith(sourcefilter):
+ continue
+ if namefilter and not ev.name.startswith(namefilter):
+ continue
+ events.append(ev)
+ return events
+
+def _event_from_logline(log_line):
+ if not log_line:
+ return None
+ idx = log_line.rfind(CEI_EVENT_SEPARATOR)
+ if idx < 0:
+ return None
+ parts = log_line.split(CEI_EVENT_SEPARATOR)
+ if len(parts) != 2:
+ return None
+ return _event_from_json(parts[1])
+
+def _event_from_json(json_string):
+ jsondict = json.loads(json_string)
+ source = jsondict[KEY_SOURCE]
+ name = jsondict[KEY_NAME]
+ key = jsondict[KEY_UNIQUEKEY]
+ extra = jsondict[KEY_EXTRA]
+ stampdict = jsondict[KEY_STAMP]
+ timestamp = _dict_to_timestamp(stampdict)
+ return CEIEvent(source, name, key, timestamp, extra)
+
+
+# ------------------ TIMESTAMP --------------------------------
+
+# measurements that happen around a leap second could be unusable
+def _timestamp_now():
+ return datetime.datetime.utcnow()
+
+def _timestamp_to_dict(dt_inst):
+ ts = {}
+ ts[KEY_STAMP_YEAR] = dt_inst.year
+ ts[KEY_STAMP_MONTH] = dt_inst.month
+ ts[KEY_STAMP_DAY] = dt_inst.day
+ ts[KEY_STAMP_HOUR] = dt_inst.hour
+ ts[KEY_STAMP_MINUTE] = dt_inst.minute
+ ts[KEY_STAMP_SECOND] = dt_inst.second
+ ts[KEY_STAMP_MICROSECOND] = dt_inst.microsecond
+ return ts
+
+def _dict_to_timestamp(jsondict):
+ return datetime.datetime(jsondict[KEY_STAMP_YEAR],
+ jsondict[KEY_STAMP_MONTH],
+ jsondict[KEY_STAMP_DAY],
+ jsondict[KEY_STAMP_HOUR],
+ jsondict[KEY_STAMP_MINUTE],
+ jsondict[KEY_STAMP_SECOND],
+ jsondict[KEY_STAMP_MICROSECOND])
1  cloudyvents/tests/__init__.py
@@ -0,0 +1 @@
+from cloudyvents.tests.test_cei_events import *
250 cloudyvents/tests/test_cei_events.py
@@ -0,0 +1,250 @@
+import unittest
+import datetime
+import logging
+import os
+import shutil
+import tempfile
+import time
+
+import cloudyvents.cei_events as cei_events
+
+# Set this to False to look at generated log files afterwards. There will be
+# many directories like /tmp/ceitestlog*
+DESTROY_LOGDIR = True
+
+logger = logging.getLogger(__name__)
+
+class CEIEventsTestCase(unittest.TestCase):
+
+ def setUp(self):
+ if not self._is_setup():
+ self._configure()
+ logger.debug("test suite set up")
+
+ def tearDown(self):
+ if not DESTROY_LOGDIR:
+ logger.debug("logdir destruction disabled")
+ return
+ if not self._is_setup():
+ raise Exception("tear down called without setup")
+ shutil.rmtree(self.logdirpath)
+
+ def _configure(self):
+ tmpdir = tempfile.mkdtemp(prefix="ceitestlog")
+ logfilepath = os.path.join(tmpdir, str(int(time.time())))
+ f = None
+ try:
+ f = file(logfilepath, 'w')
+ f.write("\n## auto-generated @ %s\n\n" % time.ctime())
+ finally:
+ if f:
+ f.close()
+
+ logfilehandler = logging.FileHandler(logfilepath)
+ logfilehandler.setLevel(logging.DEBUG)
+ formatstring = "%(asctime)s %(levelname)s @%(lineno)d: %(message)s"
+ logfilehandler.setFormatter(logging.Formatter(formatstring))
+ logger.addHandler(logfilehandler)
+
+ self.logfilepath = logfilepath
+ self.logdirpath = tmpdir
+
+ def _is_setup(self):
+ try:
+ if self.logfilepath and self.logdirpath:
+ return True
+ except:
+ pass
+ return False
+
+ # -----------------------------------------------------------------------
+
+ def test_event_write(self):
+ logger.debug("something")
+ cei_events.event("unittest", "TRIAL1", logger)
+ logger.debug("something-else")
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+ assert events[0].source == "unittest"
+ assert events[0].name == "TRIAL1"
+
+ def test_manual_event_write(self):
+ cruft = "some cruft %s" % cei_events.event_logtxt("unittest", "TRIAL1")
+ logger.warning(cruft)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+
+ cei_events.event("unittest", "TRIAL2", logger)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 2
+
+ cruft = "cruft2 %s" % cei_events.event_logtxt("unittest", "TRIAL3")
+ logger.warning(cruft)
+
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 3
+
+ found = {"TRIAL1":False, "TRIAL2":False, "TRIAL3":False}
+ for ev in events:
+ if found.has_key(ev.name):
+ found[ev.name] = True
+ for val in found.values():
+ assert val
+
+ def test_timestamp(self):
+ utc_now = datetime.datetime.utcnow()
+ cei_events.event("unittest", "TRIAL1", logger)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+ ts = events[0].timestamp
+
+ # It is possible that any of these values could have rolled over
+ # between acquiring utc_now and recording the event. But this is
+ # unlikely enough that we'll keep this important UTC sanity check:
+ assert ts.year == utc_now.year
+ assert ts.month == utc_now.month
+ assert ts.day == utc_now.day
+ assert ts.hour == utc_now.hour
+
+ def test_unique_keys(self):
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ cei_events.event("unittest", "NAME", logger)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 7
+ uniqs = {}
+ for ev in events:
+ uniqs[ev.timestamp] = None
+ assert len(uniqs) == 7
+
+ def test_extra(self):
+ adict = {"hello1":"hello2"}
+ cei_events.event("unittest", "TRIAL1", logger, extra=adict)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+ assert events[0].extra["hello1"] == "hello2"
+
+ def test_bad_extra(self):
+ self.assertRaises(Exception, cei_events.event,
+ "unittest", "TRIAL1", logger, extra="astring")
+
+ def test_extra_integer_values(self):
+ adict = {"hello1":34}
+ cei_events.event("unittest", "TRIAL1", logger, extra=adict)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+ assert events[0].extra["hello1"] == 34
+
+ def test_extra_integer_keys(self):
+ # This does not serialize as an integer, fails. Added rule
+ # to events recorder to not allow integer keys.
+ pass
+ #adict = {23:"something"}
+ #cei_events.event("unittest", "TRIAL1", logger, extra=adict)
+ #events = cei_events.events_from_file(self.logfilepath)
+ #assert len(events) == 1
+ #assert events[0].extra[23] == "something"
+
+ def test_extra_hierarchy(self):
+ # note the conflicting "hello3" key in higher level:
+ innerdict = {"hello3":"hello4"}
+ adict = {"hello1":"hello2", "hello5":innerdict, "hello3":"hello6"}
+
+ cei_events.event("unittest", "TRIAL1", logger, extra=adict)
+ events = cei_events.events_from_file(self.logfilepath)
+ assert len(events) == 1
+ assert events[0].extra["hello1"] == "hello2"
+ assert events[0].extra["hello3"] == "hello6"
+
+ innerdict = events[0].extra["hello5"]
+ assert isinstance(innerdict, dict)
+ assert innerdict["hello3"] == "hello4"
+
+ def test_newline_rules(self):
+ self.assertRaises(Exception, cei_events.event,
+ "unit\ntest", "TRIAL", logger)
+ self.assertRaises(Exception, cei_events.event,
+ "unittest", "TRIAL\nA", logger)
+ self.assertRaises(Exception, cei_events.event,
+ "unittest", "TRIAL", logger, extra="some\nthing")
+ self.assertRaises(Exception, cei_events.event,
+ "unittest\n", "TRIAL", logger)
+ self.assertRaises(Exception, cei_events.event,
+ "\nunittest", "TRIAL", logger)
+ self.assertRaises(Exception, cei_events.event,
+ "\n", "TRIAL", logger)
+
+ def test_missing_rules(self):
+ self.assertRaises(Exception, cei_events.event,
+ None, "TRIAL", logger)
+ self.assertRaises(Exception, cei_events.event,
+ "unittest", None, logger)
+
+ def test_event_namefilter(self):
+ cei_events.event("unittest", "NM1", logger)
+ cei_events.event("unittest", "NM2", logger)
+ cei_events.event("unittest", "NM3", logger)
+ logger.debug("something not an event")
+ cei_events.event("unittest", "NM4", logger)
+ cei_events.event("unittest", "NM5", logger)
+ logger.debug("something not an event")
+ cei_events.event("unittest", "NM6", logger)
+ path = self.logfilepath
+ events = cei_events.events_from_file(path, namefilter="NM")
+ assert len(events) == 6
+
+ def test_event_namefilter2(self):
+ cei_events.event("unittest", "NM1", logger)
+ logger.debug("something not an event")
+ cei_events.event("unittest", "XX2", logger)
+ cei_events.event("unittest", "NM3", logger)
+ logger.debug("something not an event")
+ cei_events.event("unittest", "XX4", logger)
+ cei_events.event("unittest", "NM5", logger)
+ cei_events.event("unittest", "XX6", logger)
+ path = self.logfilepath
+ events = cei_events.events_from_file(path, namefilter="NM")
+ assert len(events) == 3
+
+ def test_event_sourcefilter(self):
+ cei_events.event("SRC1", "NM1", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRC2", "NM2", logger)
+ cei_events.event("SRC3", "NM3", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRC4", "NM4", logger)
+ cei_events.event("SRC5", "NM5", logger)
+ cei_events.event("SRC6", "NM6", logger)
+ path = self.logfilepath
+ events = cei_events.events_from_file(path, sourcefilter="SRC")
+ assert len(events) == 6
+
+ def test_event_sourcefilter2(self):
+ cei_events.event("SRC1", "NM1", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRX2", "NM2", logger)
+ cei_events.event("SRC3", "NM3", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRX4", "NM4", logger)
+ cei_events.event("SRC5", "NM5", logger)
+ cei_events.event("SRC6", "NM6", logger)
+ path = self.logfilepath
+ events = cei_events.events_from_file(path, sourcefilter="SRC")
+ assert len(events) == 4
+
+ def test_event_nameandsourcefilter(self):
+ cei_events.event("SRC1", "NX1", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRX2", "NM2", logger)
+ cei_events.event("SRC3", "XX3", logger)
+ cei_events.event("SRX4", "XX4", logger)
+ cei_events.event("SRC5", "NM5", logger)
+ logger.debug("something not an event")
+ cei_events.event("SRC6", "NM6", logger)
+ path = self.logfilepath
+ events = cei_events.events_from_file(path, sourcefilter="SRC", namefilter="NM")
+ assert len(events) == 2
29 setup.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+import sys
+
+py_version = 2.5
+Version=0.1
+
+if float("%d.%d" % sys.version_info[:2]) < py_version:
+ sys.stderr.write("Your Python version %d.%d.%d is not supported.\n" % sys.version_info[:3])
+ sys.stderr.write("pyceievents requires Python %f or newer.\n" % (py_version))
+ sys.exit(1)
+
+setup(name='cloudyvents',
+ version=Version,
+ description='An event management library for the EPU services.',
+ author='Nimbus Team',
+ author_email='nimbusteam@mcs.anl.gov',
+ url='http://www.nimbusproject.org/',
+ packages=[ 'pyceievents', 'pyceievents.tests' ],
+ long_description="""
+""",
+ license="Apache2",
+ install_requires = ["nose"],
+ )
Please sign in to comment.
Something went wrong with that request. Please try again.