Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Flesh out the log_events tests. Now at 66% coverage.

  • Loading branch information...
commit 2240b68b7542032e34afcac7d65dbc96ac96d59c 1 parent 6b7484c
@oldpatricka oldpatricka authored
View
62 src/python/epumgmt/defaults/log_events.py
@@ -65,26 +65,26 @@ def get_event_datetimes_list(self, orig_event):
filenames = self.controllerlog_filenames
event = orig_event
- eventTimes = []
+ event_times = []
if filenames:
for filename in filenames:
try:
- eventFile = open(filename, 'r')
+ event_file = open(filename, 'r')
try:
- for line in eventFile:
+ for line in event_file:
if event in line:
splitline = line.split()
lineevent = splitline[0]
date_str = splitline[1].strip()
time_str = splitline[2].strip()
- eventTime = self._create_datetime(date_str, time_str)
- eventTimes.append(eventTime)
+ event_time = self._create_datetime(date_str, time_str)
+ event_times.append(event_time)
finally:
- eventFile.close()
+ event_file.close()
except IOError:
self.c.log.error('Failed to open and read from file: ' + \
'%s' % filename)
- return eventTimes
+ return event_times
# Events:
# job_sent: Job Queued
@@ -155,28 +155,29 @@ def get_event_datetimes_dict(self, orig_event):
self.c.log.error("Unrecognized event: %s" % event)
return {}
- eventTimes = {}
+ event_times = {}
if filenames:
for filename in filenames:
try:
- eventFile = open(filename, 'r')
+ event_file = open(filename, 'r')
try:
- for line in eventFile:
+ for line in event_file:
if event in line:
splitline = line.split()
splitinfo = splitline[1].split(';')
date_str = splitline[0].strip()
time_str = splitinfo[0].strip()
- eventTime = self._create_datetime(date_str, time_str)
- k = int(splitinfo[4].strip().split('.')[0].strip())
- eventTimes[k] = eventTime
+ event_time = self._create_datetime(date_str, time_str)
+ job_id = int(splitinfo[4].strip().split('.')[0].strip())
+ print splitinfo
+ event_times[job_id] = event_time
finally:
- eventFile.close()
+ event_file.close()
except IOError:
self.c.log.error('Failed to open and read from file: ' + \
'%s' % filename)
- self.c.log.debug("Event %s times: %s" % (orig_event, eventTimes))
- return eventTimes
+ self.c.log.debug("Event %s times: %s" % (orig_event, event_times))
+ return event_times
# Events:
# fetch_killed: time VM killed
@@ -221,6 +222,7 @@ def _set_vmkilllog_filenames(self):
if not os.path.isabs(baseDir):
baseDir = self.c.resolve_var_dir(baseDir)
for root, dirs, files in os.walk(baseDir):
+ print files
for fileName in files:
if logName in fileName:
filenames.append(os.path.join(root, fileName))
@@ -264,13 +266,13 @@ def get_event_datetimes_dict(self, event):
self.c.log.error("Unrecognized event: %s" % event)
return {}
- eventTimes = {}
+ event_times = {}
if filenames:
for filename in filenames:
try:
- eventFile = open(filename, 'r')
+ event_file = open(filename, 'r')
try:
- for line in eventFile:
+ for line in event_file:
if event in line:
if not jsonid:
if 'iaas_id' in line:
@@ -286,18 +288,18 @@ def get_event_datetimes_dict(self, event):
self.c.log.exception(emsg % splitline)
continue
timestamp = jsonEvent['timestamp']
- eventTime = self._create_datetime(timestamp)
+ event_time = self._create_datetime(timestamp)
if event == 'launch_ctx_done':
k = jsonEvent['extra'][jsonid][0]
else:
k = jsonEvent['extra'][jsonid]
- eventTimes[k] = eventTime
+ event_times[k] = event_time
finally:
- eventFile.close()
+ event_file.close()
except IOError:
self.c.log.error('Failed to open and read from file: ' + \
'%s' % filename)
- return eventTimes
+ return event_times
# Events:
# job_sent: time job sent from amqp server to worker
@@ -377,25 +379,25 @@ def get_event_datetimes_dict(self, event):
self.c.log.error("Unrecognized event: %s" % event)
return {}
- eventTimes = {}
+ event_times = {}
if filenames:
for filename in filenames:
try:
- eventFile = open(filename, 'r')
+ event_file = open(filename, 'r')
try:
- for line in eventFile:
+ for line in event_file:
if event in line:
splitline = line.rpartition('JSON:')[2]
splitline.strip()
jsonEvent = json.loads(splitline)
timestamp = jsonEvent['timestamp']
- eventTime = self._create_datetime(timestamp)
+ event_time = self._create_datetime(timestamp)
k = jsonEvent['extra'][jsonid]
- eventTimes[k] = eventTime
+ event_times[k] = event_time
finally:
- eventFile.close()
+ event_file.close()
except IOError:
self.c.log.error('Failed to open and read from file: ' + \
'%s' % filename)
- return eventTimes
+ return event_times
View
219 src/python/tests/test_epumgmt_defaults_log_events.py
@@ -134,6 +134,17 @@ def test_get_event_datetimes_dict_badfile(self):
assert len(failed_to_open) == 1
os.chmod(self.producer_ioncontainer_log, old_mode)
+ def test_get_event_count(self):
+
+ job_sent_id = 424244
+ job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
+
+ with open(self.producer_ioncontainer_log, "w") as container:
+ container.write(job_sent_event)
+
+ count = self.amqp_events.get_event_count("job_sent")
+ assert count == 1
+
class TestControllerEvents:
@@ -142,11 +153,13 @@ def setup(self):
self.runlogdir = "runlogs"
self.vmlogdir = "vmlogs"
+ self.test_event = "testevent 4-5-6 12:12:12.12"
+
controller_dir = os.path.join(self.vardir, self.runlogdir, "epucontrollerkill_logs")
os.makedirs(controller_dir)
self.controller_ioncontainer_log = os.path.join(controller_dir, "ioncontainer.log")
with open(self.controller_ioncontainer_log, "w") as container_file:
- container_file.write("contents!")
+ container_file.write(self.test_event)
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
@@ -190,3 +203,207 @@ def test_create_datetime(self):
datetime = self.controller_events._create_datetime(date_string, time_string)
assert datetime.month == month
+
+
+ def test_get_event_datetimes_list(self):
+
+ event = "testevent"
+ event_list = self.controller_events.get_event_datetimes_list(event)
+ assert len(event_list) == 1
+
+
+class TestTorqueEvents:
+
+ def setup(self):
+ self.vardir = tempfile.mkdtemp()
+ self.runlogdir = "runlogs"
+ self.vmlogdir = "vmlogs"
+ self.job_id = 5
+ self.torque_event = "05/25/2011 15:57:42;0008;PBS_Server;Job;%s.ip-10-203-66-146.ec2.internal;Job Queued at request of ubuntu@ip-10-203-66-146.ec2.internal, owner = ubuntu@ip-10-203-66-146.ec2.internal, job name = tmp5TEZaU, queue = default" % self.job_id
+ torque_dir = os.path.join(self.vardir, self.runlogdir, "torque-server_logs")
+ os.makedirs(torque_dir)
+ self.torque_log = os.path.join(torque_dir, "torque.log")
+ with open(self.torque_log, "w") as torque_file:
+ torque_file.write(self.torque_event)
+
+ self.config = ConfigParser.RawConfigParser()
+ self.config.add_section("events")
+ self.config.set("events", "runlogdir", self.runlogdir)
+ self.config.add_section("ecdirs")
+ self.config.set("ecdirs", "var", self.vardir)
+
+
+ self.p = DefaultParameters(self.config, None)
+ self.c = FakeCommon(self.p)
+ self.torque_events = epumgmt.defaults.log_events.TorqueEvents(self.p, self.c, None, "")
+
+
+ def test_set_serverlog_filenames(self):
+
+ self.torque_events._set_serverlog_filenames()
+ assert self.torque_log in self.torque_events.serverlog_filenames
+
+ def test_update_log_filenames(self):
+
+ self.torque_events._update_log_filenames()
+ assert self.torque_log in self.torque_events.serverlog_filenames
+
+ def test_create_datetime(self):
+
+ year = 2011
+ month = 4
+ day = 5
+ hour = 4
+ minute = 3
+ second = 7
+ microsecond = 6
+ date = "%s/%s/%s" % (month, day, year)
+ time = "%s:%s:%s" % (hour, minute, second)
+
+ got_datetime = self.torque_events._create_datetime(date, time)
+
+ print dir(got_datetime)
+
+ assert got_datetime.year == year
+ assert got_datetime.minute == minute
+ assert got_datetime.day == day
+ assert got_datetime.hour - epumgmt.defaults.log_events.UTC_OFFSET == hour
+
+
+ def test_get_event_datetimes_dict(self):
+
+ # Test behaviour with bad event type
+ event = "non-existent"
+ event_times = self.torque_events.get_event_datetimes_dict(event)
+ assert event_times == {}
+
+ # Test correct parsing behaviour
+ event = "job_sent"
+ event_times = self.torque_events.get_event_datetimes_dict(event)
+ assert event_times.has_key(self.job_id)
+
+ # Test handling of non-readable file
+ self.c.log.transcript = []
+ os.chmod(self.torque_log, 0)
+ event = "job_sent"
+ event_times = self.torque_events.get_event_datetimes_dict(event)
+ errors = [message for (level, message)
+ in self.c.log.transcript
+ if level == "ERROR"]
+ print errors
+ assert "Failed to open and read from file" in errors[0]
+
+class TestNodeEvents:
+
+ def setup(self):
+ self.vardir = tempfile.mkdtemp()
+ self.runlogdir = "runlogs"
+ self.logfiledir = "logs"
+ self.run_name = "test-run"
+ self.launch_ctx_id = "imauuidhonest"
+ self.launch_ctx_done = "2011-06-14 09:33:08,268 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {\"eventname\": \"launch_ctx_done\", \"timestamp\": {\"hour\": 16, \"month\": 6, \"second\": 8, \"microsecond\": 268628, \"year\": 2011, \"day\": 14, \"minute\": 33}, \"uniquekey\": \"8311960b-2802-4976-ae4d-1c4e7e7b9ee5\", \"eventsource\": \"provisioner\", \"extra\": {\"launch_id\": \"e62df223-0d7d-4882-8583-98de1c14f5c8\", \"node_ids\": [\"%s\"]}}" % self.launch_ctx_id
+ self.vmkill_event_id = "arealid"
+ self.vmkill_event = "2011-06-14 09:33:08,268 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {\"eventname\": \"fetch_killed\", \"timestamp\": {\"hour\": 16, \"month\": 6, \"second\": 8, \"microsecond\": 268628, \"year\": 2011, \"day\": 14, \"minute\": 33}, \"uniquekey\": \"8311960b-2802-4976-ae4d-1c4e7e7b9ee5\", \"eventsource\": \"provisioner\", \"extra\": {\"launch_id\": \"e62df223-0d7d-4882-8583-98de1c14f5c8\", \"iaas_id\": \"%s\"}}" % self.vmkill_event_id
+ provisioner_dir = os.path.join(self.vardir, self.runlogdir, self.run_name, "provisioner")
+ os.makedirs(provisioner_dir)
+ vmkill_dir = os.path.join(self.vardir, self.logfiledir)
+ os.makedirs(vmkill_dir)
+ self.provisioner_log = os.path.join(provisioner_dir, "ioncontainer.log")
+ with open(self.provisioner_log, "w") as provisioner_file:
+ provisioner_file.write(self.launch_ctx_done)
+ self.vmkill_log = os.path.join(vmkill_dir, "--%s-fetchkill-" % self.run_name)
+ with open(self.vmkill_log, "w") as vmkill_file:
+ vmkill_file.write(self.vmkill_event)
+
+ self.config = ConfigParser.RawConfigParser()
+ self.config.add_section("events")
+ self.config.set("events", "runlogdir", self.runlogdir)
+ self.config.add_section("logging")
+ self.config.set("logging", "logfiledir", self.logfiledir)
+ self.config.add_section("ecdirs")
+ self.config.set("ecdirs", "var", self.vardir)
+
+
+ self.p = DefaultParameters(self.config, None)
+ self.c = FakeCommon(self.p)
+ self.node_events = epumgmt.defaults.log_events.NodeEvents(self.p, self.c, None, self.run_name)
+
+ def teardown(self):
+ shutil.rmtree(self.vardir)
+
+ def test_create_datetime(self):
+
+ year = 2011
+ month = 4
+ day = 5
+ hour = 4
+ minute = 3
+ second = 7
+ microsecond = 6
+ timestamp = { "year": year, "month": month, "day": day,
+ "hour": hour, "minute": minute, "second": second,
+ "microsecond": microsecond }
+
+ got_datetime = self.node_events._create_datetime(timestamp)
+
+ print dir(got_datetime)
+
+ assert got_datetime.year == year
+ assert got_datetime.minute == minute
+ assert got_datetime.day == day
+
+ def test_set_provisionerlog_filenames(self):
+
+ self.node_events._set_provisionerlog_filenames()
+ assert self.provisioner_log in self.node_events.provisionerlog_filenames
+
+ def test_set_vmkilllog_filenames(self):
+
+ self.node_events._set_vmkilllog_filenames()
+ assert self.vmkill_log in self.node_events.vmkilllog_filenames
+
+ def test_update_log_filenames(self):
+
+ self.node_events._update_log_filenames()
+ assert self.vmkill_log in self.node_events.vmkilllog_filenames
+ assert self.provisioner_log in self.node_events.provisionerlog_filenames
+
+
+ def test_get_event_datetimes_dict(self):
+
+ event = "fake-event"
+ event_times = self.node_events.get_event_datetimes_dict(event)
+ assert event_times == {}
+
+ event = "launch_ctx_done"
+ event_times = self.node_events.get_event_datetimes_dict(event)
+ print event_times
+ assert event_times.has_key(self.launch_ctx_id)
+
+ event = "fetch_killed"
+ event_times = self.node_events.get_event_datetimes_dict(event)
+ print event_times
+ assert event_times.has_key(self.vmkill_event_id)
+
+
+ # test when we have an unreadable file
+ self.c.log.transcript = []
+ os.chmod(self.provisioner_log, 0)
+ event = "launch_ctx_done"
+ event_times = self.node_events.get_event_datetimes_dict(event)
+ print event_times
+ failed_to_open = [message for (level, message)
+ in self.c.log.transcript
+ if level == "ERROR"
+ and "Failed to open and read" in message]
+
+ assert len(failed_to_open) == 1
+
+
+
+ def test_get_event_count(self):
+
+ event_count = self.node_events.get_event_count("launch_ctx_done")
+ print event_count
+ assert event_count == 1
+
View
3  src/python/tests/test_epumgmt_main_em_core_workloadtest.py
@@ -306,6 +306,3 @@ def test_submit_not_ok(self):
print self.c.log.transcript
message_type = self.c.log.transcript[-1][0]
assert message_type == "ERROR"
-
-
-
Please sign in to comment.
Something went wrong with that request. Please try again.