Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Add even more tests.

  • Loading branch information...
commit b22809c6b3023142f7e7347ccfd3ca6c0953f583 1 parent a4d3370
@oldpatricka oldpatricka authored
View
9 src/python/tests/mocks/event.py
@@ -4,7 +4,8 @@ class Event:
"""
def __init__(self, name="", timestamp="", state=None, source="",
- last_queuelen_size=None, de_state=None):
+ last_queuelen_size=None, de_state=None, iaas_id=None,
+ node_id=None, public_ip=None):
self.name = name
self.timestamp = timestamp
self.source = source
@@ -15,3 +16,9 @@ def __init__(self, name="", timestamp="", state=None, source="",
self.extra["last_queuelen_size"] = last_queuelen_size
if de_state:
self.extra["de_state"] = de_state
+ if public_ip:
+ self.extra["public_ip"] = public_ip
+ if iaas_id:
+ self.extra["iaas_id"] = iaas_id
+ if node_id:
+ self.extra["node_id"] = node_id
View
9 src/python/tests/mocks/runlogs.py
@@ -0,0 +1,9 @@
+
+class FakeRunlogs:
+
+ def __init__(self):
+
+ self.vms = []
+
+ def new_vm(self, vm):
+ self.vms.append(vm)
View
3  src/python/tests/test_epumgmt_default_common.py
@@ -100,7 +100,4 @@ def test_close_logfile():
logging_common.close_logfile()
assert not logging_common.logfilehandler
- #logging_common.reopen_logfile()
- #assert logging_common.logfilehandler
-
shutil.rmtree(logdir)
View
135 src/python/tests/test_epumgmt_defaults_log_events.py
@@ -0,0 +1,135 @@
+import os
+import shutil
+import tempfile
+import ConfigParser
+
+import epumgmt.defaults.log_events
+from epumgmt.defaults import DefaultParameters
+from mocks.common import FakeCommon
+
+class TestAmqpEvents:
+
+ def setup(self):
+
+ self.runlogdir = tempfile.mkdtemp()
+ self.vmlogdir = tempfile.mkdtemp()
+
+ producer_dir = os.path.join(self.runlogdir, "producer1-container")
+ os.mkdir(producer_dir)
+ self.producer_ioncontainer_log = os.path.join(producer_dir, "ioncontainer.log")
+ with open(self.producer_ioncontainer_log, "w") as container_file:
+ container_file.write("contents!")
+
+ consumer_dir = os.path.join(self.runlogdir, "epuworker_container")
+ os.mkdir(consumer_dir)
+ self.consumer_ioncontainer_log = os.path.join(consumer_dir, "ioncontainer.log")
+ with open(self.consumer_ioncontainer_log, "w") as container_file:
+ container_file.write("contents!")
+
+ self.config = ConfigParser.RawConfigParser()
+ self.config.add_section("events")
+ self.config.set("events", "runlogdir", self.runlogdir)
+ self.config.set("events", "vmlogdir", self.vmlogdir)
+
+
+ self.c = FakeCommon()
+ self.p = DefaultParameters(self.config, None)
+ self.amqp_events = epumgmt.defaults.log_events.AmqpEvents(self.p, self.c, None, "")
+
+ def teardown(self):
+
+ shutil.rmtree(self.runlogdir)
+ shutil.rmtree(self.vmlogdir)
+
+ def test_create_datetime(self):
+
+ year = 2011
+ month = 4
+ day = 5
+ hour = 4
+ minute = 3
+ second = 7
+ microsecond = 6
+ timestamp = { "year": year, "month": month, "day": day,
+ "hour": hour, "minute": minute, "second": second,
+ "microsecond": microsecond }
+
+ got_datetime = self.amqp_events._create_datetime(timestamp)
+
+ print dir(got_datetime)
+
+ assert got_datetime.year == year
+ assert got_datetime.minute == minute
+ assert got_datetime.day == day
+
+
+ def test_set_workproducerlog_filenames(self):
+
+ self.amqp_events._set_workproducerlog_filenames()
+ assert self.producer_ioncontainer_log in self.amqp_events.workproducerlog_filenames
+
+
+ def test_set_workconsumerlog_filenames(self):
+
+ self.amqp_events._set_workconsumerlog_filenames()
+ assert self.consumer_ioncontainer_log in self.amqp_events.workconsumerlog_filenames
+
+
+ def test_update_log_filenames(self):
+
+ self.amqp_events._update_log_filenames()
+ assert self.consumer_ioncontainer_log in self.amqp_events.workconsumerlog_filenames
+ assert self.producer_ioncontainer_log in self.amqp_events.workproducerlog_filenames
+
+
+ def test_get_event_datetimes_dict(self):
+
+ got_datetimes = self.amqp_events.get_event_datetimes_dict("fake event")
+ assert got_datetimes == {}
+
+
+ job_begin_id = 545454
+ job_begin_event = '2011-07-07 11:03:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_begin", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_begin_id
+ job_end_id = 424242
+ job_end_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_end", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_end_id
+
+ with open(self.consumer_ioncontainer_log, "w") as container:
+ container.write(job_begin_event + job_end_event)
+
+ job_sent_id = 424244
+ job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
+
+ with open(self.producer_ioncontainer_log, "w") as container:
+ container.write(job_sent_event)
+
+
+ got_datetimes = self.amqp_events.get_event_datetimes_dict("job_end")
+ assert got_datetimes.has_key(job_end_id)
+
+
+ got_datetimes = self.amqp_events.get_event_datetimes_dict("job_begin")
+ assert got_datetimes.has_key(job_begin_id)
+
+
+ got_datetimes = self.amqp_events.get_event_datetimes_dict("job_sent")
+ assert got_datetimes.has_key(job_sent_id)
+
+ def test_get_event_datetimes_dict_badfile(self):
+
+ job_sent_id = 424244
+ job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
+
+ with open(self.producer_ioncontainer_log, "w") as container:
+ container.write(job_sent_event)
+
+ old_mode = os.stat(self.producer_ioncontainer_log).st_mode
+ os.chmod(self.producer_ioncontainer_log, 0)
+ got_datetimes = self.amqp_events.get_event_datetimes_dict("job_sent")
+
+ failed_to_open = [message for (level, message)
+ in self.c.log.transcript
+ if level == "ERROR"
+ and "Failed to open and read" in message]
+
+ assert len(failed_to_open) == 1
+ os.chmod(self.producer_ioncontainer_log, old_mode)
View
126 src/python/tests/test_epumgmt_main_em_core_findworkers.py
@@ -0,0 +1,126 @@
+import epumgmt.main.em_core_findworkers
+import epumgmt.api
+
+from epumgmt.api.exceptions import IncompatibleEnvironment
+from epumgmt.defaults.runlogs import DefaultRunlogs
+import epumgmt.main.em_args as em_args
+from epumgmt.defaults.parameters import DefaultParameters
+
+from mocks.common import FakeCommon
+from mocks.modules import FakeModules
+from mocks.event import Event
+from mocks.runlogs import FakeRunlogs
+
+def test_get_provisioner():
+ from epumgmt.main.em_core_findworkers import _get_provisioner
+
+ m = FakeModules()
+ run_name = "TESTRUN"
+
+ m.persistence.store_run_vms(run_name, [])
+
+ try:
+ _get_provisioner(m, run_name)
+ raised_incompatible_env = False
+ except IncompatibleEnvironment:
+ raised_incompatible_env = True
+
+ assert raised_incompatible_env
+
+
+ non_provisioner_vm = epumgmt.api.RunVM()
+ non_provisioner_vm.service_type = "something"
+
+ m.persistence.store_run_vms(run_name, [non_provisioner_vm])
+
+ try:
+ _get_provisioner(m, run_name)
+ raised_incompatible_env = False
+ except IncompatibleEnvironment:
+ raised_incompatible_env = True
+
+ assert raised_incompatible_env
+
+
+ test_service_name = "provisioner"
+ test_provisioner = epumgmt.api.RunVM()
+ test_provisioner.service_type = test_service_name
+ test_provisioner_instanceid = "i-TEST"
+ test_provisioner.instanceid = test_provisioner_instanceid
+
+ m.persistence.store_run_vms(run_name, [non_provisioner_vm, test_provisioner])
+
+ got_provisioner = _get_provisioner(m, run_name)
+
+ assert got_provisioner == test_provisioner
+
+
+def test_vms_launched():
+ from epumgmt.main.em_core_findworkers import vms_launched
+
+ run_name = "TESTRUN"
+
+ c = FakeCommon()
+ m = FakeModules()
+
+ optdict = {}
+ optdict[em_args.NAME.name] = run_name
+
+ p = DefaultParameters(None, None)
+ p.optdict = optdict
+
+ m.runlogs = FakeRunlogs()
+
+ test_service_name = "provisioner"
+ test_provisioner = epumgmt.api.RunVM()
+ test_provisioner.service_type = test_service_name
+ test_provisioner_instanceid = "i-TEST"
+ test_provisioner.instanceid = test_provisioner_instanceid
+
+ m.persistence.store_run_vms(run_name, [test_provisioner])
+
+ got_vms = vms_launched(m, run_name, "new_node")
+ assert got_vms == []
+
+
+ vm_0_id = "i-apple"
+ vm_0_nodeid = "applenodeid"
+ vm_0_publicip = "8.8.8.8"
+ vm_0_new_node_event = Event(name="new_node", iaas_id=vm_0_id,
+ node_id=vm_0_nodeid, public_ip=vm_0_publicip)
+
+ test_provisioner.events.append(vm_0_new_node_event)
+
+ got_vms = vms_launched(m, run_name, "new_node")
+ assert got_vms[0].instanceid == vm_0_id
+
+
+ test_provisioner.events = []
+ got_vms = vms_launched(m, run_name, "new_node")
+ assert got_vms == []
+
+
+ vm_0_node_started_event = Event(name="node_started", iaas_id=vm_0_id,
+ node_id=vm_0_nodeid, public_ip=vm_0_publicip)
+ test_provisioner.events.append(vm_0_node_started_event)
+
+ got_vms = vms_launched(m, run_name, "node_started")
+ assert got_vms[0].instanceid == vm_0_id
+
+
+ test_provisioner.events = []
+ got_vms = vms_launched(m, run_name, "new_node")
+ assert got_vms == []
+
+
+ vm_0_bad_event = Event(name="bad", iaas_id=vm_0_id,
+ node_id=vm_0_nodeid, public_ip=vm_0_publicip)
+ test_provisioner.events.append(vm_0_bad_event)
+
+ try:
+ got_vms = vms_launched(m, run_name, "bad")
+ raised_incompatible_env = False
+ except IncompatibleEnvironment:
+ raised_incompatible_env = True
+ assert raised_incompatible_env
+
Please sign in to comment.
Something went wrong with that request. Please try again.