Skip to content
This repository has been archived by the owner on Jan 31, 2020. It is now read-only.

Commit

Permalink
Merge 91ce48e into 9774f92
Browse files Browse the repository at this point in the history
  • Loading branch information
mark-burnett committed Jun 11, 2014
2 parents 9774f92 + 91ce48e commit 44686cc
Show file tree
Hide file tree
Showing 12 changed files with 191 additions and 135 deletions.
37 changes: 12 additions & 25 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,42 +1,29 @@
language: python
python: "2.7"

services:
- rabbitmq
- redis

env:
global:
- PTERO_PETRI_HOST=localhost
- PTERO_PETRI_PORT=5000
- PTERO_FORK_HOST=localhost
- PTERO_FORK_PORT=6000
- PTERO_PETRI_HOME=ptero-petri
- PTERO_FORK_HOME=ptero-shell-command-fork

install: pip install tox gunicorn git+https://github.com/nickstenning/honcho.git
install: pip install tox

before_script:
- sudo rabbitmq-plugins disable rabbitmq_management
- git clone --depth 1 https://github.com/mark-burnett/ptero-petri.git
- pushd ptero-petri
- git --no-pager show -s
- pip install -r requirements.txt
- python setup.py install
- PTERO_CONFIG_PATH=config ptero configure-rabbitmq
- honcho start --port 5000 > /dev/null &
- popd

- git clone --depth 1 https://github.com/mark-burnett/ptero-shell-command-fork.git
- pushd ptero-shell-command-fork
- git --no-pager show -s
- pip install -r requirements.txt
- python setup.py install
- honcho start --port 6000 > /dev/null &
- popd

- sleep 3

script:
- tox

after_success:
- pip install coveralls
- coveralls

after_script:
- ps -efl > var/log/ps.out
- bash -c 'for f in var/log/*; do echo; echo "============================================"; echo $f; echo "============================================"; cat $f; done'

branches:
only:
- master
3 changes: 2 additions & 1 deletion ptero_workflow/implementation/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ def get_workflow(self, workflow_id):
return self.session.query(models.Workflow).get(workflow_id).as_dict

def event(self, operation_id, event_type, body_data, query_string_data):
operation = self.session.query(models.Operation).get(operation_id)
operation = self.session.query(models.Operation
).filter_by(id=operation_id).one()
operation.handle_event(event_type, body_data, query_string_data)

def cleanup(self):
Expand Down
1 change: 1 addition & 0 deletions test-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
honcho
nose >= 1.3.0
nose-cov
psutil
Expand Down
86 changes: 86 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import errno
import os
import psutil
import signal
import sys
import time


instance = None


def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise


def wait_time():
if os.environ.get('TRAVIS'):
return 15
else:
return 3

def this_dir():
return os.path.dirname(__file__)

def procfile_path():
return os.path.join(this_dir(), 'scripts', 'Procfile')

def service_command_line():
return ['honcho', '-f', procfile_path(), 'start']


def setUp():
global instance

logdir = 'var/log'
mkdir_p(logdir)
outlog = open(os.path.join(logdir, 'honcho.out'), 'w')
errlog = open(os.path.join(logdir, 'honcho.err'), 'w')

if not os.environ.get('SKIP_PROCFILE'):
instance = psutil.Popen(service_command_line(),
shell=False, stdout=outlog, stderr=errlog)
time.sleep(wait_time())
if instance.poll() is not None:
raise RuntimeError("honcho instance terminated prematurely")

def signal_processes(processes, sig):
signaled_someone = False
for p in processes:
try:
p.send_signal(sig)
signaled_someone = True
except psutil.NoSuchProcess:
pass

return signaled_someone

def get_descendents():
return psutil.Process(instance.pid).get_children(recursive=True)

def cleanup():
descendents = get_descendents()

instance.send_signal(signal.SIGINT)
try:
instance.wait(timeout=10)
except psutil.TimeoutExpired:
pass

if not signal_processes(descendents, signal.SIGINT):
return

time.sleep(3)
signal_processes(descendents, signal.SIGKILL)


# NOTE If this doesn't run then honcho will be orphaned...
def tearDown():
if not os.environ.get('SKIP_PROCFILE'):
cleanup()
34 changes: 18 additions & 16 deletions tests/api/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from ptero_workflow.api import application
import requests
import simplejson
import os
import unittest
Expand All @@ -7,32 +8,33 @@


class BaseAPITest(unittest.TestCase):
def create_wsgi_app(self):
return application.create_app(purge=True)

def setUp(self):
self.app = self.create_wsgi_app()
self.client = self.app.test_client()
self.api_host = os.environ['PTERO_WORKFLOW_HOST']
self.api_port = int(os.environ['PTERO_WORKFLOW_PORT'])

@property
def post_url(self):
return 'http://%s:%s/v1/workflows' % (self.api_host, self.api_port)

def get(self, url, **kwargs):
return _deserialize_response(self.client.get(url, query_string=kwargs))
return _deserialize_response(requests.get(url, params=kwargs))

def patch(self, url, data):
return _deserialize_response(self.client.patch(url,
content_type='application/json', data=simplejson.dumps(data)))
return _deserialize_response(requests.patch(url,
headers={'content-type': 'application/json'},
data=simplejson.dumps(data)))

def post(self, url, data):
return _deserialize_response(self.client.post(url,
content_type='application/json', data=simplejson.dumps(data)))
return _deserialize_response(requests.post(url,
headers={'content-type': 'application/json'},
data=simplejson.dumps(data)))

def put(self, url, data):
return _deserialize_response(self.client.put(url,
content_type='application/json', data=simplejson.dumps(data)))
return _deserialize_response(requests.put(url,
headers={'content-type': 'application/json'},
data=simplejson.dumps(data)))


def _deserialize_response(response):
try:
response.DATA = simplejson.loads(response.data)
except simplejson.JSONDecodeError:
pass
response.DATA = response.json()
return response
81 changes: 6 additions & 75 deletions tests/api/v1/generator/base_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@
class TestCaseMixin(object):
__metaclass__ = abc.ABCMeta

@abc.abstractproperty
@property
def api_port(self):
pass
return int(os.environ['PTERO_WORKFLOW_PORT'])

@abc.abstractproperty
def directory(self):
Expand All @@ -38,15 +38,6 @@ def test_name(self):
pass


def setUp(self):
super(TestCaseMixin, self).setUp()
self._start_devserver()

def tearDown(self):
super(TestCaseMixin, self).tearDown()
self._stop_devserver()


def test_got_expected_result(self):
workflow_url = self._submit_workflow()
self._wait_for_completion(workflow_url)
Expand All @@ -66,6 +57,10 @@ def _wait_for_completion(self, workflow_url):
return
time.sleep(_POLLING_DELAY)

@property
def _max_wait_time(self):
return 20

def _verify_result(self, workflow_url):
actual_result = self._get_actual_result(workflow_url)
expected_result = self._expected_result
Expand Down Expand Up @@ -112,47 +107,6 @@ def _get_actual_result(self, workflow_url):
return response.json()


def _start_devserver(self):
cmd = [
self._devserver_path,
'--max-run-time', str(2 * self._max_wait_time),
'--port', str(self.api_port),
'--logdir', str(self._logdir),
'--cover',
]
if int(os.environ.get('PTERO_TEST_WEBSERVER_DEBUG', 0)) == 1:
cmd.append('--debug')

env = self._setup_database_environment()
self._devserver = subprocess.Popen(cmd, close_fds=True, env=env)
self._wait_for_devserver()

def _setup_database_environment(self):
self._remove_existing_database_file()

env = os.environ.data
env['PTERO_WORKFLOW_DB_STRING'] = self._db_string
env['PTERO_WORKFLOW_HOST'] = 'localhost'
env['PTERO_WORKFLOW_PORT'] = str(self.api_port)
return env

def _remove_existing_database_file(self):
try:
os.remove(self._db_path)
except OSError as e:
if e.errno != errno.ENOENT:
raise

def _wait_for_devserver(self):
time.sleep(5)

def _stop_devserver(self):
_stop_subprocess(self._devserver)

@property
def _devserver_path(self):
return os.path.join(self._repository_root_path, 'devserver')

@property
def _logdir(self):
return os.path.join(self._repository_root_path, 'logs', self.test_name)
Expand All @@ -162,29 +116,6 @@ def _repository_root_path(self):
return os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..', '..'))

@property
def _max_wait_time(self):
return 20

@property
def _db_string(self):
return 'sqlite:///%s' % self._db_path

@property
def _db_path(self):
return os.path.abspath(os.path.join(self._logdir, 'db.sqlite'))


def _stop_subprocess(process):
try:
process.send_signal(signal.SIGINT)
time.sleep(_TERMINATE_WAIT_TIME)
process.kill()
time.sleep(_TERMINATE_WAIT_TIME)
except OSError as e:
if e.errno != errno.ESRCH: # ESRCH: no such pid
raise


def _retry(func, *args, **kwargs):
for attempt in xrange(_MAX_RETRIES):
Expand Down
10 changes: 0 additions & 10 deletions tests/api/v1/generator/func.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def _create_and_attach_test_case(target_module, test_case_directory,

def _create_test_case(test_case_directory, test_case_name):
class_dict = {
'api_port': _get_available_port(),
'directory': os.path.join(test_case_directory, test_case_name),
'test_name': test_case_name,
}
Expand All @@ -35,12 +34,3 @@ def _create_test_case(test_case_directory, test_case_name):

def _attach_test_case(target_module, test_case):
setattr(target_module, test_case.__name__, test_case)


def _get_available_port():
s = socket.socket()
s.bind(('127.0.0.1', 0))
port = s.getsockname()[1]
s.close()

return port
6 changes: 2 additions & 4 deletions tests/api/v1/test_post_workflow_failure.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@
import abc


URL = '/v1/workflows'


class PostWorkflowFailure(object):
__metaclass__ = abc.ABCMeta

Expand All @@ -14,7 +11,8 @@ def post_data(self):

def setUp(self):
super(PostWorkflowFailure, self).setUp()
self.response = self.post(URL, self.post_data)
self.response = self.post(self.post_url, self.post_data)


def test_should_return_400(self):
self.assertEqual(400, self.response.status_code)
Expand Down
5 changes: 1 addition & 4 deletions tests/api/v1/test_roundtrip_success.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@
import abc


URL = '/v1/workflows'


class RoundTripSuccess(object):
__metaclass__ = abc.ABCMeta

Expand All @@ -14,7 +11,7 @@ def post_data(self):

def setUp(self):
super(RoundTripSuccess, self).setUp()
self.response = self.post(URL, self.post_data)
self.response = self.post(self.post_url, self.post_data)

def test_should_return_201(self):
self.assertEqual(201, self.response.status_code)
Expand Down
3 changes: 3 additions & 0 deletions tests/scripts/Procfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
fork: cd $PTERO_FORK_HOME; honcho start -f tests/scripts/Procfile -c worker=4
petri: cd $PTERO_PETRI_HOME; honcho start -f tests/scripts/Procfile -c orchestrator=2
web: PTERO_WORKFLOW_DB_STRING=sqlite:///var/workflow.sqlite tests/scripts/sigterm_wrapper coverage run ptero_workflow/api/wsgi.py --port $PTERO_WORKFLOW_PORT
Loading

0 comments on commit 44686cc

Please sign in to comment.