diff --git a/.travis.yml b/.travis.yml index 0885e6f..4fc7e76 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,17 +1,23 @@ notifications: email: false language: python +services: + - postgresql +env: + global: + POSTGIS_DB="postgresql://postgres@localhost/slingshot_test" matrix: include: - - env: TOX_ENV=py27 + - env: TOX_ENV=py27-integration - python: 3.5 - env: TOX_ENV=py35 - - python: 3.5 - env: TOX_ENV=setup + env: TOX_ENV=py35-integration - python: 3.5 env: TOX_ENV=flake8 - python: 3.5 env: TOX_ENV=coveralls +before_script: + - psql -U postgres -c "CREATE DATABASE slingshot_test;" + - psql -U postgres -d slingshot_test -c "CREATE EXTENSION postgis;" install: - pip install tox script: diff --git a/slingshot/app.py b/slingshot/app.py index 97eb9fd..f55980a 100644 --- a/slingshot/app.py +++ b/slingshot/app.py @@ -67,7 +67,7 @@ def make_uuid(value, namespace='mit.edu'): try: ns = uuid.uuid5(uuid.NAMESPACE_DNS, namespace) uid = uuid.uuid5(ns, value) - except UnicodeDecodeError: + except UnicodeDecodeError: # pragma: no cover # Python 2 requires a byte string for the second argument. # Python 3 requires a unicode string for the second argument. value, namespace = [bytearray(s, 'utf-8') for s in (value, namespace)] diff --git a/slingshot/cli.py b/slingshot/cli.py index 5ecc981..b57450b 100644 --- a/slingshot/cli.py +++ b/slingshot/cli.py @@ -16,7 +16,7 @@ unpack_zip, ) from slingshot.app import load_layer -from slingshot.db import engine, metadata +from slingshot.db import engine @click.group() @@ -52,7 +52,6 @@ def bag(layers, bags, db_uri, workspace, public, secure): PostGIS. If a Bag already exists the layer will be skipped. """ engine.configure(db_uri) - metadata.bind = engine() if os.path.isdir(layers): zips = [os.path.join(layers, l) for l in os.listdir(layers) if l.endswith('.zip')] diff --git a/slingshot/db.py b/slingshot/db.py index 6c08f93..f6cbe1c 100644 --- a/slingshot/db.py +++ b/slingshot/db.py @@ -21,6 +21,7 @@ def __call__(self): def configure(self, url): self._engine = self._engine or create_engine(url) + metadata.bind = self._engine engine = Engine() diff --git a/tests/integration/test_command_line.py b/tests/integration/test_command_line.py new file mode 100644 index 0000000..86f5d5b --- /dev/null +++ b/tests/integration/test_command_line.py @@ -0,0 +1,81 @@ +import os +import shutil +import tempfile +try: + from unittest.mock import patch +except ImportError: + from mock import patch + +from click.testing import CliRunner +from dotenv import load_dotenv, find_dotenv +import pytest + +from slingshot.cli import main +from slingshot.db import engine, metadata + + +@pytest.fixture +def runner(): + return CliRunner() + + +@pytest.fixture +def db(): + load_dotenv(find_dotenv()) + uri = os.environ.get('POSTGIS_DB') + engine.configure(uri) + return engine + + +@pytest.fixture(autouse=True) +def db_setup(db): + metadata.drop_all() + metadata.clear() + + +def test_bag_loads_shapefile(db, runner, shapefile): + store = tempfile.mkdtemp() + layers = tempfile.mkdtemp() + shutil.copy2(shapefile, layers) + uri = os.environ.get('POSTGIS_DB') + res = runner.invoke(main, ['bag', '--db-uri', uri, '--public', + 'mock://example.com', '--secure', + 'mock://example.com', layers, store]) + assert res.exit_code == 0 + with db().connect() as conn: + r = conn.execute('SELECT COUNT(*) FROM bermuda').scalar() + assert r == 713 + + +def test_bag_creates_bag(runner, shapefile): + store = tempfile.mkdtemp() + layers = tempfile.mkdtemp() + shutil.copy2(shapefile, layers) + uri = os.environ.get('POSTGIS_DB') + res = runner.invoke(main, ['bag', '--db-uri', uri, '--public', + 'mock://example.com', '--secure', + 'mock://example.com', layers, store]) + assert res.exit_code == 0 + assert 'Loaded layer bermuda' in res.output + + +def test_bag_skips_existing_layers(runner, shapefile, bags_dir): + uri = os.environ.get('POSTGIS_DB') + res = runner.invoke(main, ['bag', '--db-uri', uri, '--public', + 'mock://example.com', '--secure', + 'mock://example.com', shapefile, bags_dir]) + assert res.exit_code == 0 + assert 'Skipping existing layer bermuda' in res.output + + +def test_bag_removes_failed_bag(runner, shapefile): + store = tempfile.mkdtemp() + uri = os.environ.get('POSTGIS_DB') + with patch('slingshot.cli.load_layer') as m: + m.side_effect = Exception + res = runner.invoke(main, ['bag', '--db-uri', uri, '--public', + 'mock://example.com', '--secure', + 'mock://example.com', shapefile, store]) + assert res.exit_code == 0 + assert 'Failed creating bag bermuda' in res.output + assert not os.listdir(store) diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index 76f2329..0000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,79 +0,0 @@ -import os -import shutil -import tempfile -try: - from unittest.mock import patch -except ImportError: - from mock import patch - -from click.testing import CliRunner -import pytest -import requests_mock - -from slingshot.cli import main - - -@pytest.fixture -def runner(): - return CliRunner() - - -def test_bag_creates_bag(runner, shapefile): - store = tempfile.mkdtemp() - layers = tempfile.mkdtemp() - shutil.copy2(shapefile, layers) - with patch('slingshot.cli.load_layer'): - res = runner.invoke(main, ['bag', '--db-uri', 'sqlite://', '--public', - 'mock://example.com', '--secure', - 'mock://example.com', layers, store]) - assert res.exit_code == 0 - assert 'Loaded layer bermuda' in res.output - - -def test_bag_skips_existing_layers(runner, shapefile, bags_dir): - with patch('slingshot.cli.load_layer'): - res = runner.invoke(main, ['bag', '--db-uri', 'sqlite://', '--public', - 'mock://example.com', '--secure', - 'mock://example.com', shapefile, bags_dir]) - assert res.exit_code == 0 - assert 'Skipping existing layer bermuda' in res.output - - -def test_bag_removes_failed_bag(runner, shapefile): - store = tempfile.mkdtemp() - with patch('slingshot.cli.load_layer') as m: - m.side_effect = Exception - res = runner.invoke(main, ['bag', '--db-uri', 'sqlite://', '--public', - 'mock://example.com', '--secure', - 'mock://example.com', shapefile, store]) - assert res.exit_code == 0 - assert 'Failed creating bag bermuda' in res.output - assert not os.listdir(store) - - -def test_publish_publishes_layer(runner, bags_dir): - with requests_mock.Mocker() as m: - m.post('mock://example.com/public/rest/workspaces/mit/datastores' - '/data/featuretypes') - m.post('mock://example.com/solr/update') - m.post('mock://example.com/solr/update/json/docs') - res = runner.invoke(main, ['publish', '--public', - 'mock://example.com/public', '--secure', - 'mock://example.com/secure', '--solr', - 'mock://example.com/solr', bags_dir]) - assert res.exit_code == 0 - assert 'Loaded bermuda' in res.output - - -def test_reindex_deletes_and_reloads(runner, bags_dir): - with requests_mock.Mocker() as m: - m.post('mock://example.com/solr/update') - m.post('mock://example.com/solr/update/json/docs') - res = runner.invoke(main, ['reindex', '--solr', - 'mock://example.com/solr', bags_dir]) - assert res.exit_code == 0 - assert m.request_history[0].json() == \ - {'delete': {'query': - 'dct_provenance_s:MIT AND dc_format_s:Shapefile'}} - assert 'Indexed bermuda' in res.output - assert m.request_history[2].json() == {'commit': {}} diff --git a/tests/test_app.py b/tests/unit/test_app.py similarity index 100% rename from tests/test_app.py rename to tests/unit/test_app.py diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py new file mode 100644 index 0000000..9a99a18 --- /dev/null +++ b/tests/unit/test_cli.py @@ -0,0 +1,38 @@ +from click.testing import CliRunner +import pytest +import requests_mock + +from slingshot.cli import main + + +@pytest.fixture +def runner(): + return CliRunner() + + +def test_publish_publishes_layer(runner, bags_dir): + with requests_mock.Mocker() as m: + m.post('mock://example.com/public/rest/workspaces/mit/datastores' + '/data/featuretypes') + m.post('mock://example.com/solr/update') + m.post('mock://example.com/solr/update/json/docs') + res = runner.invoke(main, ['publish', '--public', + 'mock://example.com/public', '--secure', + 'mock://example.com/secure', '--solr', + 'mock://example.com/solr', bags_dir]) + assert res.exit_code == 0 + assert 'Loaded bermuda' in res.output + + +def test_reindex_deletes_and_reloads(runner, bags_dir): + with requests_mock.Mocker() as m: + m.post('mock://example.com/solr/update') + m.post('mock://example.com/solr/update/json/docs') + res = runner.invoke(main, ['reindex', '--solr', + 'mock://example.com/solr', bags_dir]) + assert res.exit_code == 0 + assert m.request_history[0].json() == \ + {'delete': {'query': + 'dct_provenance_s:MIT AND dc_format_s:Shapefile'}} + assert 'Indexed bermuda' in res.output + assert m.request_history[2].json() == {'commit': {}} diff --git a/tests/test_db.py b/tests/unit/test_db.py similarity index 67% rename from tests/test_db.py rename to tests/unit/test_db.py index b1006f4..de03817 100644 --- a/tests/test_db.py +++ b/tests/unit/test_db.py @@ -1,9 +1,9 @@ import re import pytest -from shapefile import Reader from sqlalchemy import Boolean, Date, Float, Integer, Text +from slingshot.app import ShapeReader from slingshot.db import ( metadata, multiply, @@ -94,46 +94,46 @@ def test_multiply_does_not_modify_points(): def test_pg_reader_read_returns_size(shapefile_unpacked): - shp = Reader(shapefile_unpacked + '/bermuda.shp') - pg = PGShapeReader(shp, 4326) - assert pg.read(17) == '1\t45683\t58443\t32.' + with ShapeReader(shapefile_unpacked + '/bermuda.shp') as shp: + pg = PGShapeReader(shp, 4326) + assert pg.read(17) == '1\t45683\t58443\t32.' def test_pg_reader_reads_to_end(shapefile_unpacked): - shp = Reader(shapefile_unpacked + '/bermuda.shp') - pg = PGShapeReader(shp, 4326) - buf = '' - while True: - chunk = pg.read(1024) - if not chunk: - break - buf += chunk - assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' - '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) + with ShapeReader(shapefile_unpacked + '/bermuda.shp') as shp: + pg = PGShapeReader(shp, 4326) + buf = '' + while True: + chunk = pg.read(1024) + if not chunk: + break + buf += chunk + assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' + '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) def test_pg_reader_reads_all(shapefile_unpacked): - shp = Reader(shapefile_unpacked + '/bermuda.shp') - pg = PGShapeReader(shp, 4326) - buf = pg.read() - assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' - '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) + with ShapeReader(shapefile_unpacked + '/bermuda.shp') as shp: + pg = PGShapeReader(shp, 4326) + buf = pg.read() + assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' + '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) def test_pg_reader_reads_line(shapefile_unpacked): - shp = Reader(shapefile_unpacked + '/bermuda.shp') - pg = PGShapeReader(shp, 4326) - assert pg.readline().startswith('1\t45683\t58443') + with ShapeReader(shapefile_unpacked + '/bermuda.shp') as shp: + pg = PGShapeReader(shp, 4326) + assert pg.readline().startswith('1\t45683\t58443') def test_pg_reader_readline_reads_to_end(shapefile_unpacked): - shp = Reader(shapefile_unpacked + '/bermuda.shp') - pg = PGShapeReader(shp, 4326) - buf = '' - while True: - line = pg.readline() - if not line: - break - buf += line - assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' - '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) + with ShapeReader(shapefile_unpacked + '/bermuda.shp') as shp: + pg = PGShapeReader(shp, 4326) + buf = '' + while True: + line = pg.readline() + if not line: + break + buf += line + assert re.search('Zeta Island\t1995-08-16\tSRID=4326;POINT ' + '\(-64\.[0-9]+ 32\.[0-9]+\)\n$', buf) diff --git a/tests/test_proj.py b/tests/unit/test_proj.py similarity index 100% rename from tests/test_proj.py rename to tests/unit/test_proj.py diff --git a/tests/test_record.py b/tests/unit/test_record.py similarity index 100% rename from tests/test_record.py rename to tests/unit/test_record.py diff --git a/tox.ini b/tox.ini index 9f32020..4aad070 100644 --- a/tox.ini +++ b/tox.ini @@ -1,27 +1,20 @@ [tox] -envlist = py27,py35,setup,coverage +envlist = py27,py35,py{27,35}-integration,coverage,flake8 [testenv] -passenv = HOME -deps = pipenv -commands = - pipenv install --dev --system - py.test tests {posargs:--tb=short} - -[testenv:setup] -basepython = python3.5 +passenv = HOME POSTGIS_DB deps = -commands = python setup.py test - -[testenv:coverage] -basepython = python3.5 + pipenv + python-dotenv + py27: mock + {coverage,coveralls}: pytest-cov + coveralls: coveralls +setenv = + {integration,coverage}: PYTEST_ARGS=tests/integration + coverage: PYTEST_COV="--cov=slingshot" commands = pipenv install --dev --system - py.test --cov=slingshot {posargs} -deps = - mock - pytest-cov - {[testenv]deps} + py.test tests/unit {env:PYTEST_ARGS:} {env:PYTEST_COV:} {posargs:--tb=short} [testenv:flake8] basepython = python3.5 @@ -29,12 +22,9 @@ deps = flake8 commands = flake8 [testenv:coveralls] -passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH +passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH POSTGIS_DB basepython = python3.5 -deps = - coveralls - {[testenv:coverage]deps} commands = pipenv install --dev --system - py.test --cov=slingshot + py.test tests --cov=slingshot coveralls