Skip to content

Commit

Permalink
Merge pull request #560 from okfn/560-datastore-tests
Browse files Browse the repository at this point in the history
Datastore tests aren't respecting the legacy_mode flag, index creation does not work on PG 8.4
  • Loading branch information
vitorbaptista committed Mar 7, 2013
2 parents deed24f + ea9a201 commit c8c36d3
Show file tree
Hide file tree
Showing 5 changed files with 110 additions and 36 deletions.
45 changes: 33 additions & 12 deletions ckanext/datastore/db.py
Expand Up @@ -5,6 +5,9 @@
import urllib
import urllib2
import urlparse
import random
import string
import distutils.version
import logging
import pprint
import sqlalchemy
Expand Down Expand Up @@ -55,7 +58,7 @@ def _pluck(field, arr):

def _get_list(input, strip=True):
"""Transforms a string or list to a list"""
if input == None:
if input is None:
return
if input == '':
return []
Expand Down Expand Up @@ -123,12 +126,7 @@ def _cache_types(context):
_pg_types[result[0]] = result[1]
_type_names.add(result[1])
if 'nested' not in _type_names:
native_json = False
try:
version = connection.execute('select version();').fetchone()
native_json = map(int, version[0].split()[1].split(".")[:2]) >= [9, 2]
except Exception:
pass
native_json = _pg_version_is_at_least(connection, '9.2')

connection.execute('CREATE TYPE "nested" AS (json {0}, extra text)'
.format('json' if native_json else 'text'))
Expand All @@ -142,6 +140,17 @@ def _cache_types(context):
psycopg2.extras.register_composite('nested', connection.connection, True)


def _pg_version_is_at_least(connection, version):
try:
v = distutils.version.LooseVersion(version)
pg_version = connection.execute('select version();').fetchone()
pg_version_number = pg_version[0].split()[1]
pv = distutils.version.LooseVersion(pg_version_number)
return v <= pv
except ValueError:
return False


def _is_valid_pg_type(context, type_name):
if type_name in _type_names:
return True
Expand Down Expand Up @@ -372,10 +381,10 @@ def create_indexes(context, data_dict):

# index and primary key could be [],
# which means that indexes should be deleted
if indexes == None and primary_key == None:
if indexes is None and primary_key is None:
return

sql_index_skeletton = u'CREATE {unique} INDEX ON "{res_id}"'
sql_index_skeletton = u'CREATE {unique} INDEX {name} ON "{res_id}"'
sql_index_string_method = sql_index_skeletton + u' USING {method}({fields})'
sql_index_string = sql_index_skeletton + u' ({fields})'
sql_index_strings = []
Expand All @@ -384,17 +393,28 @@ def create_indexes(context, data_dict):
field_ids = _pluck('id', fields)
json_fields = [x['id'] for x in fields if x['type'] == 'nested']

if indexes != None:
def generate_index_name():
# pg 9.0+ do not require an index name
if _pg_version_is_at_least(context['connection'], '9.0'):
return ''
else:
source = string.ascii_letters + string.digits
random_string = ''.join([random.choice(source) for n in xrange(10)])
return 'idx_' + random_string

if indexes is not None:
_drop_indexes(context, data_dict, False)

# create index for faster full text search (indexes: gin or gist)
sql_index_strings.append(sql_index_string_method.format(
res_id=data_dict['resource_id'], unique='',
res_id=data_dict['resource_id'],
unique='',
name=generate_index_name(),
method='gist', fields='_full_text'))
else:
indexes = []

if primary_key != None:
if primary_key is not None:
_drop_indexes(context, data_dict, True)
indexes.append(primary_key)

Expand All @@ -417,6 +437,7 @@ def create_indexes(context, data_dict):
sql_index_strings.append(sql_index_string.format(
res_id=data_dict['resource_id'],
unique='unique' if index == primary_key else '',
name=generate_index_name(),
fields=fields_string))

map(context['connection'].execute, sql_index_strings)
Expand Down
22 changes: 11 additions & 11 deletions ckanext/datastore/plugin.py
Expand Up @@ -44,7 +44,7 @@ def configure(self, config):
import sys
if sys.argv[0].split('/')[-1] == 'paster' and 'datastore' in sys.argv[1:]:
log.warn('Omitting permission checks because you are '
'running paster commands.')
'running paster commands.')
return

self.ckan_url = self.config['sqlalchemy.url']
Expand Down Expand Up @@ -74,8 +74,8 @@ def configure(self, config):
"The DataStore will NOT work and datastore tests will be skipped.")

## Do light wrapping around action function to add datastore_active
## to resource dict. Not using IAction extension as this prevents other plugins
## from having a custom resource_read.
## to resource dict. Not using IAction extension as this prevents
## other plugins from having a custom resource_read.

# Make sure actions are cached
resource_show = p.toolkit.get_action('resource_show')
Expand Down Expand Up @@ -107,7 +107,6 @@ def new_resource_show(context, data_dict):
logic._actions['resource_show'] = new_resource_show

def _is_read_only_database(self):
read_only_database = True
for url in [self.ckan_url, self.write_url, self.read_url]:
connection = db._get_engine(None,
{'connection_url': url}).connect()
Expand All @@ -121,19 +120,20 @@ def _is_read_only_database(self):
else:
raise
else:
read_only_database = False
return False
finally:
trans.rollback()
return read_only_database
return True

def _check_separate_db(self):
'''
Make sure the datastore is on a separate db. Otherwise one could access
all internal tables via the api.
'''

if self.write_url == self.read_url:
raise Exception("The write and read-only database connection url are the same.")
if not self.legacy_mode:
if self.write_url == self.read_url:
raise Exception("The write and read-only database connection url are the same.")

if self._get_db_from_url(self.ckan_url) == self._get_db_from_url(self.read_url):
raise Exception("The CKAN and datastore database are the same.")
Expand Down Expand Up @@ -207,9 +207,9 @@ def _create_alias_table(self):

def get_actions(self):
actions = {'datastore_create': action.datastore_create,
'datastore_upsert': action.datastore_upsert,
'datastore_delete': action.datastore_delete,
'datastore_search': action.datastore_search}
'datastore_upsert': action.datastore_upsert,
'datastore_delete': action.datastore_delete,
'datastore_search': action.datastore_search}
if not self.legacy_mode:
actions['datastore_search_sql'] = action.datastore_search_sql
return actions
Expand Down
39 changes: 39 additions & 0 deletions ckanext/datastore/tests/test_configure.py
@@ -0,0 +1,39 @@
import unittest

import ckanext.datastore.plugin as plugin


class TestTypeGetters(unittest.TestCase):
def setUp(self):
self.p = plugin.DatastorePlugin()

def test_legacy_mode_default(self):
assert not self.p.legacy_mode

def test_set_legacy_mode(self):
assert not self.p.legacy_mode
c = {
'sqlalchemy.url': 'bar',
'ckan.datastore.write_url': 'foo'
}
try:
self.p.configure(c)
except Exception:
pass
assert self.p.legacy_mode
assert self.p.write_url == 'foo'
assert self.p.read_url == 'foo'

def test_check_separate_db(self):
self.p.write_url = 'postgresql://u:pass@localhost/dt'
self.p.read_url = 'postgresql://u:pass@localhost/dt'
self.p.ckan_url = 'postgresql://u:pass@localhost/ckan'

self.p.legacy_mode = True
try:
self.p._check_separate_db()
except Exception:
self.fail("_check_separate_db raise Exception unexpectedly!")

self.p.legacy_mode = False
self.assertRaises(Exception, self.p._check_separate_db)
4 changes: 3 additions & 1 deletion ckanext/datastore/tests/test_search.py
Expand Up @@ -406,7 +406,9 @@ class TestDatastoreSQL(tests.WsgiAppCase):
def setup_class(cls):
if not tests.is_datastore_supported():
raise nose.SkipTest("Datastore not supported")
p.load('datastore')
plugin = p.load('datastore')
if plugin.legacy_mode:
raise nose.SkipTest("SQL tests are not supported in legacy mode")
ctd.CreateTestData.create()
cls.sysadmin_user = model.User.get('testsysadmin')
cls.normal_user = model.User.get('annafan')
Expand Down
36 changes: 24 additions & 12 deletions ckanext/datastore/tests/test_unit.py
@@ -1,11 +1,14 @@
import unittest
import pylons
import nose

import ckan.tests as tests
import ckanext.datastore.db as db


class TestTypeGetters(unittest.TestCase):
def test_list(self):
assert db._get_list(None) == None
assert db._get_list(None) is None
assert db._get_list([]) == []
assert db._get_list('') == []
assert db._get_list('foo') == ['foo']
Expand All @@ -18,17 +21,17 @@ def test_list(self):
assert db._get_list(['foo', ['bar', 'baz']]) == ['foo', ['bar', 'baz']]

def test_bool(self):
assert db._get_bool(None) == False
assert db._get_bool(False) == False
assert db._get_bool(True) == True
assert db._get_bool('', True) == True
assert db._get_bool('', False) == False
assert db._get_bool('True') == True
assert db._get_bool('False') == False
assert db._get_bool('1') == True
assert db._get_bool('0') == False
assert db._get_bool('on') == True
assert db._get_bool('off') == False
assert db._get_bool(None) is False
assert db._get_bool(False) is False
assert db._get_bool(True) is True
assert db._get_bool('', True) is True
assert db._get_bool('', False) is False
assert db._get_bool('True') is True
assert db._get_bool('False') is False
assert db._get_bool('1') is True
assert db._get_bool('0') is False
assert db._get_bool('on') is True
assert db._get_bool('off') is False

def test_is_valid_field_name(self):
assert db._is_valid_field_name("foo")
Expand All @@ -49,3 +52,12 @@ def test_is_valid_table_name(self):
assert db._is_valid_table_name("'")
assert not db._is_valid_table_name("")
assert not db._is_valid_table_name("foo%bar")

def test_pg_version_check(self):
if not tests.is_datastore_supported():
raise nose.SkipTest("Datastore not supported")
engine = db._get_engine(None,
{'connection_url': pylons.config['sqlalchemy.url']})
connection = engine.connect()
assert db._pg_version_is_at_least(connection, '8.0')
assert not db._pg_version_is_at_least(connection, '10.0')

0 comments on commit c8c36d3

Please sign in to comment.