Skip to content

Commit

Permalink
Do not use the write_url for datastore_search
Browse files Browse the repository at this point in the history
This commit remove the use of the write_url in the datastore_search
action, as the permissions are already checked against the
auth.datastore_search function (which delegates to resource_show).
Looks like the non-removal of write_url in the action method was an
accidental oversight.

Tests (including the privacy tests for TestDatastoreSearch.test_search_private_dataset)
continue to pass.

Allows datastore_search_sql on private datasets

Moves the auth check for datastore_search_sql to the db module
where a list of the resource tables involved in the query is
available.  This should allow users to query across resources
that they have access to whether they are public or private.

PEP8 cleanup and skipping tests with 8.4

As Postgres 8.4 is now legacy, we will skip tests that rely on features that no longer work.
  • Loading branch information
rossjones committed Sep 11, 2015
1 parent 63e3bc9 commit 184c8b6
Show file tree
Hide file tree
Showing 8 changed files with 61 additions and 22 deletions.
30 changes: 19 additions & 11 deletions ckanext/datastore/db.py
Expand Up @@ -86,8 +86,8 @@ def _is_valid_field_name(name):
* can't contain double quote (")
* can't be empty
'''
return (name and name == name.strip() and not name.startswith('_')
and not '"' in name)
return (name and name == name.strip() and not name.startswith('_') and
'"' not in name)


def _is_valid_table_name(name):
Expand Down Expand Up @@ -135,7 +135,7 @@ def _cache_types(context):
'json' if native_json else 'text'))
_pg_types.clear()

## redo cache types with json now available.
# redo cache types with json now available.
return _cache_types(context)

psycopg2.extras.register_composite('nested',
Expand Down Expand Up @@ -215,7 +215,7 @@ def _guess_type(field):
elif float in data_types:
return 'numeric'

##try iso dates
# try iso dates
for format in _DATE_FORMATS:
try:
datetime.datetime.strptime(field, format)
Expand Down Expand Up @@ -324,7 +324,7 @@ def create_table(context, data_dict):
})
supplied_field_ids = records[0].keys()
for field_id in supplied_field_ids:
if not field_id in field_ids:
if field_id not in field_ids:
extra_fields.append({
'id': field_id,
'type': _guess_type(records[0][field_id])
Expand Down Expand Up @@ -565,7 +565,7 @@ def alter_table(context, data_dict):
'present or in wrong order').format(
field['id'])]
})
## no need to check type as field already defined.
# no need to check type as field already defined.
continue

if 'type' not in field:
Expand All @@ -589,7 +589,7 @@ def alter_table(context, data_dict):
})
supplied_field_ids = records[0].keys()
for field_id in supplied_field_ids:
if not field_id in field_ids:
if field_id not in field_ids:
new_fields.append({
'id': field_id,
'type': _guess_type(records[0][field_id])
Expand Down Expand Up @@ -636,7 +636,7 @@ def upsert_data(context, data_dict):
for field in fields:
value = record.get(field['id'])
if value and field['type'].lower() == 'nested':
## a tuple with an empty second value
# a tuple with an empty second value
value = (json.dumps(value), '')
row.append(value)
row.append(_to_full_text(fields, record))
Expand Down Expand Up @@ -678,7 +678,7 @@ def upsert_data(context, data_dict):
for field in fields:
value = record.get(field['id'])
if value is not None and field['type'].lower() == 'nested':
## a tuple with an empty second value
# a tuple with an empty second value
record[field['id']] = (json.dumps(value), '')

non_existing_filed_names = [field for field in record
Expand Down Expand Up @@ -778,7 +778,7 @@ def _validate_record(record, num, field_names):
raise ValidationError({
'records': [u'row "{0}" is not a json object'.format(num)]
})
## check for extra fields in data
# check for extra fields in data
extra_keys = set(record.keys()) - set(field_names)

if extra_keys:
Expand Down Expand Up @@ -1159,7 +1159,7 @@ def delete(context, data_dict):
trans = context['connection'].begin()
try:
# check if table exists
if not 'filters' in data_dict:
if 'filters' not in data_dict:
context['connection'].execute(
u'DROP TABLE "{0}" CASCADE'.format(data_dict['resource_id'])
)
Expand Down Expand Up @@ -1224,6 +1224,14 @@ def search_sql(context, data_dict):
'permissions': ['Not authorized to access system tables']
})

# Check the user has the correct permissions for this resource. This
# would normally be done in the action function
# (action.datastore_search_sql) but it is only at this point that we
# know which tables are being queried.
for resource_table in table_names:
data_dict['resource_id'] = resource_table
p.toolkit.check_access('datastore_search_sql', context, data_dict)

results = context['connection'].execute(sql)

return format_results(context, results, data_dict)
Expand Down
4 changes: 1 addition & 3 deletions ckanext/datastore/logic/action.py
Expand Up @@ -399,7 +399,7 @@ def datastore_search(context, data_dict):
raise p.toolkit.ValidationError(errors)

res_id = data_dict['resource_id']
data_dict['connection_url'] = pylons.config['ckan.datastore.write_url']
data_dict['connection_url'] = pylons.config['ckan.datastore.read_url']

resources_sql = sqlalchemy.text(u'''SELECT alias_of FROM "_table_metadata"
WHERE name = :id''')
Expand Down Expand Up @@ -461,8 +461,6 @@ def datastore_search_sql(context, data_dict):
'query': ['Query is not a single statement.']
})

p.toolkit.check_access('datastore_search_sql', context, data_dict)

data_dict['connection_url'] = pylons.config['ckan.datastore.read_url']

result = db.search_sql(context, data_dict)
Expand Down
6 changes: 3 additions & 3 deletions ckanext/datastore/logic/auth.py
Expand Up @@ -2,7 +2,7 @@


def datastore_auth(context, data_dict, privilege='resource_update'):
if not 'id' in data_dict:
if 'id' not in data_dict:
data_dict['id'] = data_dict.get('resource_id')

user = context.get('user')
Expand All @@ -13,7 +13,7 @@ def datastore_auth(context, data_dict, privilege='resource_update'):
return {
'success': False,
'msg': p.toolkit._('User {0} not authorized to update resource {1}'
.format(str(user), data_dict['id']))
.format(str(user), data_dict['id']))
}
else:
return {'success': True}
Expand Down Expand Up @@ -50,7 +50,7 @@ def datastore_search(context, data_dict):

@p.toolkit.auth_allow_anonymous_access
def datastore_search_sql(context, data_dict):
return {'success': True}
return datastore_auth(context, data_dict, 'resource_show')


def datastore_change_permissions(context, data_dict):
Expand Down
7 changes: 7 additions & 0 deletions ckanext/datastore/tests/helpers.py
Expand Up @@ -8,6 +8,13 @@ def extract(d, keys):
return dict((k, d[k]) for k in keys if k in d)


def should_skip_test_for_legacy():
legacy = False
with p.use_plugin('datastore') as the_plugin:
legacy = the_plugin.legacy_mode
return legacy


def clear_db(Session):
drop_tables = u'''select 'drop table "' || tablename || '" cascade;'
from pg_tables where schemaname = 'public' '''
Expand Down
9 changes: 6 additions & 3 deletions ckanext/datastore/tests/test_dump.py
Expand Up @@ -21,6 +21,9 @@ class TestDatastoreDump(object):

@classmethod
def setup_class(cls):
if helpers.should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

wsgiapp = middleware.make_app(config['global_conf'], **config)
cls.app = paste.fixture.TestApp(wsgiapp)
if not tests.is_datastore_supported():
Expand All @@ -39,9 +42,9 @@ def setup_class(cls):
{'id': 'published'},
{'id': u'characters', u'type': u'_text'}],
'records': [{u'b\xfck': 'annakarenina',
'author': 'tolstoy',
'published': '2005-03-01',
'nested': ['b', {'moo': 'moo'}],
'author': 'tolstoy',
'published': '2005-03-01',
'nested': ['b', {'moo': 'moo'}],
u'characters': [u'Princess Anna', u'Sergius']},
{u'b\xfck': 'warandpeace', 'author': 'tolstoy',
'nested': {'a': 'b'}}]
Expand Down
4 changes: 4 additions & 0 deletions ckanext/datastore/tests/test_interface.py
Expand Up @@ -2,6 +2,7 @@

import ckan.plugins as p
import ckan.tests.helpers as helpers
import ckanext.datastore.tests.helpers as datastore_helpers
import ckan.tests.factories as factories

assert_equals = nose.tools.assert_equals
Expand All @@ -11,6 +12,9 @@
class TestInterfaces(object):
@classmethod
def setup_class(cls):
if datastore_helpers.should_skip_test_for_legacy():
raise nose.SkipTest("These tests are not supported in legacy mode")

p.load('datastore')
p.load('sample_datastore_plugin')

Expand Down
8 changes: 7 additions & 1 deletion ckanext/datastore/tests/test_plugin.py
Expand Up @@ -2,10 +2,12 @@
import mock

import ckan.tests.helpers as helpers
import ckanext.datastore.tests.helpers as datastore_helpers
import ckan.plugins as p
import ckanext.datastore.interfaces as interfaces
import ckanext.datastore.plugin as plugin

from pylons import config

DatastorePlugin = plugin.DatastorePlugin
assert_equal = nose.tools.assert_equal
Expand Down Expand Up @@ -53,6 +55,9 @@ def test_loading_datastore_last_doesnt_work(self):
class TestPluginDatastoreSearch(object):
@classmethod
def setup_class(cls):
if datastore_helpers.should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

p.load('datastore')

@classmethod
Expand Down Expand Up @@ -94,7 +99,8 @@ def test_lang_parameter_overwrites_default_fts_lang(self):
assert_equal(result['ts_query'], expected_ts_query)

def test_fts_rank_column_uses_lang_when_casting_to_tsvector(self):
expected_select_content = u'to_tsvector(\'french\', cast("country" as text))'
expected_select_content = \
u'to_tsvector(\'french\', cast("country" as text))'
data_dict = {
'q': {'country': 'Brazil'},
'lang': 'french',
Expand Down
15 changes: 14 additions & 1 deletion ckanext/datastore/tests/test_search.py
Expand Up @@ -11,7 +11,8 @@
import ckan.tests.legacy as tests

import ckanext.datastore.db as db
from ckanext.datastore.tests.helpers import extract, rebuild_all_dbs
from ckanext.datastore.tests.helpers import (extract, rebuild_all_dbs,
should_skip_test_for_legacy)

import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
Expand All @@ -23,6 +24,9 @@
class TestDatastoreSearchNewTest(object):
@classmethod
def setup_class(cls):
if should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

p.load('datastore')

@classmethod
Expand Down Expand Up @@ -110,6 +114,9 @@ class TestDatastoreSearch(tests.WsgiAppCase):

@classmethod
def setup_class(cls):
if should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

if not tests.is_datastore_supported():
raise nose.SkipTest("Datastore not supported")
p.load('datastore')
Expand Down Expand Up @@ -649,6 +656,9 @@ def test_search_is_unsuccessful_when_called_with_invalid_fields(self):
class TestDatastoreFullTextSearch(tests.WsgiAppCase):
@classmethod
def setup_class(cls):
if should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

if not tests.is_datastore_supported():
raise nose.SkipTest("Datastore not supported")
p.load('datastore')
Expand Down Expand Up @@ -779,6 +789,9 @@ class TestDatastoreSQL(tests.WsgiAppCase):

@classmethod
def setup_class(cls):
if should_skip_test_for_legacy():
raise nose.SkipTest("SQL tests are not supported in legacy mode")

if not tests.is_datastore_supported():
raise nose.SkipTest("Datastore not supported")
plugin = p.load('datastore')
Expand Down

0 comments on commit 184c8b6

Please sign in to comment.