Skip to content

Commit

Permalink
Better error reporting and duplicate alias testing.
Browse files Browse the repository at this point in the history
  • Loading branch information
domoritz authored and amercader committed Oct 12, 2012
1 parent 5a768d5 commit 65b499f
Show file tree
Hide file tree
Showing 2 changed files with 110 additions and 28 deletions.
55 changes: 42 additions & 13 deletions ckanext/datastore/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,11 +244,11 @@ def check_fields(context, fields):
for field in fields:
if field.get('type') and not _is_valid_pg_type(context, field['type']):
raise ValidationError({
'fields': ['{0} is not a valid field type'.format(field['type'])]
'fields': ['"{0}" is not a valid field type'.format(field['type'])]
})
elif not _is_valid_field_name(field['id']):
raise ValidationError({
'fields': ['{0} is not a valid field name'.format(field['id'])]
'fields': ['"{0}" is not a valid field name'.format(field['id'])]
})


Expand Down Expand Up @@ -288,7 +288,7 @@ def create_table(context, data_dict):
if 'type' not in field:
if not records or field['id'] not in records[0]:
raise ValidationError({
'fields': ['{0} type not guessable'.format(field['id'])]
'fields': ['"{0}" type not guessable'.format(field['id'])]
})
field['type'] = _guess_type(records[0][field['id']])

Expand Down Expand Up @@ -319,12 +319,24 @@ def create_table(context, data_dict):


def _get_aliases(context, data_dict):
''' Get a list of aliases for a resource. '''
res_id = data_dict['resource_id']
alias_sql = sqlalchemy.text(u'SELECT name FROM "_table_metadata" WHERE alias_of = :id')
alias_sql = sqlalchemy.text(
u'SELECT name FROM "_table_metadata" WHERE alias_of = :id')
results = context['connection'].execute(alias_sql, id=res_id).fetchall()
return [x[0] for x in results]


def _get_resources(context, alias):
''' Get a list of resources for an alias. There could be more than one alias
in a resource_dict. '''
alias_sql = sqlalchemy.text(
u'''SELECT alias_of FROM "_table_metadata"
WHERE name = :alias AND alias_of IS NOT NULL''')
results = context['connection'].execute(alias_sql, alias=alias).fetchall()
return [x[0] for x in results]


def create_alias(context, data_dict):
aliases = _get_list(data_dict.get('aliases'))
if aliases != None:
Expand All @@ -339,6 +351,13 @@ def create_alias(context, data_dict):
alias=alias,
main=data_dict['resource_id']
)

res_ids = _get_resources(context, alias)
if res_ids:
raise ValidationError({
'alias': [(u'The alias "{0}" already exists.').format(alias)]
})

context['connection'].execute(sql_alias_string)


Expand Down Expand Up @@ -384,7 +403,7 @@ def create_indexes(context, data_dict):
for field in index_fields:
if field not in field_ids:
raise ValidationError({
'index': [('The field {0} is not a valid column name.').format(
'index': [('The field "{0}" is not a valid column name.').format(
index)]
})
fields_string = u', '.join([
Expand Down Expand Up @@ -450,7 +469,7 @@ def alter_table(context, data_dict):
if 'type' not in field:
if not records or field['id'] not in records[0]:
raise ValidationError({
'fields': ['{0} type not guessable'.format(field['id'])]
'fields': ['"{0}" type not guessable'.format(field['id'])]
})
field['type'] = _guess_type(records[0][field['id']])
new_fields.append(field)
Expand Down Expand Up @@ -491,7 +510,7 @@ def upsert_data(context, data_dict):

if method not in _methods:
raise ValidationError({
'method': [u'{0} is not defined'.format(method)]
'method': [u'"{0}" is not defined'.format(method)]
})

fields = _get_fields(context, data_dict)
Expand Down Expand Up @@ -631,14 +650,14 @@ def _validate_record(record, num, field_names):
# check record for sanity
if not isinstance(record, dict):
raise ValidationError({
'records': [u'row {0} is not a json object'.format(num)]
'records': [u'row "{0}" is not a json object'.format(num)]
})
## check for extra fields in data
extra_keys = set(record.keys()) - set(field_names)

if extra_keys:
raise ValidationError({
'records': [u'row {0} has extra keys "{1}"'.format(
'records': [u'row "{0}" has extra keys "{1}"'.format(
num + 1,
', '.join(list(extra_keys))
)]
Expand Down Expand Up @@ -727,7 +746,7 @@ def _sort(context, data_dict, field_ids):

if field not in field_ids:
raise ValidationError({
'sort': [u'field {0} not it table'.format(
'sort': [u'field "{0}" not it table'.format(
unicode(field, 'utf-8'))]
})
if sort.lower() not in ('asc', 'desc'):
Expand Down Expand Up @@ -923,7 +942,7 @@ def create(context, data_dict):
if ('duplicate key value violates unique constraint' in str(e)
or 'could not create unique index' in str(e)):
raise ValidationError({
'constraints': ['Cannot insert records because of uniqueness constraint'],
'constraints': ['Cannot insert records or create index because of uniqueness constraint'],
'info': {
'details': str(e)
}
Expand Down Expand Up @@ -961,6 +980,16 @@ def upsert(context, data_dict):
upsert_data(context, data_dict)
trans.commit()
return _unrename_json_field(data_dict)
except IntegrityError, e:
if 'duplicate key value violates unique constraint' in str(e):
raise ValidationError({
'constraints': ['Cannot insert records because of uniqueness constraint'],
'info': {
'details': str(e)
}
})
else:
raise
except Exception, e:
trans.rollback()
if 'due to statement timeout' in str(e):
Expand All @@ -986,7 +1015,7 @@ def delete(context, data_dict):
).fetchone()
if not result:
raise ValidationError({
'resource_id': [u'table for resource {0} does not exist'.format(
'resource_id': [u'table for resource "{0}" does not exist'.format(
data_dict['resource_id'])]
})
if not 'filters' in data_dict:
Expand Down Expand Up @@ -1022,7 +1051,7 @@ def search(context, data_dict):
).fetchone()
if not result:
raise ValidationError({
'resource_id': [u'table for resource {0} does not exist'.format(
'resource_id': [u'table for resource "{0}" does not exist'.format(
data_dict['resource_id'])]
})
return search_data(context, data_dict)
Expand Down
83 changes: 68 additions & 15 deletions ckanext/datastore/tests/test_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,34 @@ def test_create_invalid_unique_index(self):
res_dict = json.loads(res.body)
assert res_dict['success'] is False

def test_create_alias_twice(self):
resource = model.Package.get('annakarenina').resources[1]
data = {
'resource_id': resource.id,
'aliases': 'new_alias',
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True, res_dict

resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'aliases': 'new_alias',
'fields': [{'id': 'more books', 'type': 'text'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False, res_dict

def test_create_basic(self):
resource = model.Package.get('annakarenina').resources[0]
aliases = [u'great_list_of_books', u'another_list_of_b\xfcks']
Expand Down Expand Up @@ -487,6 +515,16 @@ def test_create_basic(self):

def test_guess_types(self):
resource = model.Package.get('annakarenina').resources[1]

data = {
'resource_id': resource.id
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth, status="*") # ignore status
res_dict = json.loads(res.body)

data = {
'resource_id': resource.id,
'fields': [{'id': 'author', 'type': 'json'},
Expand Down Expand Up @@ -853,6 +891,36 @@ def setup_class(cls):
def teardown_class(cls):
rebuild_all_dbs(cls.Session)

def test_insert_non_existing_field(self):
data = {
'resource_id': self.data['resource_id'],
'method': 'insert',
'records': [{u'b\xfck': 'the hobbit', 'dummy': 'tolkien'}]
}

postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_upsert', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)

assert res_dict['success'] is False

def test_insert_with_index_violation(self):
data = {
'resource_id': self.data['resource_id'],
'method': 'insert',
'records': [{u'b\xfck': 'annakarenina'}]
}

postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_upsert', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)

assert res_dict['success'] is False

def test_insert_basic(self):
hhguide = u"hitchhiker's guide to the galaxy"
data = {
Expand All @@ -879,21 +947,6 @@ def test_insert_basic(self):

assert results.rowcount == 3

def test_insert_non_existing_field(self):
data = {
'resource_id': self.data['resource_id'],
'method': 'insert',
'records': [{u'b\xfck': 'annakarenina', 'dummy': 'tolkien'}]
}

postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_upsert', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)

assert res_dict['success'] is False


class TestDatastoreUpdate(tests.WsgiAppCase):
sysadmin_user = None
Expand Down

0 comments on commit 65b499f

Please sign in to comment.