Skip to content

Commit

Permalink
Fix problems with % in column names and table names.
Browse files Browse the repository at this point in the history
  • Loading branch information
domoritz authored and tobes committed Sep 19, 2012
1 parent 6ea18bb commit 295410f
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 20 deletions.
17 changes: 10 additions & 7 deletions ckanext/datastore/db.py
Expand Up @@ -78,7 +78,10 @@ def _is_valid_field_name(name):
return True


_is_valid_table_name = _is_valid_field_name
def _is_valid_table_name(name):
if '%' in name:
return False
return _is_valid_field_name(name)


def _validate_int(i, field_name):
Expand Down Expand Up @@ -278,7 +281,7 @@ def create_table(context, data_dict):
})

fields = datastore_fields + supplied_fields + extra_fields
sql_fields = u", ".join([u'"{0}" {1}'.format(f['id'], f['type'])
sql_fields = u", ".join([u'"{0}" {1}'.format(f['id'].replace('%', '%%'), f['type'])
for f in fields])

sql_string = u'CREATE TABLE "{0}" ({1});'.format(
Expand Down Expand Up @@ -341,7 +344,7 @@ def create_indexes(context, data_dict):
'index': [('The field {0} is not a valid column name.').format(
index)]
})
fields_string = u','.join(['"%s"' % field for field in fields])
fields_string = u','.join(['"%s"' % field.replace('%', '%%') for field in fields])
sql_index_strings.append(sql_index_string.format(
res_id=data_dict['resource_id'], unique='',
fields=fields_string))
Expand All @@ -363,7 +366,7 @@ def create_indexes(context, data_dict):
if primary_key:
sql_index_strings.append(sql_index_string.format(
res_id=data_dict['resource_id'], unique='unique',
fields=u','.join(['"%s"' % field for field in primary_key])))
fields=u','.join(['"%s"' % field.replace('%', '%%') for field in primary_key])))

map(context['connection'].execute, sql_index_strings)

Expand All @@ -388,7 +391,7 @@ def _drop_indexes(context, data_dict, unique=False):
indexes_to_drop = context['connection'].execute(
sql_get_index_string, data_dict['resource_id']).fetchall()
for index in indexes_to_drop:
context['connection'].execute(sql_drop_index.format(index[0]))
context['connection'].execute(sql_drop_index.format(index[0]).replace('%', '%%'))


def alter_table(context, data_dict):
Expand Down Expand Up @@ -440,7 +443,7 @@ def alter_table(context, data_dict):
for field in new_fields:
sql = 'ALTER TABLE "{0}" ADD "{1}" {2}'.format(
data_dict['resource_id'],
field['id'],
field['id'].replace('%', '%%'),
field['type'])
context['connection'].execute(sql)

Expand All @@ -465,7 +468,7 @@ def upsert_data(context, data_dict):
fields = _get_fields(context, data_dict)
field_names = _pluck('id', fields)
records = data_dict['records']
sql_columns = ", ".join(['"%s"' % name for name in field_names]
sql_columns = ", ".join(['"%s"' % name.replace('%', '%%') for name in field_names]
+ ['"_full_text"'])

if method == INSERT:
Expand Down
39 changes: 26 additions & 13 deletions ckanext/datastore/tests/test_datastore.py
Expand Up @@ -80,7 +80,20 @@ def test_create_invalid_alias_name(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'aliases': 'foo"bar',
'aliases': u'foo"bar',
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False

data = {
'resource_id': resource.id,
'aliases': u'fo%25bar',
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}]
}
Expand Down Expand Up @@ -215,13 +228,13 @@ def test_create_basic(self):
data = {
'resource_id': resource.id,
'aliases': aliases,
'fields': [{'id': 'book', 'type': 'text'},
'fields': [{'id': 'boo%k', 'type': 'text'},
{'id': 'author', 'type': 'json'}],
'indexes': [['book', 'author'], 'book'],
'indexes': [['boo%k', 'author'], 'boo%k'],
'records': [
{'book': 'crime', 'author': ['tolstoy', 'dostoevsky']},
{'book': 'annakarenina', 'author': ['tolstoy', 'putin']},
{'book': 'warandpeace'}] # treat author as null
{'boo%k': 'crime', 'author': ['tolstoy', 'dostoevsky']},
{'boo%k': 'annakarenina', 'author': ['tolstoy', 'putin']},
{'boo%k': 'warandpeace'}] # treat author as null
}
### Firstly test to see if resource things it has datastore table
postparams = '%s=1' % json.dumps({'id': resource.id})
Expand All @@ -248,7 +261,7 @@ def test_create_basic(self):

assert results.rowcount == 3
for i, row in enumerate(results):
assert data['records'][i].get('book') == row['book']
assert data['records'][i].get('boo%k') == row['boo%k']
assert data['records'][i].get('author') == (
json.loads(row['author'][0]) if row['author'] else None)

Expand Down Expand Up @@ -288,7 +301,7 @@ def test_create_basic(self):
####### insert again simple
data2 = {
'resource_id': resource.id,
'records': [{'book': 'hagji murat', 'author': ['tolstoy']}]
'records': [{'boo%k': 'hagji murat', 'author': ['tolstoy']}]
}

postparams = '%s=1' % json.dumps(data2)
Expand All @@ -306,7 +319,7 @@ def test_create_basic(self):

all_data = data['records'] + data2['records']
for i, row in enumerate(results):
assert all_data[i].get('book') == row['book']
assert all_data[i].get('boo%k') == row['boo%k']
assert all_data[i].get('author') == (
json.loads(row['author'][0]) if row['author'] else None)

Expand All @@ -319,7 +332,7 @@ def test_create_basic(self):
####### insert again extra field
data3 = {
'resource_id': resource.id,
'records': [{'book': 'crime and punsihment',
'records': [{'boo%k': 'crime and punsihment',
'author': ['dostoevsky'], 'rating': 'good'}],
'indexes': ['rating']
}
Expand All @@ -339,7 +352,7 @@ def test_create_basic(self):

all_data = data['records'] + data2['records'] + data3['records']
for i, row in enumerate(results):
assert all_data[i].get('book') == row['book'], (i, all_data[i].get('book'), row['book'])
assert all_data[i].get('boo%k') == row['boo%k'], (i, all_data[i].get('boo%k'), row['boo%k'])
assert all_data[i].get('author') == (json.loads(row['author'][0]) if row['author'] else None)

results = c.execute('''select * from "{0}" where _full_text @@ to_tsquery('dostoevsky') '''.format(resource.id))
Expand All @@ -350,8 +363,8 @@ def test_create_basic(self):
####### insert again which will fail because of unique book name
data4 = {
'resource_id': resource.id,
'records': [{'book': 'warandpeace'}],
'primary_key': 'book'
'records': [{'boo%k': 'warandpeace'}],
'primary_key': 'boo%k'
}

postparams = '%s=1' % json.dumps(data4)
Expand Down

0 comments on commit 295410f

Please sign in to comment.