Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ ChangeLog
master (unreleased)
===================

- Nothing here yet
* Enforce unicity of keys in NestedListSerializers (#202)
* Define __unicode__ and __str__ on models (#200)
* Fix regression on presets_lists endpoint (#199)

Release 0.8.1 (2017-03-07)
==========================
Expand Down
20 changes: 20 additions & 0 deletions demo/tests/tests_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,15 @@ def test_simple(self):
field.accesses.filter(access_id=access, level=level).exists()
)

def test_fields_slug(self):
data = deepcopy(form_data)
# duplicate field
data['fields'] *= 2
res = self.client.post(
reverse('formidable:form_create'), data, format='json'
)
self.assertEquals(res.status_code, 400)

def test_with_items_in_fields(self):
initial_count = Formidable.objects.count()
res = self.client.post(
Expand Down Expand Up @@ -179,6 +188,17 @@ def test_create_field_on_update(self):
self.assertEquals(form.pk, self.form.pk)
self.assertEquals(form.fields.count(), 2)

def test_duplicate_items_update(self):
# create a form with items
data = deepcopy(form_data_items)
res = self.client.put(self.edit_url, data, format='json')
self.assertEquals(res.status_code, 200)
# update items with duplicate entries
data['fields'] *= 2
res = self.client.put(self.edit_url, data, format='json')
# expect validation error
self.assertEquals(res.status_code, 400)

def test_delete_field_on_update(self):
self.form.fields.create(
type_id='text', slug='textslug', label='mytext',
Expand Down
1 change: 1 addition & 0 deletions formidable/serializers/access.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class AccessListSerializer(NestedListSerializer):
parent_name = 'field_id'

def validate(self, data):
data = super(AccessListSerializer, self).validate(data)
accesses_id = [accesses['access_id'] for accesses in data]

for access in get_accesses():
Expand Down
2 changes: 2 additions & 0 deletions formidable/serializers/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ def validate(self, validated_data):
order before the update/create method sorts the validated data
by id.
"""
validated_data = super(FieldListSerializer, self).validate(
validated_data)
for index, data in enumerate(validated_data):
data['order'] = index

Expand Down
16 changes: 16 additions & 0 deletions formidable/serializers/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import logging

from rest_framework.exceptions import ValidationError
from rest_framework.serializers import ListSerializer

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -50,6 +51,21 @@ def _extract_id(self, qs, validated_data):
deleted_ids = db_ids - validated_ids
return created_ids, updated_ids, deleted_ids

def validate(self, data):
"""
ensure that field_id is unique among children
"""
data = super(NestedListSerializer, self).validate(data)

if self.field_id:
if len(data) != len(set(f[self.field_id] for f in data)):
msg = 'The fields {field_id} must make a unique set.'.format(
field_id=self.field_id
)
raise ValidationError(msg, code='unique')

return data


class NestedListSerializerDummyUpdate(NestedListSerializer):

Expand Down