Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge pull request #2489 from loic/related_transactions

Fixed transaction handling for a number of operations on related objects.
  • Loading branch information...
commit 2f3e1fe3234f5ebaca7635b0a080c2a751c3c758 2 parents f356b6e + bc9be72
@aaugustin aaugustin authored
View
25 django/contrib/contenttypes/fields.py
@@ -394,9 +394,12 @@ def __get__(self, instance, instance_type=None):
def __set__(self, instance, value):
manager = self.__get__(instance)
- manager.clear()
- for obj in value:
- manager.add(obj)
+
+ db = router.db_for_write(manager.model, instance=manager.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ manager.clear()
+ for obj in value:
+ manager.add(obj)
def create_generic_related_manager(superclass):
@@ -474,12 +477,14 @@ def get_prefetch_queryset(self, instances, queryset=None):
self.prefetch_cache_name)
def add(self, *objs):
- for obj in objs:
- if not isinstance(obj, self.model):
- raise TypeError("'%s' instance expected" % self.model._meta.object_name)
- setattr(obj, self.content_type_field_name, self.content_type)
- setattr(obj, self.object_id_field_name, self.pk_val)
- obj.save()
+ db = router.db_for_write(self.model, instance=self.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ for obj in objs:
+ if not isinstance(obj, self.model):
+ raise TypeError("'%s' instance expected" % self.model._meta.object_name)
+ setattr(obj, self.content_type_field_name, self.content_type)
+ setattr(obj, self.object_id_field_name, self.pk_val)
+ obj.save()
add.alters_data = True
def remove(self, *objs, **kwargs):
@@ -498,6 +503,8 @@ def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
+ # `QuerySet.delete()` creates its own atomic block which
+ # contains the `pre_delete` and `post_delete` signal handlers.
queryset.delete()
else:
with transaction.atomic(using=db, savepoint=False):
View
146 django/db/models/fields/related.py
@@ -735,6 +735,7 @@ def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
+ # `QuerySet.update()` is intrinsically atomic.
queryset.update(**{rel_field.name: None})
else:
with transaction.atomic(using=db, savepoint=False):
@@ -763,11 +764,14 @@ def __get__(self, instance, instance_type=None):
def __set__(self, instance, value):
manager = self.__get__(instance)
- # If the foreign key can support nulls, then completely clear the related set.
- # Otherwise, just move the named objects into the set.
- if self.related.field.null:
- manager.clear()
- manager.add(*value)
+
+ db = router.db_for_write(manager.model, instance=manager.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ # If the foreign key can support nulls, then completely clear the related set.
+ # Otherwise, just move the named objects into the set.
+ if self.related.field.null:
+ manager.clear()
+ manager.add(*value)
@cached_property
def related_manager_cls(self):
@@ -901,11 +905,14 @@ def add(self, *objs):
"Cannot use add() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
- self._add_items(self.source_field_name, self.target_field_name, *objs)
- # If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
- if self.symmetrical:
- self._add_items(self.target_field_name, self.source_field_name, *objs)
+ db = router.db_for_write(self.through, instance=self.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ self._add_items(self.source_field_name, self.target_field_name, *objs)
+
+ # If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
+ if self.symmetrical:
+ self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
@@ -920,17 +927,17 @@ def remove(self, *objs):
def clear(self):
db = router.db_for_write(self.through, instance=self.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ signals.m2m_changed.send(sender=self.through, action="pre_clear",
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=None, using=db)
- signals.m2m_changed.send(sender=self.through, action="pre_clear",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=None, using=db)
-
- filters = self._build_remove_filters(super(ManyRelatedManager, self).get_queryset().using(db))
- self.through._default_manager.using(db).filter(filters).delete()
+ filters = self._build_remove_filters(super(ManyRelatedManager, self).get_queryset().using(db))
+ self.through._default_manager.using(db).filter(filters).delete()
- signals.m2m_changed.send(sender=self.through, action="post_clear",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=None, using=db)
+ signals.m2m_changed.send(sender=self.through, action="post_clear",
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=None, using=db)
clear.alters_data = True
def create(self, **kwargs):
@@ -990,35 +997,39 @@ def _add_items(self, source_field_name, target_field_name, *objs):
)
else:
new_ids.add(obj)
+
db = router.db_for_write(self.through, instance=self.instance)
- vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
- vals = vals.filter(**{
- source_field_name: self.related_val[0],
- '%s__in' % target_field_name: new_ids,
- })
+ vals = (self.through._default_manager.using(db)
+ .values_list(target_field_name, flat=True)
+ .filter(**{
+ source_field_name: self.related_val[0],
+ '%s__in' % target_field_name: new_ids,
+ }))
new_ids = new_ids - set(vals)
- if self.reverse or source_field_name == self.source_field_name:
- # Don't send the signal when we are inserting the
- # duplicate data row for symmetrical reverse entries.
- signals.m2m_changed.send(sender=self.through, action='pre_add',
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=new_ids, using=db)
- # Add the ones that aren't there already
- self.through._default_manager.using(db).bulk_create([
- self.through(**{
- '%s_id' % source_field_name: self.related_val[0],
- '%s_id' % target_field_name: obj_id,
- })
- for obj_id in new_ids
- ])
-
- if self.reverse or source_field_name == self.source_field_name:
- # Don't send the signal when we are inserting the
- # duplicate data row for symmetrical reverse entries.
- signals.m2m_changed.send(sender=self.through, action='post_add',
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=new_ids, using=db)
+ with transaction.atomic(using=db, savepoint=False):
+ if self.reverse or source_field_name == self.source_field_name:
+ # Don't send the signal when we are inserting the
+ # duplicate data row for symmetrical reverse entries.
+ signals.m2m_changed.send(sender=self.through, action='pre_add',
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=new_ids, using=db)
+
+ # Add the ones that aren't there already
+ self.through._default_manager.using(db).bulk_create([
+ self.through(**{
+ '%s_id' % source_field_name: self.related_val[0],
+ '%s_id' % target_field_name: obj_id,
+ })
+ for obj_id in new_ids
+ ])
+
+ if self.reverse or source_field_name == self.source_field_name:
+ # Don't send the signal when we are inserting the
+ # duplicate data row for symmetrical reverse entries.
+ signals.m2m_changed.send(sender=self.through, action='post_add',
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
@@ -1037,23 +1048,23 @@ def _remove_items(self, source_field_name, target_field_name, *objs):
old_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ # Send a signal to the other end if need be.
+ signals.m2m_changed.send(sender=self.through, action="pre_remove",
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=old_ids, using=db)
+ target_model_qs = super(ManyRelatedManager, self).get_queryset()
+ if target_model_qs._has_filters():
+ old_vals = target_model_qs.using(db).filter(**{
+ '%s__in' % self.target_field.related_field.attname: old_ids})
+ else:
+ old_vals = old_ids
+ filters = self._build_remove_filters(old_vals)
+ self.through._default_manager.using(db).filter(filters).delete()
- # Send a signal to the other end if need be.
- signals.m2m_changed.send(sender=self.through, action="pre_remove",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=old_ids, using=db)
- target_model_qs = super(ManyRelatedManager, self).get_queryset()
- if target_model_qs._has_filters():
- old_vals = target_model_qs.using(db).filter(**{
- '%s__in' % self.target_field.related_field.attname: old_ids})
- else:
- old_vals = old_ids
- filters = self._build_remove_filters(old_vals)
- self.through._default_manager.using(db).filter(filters).delete()
-
- signals.m2m_changed.send(sender=self.through, action="post_remove",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=old_ids, using=db)
+ signals.m2m_changed.send(sender=self.through, action="post_remove",
+ instance=self.instance, reverse=self.reverse,
+ model=self.model, pk_set=old_ids, using=db)
return ManyRelatedManager
@@ -1103,8 +1114,11 @@ def __set__(self, instance, value):
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
- manager.clear()
- manager.add(*value)
+
+ db = router.db_for_write(manager.through, instance=manager.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ manager.clear()
+ manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
@@ -1157,11 +1171,15 @@ def __set__(self, instance, value):
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
+
# clear() can change expected output of 'value' queryset, we force evaluation
# of queryset before clear; ticket #19816
value = tuple(value)
- manager.clear()
- manager.add(*value)
+
+ db = router.db_for_write(manager.through, instance=manager.instance)
+ with transaction.atomic(using=db, savepoint=False):
+ manager.clear()
+ manager.add(*value)
class ForeignObjectRel(object):
View
14 docs/releases/1.8.txt
@@ -168,7 +168,19 @@ Backwards incompatible changes in 1.8
deprecation timeline for a given feature, its removal may appear as a
backwards incompatible change.
-...
+* Some operations on related objects such as
+ :meth:`~django.db.models.fields.related.RelatedManager.add()` or
+ :ref:`direct assignment<direct-assignment>` ran multiple data modifying
+ queries without wrapping them in transactions. To reduce the risk of data
+ corruption, all data modifying methods that affect multiple related objects
+ (i.e. ``add()``, ``remove()``, ``clear()``, and
+ :ref:`direct assignment<direct-assignment>`) now perform their data modifying
+ queries from within a transaction, provided your database supports
+ transactions.
+
+ This has one backwards incompatible side effect, signal handlers triggered
+ from these methods are now executed within the method's transaction and
+ any exception in a signal handler will prevent the whole operation.
Miscellaneous
~~~~~~~~~~~~~
View
5 tests/many_to_many/tests.py
@@ -1,5 +1,6 @@
from __future__ import unicode_literals
+from django.db import transaction
from django.test import TestCase
from django.utils import six
@@ -54,7 +55,9 @@ def test_add(self):
# Adding an object of the wrong type raises TypeError
with six.assertRaisesRegex(self, TypeError, "'Publication' instance expected, got <Article.*"):
- a6.publications.add(a5)
+ with transaction.atomic():
+ a6.publications.add(a5)
+
# Add a Publication directly via publications.add by using keyword arguments.
a6.publications.create(title='Highlights for Adults')
self.assertQuerysetEqual(a6.publications.all(),
View
86 tests/multiple_database/tests.py
@@ -289,39 +289,29 @@ def test_m2m_cross_database_protection(self):
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key set with an object from a different database
- try:
- marty.book_set = [pro, dive]
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='default'):
+ marty.edited = [pro, dive]
# Add to an m2m with an object from a different database
- try:
- marty.book_set.add(dive)
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='default'):
+ marty.book_set.add(dive)
# Set a m2m with an object from a different database
- try:
- marty.book_set = [pro, dive]
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='default'):
+ marty.book_set = [pro, dive]
# Add to a reverse m2m with an object from a different database
- try:
- dive.authors.add(marty)
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='other'):
+ dive.authors.add(marty)
# Set a reverse m2m with an object from a different database
- try:
- dive.authors = [mark, marty]
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='other'):
+ dive.authors = [mark, marty]
def test_m2m_deletion(self):
"Cascaded deletions of m2m relations issue queries on the right database"
@@ -489,28 +479,18 @@ def test_foreign_key_cross_database_protection(self):
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key with an object from a different database
- try:
- with transaction.atomic(using='default'):
- dive.editor = marty
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ dive.editor = marty
# Set a foreign key set with an object from a different database
- try:
+ with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.edited = [pro, dive]
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
# Add to a foreign key set with an object from a different database
- try:
+ with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.edited.add(dive)
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
@@ -634,11 +614,8 @@ def test_o2o_cross_database_protection(self):
# Set a one-to-one relation with an object from a different database
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
- try:
+ with self.assertRaises(ValueError):
bob.userprofile = alice_profile
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
@@ -784,18 +761,13 @@ def test_generic_key_cross_database_protection(self):
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
# Set a foreign key with an object from a different database
- try:
+ with self.assertRaises(ValueError):
review1.content_object = dive
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
# Add to a foreign key set with an object from a different database
- try:
- dive.reviews.add(review1)
- self.fail("Shouldn't be able to assign across databases")
- except ValueError:
- pass
+ with self.assertRaises(ValueError):
+ with transaction.atomic(using='other'):
+ dive.reviews.add(review1)
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
@@ -892,12 +864,9 @@ def test_subquery(self):
self.assertRaises(ValueError, str, qs.query)
# Evaluating the query shouldn't work, either
- try:
+ with self.assertRaises(ValueError):
for obj in qs:
pass
- self.fail('Iterating over query should raise ValueError')
- except ValueError:
- pass
def test_related_manager(self):
"Related managers return managers, not querysets"
@@ -1041,12 +1010,9 @@ def test_database_routing(self):
# An update query will be routed to the default database
Book.objects.filter(title='Pro Django').update(pages=200)
- try:
+ with self.assertRaises(Book.DoesNotExist):
# By default, the get query will be directed to 'other'
Book.objects.get(title='Pro Django')
- self.fail("Shouldn't be able to find the book")
- except Book.DoesNotExist:
- pass
# But the same query issued explicitly at a database will work.
pro = Book.objects.using('default').get(title='Pro Django')
Please sign in to comment.
Something went wrong with that request. Please try again.