Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Simplified QuerySet field.null handling

QuerySet had previously some complex logic for dealing with nullable
fields in negated add_filter() calls. It seems the logic is leftover
from a time where the WhereNode wasn't as intelligent in handling
field__in=[] conditions.

Thanks to aaugustin for comments on the patch.
  • Loading branch information...
commit 5aa51fa99976ad6133dc6dbb226ecba2c65e6be2 1 parent 9350d1d
@akaariai akaariai authored
View
17 django/db/models/sql/query.py
@@ -1193,14 +1193,15 @@ def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
entry.negate()
self.where.add(entry, AND)
break
- if not (lookup_type == 'in'
- and not hasattr(value, 'as_sql')
- and not hasattr(value, '_as_sql')
- and not value) and field.null:
- # Leaky abstraction artifact: We have to specifically
- # exclude the "foo__in=[]" case from this handling, because
- # it's short-circuited in the Where class.
- # We also need to handle the case where a subquery is provided
+ if field.null:
+ # In SQL NULL = anyvalue returns unknown, and NOT unknown
+ # is still unknown. However, in Python None = anyvalue is False
+ # (and not False is True...), and we want to return this Python's
+ # view of None handling. So we need to specifically exclude the
+ # NULL values, and because we are inside NOT branch they will
+ # be included in the final resultset. We are essentially creating
+ # SQL like this here: NOT (col IS NOT NULL), where the first NOT
+ # is added in upper layers of the code.
self.where.add((Constraint(alias, col, None), 'isnull', False), AND)
if can_reuse is not None:
View
6 tests/regressiontests/queries/models.py
@@ -346,3 +346,9 @@ class OneToOneCategory(models.Model):
def __unicode__(self):
return "one2one " + self.new_name
+
+class NullableName(models.Model):
+ name = models.CharField(max_length=20, null=True)
+
+ class Meta:
+ ordering = ['id']
View
38 tests/regressiontests/queries/tests.py
@@ -1,6 +1,7 @@
from __future__ import absolute_import
import datetime
+from operator import attrgetter
import pickle
import sys
@@ -18,7 +19,7 @@
ManagedModel, Member, NamedCategory, Note, Number, Plaything, PointerA,
Ranking, Related, Report, ReservedName, Tag, TvChef, Valid, X, Food, Eaten,
Node, ObjectA, ObjectB, ObjectC, CategoryItem, SimpleCategory,
- SpecialCategory, OneToOneCategory)
+ SpecialCategory, OneToOneCategory, NullableName)
class BaseQuerysetTest(TestCase):
@@ -1894,3 +1895,38 @@ def test_no_extra_params(self):
DumbCategory.objects.create()
except TypeError:
self.fail("Creation of an instance of a model with only the PK field shouldn't error out after bulk insert refactoring (#17056)")
+
+class NullInExcludeTest(TestCase):
+ def setUp(self):
+ NullableName.objects.create(name='i1')
+ NullableName.objects.create()
+
+ def test_null_in_exclude_qs(self):
+ none_val = '' if connection.features.interprets_empty_strings_as_nulls else None
+ self.assertQuerysetEqual(
+ NullableName.objects.exclude(name__in=[]),
+ ['i1', none_val], attrgetter('name'))
+ self.assertQuerysetEqual(
+ NullableName.objects.exclude(name__in=['i1']),
+ [none_val], attrgetter('name'))
+ self.assertQuerysetEqual(
+ NullableName.objects.exclude(name__in=['i3']),
+ ['i1', none_val], attrgetter('name'))
+ inner_qs = NullableName.objects.filter(name='i1').values_list('name')
+ self.assertQuerysetEqual(
+ NullableName.objects.exclude(name__in=inner_qs),
+ [none_val], attrgetter('name'))
+ # Check that the inner queryset wasn't executed - it should be turned
+ # into subquery above
+ self.assertIs(inner_qs._result_cache, None)
+
+ @unittest.expectedFailure
+ def test_col_not_in_list_containing_null(self):
+ """
+ The following case is not handled properly because
+ SQL's COL NOT IN (list containing null) handling is too weird to
+ abstract away.
+ """
+ self.assertQuerysetEqual(
+ NullableName.objects.exclude(name__in=[None]),
+ ['i1'], attrgetter('name'))
Please sign in to comment.
Something went wrong with that request. Please try again.