Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Fixed #19087 -- Ensured query's base table is never LOUTER joined

This fixes a regression created by join promotion logic refactoring:
01b9c3d

Thanks to Ivan Virabyan for the report.
  • Loading branch information...
commit a62d53c03252bdf82b21b64874efe053160cbdb7 1 parent 4797ad8
@akaariai akaariai authored
View
8 django/db/models/sql/query.py
@@ -702,6 +702,11 @@ def promote_joins(self, aliases, unconditional=False):
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
+ if self.alias_map[alias].rhs_join_col is None:
+ # This is the base table (first FROM entry) - this table
+ # isn't really joined at all in the query, so we should not
+ # alter its join type.
+ continue
parent_alias = self.alias_map[alias].lhs_alias
parent_louter = (parent_alias
and self.alias_map[parent_alias].join_type == self.LOUTER)
@@ -1188,6 +1193,9 @@ def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
for alias in join_list:
if self.alias_map[alias].join_type == self.LOUTER:
j_col = self.alias_map[alias].rhs_join_col
+ # The join promotion logic should never produce
+ # a LOUTER join for the base join - assert that.
+ assert j_col is not None
entry = self.where_class()
entry.add(
(Constraint(alias, j_col, None), 'isnull', True),
View
11 tests/regressiontests/aggregation_regress/tests.py
@@ -878,3 +878,14 @@ def test_type_conversion(self):
connection.ops.convert_values(testData, testField),
testData
)
+
+ def test_annotate_joins(self):
+ """
+ Test that the base table's join isn't promoted to LOUTER. This could
+ cause the query generation to fail if there is an exclude() for fk-field
+ in the query, too. Refs #19087.
+ """
+ qs = Book.objects.annotate(n=Count('pk'))
+ self.assertIs(qs.query.alias_map['aggregation_regress_book'].join_type, None)
+ # Check that the query executes without problems.
+ self.assertEqual(len(qs.exclude(publisher=-1)), 6)
Please sign in to comment.
Something went wrong with that request. Please try again.