Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

Removed dupe_avoidance from sql/query and sql/compiler.py

The dupe avoidance logic was removed as it doesn't seem to do anything,
it is complicated, and it has nearly zero documentation.

The removal of dupe_avoidance allowed for refactoring of both the
implementation and signature of Query.join(). This refactoring cascades
again to some other parts. The most significant of them is the changes
in qs.combine(), and compiler.select_related_descent().
  • Loading branch information...
commit 68847135bc9acb2c51c2d36797d0a85395f0cd35 1 parent 9bf0eed
Anssi Kääriäinen authored August 10, 2012
11  django/db/models/fields/related.py
@@ -1053,11 +1053,6 @@ def value_to_string(self, obj):
@@ -1293,12 +1288,6 @@ def resolve_through_model(field, model, cls):
19  django/db/models/options.py
@@ -58,7 +58,6 @@ def __init__(self, meta, app_label=None):
58 58
         self.concrete_model = None
59 59
         self.swappable = None
60 60
         self.parents = SortedDict()
61  
-        self.duplicate_targets = {}
62 61
         self.auto_created = False
63 62
 
64 63
         # To handle various inheritance situations, we need to track where
@@ -147,24 +146,6 @@ def _prepare(self, model):
147 146
                         auto_created=True)
148 147
                 model.add_to_class('id', auto)
149 148
 
150  
-        # Determine any sets of fields that are pointing to the same targets
151  
-        # (e.g. two ForeignKeys to the same remote model). The query
152  
-        # construction code needs to know this. At the end of this,
153  
-        # self.duplicate_targets will map each duplicate field column to the
154  
-        # columns it duplicates.
155  
-        collections = {}
156  
-        for column, target in six.iteritems(self.duplicate_targets):
157  
-            try:
158  
-                collections[target].add(column)
159  
-            except KeyError:
160  
-                collections[target] = set([column])
161  
-        self.duplicate_targets = {}
162  
-        for elt in six.itervalues(collections):
163  
-            if len(elt) == 1:
164  
-                continue
165  
-            for column in elt:
166  
-                self.duplicate_targets[column] = elt.difference(set([column]))
167  
-
168 149
     def add_field(self, field):
169 150
         # Insert the given field in the order in which it was created, using
170 151
         # the "creation_counter" attribute of the field.
68  django/db/models/sql/compiler.py
@@ -6,7 +6,7 @@
6 6
 from django.db.models.constants import LOOKUP_SEP
7 7
 from django.db.models.query_utils import select_related_descend
8 8
 from django.db.models.sql.constants import (SINGLE, MULTI, ORDER_DIR,
9  
-        GET_ITERATOR_CHUNK_SIZE, SelectInfo)
  9
+        GET_ITERATOR_CHUNK_SIZE, REUSE_ALL, SelectInfo)
10 10
 from django.db.models.sql.datastructures import EmptyResultSet
11 11
 from django.db.models.sql.expressions import SQLEvaluator
12 12
 from django.db.models.sql.query import get_order_dir, Query
@@ -457,7 +457,7 @@ def _setup_joins(self, pieces, opts, alias):
457 457
         if not alias:
458 458
             alias = self.query.get_initial_alias()
459 459
         field, target, opts, joins, _, _ = self.query.setup_joins(pieces,
460  
-                opts, alias, False)
  460
+                opts, alias, REUSE_ALL)
461 461
         # We will later on need to promote those joins that were added to the
462 462
         # query afresh above.
463 463
         joins_to_promote = [j for j in joins if self.query.alias_refcount[j] < 2]
@@ -574,8 +574,7 @@ def get_grouping(self):
574 574
         return result, params
575 575
 
576 576
     def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
577  
-            used=None, requested=None, restricted=None, nullable=None,
578  
-            dupe_set=None, avoid_set=None):
  577
+            requested=None, restricted=None, nullable=None):
579 578
         """
580 579
         Fill in the information needed for a select_related query. The current
581 580
         depth is measured as the number of connections away from the root model
@@ -590,13 +589,6 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
590 589
             opts = self.query.get_meta()
591 590
             root_alias = self.query.get_initial_alias()
592 591
             self.query.related_select_cols = []
593  
-        if not used:
594  
-            used = set()
595  
-        if dupe_set is None:
596  
-            dupe_set = set()
597  
-        if avoid_set is None:
598  
-            avoid_set = set()
599  
-        orig_dupe_set = dupe_set
600 592
         only_load = self.query.get_loaded_field_names()
601 593
 
602 594
         # Setup for the case when only particular related fields should be
@@ -616,12 +608,6 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
616 608
             if not select_related_descend(f, restricted, requested,
617 609
                                           only_load.get(field_model)):
618 610
                 continue
619  
-            # The "avoid" set is aliases we want to avoid just for this
620  
-            # particular branch of the recursion. They aren't permanently
621  
-            # forbidden from reuse in the related selection tables (which is
622  
-            # what "used" specifies).
623  
-            avoid = avoid_set.copy()
624  
-            dupe_set = orig_dupe_set.copy()
625 611
             table = f.rel.to._meta.db_table
626 612
             promote = nullable or f.null
627 613
             if model:
@@ -637,31 +623,17 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
637 623
                         int_opts = int_model._meta
638 624
                         continue
639 625
                     lhs_col = int_opts.parents[int_model].column
640  
-                    dedupe = lhs_col in opts.duplicate_targets
641  
-                    if dedupe:
642  
-                        avoid.update(self.query.dupe_avoidance.get((id(opts), lhs_col),
643  
-                                ()))
644  
-                        dupe_set.add((opts, lhs_col))
645 626
                     int_opts = int_model._meta
646 627
                     alias = self.query.join((alias, int_opts.db_table, lhs_col,
647  
-                            int_opts.pk.column), exclusions=used,
  628
+                            int_opts.pk.column),
648 629
                             promote=promote)
649 630
                     alias_chain.append(alias)
650  
-                    for (dupe_opts, dupe_col) in dupe_set:
651  
-                        self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
652 631
             else:
653 632
                 alias = root_alias
654 633
 
655  
-            dedupe = f.column in opts.duplicate_targets
656  
-            if dupe_set or dedupe:
657  
-                avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ()))
658  
-                if dedupe:
659  
-                    dupe_set.add((opts, f.column))
660  
-
661 634
             alias = self.query.join((alias, table, f.column,
662 635
                     f.rel.get_related_field().column),
663  
-                    exclusions=used.union(avoid), promote=promote)
664  
-            used.add(alias)
  636
+                    promote=promote)
665 637
             columns, aliases = self.get_default_columns(start_alias=alias,
666 638
                     opts=f.rel.to._meta, as_pairs=True)
667 639
             self.query.related_select_cols.extend(
@@ -671,10 +643,8 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
671 643
             else:
672 644
                 next = False
673 645
             new_nullable = f.null or promote
674  
-            for dupe_opts, dupe_col in dupe_set:
675  
-                self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
676 646
             self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1,
677  
-                    used, next, restricted, new_nullable, dupe_set, avoid)
  647
+                    next, restricted, new_nullable)
678 648
 
679 649
         if restricted:
680 650
             related_fields = [
@@ -686,14 +656,8 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
686 656
                 if not select_related_descend(f, restricted, requested,
687 657
                                               only_load.get(model), reverse=True):
688 658
                     continue
689  
-                # The "avoid" set is aliases we want to avoid just for this
690  
-                # particular branch of the recursion. They aren't permanently
691  
-                # forbidden from reuse in the related selection tables (which is
692  
-                # what "used" specifies).
693  
-                avoid = avoid_set.copy()
694  
-                dupe_set = orig_dupe_set.copy()
695  
-                table = model._meta.db_table
696 659
 
  660
+                table = model._meta.db_table
697 661
                 int_opts = opts
698 662
                 alias = root_alias
699 663
                 alias_chain = []
@@ -708,30 +672,16 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
708 672
                             int_opts = int_model._meta
709 673
                             continue
710 674
                         lhs_col = int_opts.parents[int_model].column
711  
-                        dedupe = lhs_col in opts.duplicate_targets
712  
-                        if dedupe:
713  
-                            avoid.update((self.query.dupe_avoidance.get(id(opts), lhs_col),
714  
-                                ()))
715  
-                            dupe_set.add((opts, lhs_col))
716 675
                         int_opts = int_model._meta
717 676
                         alias = self.query.join(
718 677
                             (alias, int_opts.db_table, lhs_col, int_opts.pk.column),
719  
-                            exclusions=used, promote=True, reuse=used
  678
+                            promote=True,
720 679
                         )
721 680
                         alias_chain.append(alias)
722  
-                        for dupe_opts, dupe_col in dupe_set:
723  
-                            self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
724  
-                    dedupe = f.column in opts.duplicate_targets
725  
-                    if dupe_set or dedupe:
726  
-                        avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ()))
727  
-                        if dedupe:
728  
-                            dupe_set.add((opts, f.column))
729 681
                 alias = self.query.join(
730 682
                     (alias, table, f.rel.get_related_field().column, f.column),
731  
-                    exclusions=used.union(avoid),
732 683
                     promote=True
733 684
                 )
734  
-                used.add(alias)
735 685
                 columns, aliases = self.get_default_columns(start_alias=alias,
736 686
                     opts=model._meta, as_pairs=True, local_only=True)
737 687
                 self.query.related_select_cols.extend(
@@ -743,7 +693,7 @@ def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
743 693
                 new_nullable = True
744 694
 
745 695
                 self.fill_related_selections(model._meta, table, cur_depth+1,
746  
-                    used, next, restricted, new_nullable)
  696
+                    next, restricted, new_nullable)
747 697
 
748 698
     def deferred_to_columns(self):
749 699
         """
3  django/db/models/sql/constants.py
@@ -37,3 +37,6 @@
37 37
     'ASC': ('ASC', 'DESC'),
38 38
     'DESC': ('DESC', 'ASC'),
39 39
 }
  40
+
  41
+# A marker for join-reusability.
  42
+REUSE_ALL = object()
196  django/db/models/sql/query.py
@@ -20,7 +20,7 @@
20 20
 from django.db.models.fields import FieldDoesNotExist
21 21
 from django.db.models.sql import aggregates as base_aggregates_module
22 22
 from django.db.models.sql.constants import (QUERY_TERMS, ORDER_DIR, SINGLE,
23  
-        ORDER_PATTERN, JoinInfo, SelectInfo)
  23
+        ORDER_PATTERN, REUSE_ALL, JoinInfo, SelectInfo)
24 24
 from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin
25 25
 from django.db.models.sql.expressions import SQLEvaluator
26 26
 from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode,
@@ -115,7 +115,6 @@ def __init__(self, model, where=WhereNode):
115 115
         self.default_ordering = True
116 116
         self.standard_ordering = True
117 117
         self.ordering_aliases = []
118  
-        self.dupe_avoidance = {}
119 118
         self.used_aliases = set()
120 119
         self.filter_is_sticky = False
121 120
         self.included_inherited_models = {}
@@ -257,7 +256,6 @@ def clone(self, klass=None, memo=None, **kwargs):
257 256
         obj.standard_ordering = self.standard_ordering
258 257
         obj.included_inherited_models = self.included_inherited_models.copy()
259 258
         obj.ordering_aliases = []
260  
-        obj.dupe_avoidance = self.dupe_avoidance.copy()
261 259
         obj.select = self.select[:]
262 260
         obj.related_select_cols = []
263 261
         obj.tables = self.tables[:]
@@ -460,24 +458,42 @@ def combine(self, rhs, connector):
460 458
         self.remove_inherited_models()
461 459
         # Work out how to relabel the rhs aliases, if necessary.
462 460
         change_map = {}
463  
-        used = set()
464 461
         conjunction = (connector == AND)
465  
-        # Add the joins in the rhs query into the new query.
466  
-        first = True
467  
-        for alias in rhs.tables:
  462
+
  463
+        # Determine which existing joins can be reused. When combining the
  464
+        # query with AND we must recreate all joins for m2m filters. When
  465
+        # combining with OR we can reuse joins. The reason is that in AND
  466
+        # case a single row can't fulfill a condition like:
  467
+        #     revrel__col=1 & revrel__col=2
  468
+        # But, there might be two different related rows matching this
  469
+        # condition. In OR case a single True is enough, so single row is
  470
+        # enough, too.
  471
+        #
  472
+        # Note that we will be creating duplicate joins for non-m2m joins in
  473
+        # the AND case. The results will be correct but this creates too many
  474
+        # joins. This is something that could be fixed later on.
  475
+        reuse = set() if conjunction else set(self.tables)
  476
+        # Base table must be present in the query - this is the same
  477
+        # table on both sides.
  478
+        self.get_initial_alias()
  479
+        # Now, add the joins from rhs query into the new query (skipping base
  480
+        # table).
  481
+        for alias in rhs.tables[1:]:
468 482
             if not rhs.alias_refcount[alias]:
469  
-                # An unused alias.
470 483
                 continue
471  
-            table, _, join_type, lhs, lhs_col, col, _ = rhs.alias_map[alias]
472  
-            promote = join_type == self.LOUTER
  484
+            table, _, join_type, lhs, lhs_col, col, nullable = rhs.alias_map[alias]
  485
+            promote = (join_type == self.LOUTER)
473 486
             # If the left side of the join was already relabeled, use the
474 487
             # updated alias.
475 488
             lhs = change_map.get(lhs, lhs)
476  
-            new_alias = self.join((lhs, table, lhs_col, col),
477  
-                    conjunction and not first, used, promote, not conjunction)
478  
-            used.add(new_alias)
  489
+            new_alias = self.join(
  490
+                (lhs, table, lhs_col, col), reuse=reuse, promote=promote,
  491
+                outer_if_first=not conjunction, nullable=nullable)
  492
+            # We can't reuse the same join again in the query. If we have two
  493
+            # distinct joins for the same connection in rhs query, then the
  494
+            # combined query must have two joins, too.
  495
+            reuse.discard(new_alias)
479 496
             change_map[alias] = new_alias
480  
-            first = False
481 497
 
482 498
         # So that we don't exclude valid results in an "or" query combination,
483 499
         # all joins exclusive to either the lhs or the rhs must be converted
@@ -767,9 +783,11 @@ def relabel_column(col):
767 783
             (key, relabel_column(col)) for key, col in self.aggregates.items())
768 784
 
769 785
         # 2. Rename the alias in the internal table/alias datastructures.
770  
-        for k, aliases in self.join_map.items():
  786
+        for ident, aliases in self.join_map.items():
  787
+            del self.join_map[ident]
771 788
             aliases = tuple([change_map.get(a, a) for a in aliases])
772  
-            self.join_map[k] = aliases
  789
+            ident = (change_map.get(ident[0], ident[0]),) + ident[1:]
  790
+            self.join_map[ident] = aliases
773 791
         for old_alias, new_alias in six.iteritems(change_map):
774 792
             alias_data = self.alias_map[old_alias]
775 793
             alias_data = alias_data._replace(rhs_alias=new_alias)
@@ -844,8 +862,8 @@ def count_active_tables(self):
844 862
         """
845 863
         return len([1 for count in six.itervalues(self.alias_refcount) if count])
846 864
 
847  
-    def join(self, connection, always_create=False, exclusions=(),
848  
-            promote=False, outer_if_first=False, nullable=False, reuse=None):
  865
+    def join(self, connection, reuse=REUSE_ALL, promote=False,
  866
+             outer_if_first=False, nullable=False):
849 867
         """
850 868
         Returns an alias for the join in 'connection', either reusing an
851 869
         existing alias for that join or creating a new one. 'connection' is a
@@ -855,56 +873,40 @@ def join(self, connection, always_create=False, exclusions=(),
855 873
 
856 874
             lhs.lhs_col = table.col
857 875
 
858  
-        If 'always_create' is True and 'reuse' is None, a new alias is always
859  
-        created, regardless of whether one already exists or not. If
860  
-        'always_create' is True and 'reuse' is a set, an alias in 'reuse' that
861  
-        matches the connection will be returned, if possible.  If
862  
-        'always_create' is False, the first existing alias that matches the
863  
-        'connection' is returned, if any. Otherwise a new join is created.
864  
-
865  
-        If 'exclusions' is specified, it is something satisfying the container
866  
-        protocol ("foo in exclusions" must work) and specifies a list of
867  
-        aliases that should not be returned, even if they satisfy the join.
  876
+        The 'reuse' parameter can be used in three ways: it can be REUSE_ALL
  877
+        which means all joins (matching the connection) are reusable, it can
  878
+        be a set containing the aliases that can be reused, or it can be None
  879
+        which means a new join is always created.
868 880
 
869 881
         If 'promote' is True, the join type for the alias will be LOUTER (if
870 882
         the alias previously existed, the join type will be promoted from INNER
871 883
         to LOUTER, if necessary).
872 884
 
873 885
         If 'outer_if_first' is True and a new join is created, it will have the
874  
-        LOUTER join type. This is used when joining certain types of querysets
875  
-        and Q-objects together.
  886
+        LOUTER join type. Used for example when adding ORed filters, where we
  887
+        want to use LOUTER joins except if some other join already restricts
  888
+        the join to INNER join.
876 889
 
877 890
         A join is always created as LOUTER if the lhs alias is LOUTER to make
878  
-        sure we do not generate chains like a LOUTER b INNER c.
  891
+        sure we do not generate chains like t1 LOUTER t2 INNER t3.
879 892
 
880 893
         If 'nullable' is True, the join can potentially involve NULL values and
881 894
         is a candidate for promotion (to "left outer") when combining querysets.
882 895
         """
883 896
         lhs, table, lhs_col, col = connection
884  
-        if lhs in self.alias_map:
885  
-            lhs_table = self.alias_map[lhs].table_name
  897
+        existing = self.join_map.get(connection, ())
  898
+        if reuse == REUSE_ALL:
  899
+            reuse = existing
  900
+        elif reuse is None:
  901
+            reuse = set()
886 902
         else:
887  
-            lhs_table = lhs
888  
-
889  
-        if reuse and always_create and table in self.table_map:
890  
-            # Convert the 'reuse' to case to be "exclude everything but the
891  
-            # reusable set, minus exclusions, for this table".
892  
-            exclusions = set(self.table_map[table]).difference(reuse).union(set(exclusions))
893  
-            always_create = False
894  
-        t_ident = (lhs_table, table, lhs_col, col)
895  
-        if not always_create:
896  
-            for alias in self.join_map.get(t_ident, ()):
897  
-                if alias not in exclusions:
898  
-                    if lhs_table and not self.alias_refcount[self.alias_map[alias].lhs_alias]:
899  
-                        # The LHS of this join tuple is no longer part of the
900  
-                        # query, so skip this possibility.
901  
-                        continue
902  
-                    if self.alias_map[alias].lhs_alias != lhs:
903  
-                        continue
904  
-                    self.ref_alias(alias)
905  
-                    if promote or (lhs and self.alias_map[lhs].join_type == self.LOUTER):
906  
-                        self.promote_joins([alias])
907  
-                    return alias
  903
+            reuse = [a for a in existing if a in reuse]
  904
+        if reuse:
  905
+            alias = reuse[0]
  906
+            self.ref_alias(alias)
  907
+            if promote or (lhs and self.alias_map[lhs].join_type == self.LOUTER):
  908
+                self.promote_joins([alias])
  909
+            return alias
908 910
 
909 911
         # No reuse is possible, so we need a new alias.
910 912
         alias, _ = self.table_alias(table, True)
@@ -915,18 +917,16 @@ def join(self, connection, always_create=False, exclusions=(),
915 917
         elif (promote or outer_if_first
916 918
               or self.alias_map[lhs].join_type == self.LOUTER):
917 919
             # We need to use LOUTER join if asked by promote or outer_if_first,
918  
-            # or if the LHS table is left-joined in the query. Adding inner join
919  
-            # to an existing outer join effectively cancels the effect of the
920  
-            # outer join.
  920
+            # or if the LHS table is left-joined in the query.
921 921
             join_type = self.LOUTER
922 922
         else:
923 923
             join_type = self.INNER
924 924
         join = JoinInfo(table, alias, join_type, lhs, lhs_col, col, nullable)
925 925
         self.alias_map[alias] = join
926  
-        if t_ident in self.join_map:
927  
-            self.join_map[t_ident] += (alias,)
  926
+        if connection in self.join_map:
  927
+            self.join_map[connection] += (alias,)
928 928
         else:
929  
-            self.join_map[t_ident] = (alias,)
  929
+            self.join_map[connection] = (alias,)
930 930
         return alias
931 931
 
932 932
     def setup_inherited_models(self):
@@ -1003,7 +1003,7 @@ def add_aggregate(self, aggregate, model, alias, is_summary):
1003 1003
             # then we need to explore the joins that are required.
1004 1004
 
1005 1005
             field, source, opts, join_list, last, _ = self.setup_joins(
1006  
-                field_list, opts, self.get_initial_alias(), False)
  1006
+                field_list, opts, self.get_initial_alias(), REUSE_ALL)
1007 1007
 
1008 1008
             # Process the join chain to see if it can be trimmed
1009 1009
             col, _, join_list = self.trim_joins(source, join_list, last, False)
@@ -1114,8 +1114,8 @@ def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
1114 1114
 
1115 1115
         try:
1116 1116
             field, target, opts, join_list, last, extra_filters = self.setup_joins(
1117  
-                    parts, opts, alias, True, allow_many, allow_explicit_fk=True,
1118  
-                    can_reuse=can_reuse, negate=negate,
  1117
+                    parts, opts, alias, can_reuse, allow_many,
  1118
+                    allow_explicit_fk=True, negate=negate,
1119 1119
                     process_extras=process_extras)
1120 1120
         except MultiJoin as e:
1121 1121
             self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]),
@@ -1268,9 +1268,8 @@ def add_q(self, q_object, used_aliases=None, force_having=False):
1268 1268
         if self.filter_is_sticky:
1269 1269
             self.used_aliases = used_aliases
1270 1270
 
1271  
-    def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1272  
-            allow_explicit_fk=False, can_reuse=None, negate=False,
1273  
-            process_extras=True):
  1271
+    def setup_joins(self, names, opts, alias, can_reuse, allow_many=True,
  1272
+            allow_explicit_fk=False, negate=False, process_extras=True):
1274 1273
         """
1275 1274
         Compute the necessary table joins for the passage through the fields
1276 1275
         given in 'names'. 'opts' is the Options class for the current model
@@ -1290,14 +1289,9 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1290 1289
         """
1291 1290
         joins = [alias]
1292 1291
         last = [0]
1293  
-        dupe_set = set()
1294  
-        exclusions = set()
1295 1292
         extra_filters = []
1296 1293
         int_alias = None
1297 1294
         for pos, name in enumerate(names):
1298  
-            if int_alias is not None:
1299  
-                exclusions.add(int_alias)
1300  
-            exclusions.add(alias)
1301 1295
             last.append(len(joins))
1302 1296
             if name == 'pk':
1303 1297
                 name = opts.pk.name
@@ -1330,28 +1324,12 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1330 1324
                         opts = int_model._meta
1331 1325
                     else:
1332 1326
                         lhs_col = opts.parents[int_model].column
1333  
-                        dedupe = lhs_col in opts.duplicate_targets
1334  
-                        if dedupe:
1335  
-                            exclusions.update(self.dupe_avoidance.get(
1336  
-                                    (id(opts), lhs_col), ()))
1337  
-                            dupe_set.add((opts, lhs_col))
1338 1327
                         opts = int_model._meta
1339 1328
                         alias = self.join((alias, opts.db_table, lhs_col,
1340  
-                                opts.pk.column), exclusions=exclusions)
  1329
+                                opts.pk.column))
1341 1330
                         joins.append(alias)
1342  
-                        exclusions.add(alias)
1343  
-                        for (dupe_opts, dupe_col) in dupe_set:
1344  
-                            self.update_dupe_avoidance(dupe_opts, dupe_col,
1345  
-                                    alias)
1346 1331
             cached_data = opts._join_cache.get(name)
1347 1332
             orig_opts = opts
1348  
-            dupe_col = direct and field.column or field.field.column
1349  
-            dedupe = dupe_col in opts.duplicate_targets
1350  
-            if dupe_set or dedupe:
1351  
-                if dedupe:
1352  
-                    dupe_set.add((opts, dupe_col))
1353  
-                exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col),
1354  
-                        ()))
1355 1333
 
1356 1334
             if process_extras and hasattr(field, 'extra_filters'):
1357 1335
                 extra_filters.extend(field.extra_filters(names, pos, negate))
@@ -1377,16 +1355,14 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1377 1355
                                 target)
1378 1356
 
1379 1357
                     int_alias = self.join((alias, table1, from_col1, to_col1),
1380  
-                            dupe_multis, exclusions, nullable=True,
1381  
-                            reuse=can_reuse)
  1358
+                            reuse=can_reuse, nullable=True)
1382 1359
                     if int_alias == table2 and from_col2 == to_col2:
1383 1360
                         joins.append(int_alias)
1384 1361
                         alias = int_alias
1385 1362
                     else:
1386 1363
                         alias = self.join(
1387 1364
                                 (int_alias, table2, from_col2, to_col2),
1388  
-                                dupe_multis, exclusions, nullable=True,
1389  
-                                reuse=can_reuse)
  1365
+                                reuse=can_reuse, nullable=True)
1390 1366
                         joins.extend([int_alias, alias])
1391 1367
                 elif field.rel:
1392 1368
                     # One-to-one or many-to-one field
@@ -1402,7 +1378,6 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1402 1378
                                 opts, target)
1403 1379
 
1404 1380
                     alias = self.join((alias, table, from_col, to_col),
1405  
-                                      exclusions=exclusions,
1406 1381
                                       nullable=self.is_nullable(field))
1407 1382
                     joins.append(alias)
1408 1383
                 else:
@@ -1433,11 +1408,9 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1433 1408
                                 target)
1434 1409
 
1435 1410
                     int_alias = self.join((alias, table1, from_col1, to_col1),
1436  
-                            dupe_multis, exclusions, nullable=True,
1437  
-                            reuse=can_reuse)
  1411
+                            reuse=can_reuse, nullable=True)
1438 1412
                     alias = self.join((int_alias, table2, from_col2, to_col2),
1439  
-                            dupe_multis, exclusions, nullable=True,
1440  
-                            reuse=can_reuse)
  1413
+                            reuse=can_reuse, nullable=True)
1441 1414
                     joins.extend([int_alias, alias])
1442 1415
                 else:
1443 1416
                     # One-to-many field (ForeignKey defined on the target model)
@@ -1461,17 +1434,9 @@ def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
1461 1434
                                 opts, target)
1462 1435
 
1463 1436
                     alias = self.join((alias, table, from_col, to_col),
1464  
-                            dupe_multis, exclusions, nullable=True,
1465  
-                            reuse=can_reuse)
  1437
+                            reuse=can_reuse, nullable=True)
1466 1438
                     joins.append(alias)
1467 1439
 
1468  
-            for (dupe_opts, dupe_col) in dupe_set:
1469  
-                if int_alias is None:
1470  
-                    to_avoid = alias
1471  
-                else:
1472  
-                    to_avoid = int_alias
1473  
-                self.update_dupe_avoidance(dupe_opts, dupe_col, to_avoid)
1474  
-
1475 1440
         if pos != len(names) - 1:
1476 1441
             if pos == len(names) - 2:
1477 1442
                 raise FieldError("Join on field %r not permitted. Did you misspell %r for the lookup type?" % (name, names[pos + 1]))
@@ -1538,19 +1503,6 @@ def trim_joins(self, target, join_list, last, trim, nonnull_check=False):
1538 1503
                 penultimate = last.pop()
1539 1504
         return col, alias, join_list
1540 1505
 
1541  
-    def update_dupe_avoidance(self, opts, col, alias):
1542  
-        """
1543  
-        For a column that is one of multiple pointing to the same table, update
1544  
-        the internal data structures to note that this alias shouldn't be used
1545  
-        for those other columns.
1546  
-        """
1547  
-        ident = id(opts)
1548  
-        for name in opts.duplicate_targets[col]:
1549  
-            try:
1550  
-                self.dupe_avoidance[ident, name].add(alias)
1551  
-            except KeyError:
1552  
-                self.dupe_avoidance[ident, name] = set([alias])
1553  
-
1554 1506
     def split_exclude(self, filter_expr, prefix, can_reuse):
1555 1507
         """
1556 1508
         When doing an exclude against any kind of N-to-many relation, we need
@@ -1657,8 +1609,8 @@ def add_fields(self, field_names, allow_m2m=True):
1657 1609
         try:
1658 1610
             for name in field_names:
1659 1611
                 field, target, u2, joins, u3, u4 = self.setup_joins(
1660  
-                        name.split(LOOKUP_SEP), opts, alias, False, allow_m2m,
1661  
-                        True)
  1612
+                        name.split(LOOKUP_SEP), opts, alias, REUSE_ALL,
  1613
+                        allow_m2m, True)
1662 1614
                 final_alias = joins[-1]
1663 1615
                 col = target.column
1664 1616
                 if len(joins) > 1:
@@ -1948,7 +1900,7 @@ def set_start(self, start):
1948 1900
         opts = self.model._meta
1949 1901
         alias = self.get_initial_alias()
1950 1902
         field, col, opts, joins, last, extra = self.setup_joins(
1951  
-                start.split(LOOKUP_SEP), opts, alias, False)
  1903
+                start.split(LOOKUP_SEP), opts, alias, REUSE_ALL)
1952 1904
         select_col = self.alias_map[joins[1]].lhs_join_col
1953 1905
         select_alias = alias
1954 1906
 
12  tests/regressiontests/queries/tests.py
@@ -1046,6 +1046,18 @@ def test_ticket14876(self):
1046 1046
         self.assertQuerysetEqual(q1, ["<Item: i1>"])
1047 1047
         self.assertEqual(str(q1.query), str(q2.query))
1048 1048
 
  1049
+    def test_combine_join_reuse(self):
  1050
+        # Test that we correctly recreate joins having identical connections
  1051
+        # in the rhs query, in case the query is ORed together. Related to
  1052
+        # ticket #18748
  1053
+        Report.objects.create(name='r4', creator=self.a1)
  1054
+        q1 = Author.objects.filter(report__name='r5')
  1055
+        q2 = Author.objects.filter(report__name='r4').filter(report__name='r1')
  1056
+        combined = q1|q2
  1057
+        self.assertEquals(str(combined.query).count('JOIN'), 2)
  1058
+        self.assertEquals(len(combined), 1)
  1059
+        self.assertEquals(combined[0].name, 'a1')
  1060
+
1049 1061
     def test_ticket7095(self):
1050 1062
         # Updates that are filtered on the model being updated are somewhat
1051 1063
         # tricky in MySQL. This exercises that case.

0 notes on commit 6884713

Please sign in to comment.
Something went wrong with that request. Please try again.