Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions django/contrib/gis/utils/layermapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,8 @@ def __init__(self, model, data, mapping, layer=0,
self.layer = self.ds[layer]

self.using = using if using is not None else router.db_for_write(model)
self.spatial_backend = connections[self.using].ops
connection = connections[self.using]
self.spatial_backend = connection.ops

# Setting the mapping & model attributes.
self.mapping = mapping
Expand All @@ -113,7 +114,7 @@ def __init__(self, model, data, mapping, layer=0,

# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connections[self.using].features.supports_transform:
if connection.features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False
Expand Down
4 changes: 2 additions & 2 deletions django/db/models/fields/related_descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1100,8 +1100,8 @@ def _get_add_plan(self, db, source_field_name):
# user-defined intermediary models as they could have other fields
# causing conflicts which must be surfaced.
can_ignore_conflicts = (
connections[db].features.supports_ignore_conflicts and
self.through._meta.auto_created is not False
self.through._meta.auto_created is not False and
connections[db].features.supports_ignore_conflicts
)
# Don't send the signal when inserting duplicate data row
# for symmetrical reverse entries.
Expand Down
20 changes: 10 additions & 10 deletions django/db/models/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,6 @@ def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
opts = self.model._meta
fields = opts.concrete_fields
objs = list(objs)
Expand All @@ -521,6 +520,7 @@ def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
returned_columns = self._batched_insert(
objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
connection = connections[self.db]
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
Expand Down Expand Up @@ -551,9 +551,10 @@ def bulk_update(self, objs, fields, batch_size=None):
return 0
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
connection = connections[self.db]
max_batch_size = connection.ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connections[self.db].features.requires_casted_case_in_updates
requires_casting = connection.features.requires_casted_case_in_updates
batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
Expand Down Expand Up @@ -1308,13 +1309,14 @@ def _batched_insert(self, objs, fields, batch_size, ignore_conflicts=False):
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
connection = connections[self.db]
if ignore_conflicts and not connection.features.supports_ignore_conflicts:
raise NotSupportedError('This database backend does not support ignoring conflicts.')
ops = connections[self.db].ops
ops = connection.ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
bulk_return = connection.features.can_return_rows_from_bulk_insert
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and not ignore_conflicts:
inserted_rows.extend(self._insert(
Expand Down Expand Up @@ -1523,10 +1525,8 @@ def __iter__(self):
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)

connection = connections[db]
compiler = connection.ops.compiler('SQLCompiler')(self.query, connection, db)
query = iter(self.query)

try:
Expand Down
8 changes: 4 additions & 4 deletions django/db/models/sql/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -2332,10 +2332,10 @@ def is_nullable(self, field):
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return (
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
field.empty_strings_allowed
) or field.null
return field.null or (
field.empty_strings_allowed and
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)


def get_order_dir(field, default='ASC'):
Expand Down