Permalink
Browse files

Fixed #28996 -- Simplified some boolean constructs and removed trivia…

…l continue statements.
  • Loading branch information...
1 parent 4bcec02 commit a38ae914d89809aed6d79337b74a8b31b6d3849a Дилян Палаузов committed with timgraham Jan 12, 2018
@@ -152,9 +152,8 @@ def register(cls, test, list_filter_class, take_priority=False):
@classmethod
def create(cls, field, request, params, model, model_admin, field_path):
for test, list_filter_class in cls._field_list_filters:
- if not test(field):
- continue
- return list_filter_class(field, request, params, model, model_admin, field_path=field_path)
+ if test(field):
+ return list_filter_class(field, request, params, model, model_admin, field_path=field_path)
class RelatedFieldListFilter(FieldListFilter):
@@ -367,9 +367,8 @@ def __init__(self, formset, *args, **kwargs):
def __iter__(self):
fk = getattr(self.formset, "fk", None)
for field in self.fields:
- if fk and fk.name == field:
- continue
- yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
+ if not fk or fk.name != field:
+ yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class AdminErrorList(forms.utils.ErrorList):
@@ -831,10 +831,7 @@ def get_actions(self, request):
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
- class_actions = getattr(klass, 'actions', [])
- # Avoid trying to iterate over None
- if not class_actions:
- continue
+ class_actions = getattr(klass, 'actions', []) or []
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
@@ -1498,11 +1495,10 @@ def _changeform_view(self, request, object_id, form_url, extra_context):
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
- if form.is_valid():
- form_validated = True
+ form_validated = form.is_valid()
+ if form_validated:
new_object = self.save_form(request, form, change=not add)
else:
- form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
if all_valid(formsets) and form_validated:
@@ -128,10 +128,9 @@ def result_headers(cl):
order_type = ''
new_order_type = 'asc'
sort_priority = 0
- sorted = False
# Is it currently being sorted on?
- if i in ordering_field_columns:
- sorted = True
+ is_sorted = i in ordering_field_columns
+ if is_sorted:
order_type = ordering_field_columns.get(i).lower()
sort_priority = list(ordering_field_columns).index(i) + 1
th_classes.append('sorted %sending' % order_type)
@@ -165,7 +164,7 @@ def make_qs_param(t, n):
yield {
"text": text,
"sortable": True,
- "sorted": sorted,
+ "sorted": is_sorted,
"ascending": order_type == "asc",
"sort_priority": sort_priority,
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
@@ -53,11 +53,8 @@ def prepare_lookup_value(key, value):
if key.endswith('__in'):
value = value.split(',')
# if key ends with __isnull, special case '' and the string literals 'false' and '0'
- if key.endswith('__isnull'):
- if value.lower() in ('', 'false', '0'):
- value = False
- else:
- value = True
+ elif key.endswith('__isnull'):
+ value = value.lower() not in ('', 'false', '0')
return value
@@ -378,9 +378,8 @@ def has_related_field_in_list_display(self):
else:
if isinstance(field.remote_field, models.ManyToOneRel):
# <FK>_id field names don't require a join.
- if field_name == field.get_attname():
- continue
- return True
+ if field_name != field.get_attname():
+ return True
return False
def url_for_result(self, result):
@@ -438,8 +438,7 @@ def optgroups(self, name, value, attr=None):
str(option_value) in value and
(has_selected is False or self.allow_multiple_selected)
)
- if selected is True and has_selected is False:
- has_selected = True
+ has_selected |= selected
index = len(default[1])
subgroup = default[1]
subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index))
@@ -41,12 +41,11 @@ def handle(self, **options):
collector.collect([ct])
for obj_type, objs in collector.data.items():
- if objs == {ct}:
- continue
- ct_info.append(' - %s %s object(s)' % (
- len(objs),
- obj_type._meta.label,
- ))
+ if objs != {ct}:
+ ct_info.append(' - %s %s object(s)' % (
+ len(objs),
+ obj_type._meta.label,
+ ))
content_type_display = '\n'.join(ct_info)
self.stdout.write("""Some content types in your database are stale and can be deleted.
Any objects that depend on these content types will also be deleted.
@@ -142,12 +142,9 @@ def _from_wkb(cls, geom_input):
def _from_json(geom_input):
ptr = capi.from_json(geom_input)
if GDAL_VERSION < (2, 0):
- has_srs = True
try:
capi.get_geom_srs(ptr)
except SRSException:
- has_srs = False
- if not has_srs:
srs = SpatialReference(4326)
capi.assign_srs(ptr, srs.ptr)
return ptr
@@ -49,7 +49,14 @@ def __init__(self, *args, **kwargs):
)
)
- if isinstance(coords, (tuple, list)):
+ numpy_coords = not isinstance(coords, (tuple, list))
+ if numpy_coords:
+ shape = coords.shape # Using numpy's shape.
+ if len(shape) != 2:
+ raise TypeError('Too many dimensions.')
+ self._checkdim(shape[1])
+ ndim = shape[1]
+ else:
# Getting the number of coords and the number of dimensions -- which
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
ndim = None
@@ -63,14 +70,6 @@ def __init__(self, *args, **kwargs):
self._checkdim(ndim)
elif len(coord) != ndim:
raise TypeError('Dimension mismatch.')
- numpy_coords = False
- else:
- shape = coords.shape # Using numpy's shape.
- if len(shape) != 2:
- raise TypeError('Too many dimensions.')
- self._checkdim(shape[1])
- ndim = shape[1]
- numpy_coords = True
# Creating a coordinate sequence object because it is easier to
# set the points using its methods.
@@ -61,10 +61,8 @@ def _last_modification(self):
modification = os.stat(self._key_to_file()).st_mtime
if settings.USE_TZ:
modification = datetime.datetime.utcfromtimestamp(modification)
- modification = modification.replace(tzinfo=timezone.utc)
- else:
- modification = datetime.datetime.fromtimestamp(modification)
- return modification
+ return modification.replace(tzinfo=timezone.utc)
+ return datetime.datetime.fromtimestamp(modification)
def _expiry_date(self, session_data):
"""
@@ -86,16 +86,15 @@ def hashed_name(self, name, content=None, filename=None):
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
- opened = False
- if content is None:
+ opened = content is None
+ if opened:
if not self.exists(filename):
raise ValueError("The file '%s' could not be found with %r." % (filename, self))
try:
content = self.open(filename)
except IOError:
# Handle directory paths and fragments
return name
- opened = True
try:
file_hash = self.file_hash(clean_name, content)
finally:
@@ -17,13 +17,10 @@
@register(Tags.templates)
def check_setting_app_dirs_loaders(app_configs, **kwargs):
- passed_check = True
- for conf in settings.TEMPLATES:
- if not conf.get('APP_DIRS'):
- continue
- if 'loaders' in conf.get('OPTIONS', {}):
- passed_check = False
- return [] if passed_check else [E001]
+ return [E001] if any(
+ conf.get('APP_DIRS') and 'loaders' in conf.get('OPTIONS', {})
+ for conf in settings.TEMPLATES
+ ) else []
@register(Tags.templates)
@@ -160,10 +160,7 @@ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
- if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
- self.activated = False
- else:
- self.activated = True
+ self.activated = content_length <= settings.FILE_UPLOAD_MAX_MEMORY_SIZE
def new_file(self, *args, **kwargs):
super().new_file(*args, **kwargs)
@@ -271,9 +271,8 @@ def message(self):
# Use cached DNS_NAME for performance
msg['Message-ID'] = make_msgid(domain=DNS_NAME)
for name, value in self.extra_headers.items():
- if name.lower() == 'from': # From is already handled
- continue
- msg[name] = value
+ if name.lower() != 'from': # From is already handled
+ msg[name] = value
return msg
def recipients(self):
View
@@ -132,11 +132,10 @@ def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, ma
# TimestampSigner.unsign() returns str but base64 and zlib compression
# operate on bytes.
base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
- decompress = False
- if base64d[:1] == b'.':
+ decompress = base64d[:1] == b'.'
+ if decompress:
# It's compressed; uncompress it first
base64d = base64d[1:]
- decompress = True
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
@@ -570,12 +570,11 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type,
# db_index=True.
index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)
for index_name in index_names:
- if index_name in meta_index_names:
+ if index_name not in meta_index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
- continue
- self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
+ self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
@@ -261,16 +261,15 @@ def _build_migration_list(self, graph=None):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
- is_swappable_dep = False
- if dep[0] == "__setting__":
+ is_swappable_dep = dep[0] == '__setting__'
+ if is_swappable_dep:
# We need to temporarily resolve the swappable dependency to prevent
# circular references. While keeping the dependency checks on the
# resolved model we still add the swappable dependencies.
# See #23322
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
original_dep = dep
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
- is_swappable_dep = True
if dep[0] != app_label and dep[0] != "__setting__":
# External app dependency. See if it's not yet
# satisfied.
@@ -831,18 +830,18 @@ def _generate_added_field(self, app_label, model_name, field_name):
dependencies.extend(self._get_dependencies_for_foreign_key(field))
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
- preserve_default = True
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
- if (not field.null and not field.has_default() and
- not field.many_to_many and
- not (field.blank and field.empty_strings_allowed) and
- not (isinstance(field, time_fields) and field.auto_now)):
+ preserve_default = (
+ field.null or field.has_default() or field.many_to_many or
+ (field.blank and field.empty_strings_allowed) or
+ (isinstance(field, time_fields) and field.auto_now)
+ )
+ if not preserve_default:
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
else:
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
- preserve_default = False
self.add_operation(
app_label,
operations.AddField(
@@ -367,9 +367,7 @@ def make_state(self, nodes=None, at_end=True, real_apps=None):
plan = []
for node in nodes:
for migration in self.forwards_plan(node):
- if migration not in plan:
- if not at_end and migration in nodes:
- continue
+ if migration in plan or at_end or migration not in nodes:
plan.append(migration)
project_state = ProjectState(real_apps=real_apps)
for node in plan:
@@ -172,10 +172,9 @@ def add_internal_dependencies(self, key, migration):
dependencies find the correct root node.
"""
for parent in migration.dependencies:
- if parent[0] != key[0] or parent[1] == '__first__':
- # Ignore __first__ references to the same app (#22325).
- continue
- self.graph.add_dependency(migration, key, parent, skip_validation=True)
+ # Ignore __first__ references to the same app.
+ if parent[0] == key[0] and parent[1] != '__first__':
+ self.graph.add_dependency(migration, key, parent, skip_validation=True)
def add_external_dependencies(self, key, migration):
for parent in migration.dependencies:
@@ -109,11 +109,7 @@ def _check_related_name_is_valid(self):
related_name = self.remote_field.related_name
if related_name is None:
return []
- is_valid_id = True
- if keyword.iskeyword(related_name):
- is_valid_id = False
- if not related_name.isidentifier():
- is_valid_id = False
+ is_valid_id = not keyword.iskeyword(related_name) and related_name.isidentifier()
if not (is_valid_id or related_name.endswith('+')):
return [
checks.Error(
@@ -753,10 +753,9 @@ def _get_fields(self, forward=True, reverse=True, include_parents=True, include_
# We must keep track of which models we have already seen. Otherwise we
# could include the same field multiple times from different models.
- topmost_call = False
- if seen_models is None:
+ topmost_call = seen_models is None
+ if topmost_call:
seen_models = set()
- topmost_call = True
seen_models.add(self.model)
# Creates a cache key composed of all arguments
@@ -785,9 +784,8 @@ def _get_fields(self, forward=True, reverse=True, include_parents=True, include_
for obj in parent._meta._get_fields(
forward=forward, reverse=reverse, include_parents=include_parents,
include_hidden=include_hidden, seen_models=seen_models):
- if getattr(obj, 'parent_link', False) and obj.model != self.concrete_model:
- continue
- fields.append(obj)
+ if not getattr(obj, 'parent_link', False) or obj.model == self.concrete_model:
+ fields.append(obj)
if reverse and not self.proxy:
# Tree is computed once and cached until the app cache is expired.
# It is composed of a list of fields pointing to the current model
Oops, something went wrong.

0 comments on commit a38ae91

Please sign in to comment.