Skip to content

Commit

Permalink
use "elif" instead of "else" then "if"
Browse files Browse the repository at this point in the history
pylint's `check_elif` plug-in complains:

```
************* Module apport.fileutils
apport/fileutils.py:605:8: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module apport.crashdb
apport/crashdb.py:224:20: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module apport.crashdb_impl.launchpad
apport/crashdb_impl/launchpad.py:895:12: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
apport/crashdb_impl/launchpad.py:955:12: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module apport.packaging_impl.apt_dpkg
apport/packaging_impl/apt_dpkg.py:1291:16: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
apport/packaging_impl/apt_dpkg.py:1431:16: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module problem_report
problem_report.py:323:12: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
problem_report.py:332:16: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
problem_report.py:408:16: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module tests.integration.test_signal_crashes
tests/integration/test_signal_crashes.py:1050:12: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module apport-retrace
bin/apport-retrace:532:20: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
bin/apport-retrace:607:8: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
bin/apport-retrace:681:12: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
************* Module apport-gtk
gtk/apport-gtk:573:16: R5501: Consider using "elif" instead of "else" then "if" to remove one indentation level (else-if-used)
```

Signed-off-by: Benjamin Drung <benjamin.drung@canonical.com>
  • Loading branch information
bdrung authored and schopin-pro committed Dec 7, 2023
1 parent 5eb42d5 commit 960c934
Show file tree
Hide file tree
Showing 9 changed files with 76 additions and 89 deletions.
1 change: 1 addition & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ extension-pkg-whitelist=apt_pkg # wokeignore:rule=whitelist
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
pylint.extensions.check_elif,
pylint.extensions.code_style,
pylint.extensions.comparison_placement,
pylint.extensions.consider_refactoring_into_while_condition,
Expand Down
25 changes: 12 additions & 13 deletions apport/crashdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,20 +216,19 @@ def check_duplicate(self, crash_id, report=None):
# we have a duplicate only identified by address sig,
# close it
master_id = addr_match
# our bug is a dupe of two different masters, one from
# symbolic, the other from addr matching (see LP#943117);
# make them all duplicates of each other, using the lower
# number as master
elif master_id < addr_match:
self.close_duplicate(report, addr_match, master_id)
self._duplicate_db_merge_id(addr_match, master_id)
else:
# our bug is a dupe of two different masters, one from
# symbolic, the other from addr matching (see LP#943117);
# make them all duplicates of each other, using the lower
# number as master
if master_id < addr_match:
self.close_duplicate(report, addr_match, master_id)
self._duplicate_db_merge_id(addr_match, master_id)
else:
self.close_duplicate(report, master_id, addr_match)
self._duplicate_db_merge_id(master_id, addr_match)
master_id = addr_match
# no version tracking for address signatures yet
master_ver = None
self.close_duplicate(report, master_id, addr_match)
self._duplicate_db_merge_id(master_id, addr_match)
master_id = addr_match
# no version tracking for address signatures yet
master_ver = None

if master_id is not None and master_id != crash_id:
if addr_sig:
Expand Down
14 changes: 6 additions & 8 deletions apport/crashdb_impl/launchpad.py
Original file line number Diff line number Diff line change
Expand Up @@ -891,9 +891,8 @@ def close_duplicate(self, report, crash_id, master_id):
master.tags = master_tags
master.lp_save()

else:
if bug.duplicate_of:
bug.duplicate_of = None
elif bug.duplicate_of:
bug.duplicate_of = None

# pylint: disable=protected-access
if bug._dirty_attributes: # LP#336866 workaround
Expand Down Expand Up @@ -951,11 +950,10 @@ def mark_retrace_failed(self, crash_id, invalid_msg=None):
a.removeFromBug()
except HTTPError:
pass # LP#249950 workaround
else:
if "apport-failed-retrace" not in bug.tags:
# LP#254901 workaround
bug.tags = bug.tags + ["apport-failed-retrace"]
bug.lp_save()
elif "apport-failed-retrace" not in bug.tags:
# LP#254901 workaround
bug.tags = bug.tags + ["apport-failed-retrace"]
bug.lp_save()

def _mark_dup_checked(self, crash_id, report):
"""Mark crash id as checked for being a duplicate."""
Expand Down
9 changes: 4 additions & 5 deletions apport/fileutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -601,11 +601,10 @@ def get_core_path(pid=None, exe=None, uid=None, timestamp=None, proc_pid_fd=None
if pid is None:
pid = "unknown"
timestamp = "unknown"
else:
if timestamp is None:
with open(f"/proc/{pid}/stat", encoding="utf-8") as stat_file:
stat_contents = stat_file.read()
timestamp = get_starttime(stat_contents)
elif timestamp is None:
with open(f"/proc/{pid}/stat", encoding="utf-8") as stat_file:
stat_contents = stat_file.read()
timestamp = get_starttime(stat_contents)

if exe is None:
exe = get_process_path(proc_pid_fd)
Expand Down
16 changes: 7 additions & 9 deletions apport/packaging_impl/apt_dpkg.py
Original file line number Diff line number Diff line change
Expand Up @@ -1287,13 +1287,12 @@ def install_packages(
else:
logger.debug("Removing %s which is already the right version", p)
real_pkgs.remove(p)
elif pkg_versions.get(p) != cache[p].candidate.version:
logger.debug("Installing %s", p)
cache[p].mark_install(False, False)
else:
if pkg_versions.get(p) != cache[p].candidate.version:
logger.debug("Installing %s", p)
cache[p].mark_install(False, False)
else:
logger.debug("Removing %s which is already the right version", p)
real_pkgs.remove(p)
logger.debug("Removing %s which is already the right version", p)
real_pkgs.remove(p)

last_written = time.time()
# fetch packages
Expand Down Expand Up @@ -1427,9 +1426,8 @@ def _get_primary_mirror_from_apt_sources(apt_dir: str) -> str:
if isinstance(source, SourceEntry):
if source.type == "deb":
uri = source.uri or ""
else:
if "deb" in source.types:
uri = source.uris[0]
elif "deb" in source.types:
uri = source.uris[0]

if uri is not None:
if uri.startswith("mirror+file:"):
Expand Down
31 changes: 14 additions & 17 deletions bin/apport-retrace
Original file line number Diff line number Diff line change
Expand Up @@ -528,15 +528,14 @@ Thank you for your understanding, and sorry for the inconvenience!
== f"{sandbox}/usr/lib/debug/.dwz/{target}"
):
pass
else:
if not os.path.islink(f"/usr/lib/debug/.dwz/{target}"):
print(
"apport is unlikely to produce a quality retrace"
" if it can not create a symlink in the host"
" system's /usr/lib/debug/.dwz directory to the"
" gdb sandbox's. See LP: #1818918 for details."
)
sys.exit(0)
elif not os.path.islink(f"/usr/lib/debug/.dwz/{target}"):
print(
"apport is unlikely to produce a quality retrace"
" if it can not create a symlink in the host"
" system's /usr/lib/debug/.dwz directory to the"
" gdb sandbox's. See LP: #1818918 for details."
)
sys.exit(0)
else:
gdb_sandbox = None

Expand Down Expand Up @@ -603,9 +602,8 @@ Thank you for your understanding, and sorry for the inconvenience!

if options.stdout:
print_traces(report)
else:
if not options.gdb:
modified = True
elif not options.gdb:
modified = True

if modified:
if not options.report and not options.output:
Expand Down Expand Up @@ -677,12 +675,11 @@ Thank you for your understanding, and sorry for the inconvenience!
)
crashdb.mark_retrace_failed(crashid)

elif options.output == "-":
report.write(sys.stdout.detach())
else:
if options.output == "-":
report.write(sys.stdout.detach())
else:
with open(options.report, "wb") as out:
report.write(out)
with open(options.report, "wb") as out:
report.write(out)


if __name__ == "__main__":
Expand Down
9 changes: 4 additions & 5 deletions gtk/apport-gtk
Original file line number Diff line number Diff line change
Expand Up @@ -568,12 +568,11 @@ class GTKUserInterface(apport.ui.UserInterface):
for option in options:
if multiple:
b = Gtk.CheckButton.new_with_label(option)
# use previous radio button as group; work around GNOME#635253
elif b:
b = Gtk.RadioButton.new_with_label_from_widget(b, option)
else:
# use previous radio button as group; work around GNOME#635253
if b:
b = Gtk.RadioButton.new_with_label_from_widget(b, option)
else:
b = Gtk.RadioButton.new_with_label([], option)
b = Gtk.RadioButton.new_with_label([], option)
self.w("vbox_choices").pack_start(b, True, True, 0)
self.w("vbox_choices").show_all()

Expand Down
41 changes: 19 additions & 22 deletions problem_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,24 +319,22 @@ def _decompress_line(cls, line, decompressor, value=b""):
raise MalformedProblemReport(str(error)) from None
if decompressor:
value += decompressor.decompress(block)
elif isinstance(value, CompressedValue):
# check gzip header; if absent, we have legacy zlib
# data
if value.gzipvalue == b"" and not block.startswith(GZIP_HEADER_START):
value.legacy_zlib = True
value.gzipvalue += block
# lazy initialization of decompressor
# skip gzip header, if present
elif block.startswith(GZIP_HEADER_START):
decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
value = decompressor.decompress(cls._strip_gzip_header(block))
else:
if isinstance(value, CompressedValue):
# check gzip header; if absent, we have legacy zlib
# data
if value.gzipvalue == b"" and not block.startswith(GZIP_HEADER_START):
value.legacy_zlib = True
value.gzipvalue += block
else:
# lazy initialization of decompressor
# skip gzip header, if present
if block.startswith(GZIP_HEADER_START):
decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
value = decompressor.decompress(cls._strip_gzip_header(block))
else:
# legacy zlib-only format used default block
# size
decompressor = zlib.decompressobj()
value += decompressor.decompress(block)
# legacy zlib-only format used default block
# size
decompressor = zlib.decompressobj()
value += decompressor.decompress(block)

return decompressor, value

Expand Down Expand Up @@ -404,12 +402,11 @@ def _get_sorted_keys(self, only_new: bool) -> tuple[list[str], list[str]]:
binkeys.append(k)
else:
asckeys.append(k)
elif not isinstance(v, CompressedValue) and len(v) >= 2 and not v[1]:
# force uncompressed
asckeys.append(k)
else:
if not isinstance(v, CompressedValue) and len(v) >= 2 and not v[1]:
# force uncompressed
asckeys.append(k)
else:
binkeys.append(k)
binkeys.append(k)

asckeys.sort()
if "ProblemType" in asckeys:
Expand Down
19 changes: 9 additions & 10 deletions tests/integration/test_signal_crashes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1046,17 +1046,16 @@ def do_crash(
self._check_core_file_is_valid(core_path, command)
finally:
os.unlink(core_path)
else:
if os.path.exists(core_path):
try:
os.unlink(core_path)
except OSError as error:
sys.stderr.write(
f"WARNING: cannot clean up core file {core_path}:"
f" {str(error)}\n"
)
elif os.path.exists(core_path):
try:
os.unlink(core_path)
except OSError as error:
sys.stderr.write(
f"WARNING: cannot clean up core file {core_path}:"
f" {str(error)}\n"
)

self.fail("leaves unexpected core file behind")
self.fail("leaves unexpected core file behind")

self._check_report(
expect_report=expect_report,
Expand Down

0 comments on commit 960c934

Please sign in to comment.