Skip to content

Commit

Permalink
updated all python files to use pep-3110 exception syntax for python3…
Browse files Browse the repository at this point in the history
… compatibility
  • Loading branch information
heartsucker committed Mar 31, 2019
1 parent d8187a0 commit 10c6d13
Show file tree
Hide file tree
Showing 42 changed files with 114 additions and 88 deletions.
4 changes: 2 additions & 2 deletions misc/coding_tools/check-interfaces.py
Expand Up @@ -46,7 +46,7 @@ def _implements_advice(cls):
for interface in interfaces:
try:
verifyClass(interface, cls)
except Exception, e:
except Exception as e:
print("%s.%s does not correctly implement %s.%s:\n%s"
% (cls.__module__, cls.__name__,
interface.__module__, interface.__name__, e), file=_err)
Expand Down Expand Up @@ -89,7 +89,7 @@ def check():
module = relpath.replace(os.sep, '/').replace('/', '.')
try:
__import__(module)
except ImportError, e:
except ImportError as e:
if not is_windows and (' _win' in str(e) or 'win32' in str(e)):
print("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
% (module, str(e)), file=_err)
Expand Down
2 changes: 1 addition & 1 deletion misc/coding_tools/check-miscaptures.py
Expand Up @@ -15,7 +15,7 @@ def check_file(path):
def check_thing(parser, thing):
try:
ast = parser(thing)
except SyntaxError, e:
except SyntaxError as e:
return e
else:
results = []
Expand Down
1 change: 1 addition & 0 deletions newsfragments/3013.other
@@ -0,0 +1 @@
Updated all Python files to use PEP-3110 exception syntax for Python3 compatibility.
38 changes: 31 additions & 7 deletions py3_valid_syntax.txt
Expand Up @@ -5,19 +5,14 @@ misc/build_helpers/gen-package-table.py
misc/build_helpers/run-deprecations.py
misc/build_helpers/show-tool-versions.py
misc/build_helpers/test-osx-pkg.py
misc/coding_tools/check-debugging.py
misc/coding_tools/check-umids.py
misc/coding_tools/coverage2el.py
misc/coding_tools/find-trailing-spaces.py
misc/coding_tools/fixshebangs.py
misc/coding_tools/graph-deps.py
misc/coding_tools/make-canary-files.py
misc/coding_tools/
misc/incident-gatherer/
misc/operations_helpers/
misc/simulators/
release-tools/
setup.py
src/allmydata/_auto_deps.py
src/allmydata/blacklist.py
src/allmydata/check_results.py
src/allmydata/client.py
src/allmydata/codec.py
Expand All @@ -27,10 +22,12 @@ src/allmydata/frontends/auth.py
src/allmydata/frontends/__init__.py
src/allmydata/hashtree.py
src/allmydata/history.py
src/allmydata/immutable/checker.py
src/allmydata/immutable/downloader/common.py
src/allmydata/immutable/downloader/fetcher.py
src/allmydata/immutable/downloader/finder.py
src/allmydata/immutable/downloader/__init__.py
src/allmydata/immutable/downloader/share.py
src/allmydata/immutable/downloader/status.py
src/allmydata/immutable/filenode.py
src/allmydata/immutable/__init__.py
Expand All @@ -40,6 +37,7 @@ src/allmydata/immutable/offloaded.py
src/allmydata/immutable/repairer.py
src/allmydata/interfaces.py
src/allmydata/introducer/
src/allmydata/magicfolderdb.py
src/allmydata/__main__.py
src/allmydata/monitor.py
src/allmydata/mutable/checker.py
Expand All @@ -48,24 +46,40 @@ src/allmydata/mutable/__init__.py
src/allmydata/mutable/layout.py
src/allmydata/mutable/publish.py
src/allmydata/mutable/repairer.py
src/allmydata/mutable/retrieve.py
src/allmydata/mutable/servermap.py
src/allmydata/nodemaker.py
src/allmydata/node.py
src/allmydata/scripts/admin.py
src/allmydata/scripts/backupdb.py
src/allmydata/scripts/cli.py
src/allmydata/scripts/common_http.py
src/allmydata/scripts/common.py
src/allmydata/scripts/create_node.py
src/allmydata/scripts/default_nodedir.py
src/allmydata/scripts/__init__.py
src/allmydata/scripts/magic_folder_cli.py
src/allmydata/scripts/slow_operation.py
src/allmydata/scripts/stats_gatherer.py
src/allmydata/scripts/tahoe_add_alias.py
src/allmydata/scripts/tahoe_backup.py
src/allmydata/scripts/tahoe_check.py
src/allmydata/scripts/tahoe_cp.py
src/allmydata/scripts/tahoe_daemonize.py
src/allmydata/scripts/tahoe_get.py
src/allmydata/scripts/tahoe_invite.py
src/allmydata/scripts/tahoe_ls.py
src/allmydata/scripts/tahoe_manifest.py
src/allmydata/scripts/tahoe_mkdir.py
src/allmydata/scripts/tahoe_mv.py
src/allmydata/scripts/tahoe_put.py
src/allmydata/scripts/tahoe_restart.py
src/allmydata/scripts/tahoe_run.py
src/allmydata/scripts/tahoe_start.py
src/allmydata/scripts/tahoe_status.py
src/allmydata/scripts/tahoe_stop.py
src/allmydata/scripts/tahoe_unlink.py
src/allmydata/scripts/tahoe_webopen.py
src/allmydata/stats.py
src/allmydata/storage/
src/allmydata/test/bench_dirnode.py
Expand All @@ -81,6 +95,7 @@ src/allmydata/test/cli/test_daemonize.py
src/allmydata/test/cli/test_invite.py
src/allmydata/test/cli/test_start.py
src/allmydata/test/cli/test_status.py
src/allmydata/test/common.py
src/allmydata/test/common_util.py
src/allmydata/test/common_web.py
src/allmydata/test/eliotutil.py
Expand Down Expand Up @@ -109,11 +124,14 @@ src/allmydata/test/test_checker.py
src/allmydata/test/test_client.py
src/allmydata/test/test_configutil.py
src/allmydata/test/test_connections.py
src/allmydata/test/test_crawler.py
src/allmydata/test/test_eliotutil.py
src/allmydata/test/test_encode.py
src/allmydata/test/test_encodingutil.py
src/allmydata/test/test_filenode.py
src/allmydata/test/test_ftp.py
src/allmydata/test/test_happiness.py
src/allmydata/test/test_hashtree.py
src/allmydata/test/test_helper.py
src/allmydata/test/test_hung_server.py
src/allmydata/test/test_i2p_provider.py
Expand All @@ -130,6 +148,7 @@ src/allmydata/test/test_netstring.py
src/allmydata/test/test_node.py
src/allmydata/test/test_no_network.py
src/allmydata/test/test_observer.py
src/allmydata/test/test_repairer.py
src/allmydata/test/test_runner.py
src/allmydata/test/test_stats.py
src/allmydata/test/test_storage_client.py
Expand All @@ -147,19 +166,23 @@ src/allmydata/test/web/test_root.py
src/allmydata/test/web/test_token.py
src/allmydata/test/web/test_util.py
src/allmydata/unknown.py
src/allmydata/uri.py
src/allmydata/util/abbreviate.py
src/allmydata/util/base32.py
src/allmydata/util/base62.py
src/allmydata/util/cachedir.py
src/allmydata/util/configutil.py
src/allmydata/util/connection_status.py
src/allmydata/util/consumer.py
src/allmydata/util/dbutil.py
src/allmydata/util/deferredutil.py
src/allmydata/util/dictutil.py
src/allmydata/util/eliotutil.py
src/allmydata/util/hashutil.py
src/allmydata/util/i2p_provider.py
src/allmydata/util/idlib.py
src/allmydata/util/__init__.py
src/allmydata/util/iputil.py
src/allmydata/util/keyutil.py
src/allmydata/util/limiter.py
src/allmydata/util/log.py
Expand All @@ -184,5 +207,6 @@ src/allmydata/watchdog/
src/allmydata/web/
src/allmydata/windows/__init__.py
src/allmydata/windows/tahoesvc.py
static/
static/tahoe.py
ws_client.py
6 changes: 3 additions & 3 deletions src/allmydata/__init__.py
Expand Up @@ -399,7 +399,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
pr_normver = normalized_version(pr_ver)
except verlib.IrrationalVersionError:
continue
except Exception, e:
except Exception as e:
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
"The version found by import was %r from %r. "
"pkg_resources thought it should be found at %r. "
Expand All @@ -416,7 +416,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
imp_normver = normalized_version(imp_ver)
except verlib.IrrationalVersionError:
continue
except Exception, e:
except Exception as e:
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
"pkg_resources thought it should be version %r at %r. "
"The exception was %s: %s"
Expand Down Expand Up @@ -470,7 +470,7 @@ def check_all_requirements():
for requirement in install_requires:
try:
check_requirement(requirement, vers_and_locs)
except (ImportError, PackagingError), e:
except (ImportError, PackagingError) as e:
fatal_errors.append("%s: %s" % (e.__class__.__name__, e))

if fatal_errors:
Expand Down
2 changes: 1 addition & 1 deletion src/allmydata/blacklist.py
Expand Up @@ -42,7 +42,7 @@ def read_blacklist(self):
si = base32.a2b(si_s) # must be valid base32
self.entries[si] = reason
self.last_mtime = current_mtime
except Exception, e:
except Exception as e:
twisted_log.err(e, "unparseable blacklist file")
raise

Expand Down
2 changes: 1 addition & 1 deletion src/allmydata/dirnode.py
Expand Up @@ -402,7 +402,7 @@ def _unpack_contents(self, data):
log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
name=quote_output(name, encoding='utf-8'),
facility="tahoe.webish", level=log.UNUSUAL)
except CapConstraintError, e:
except CapConstraintError as e:
log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
"%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
facility="tahoe.webish", level=log.UNUSUAL)
Expand Down
2 changes: 1 addition & 1 deletion src/allmydata/frontends/sftpd.py
Expand Up @@ -536,7 +536,7 @@ def close(self):
self.is_closed = True
try:
self.f.close()
except Exception, e:
except Exception as e:
self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
self.download_done("closed")
return self.done_status
Expand Down
18 changes: 9 additions & 9 deletions src/allmydata/immutable/checker.py
Expand Up @@ -256,9 +256,9 @@ def _got_share_hashes(sh):
sharehashes = dict(sh)
try:
self.share_hash_tree.set_hashes(sharehashes)
except IndexError, le:
except IndexError as le:
raise BadOrMissingHash(le)
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
raise BadOrMissingHash(le)
d.addCallback(_got_share_hashes)
return d
Expand Down Expand Up @@ -289,9 +289,9 @@ def _got_block_hashes(blockhashes):

try:
self.block_hash_tree.set_hashes(bh)
except IndexError, le:
except IndexError as le:
raise BadOrMissingHash(le)
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
raise BadOrMissingHash(le)
d.addCallback(_got_block_hashes)
return d
Expand All @@ -316,9 +316,9 @@ def _got_crypttext_hashes(hashes):
ct_hashes = dict(enumerate(hashes))
try:
crypttext_hash_tree.set_hashes(ct_hashes)
except IndexError, le:
except IndexError as le:
raise BadOrMissingHash(le)
except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
raise BadOrMissingHash(le)
d.addCallback(_got_crypttext_hashes)
return d
Expand Down Expand Up @@ -359,7 +359,7 @@ def _got_data(self, results, blocknum):
sharehashes, blockhashes, blockdata = results
try:
sharehashes = dict(sharehashes)
except ValueError, le:
except ValueError as le:
le.args = tuple(le.args + (sharehashes,))
raise
blockhashes = dict(enumerate(blockhashes))
Expand All @@ -373,7 +373,7 @@ def _got_data(self, results, blocknum):
# match the root node of self.share_hash_tree.
try:
self.share_hash_tree.set_hashes(sharehashes)
except IndexError, le:
except IndexError as le:
# Weird -- sharehashes contained index numbers outside of
# the range that fit into this hash tree.
raise BadOrMissingHash(le)
Expand All @@ -400,7 +400,7 @@ def _got_data(self, results, blocknum):
# (self.sharenum, blocknum, len(blockdata),
# blockdata[:50], blockdata[-50:], base32.b2a(blockhash)))

except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
# log.WEIRD: indicates undetected disk/network error, or more
# likely a programming error
self.log("hash failure in block=%d, shnum=%d on %s" %
Expand Down
14 changes: 7 additions & 7 deletions src/allmydata/immutable/downloader/share.py
Expand Up @@ -208,7 +208,7 @@ def loop(self):
level=log.NOISY, parent=self._lp, umid="BaL1zw")
self._do_loop()
# all exception cases call self._fail(), which clears self._alive
except (BadHashError, NotEnoughHashesError, LayoutInvalid), e:
except (BadHashError, NotEnoughHashesError, LayoutInvalid) as e:
# Abandon this share. We do this if we see corruption in the
# offset table, the UEB, or a hash tree. We don't abandon the
# whole share if we see corruption in a data block (we abandon
Expand All @@ -225,7 +225,7 @@ def loop(self):
share=repr(self),
level=log.UNUSUAL, parent=self._lp, umid="gWspVw")
self._fail(Failure(e), log.UNUSUAL)
except DataUnavailable, e:
except DataUnavailable as e:
# Abandon this share.
log.msg(format="need data that will never be available"
" from %s: pending=%s, received=%s, unavailable=%s" %
Expand Down Expand Up @@ -416,7 +416,7 @@ def _satisfy_UEB(self):
try:
self._node.validate_and_store_UEB(UEB_s)
return True
except (LayoutInvalid, BadHashError), e:
except (LayoutInvalid, BadHashError) as e:
# TODO: if this UEB was bad, we'll keep trying to validate it
# over and over again. Only log.err on the first one, or better
# yet skip all but the first
Expand Down Expand Up @@ -452,7 +452,7 @@ def _satisfy_share_hash_tree(self):
try:
self._node.process_share_hashes(share_hashes)
# adds to self._node.share_hash_tree
except (BadHashError, NotEnoughHashesError), e:
except (BadHashError, NotEnoughHashesError) as e:
f = Failure(e)
self._signal_corruption(f, o["share_hashes"], hashlen)
self.had_corruption = True
Expand Down Expand Up @@ -481,7 +481,7 @@ def _satisfy_block_hash_tree(self, needed_hashes):
# cannot validate)
try:
self._commonshare.process_block_hashes(block_hashes)
except (BadHashError, NotEnoughHashesError), e:
except (BadHashError, NotEnoughHashesError) as e:
f = Failure(e)
hashnums = ",".join([str(n) for n in sorted(block_hashes.keys())])
log.msg(format="hash failure in block_hashes=(%(hashnums)s),"
Expand Down Expand Up @@ -509,7 +509,7 @@ def _satisfy_ciphertext_hash_tree(self, needed_hashes):
# gotten them all
try:
self._node.process_ciphertext_hashes(hashes)
except (BadHashError, NotEnoughHashesError), e:
except (BadHashError, NotEnoughHashesError) as e:
f = Failure(e)
hashnums = ",".join([str(n) for n in sorted(hashes.keys())])
log.msg(format="hash failure in ciphertext_hashes=(%(hashnums)s),"
Expand Down Expand Up @@ -553,7 +553,7 @@ def _satisfy_data_block(self, segnum, observers):
# now clear our received data, to dodge the #1170 spans.py
# complexity bug
self._received = DataSpans()
except (BadHashError, NotEnoughHashesError), e:
except (BadHashError, NotEnoughHashesError) as e:
# rats, we have a corrupt block. Notify our clients that they
# need to look elsewhere, and advise the server. Unlike
# corruption in other parts of the share, this doesn't cause us
Expand Down
2 changes: 1 addition & 1 deletion src/allmydata/magicfolderdb.py
Expand Up @@ -85,7 +85,7 @@ def get_magicfolderdb(dbfile, stderr=sys.stderr,
else:
print("invalid magicfolderdb schema version specified", file=stderr)
return None
except DBError, e:
except DBError as e:
print(e, file=stderr)
return None

Expand Down
6 changes: 3 additions & 3 deletions src/allmydata/mutable/retrieve.py
Expand Up @@ -756,7 +756,7 @@ def _validate_block(self, results, segnum, reader, server, started):
try:
bht.set_hashes(blockhashes)
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
IndexError), e:
IndexError) as e:
raise CorruptShareError(server,
reader.shnum,
"block hash tree failure: %s" % e)
Expand All @@ -770,7 +770,7 @@ def _validate_block(self, results, segnum, reader, server, started):
try:
bht.set_hashes(leaves={segnum: blockhash})
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
IndexError), e:
IndexError) as e:
raise CorruptShareError(server,
reader.shnum,
"block hash tree failure: %s" % e)
Expand All @@ -788,7 +788,7 @@ def _validate_block(self, results, segnum, reader, server, started):
self.share_hash_tree.set_hashes(hashes=sharehashes,
leaves={reader.shnum: bht[0]})
except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
IndexError), e:
IndexError) as e:
raise CorruptShareError(server,
reader.shnum,
"corrupt hashes: %s" % e)
Expand Down
2 changes: 1 addition & 1 deletion src/allmydata/scripts/backupdb.py
Expand Up @@ -73,7 +73,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
(sqlite3, db) = get_db(dbfile, stderr, create_version, updaters=UPDATERS,
just_create=just_create, dbname="backupdb")
return BackupDB_v2(sqlite3, db)
except DBError, e:
except DBError as e:
print(e, file=stderr)
return None

Expand Down

0 comments on commit 10c6d13

Please sign in to comment.