diff --git a/.testr.conf b/.testr.conf index e6fc3d1580..fbe55d9745 100644 --- a/.testr.conf +++ b/.testr.conf @@ -1,4 +1,4 @@ [DEFAULT] -test_command=BRZ_PLUGIN_PATH=-site:-user ./tools/testr-run.py $IDOPTION $LISTOPT +test_command=BRZ_PLUGIN_PATH=-site:-user python3 ./brz selftest --subunit2 $IDOPTION $LISTOPT test_id_option=--load-list $IDFILE test_list_option=--list diff --git a/.travis.yml b/.travis.yml index 6fb65a3328..6219fab51a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,8 +11,6 @@ python: matrix: include: - - python: 2.7 - env: SELFTEST_OPTIONS="--coverage" - python: 3.7 dist: xenial diff --git a/Makefile b/Makefile index d4828a2c23..9855936b35 100644 --- a/Makefile +++ b/Makefile @@ -22,9 +22,7 @@ SHELL=bash PYTHON?=python3 -PYTHON2?=python2 PYTHON3?=python3 -PYTHON27=python27 BRZ_TARGET=release PLUGIN_TARGET=plugin-release PYTHON_BUILDFLAGS= @@ -81,10 +79,7 @@ check-ci: docs extensions # https://github.com/paramiko/paramiko/issues/713 is not a concern # anymore -- vila 2017-05-24 set -o pipefail; \ - BRZ_PLUGIN_PATH=$(BRZ_PLUGIN_PATH) $(PYTHON) -Werror -Wignore::FutureWarning -Wignore::DeprecationWarning -Wignore::ImportWarning -Wignore::ResourceWarning -O \ - ./brz selftest -v --parallel=fork -Oselftest.timeout=120 --subunit2 \ - | subunit-filter -s --passthrough --rename "^" "python2."; \ - BRZ_PLUGIN_PATH=$(BRZ_PLUGIN_PATH) $(PYTHON3) -Werror -Wignore::FutureWarning -Wignore::DeprecationWarning -Wignore::PendingDeprecationWarning -Wignore::ImportWarning -Wignore::ResourceWarning -O \ + BRZ_PLUGIN_PATH=$(BRZ_PLUGIN_PATH) $(PYTHON3) -Werror -Wignore::FutureWarning -Wignore::DeprecationWarning -Wignore::PendingDeprecationWarning -Wignore::ImportWarning -Wignore::ResourceWarning -O \ ./brz selftest -v --parallel=fork -Oselftest.timeout=120 --subunit2 \ | subunit-filter -s --passthrough --rename "^" "python3." @@ -266,11 +261,10 @@ installer: exe copy-docs $(PYTHON) tools/win32/run_script.py cog.py -d -o tools/win32/brz.iss tools/win32/brz.iss.cog iscc /Q tools/win32/brz.iss -py-inst-27: docs - $(PYTHON27) setup.py bdist_wininst --install-script="brz-win32-bdist-postinstall.py" -d . - -python-installer: py-inst-27 +py-inst-37: docs + $(PYTHON37) setup.py bdist_wininst --install-script="brz-win32-bdist-postinstall.py" -d . +python-installer: py-inst-37 copy-docs: docs $(PYTHON) tools/win32/ostools.py copytodir README win32_brz.exe/doc diff --git a/breezy/__init__.py b/breezy/__init__.py index 369284c19e..de293d618c 100644 --- a/breezy/__init__.py +++ b/breezy/__init__.py @@ -129,26 +129,20 @@ def _patch_filesystem_default_encoding(new_enc): The use of intern() may defer breakage is but is not enough, the string object should be secure against module reloading and during teardown. """ - is_py3 = sys.version_info > (3,) try: import ctypes old_ptr = ctypes.c_void_p.in_dll(ctypes.pythonapi, "Py_FileSystemDefaultEncoding") - if is_py3: - has_enc = ctypes.c_int.in_dll(ctypes.pythonapi, - "Py_HasFileSystemDefaultEncoding") - as_utf8 = ctypes.PYFUNCTYPE( - ctypes.POINTER(ctypes.c_char), ctypes.py_object)( - ("PyUnicode_AsUTF8", ctypes.pythonapi)) + has_enc = ctypes.c_int.in_dll(ctypes.pythonapi, + "Py_HasFileSystemDefaultEncoding") + as_utf8 = ctypes.PYFUNCTYPE( + ctypes.POINTER(ctypes.c_char), ctypes.py_object)( + ("PyUnicode_AsUTF8", ctypes.pythonapi)) except (ImportError, ValueError): return # No ctypes or not CPython implementation, do nothing - if is_py3: - new_enc = sys.intern(new_enc) - enc_ptr = as_utf8(new_enc) - has_enc.value = 1 - else: - new_enc = intern(new_enc) - enc_ptr = ctypes.c_char_p(new_enc) + new_enc = sys.intern(new_enc) + enc_ptr = as_utf8(new_enc) + has_enc.value = 1 old_ptr.value = ctypes.cast(enc_ptr, ctypes.c_void_p).value if sys.getfilesystemencoding() != new_enc: raise RuntimeError("Failed to change the filesystem default encoding") diff --git a/breezy/_annotator_py.py b/breezy/_annotator_py.py index 8615a984bd..128b976740 100644 --- a/breezy/_annotator_py.py +++ b/breezy/_annotator_py.py @@ -33,10 +33,6 @@ osutils, ui, ) -from .sixish import ( - range, - viewitems, - ) class Annotator(object): @@ -95,7 +91,7 @@ def _get_needed_keys(self, key): vf_keys_needed.add(key) needed_keys = set() next_parent_map.update(self._vf.get_parent_map(parent_lookup)) - for key, parent_keys in viewitems(next_parent_map): + for key, parent_keys in next_parent_map.items(): if parent_keys is None: # No graph versionedfile parent_keys = () next_parent_map[key] = () diff --git a/breezy/_known_graph_py.py b/breezy/_known_graph_py.py index b1375342c5..cade02ad3e 100644 --- a/breezy/_known_graph_py.py +++ b/breezy/_known_graph_py.py @@ -27,10 +27,6 @@ errors, revision, ) -from .sixish import ( - viewitems, - viewvalues, - ) class _KnownGraphNode(object): @@ -88,7 +84,7 @@ def _initialize_nodes(self, parent_map): child_keys, """ nodes = self._nodes - for key, parent_keys in viewitems(parent_map): + for key, parent_keys in parent_map.items(): if key in nodes: node = nodes[key] node.parent_keys = parent_keys @@ -104,11 +100,11 @@ def _initialize_nodes(self, parent_map): parent_node.child_keys.append(key) def _find_tails(self): - return [node for node in viewvalues(self._nodes) + return [node for node in self._nodes.values() if not node.parent_keys] def _find_tips(self): - return [node for node in viewvalues(self._nodes) + return [node for node in self._nodes.values() if not node.child_keys] def _find_gdfo(self): @@ -242,7 +238,7 @@ def heads(self, keys): seen = set() pending = [] min_gdfo = None - for node in viewvalues(candidate_nodes): + for node in candidate_nodes.values(): if node.parent_keys: pending.extend(node.parent_keys) if min_gdfo is None or node.gdfo < min_gdfo: @@ -269,7 +265,7 @@ def topo_sort(self): All parents must occur before all children. """ - for node in viewvalues(self._nodes): + for node in self._nodes.values(): if node.gdfo is None: raise errors.GraphCycleError(self._nodes) pending = self._find_tails() @@ -347,7 +343,7 @@ def merge_sort(self, tip_key): """Compute the merge sorted graph output.""" from breezy import tsort as_parent_map = dict((node.key, node.parent_keys) - for node in viewvalues(self._nodes) + for node in self._nodes.values() if node.parent_keys is not None) # We intentionally always generate revnos and never force the # mainline_revisions diff --git a/breezy/_static_tuple_py.py b/breezy/_static_tuple_py.py index e7829df0fc..20cc3561c6 100644 --- a/breezy/_static_tuple_py.py +++ b/breezy/_static_tuple_py.py @@ -76,8 +76,6 @@ def from_sequence(seq): _valid_types = (bytes, str, StaticTuple, int, float, None.__class__, bool) -if sys.version_info < (3,): - _valid_types += (long, unicode) # Have to set it to None first, so that __new__ can determine whether diff --git a/breezy/archive/tar.py b/breezy/archive/tar.py index 274497bc60..4a8b88c3a7 100644 --- a/breezy/archive/tar.py +++ b/breezy/archive/tar.py @@ -19,6 +19,7 @@ from __future__ import absolute_import from contextlib import closing +from io import BytesIO import os import sys import tarfile @@ -28,9 +29,6 @@ osutils, ) from ..export import _export_iter_entries -from ..sixish import ( - BytesIO, - ) def prepare_tarball_item(tree, root, final_path, tree_path, entry, force_mtime=None): @@ -185,16 +183,12 @@ def tar_lzma_generator(tree, dest, root, subdir, force_mtime=None, except ImportError as e: raise errors.DependencyNotPresent('lzma', e) - if sys.version_info[0] == 2: - compressor = lzma.LZMACompressor( - options={"format": compression_format}) - else: - compressor = lzma.LZMACompressor( - format={ - 'xz': lzma.FORMAT_XZ, - 'raw': lzma.FORMAT_RAW, - 'alone': lzma.FORMAT_ALONE, - }[compression_format]) + compressor = lzma.LZMACompressor( + format={ + 'xz': lzma.FORMAT_XZ, + 'raw': lzma.FORMAT_RAW, + 'alone': lzma.FORMAT_ALONE, + }[compression_format]) for chunk in tarball_generator( tree, root, subdir, force_mtime=force_mtime): diff --git a/breezy/bedding.py b/breezy/bedding.py index ebd9cb7b00..8de35c1352 100644 --- a/breezy/bedding.py +++ b/breezy/bedding.py @@ -33,9 +33,6 @@ from . import ( errors, ) -from .sixish import ( - PY3, - ) def ensure_config_dir_exists(path=None): @@ -213,13 +210,9 @@ def _get_default_mail_domain(mailname_file='/etc/mailname'): def default_email(): v = os.environ.get('BRZ_EMAIL') if v: - if not PY3: - v = v.decode(osutils.get_user_encoding()) return v v = os.environ.get('EMAIL') if v: - if not PY3: - v = v.decode(osutils.get_user_encoding()) return v name, email = _auto_user_id() if name and email: diff --git a/breezy/bisect.py b/breezy/bisect.py index e70f7e5672..358b782976 100644 --- a/breezy/bisect.py +++ b/breezy/bisect.py @@ -24,9 +24,6 @@ from .commands import Command from .errors import BzrCommandError from .option import Option -from .sixish import ( - text_type, - ) from .trace import note BISECT_INFO_PATH = "bisect" @@ -314,7 +311,7 @@ class cmd_bisect(Command): takes_args = ['subcommand', 'args*'] takes_options = [Option('output', short_name='o', - help='Write log to this file.', type=text_type), + help='Write log to this file.', type=str), 'revision', 'directory'] def _check(self, controldir): diff --git a/breezy/branch.py b/breezy/branch.py index effbe794b0..029cfa07de 100644 --- a/breezy/branch.py +++ b/breezy/branch.py @@ -18,9 +18,9 @@ from .lazy_import import lazy_import lazy_import(globals(), """ +import contextlib import itertools from breezy import ( - cleanup, config as _mod_config, debug, memorytree, @@ -47,10 +47,6 @@ from .hooks import Hooks from .inter import InterObject from .lock import LogicalLockResult -from .sixish import ( - text_type, - viewitems, - ) from .trace import mutter, mutter_callsite, note, is_quiet, warning @@ -371,7 +367,7 @@ def _do_dotted_revno_to_revision_id(self, revno): raise errors.GhostRevisionsHaveNoRevno(revno[0], e.revision_id) revision_id_to_revno = self.get_revision_id_to_revno_map() revision_ids = [revision_id for revision_id, this_revno - in viewitems(revision_id_to_revno) + in revision_id_to_revno.items() if revno == this_revno] if len(revision_ids) == 1: return revision_ids[0] @@ -779,7 +775,7 @@ def set_parent(self, url): # FIXUP this and get_parent in a future branch format bump: # read and rewrite the file. RBC 20060125 if url is not None: - if isinstance(url, text_type): + if isinstance(url, str): try: url.encode('ascii') except UnicodeEncodeError: @@ -2213,7 +2209,7 @@ def pull(self, overwrite=False, stop_revision=None, is being called because it's the master of the primary branch, so it should not run its hooks. """ - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(self.target.lock_write()) bound_location = self.target.get_bound_location() if local and not bound_location: @@ -2392,7 +2388,7 @@ def update_references(self): old_base = self.source.base new_base = self.target.base target_reference_dict = self.target._get_all_reference_info() - for tree_path, (branch_location, file_id) in viewitems(reference_dict): + for tree_path, (branch_location, file_id) in reference_dict.items(): try: branch_location = urlutils.rebase_url(branch_location, old_base, new_base) diff --git a/breezy/branchbuilder.py b/breezy/branchbuilder.py index f708033ee0..fd471a7c2a 100644 --- a/breezy/branchbuilder.py +++ b/breezy/branchbuilder.py @@ -24,9 +24,6 @@ errors, revision, ) -from .sixish import ( - viewitems, - ) class BranchBuilder(object): @@ -48,11 +45,11 @@ class BranchBuilder(object): ... ('add', ('', b'root-id', 'directory', '')), ... ('add', ('filename', b'f-id', 'file', b'content\n'))], ... revision_id=b'rev-id') - 'rev-id' + b'rev-id' >>> builder.build_snapshot([b'rev-id'], ... [('modify', ('filename', b'new-content\n'))], ... revision_id=b'rev2-id') - 'rev2-id' + b'rev2-id' >>> builder.finish_series() >>> branch = builder.get_branch() @@ -275,7 +272,7 @@ def _flush_pending(self, tree, pending): tree.unversion(pending.to_unversion_paths) tree.add(pending.to_add_files, pending.to_add_file_ids, pending.to_add_kinds) - for path, content in viewitems(pending.new_contents): + for path, content in pending.new_contents.items(): tree.put_file_bytes_non_atomic(path, content) def get_branch(self): diff --git a/breezy/builtins.py b/breezy/builtins.py index 879a1e8e51..4aeb62593c 100644 --- a/breezy/builtins.py +++ b/breezy/builtins.py @@ -86,12 +86,6 @@ RevisionSpec, RevisionInfo, ) -from .sixish import ( - PY3, - text_type, - viewitems, - viewvalues, -) from .trace import mutter, note, warning, is_quiet, get_verbosity_level @@ -221,7 +215,7 @@ def iter_sibling_branches(control_dir, possible_transports=None): if ref_branch is None or ref_branch.name: if ref_branch is not None: control_dir = ref_branch.controldir - for name, branch in viewitems(control_dir.get_branches()): + for name, branch in control_dir.get_branches().items(): yield name, branch else: repo = ref_branch.controldir.find_repository() @@ -741,7 +735,7 @@ class cmd_add(Command): "anything."), 'verbose', Option('file-ids-from', - type=text_type, + type=str, help='Lookup file ids from this tree.'), ] encoding_type = 'replace' @@ -871,7 +865,7 @@ class cmd_inventory(Command): Option('kind', help='List entries of a particular kind: file, directory, ' 'symlink.', - type=text_type), + type=str), ] takes_args = ['file*'] @@ -1336,7 +1330,7 @@ class cmd_push(Command): help='Create a stacked branch that refers to another branch ' 'for the commit history. Only the work not present in the ' 'referenced branch is included in the branch created.', - type=text_type), + type=str), Option('strict', help='Refuse to push if there are uncommitted changes in' ' the working tree, --no-strict disables the check.'), @@ -1447,7 +1441,7 @@ class cmd_branch(Command): takes_options = ['revision', Option( 'hardlink', help='Hard-link working tree files where possible.'), - Option('files-from', type=text_type, + Option('files-from', type=str, help="Get file contents from this tree."), Option('no-tree', help="Create a branch without a working-tree."), @@ -1619,7 +1613,7 @@ def run(self, location=".", recursive=False): names[name] = active # Only mention the current branch explicitly if it's not # one of the colocated branches - if not any(viewvalues(names)) and active_branch is not None: + if not any(names.values()) and active_branch is not None: self.outf.write("* %s\n" % gettext("(default)")) for name in sorted(names): active = names[name] @@ -1627,8 +1621,7 @@ def run(self, location=".", recursive=False): prefix = "*" else: prefix = " " - self.outf.write("%s %s\n" % ( - prefix, (name if PY3 else name.encode(self.outf.encoding)))) + self.outf.write("%s %s\n" % (prefix, name)) class cmd_checkout(Command): @@ -1662,7 +1655,7 @@ class cmd_checkout(Command): "common operations like diff and status without " "such access, and also support local commits." ), - Option('files-from', type=text_type, + Option('files-from', type=str, help="Get file contents from this tree."), Option('hardlink', help='Hard-link working tree files where possible.' @@ -2306,25 +2299,25 @@ class cmd_diff(Command): _see_also = ['status'] takes_args = ['file*'] takes_options = [ - Option('diff-options', type=text_type, + Option('diff-options', type=str, help='Pass these options to the external diff program.'), - Option('prefix', type=text_type, + Option('prefix', type=str, short_name='p', help='Set prefixes added to old and new filenames, as ' 'two values separated by a colon. (eg "old/:new/").'), Option('old', help='Branch/tree to compare from.', - type=text_type, + type=str, ), Option('new', help='Branch/tree to compare to.', - type=text_type, + type=str, ), 'revision', 'change', Option('using', help='Use this command to compare files.', - type=text_type, + type=str, ), RegistryOption('format', short_name='F', @@ -2699,7 +2692,7 @@ class cmd_log(Command): Option('message', help='Show revisions whose message matches this ' 'regular expression.', - type=text_type, + type=str, hidden=True), Option('limit', short_name='l', @@ -2725,23 +2718,23 @@ class cmd_log(Command): short_name='m', help='Show revisions whose properties match this ' 'expression.', - type=text_type), + type=str), ListOption('match-message', help='Show revisions whose message matches this ' 'expression.', - type=text_type), + type=str), ListOption('match-committer', help='Show revisions whose committer matches this ' 'expression.', - type=text_type), + type=str), ListOption('match-author', help='Show revisions whose authors match this ' 'expression.', - type=text_type), + type=str), ListOption('match-bugs', help='Show revisions whose bugs match this ' 'expression.', - type=text_type) + type=str) ] encoding_type = 'replace' @@ -3007,7 +3000,7 @@ class cmd_ls(Command): Option('kind', short_name='k', help=('List entries of a particular kind: file, ' 'directory, symlink, tree-reference.'), - type=text_type), + type=str), 'null', 'show-ids', 'directory', @@ -3331,12 +3324,12 @@ class cmd_export(Command): takes_options = ['directory', Option('format', help="Type of file to export to.", - type=text_type), + type=str), 'revision', Option('filters', help='Apply content filters to export the ' 'convenient form.'), Option('root', - type=text_type, + type=str, help="Name of the root directory inside the exported file."), Option('per-file-timestamps', help='Set modification time of files to that of the last ' @@ -3538,33 +3531,33 @@ class cmd_commit(Command): takes_args = ['selected*'] takes_options = [ ListOption( - 'exclude', type=text_type, short_name='x', + 'exclude', type=str, short_name='x', help="Do not consider changes made to a given path."), - Option('message', type=text_type, + Option('message', type=str, short_name='m', help="Description of the new revision."), 'verbose', Option('unchanged', help='Commit even if nothing has changed.'), - Option('file', type=text_type, + Option('file', type=str, short_name='F', argname='msgfile', help='Take commit message from this file.'), Option('strict', help="Refuse to commit if there are unknown " "files in the working tree."), - Option('commit-time', type=text_type, + Option('commit-time', type=str, help="Manually set a commit time using commit date " "format, e.g. '2009-10-10 08:00:00 +0100'."), ListOption( - 'bugs', type=text_type, + 'bugs', type=str, help="Link to a related bug. (see \"brz help bugs\")."), ListOption( - 'fixes', type=text_type, + 'fixes', type=str, help="Mark a bug as being fixed by this revision " "(see \"brz help bugs\")."), ListOption( - 'author', type=text_type, + 'author', type=str, help="Set the author's name, if it's different " "from the committer."), Option('local', @@ -4030,7 +4023,7 @@ def remove_alias(self, alias_name): def print_aliases(self): """Print out the defined aliases in a similar format to bash.""" aliases = _mod_config.GlobalConfig().get_aliases() - for key, value in sorted(viewitems(aliases)): + for key, value in sorted(aliases.items()): self.outf.write('brz alias %s="%s"\n' % (key, value)) @display_command @@ -4147,10 +4140,10 @@ def get_transport_type(typestring): 'breezy.tests', 'parallel_registry'), value_switches=False, ), - Option('randomize', type=text_type, argname="SEED", + Option('randomize', type=str, argname="SEED", help='Randomize the order of tests using the given' ' seed or "now" for the current time.'), - ListOption('exclude', type=text_type, argname="PATTERN", + ListOption('exclude', type=str, argname="PATTERN", short_name='x', help='Exclude tests that match this regular' ' expression.'), @@ -4160,11 +4153,11 @@ def get_transport_type(typestring): help='Output test progress via subunit v2.'), Option('strict', help='Fail on missing dependencies or ' 'known failures.'), - Option('load-list', type=text_type, argname='TESTLISTFILE', + Option('load-list', type=str, argname='TESTLISTFILE', help='Load a test id list from a text file.'), - ListOption('debugflag', type=text_type, short_name='E', + ListOption('debugflag', type=str, short_name='E', help='Turn on a selftest debug flag.'), - ListOption('starting-with', type=text_type, argname='TESTID', + ListOption('starting-with', type=str, argname='TESTID', param_name='starting_with', short_name='s', help='Load only the tests starting with TESTID.'), Option('sync', @@ -5590,7 +5583,7 @@ class cmd_serve(Command): value_switches=True), Option('listen', help='Listen for connections on nominated address.', - type=text_type), + type=str), Option('port', help='Listen for connections on nominated port. Passing 0 as ' 'the port number will result in a dynamically allocated ' @@ -5732,10 +5725,10 @@ class cmd_merge_directive(Command): diff='Normal unified diff.', plain='No patch, just directive.'), Option('sign', help='GPG-sign the directive.'), 'revision', - Option('mail-to', type=text_type, + Option('mail-to', type=str, help='Instead of printing the directive, email to this ' 'address.'), - Option('message', type=text_type, short_name='m', + Option('message', type=str, short_name='m', help='Message to use when committing this merge.') ] @@ -5889,19 +5882,19 @@ class cmd_send(Command): help='Branch to generate the submission from, ' 'rather than the one containing the working directory.', short_name='f', - type=text_type), + type=str), Option('output', short_name='o', help='Write merge directive to this file or directory; ' 'use - for stdout.', - type=text_type), + type=str), Option('strict', help='Refuse to send if there are uncommitted changes in' ' the working tree, --no-strict disables the check.'), Option('mail-to', help='Mail the request to this address.', - type=text_type), + type=str), 'revision', 'message', - Option('body', help='Body for the email.', type=text_type), + Option('body', help='Body for the email.', type=str), RegistryOption('format', help='Use the specified output format.', lazy_registry=('breezy.send', 'format_registry')), @@ -5957,9 +5950,9 @@ class cmd_bundle_revisions(cmd_send): help='Branch to generate the submission from, ' 'rather than the one containing the working directory.', short_name='f', - type=text_type), + type=str), Option('output', short_name='o', help='Write directive to this file.', - type=text_type), + type=str), Option('strict', help='Refuse to bundle revisions if there are uncommitted' ' changes in the working tree, --no-strict disables the check.'), @@ -6089,7 +6082,7 @@ def run(self, directory='.', sort=None, show_ids=False, revision=None): from .tag import tag_sort_methods branch, relpath = Branch.open_containing(directory) - tags = list(viewitems(branch.tags.get_tag_dict())) + tags = list(branch.tags.get_tag_dict().items()) if not tags: return @@ -6193,13 +6186,13 @@ class cmd_reconfigure(Command): with_no_trees='Reconfigure repository to not create ' 'working trees on branches by default.' ), - Option('bind-to', help='Branch to bind checkout to.', type=text_type), + Option('bind-to', help='Branch to bind checkout to.', type=str), Option('force', help='Perform reconfiguration even if local changes' ' will be lost.'), Option('stacked-on', help='Reconfigure a branch to be stacked on another branch.', - type=text_type, + type=str, ), Option('unstacked', help='Reconfigure a branch to be unstacked. This ' @@ -6439,11 +6432,11 @@ class cmd_view(Command): ), Option('name', help='Name of the view to define, list or delete.', - type=text_type, + type=str, ), Option('switch', help='Name of the view to switch to.', - type=text_type, + type=str, ), ] @@ -6792,7 +6785,7 @@ class cmd_export_pot(Command): takes_options = [Option('plugin', help='Export help text from named command ' '(defaults to all built in commands).', - type=text_type), + type=str), Option('include-duplicates', help='Output multiple copies of the same msgid ' 'string if it appears more than once.'), @@ -6898,13 +6891,13 @@ class cmd_grep(Command): takes_options = [ 'verbose', 'revision', - Option('color', type=text_type, argname='when', + Option('color', type=str, argname='when', help='Show match in color. WHEN is never, always or auto.'), Option('diff', short_name='p', help='Grep for pattern in changeset for each revision.'), - ListOption('exclude', type=text_type, argname='glob', short_name='X', + ListOption('exclude', type=str, argname='glob', short_name='X', help="Skip files whose base name matches GLOB."), - ListOption('include', type=text_type, argname='glob', short_name='I', + ListOption('include', type=str, argname='glob', short_name='I', help="Search only files whose base name matches GLOB."), Option('files-with-matches', short_name='l', help='Print only the name of each input file in ' diff --git a/breezy/bzr/_chk_map_py.py b/breezy/bzr/_chk_map_py.py index 7ac13055c1..da3930f640 100644 --- a/breezy/bzr/_chk_map_py.py +++ b/breezy/bzr/_chk_map_py.py @@ -21,7 +21,6 @@ import zlib import struct -from ..sixish import bytesintern from ..static_tuple import StaticTuple _LeafNode = None @@ -167,4 +166,4 @@ def _bytes_to_text_key(data): """Take a CHKInventory value string and return a (file_id, rev_id) tuple""" sections = data.split(b'\n') kind, file_id = sections[0].split(b': ') - return (bytesintern(file_id), bytesintern(sections[3])) + return (file_id, sections[3]) diff --git a/breezy/bzr/_dirstate_helpers_py.py b/breezy/bzr/_dirstate_helpers_py.py index 3f170a7f0a..72004ff113 100644 --- a/breezy/bzr/_dirstate_helpers_py.py +++ b/breezy/bzr/_dirstate_helpers_py.py @@ -25,9 +25,6 @@ # We cannot import the dirstate module, because it loads this module # All we really need is the IN_MEMORY_MODIFIED constant from .dirstate import DirState, DirstateCorrupt -from ..sixish import ( - range, - ) def pack_stat(st, _b64=binascii.b2a_base64, _pack=struct.Struct('>6L').pack): diff --git a/breezy/bzr/_groupcompress_py.py b/breezy/bzr/_groupcompress_py.py index 27d3fbaf51..3c6229b819 100644 --- a/breezy/bzr/_groupcompress_py.py +++ b/breezy/bzr/_groupcompress_py.py @@ -23,11 +23,6 @@ from __future__ import absolute_import from .. import osutils -from ..sixish import ( - indexbytes, - int2byte, - range, - ) class _OutputHandler(object): @@ -55,7 +50,7 @@ def _flush_insert(self): if self.cur_insert_len > 127: raise AssertionError('We cannot insert more than 127 bytes' ' at a time.') - self.out_lines.append(int2byte(self.cur_insert_len)) + self.out_lines.append(bytes([self.cur_insert_len])) self.index_lines.append(False) self.out_lines.extend(self.cur_insert_lines) if self.cur_insert_len < self.min_len_to_index: @@ -71,7 +66,7 @@ def _insert_long_line(self, line): line_len = len(line) for start_index in range(0, line_len, 127): next_len = min(127, line_len - start_index) - self.out_lines.append(int2byte(next_len)) + self.out_lines.append(bytes([next_len])) self.index_lines.append(False) self.out_lines.append(line[start_index:start_index + next_len]) # We don't index long lines, because we won't be able to match @@ -262,7 +257,7 @@ def _flush_insert(self, start_linenum, end_linenum, # Each insert instruction is at most 127 bytes long for start_byte in range(0, insert_length, 127): insert_count = min(insert_length - start_byte, 127) - out_lines.append(int2byte(insert_count)) + out_lines.append(bytes([insert_count])) # Don't index the 'insert' instruction index_lines.append(False) insert = bytes_to_insert[start_byte:start_byte + insert_count] @@ -330,12 +325,12 @@ def decode_base128_int(data): offset = 0 val = 0 shift = 0 - bval = indexbytes(data, offset) + bval = data[offset] while bval >= 0x80: val |= (bval & 0x7F) << shift shift += 7 offset += 1 - bval = indexbytes(data, offset) + bval = data[offset] val |= bval << shift offset += 1 return val, offset @@ -350,7 +345,7 @@ def encode_copy_instruction(offset, length): base_byte = offset & 0xff if base_byte: copy_command |= copy_bit - copy_bytes.append(int2byte(base_byte)) + copy_bytes.append(bytes([base_byte])) offset >>= 8 if length is None: raise ValueError("cannot supply a length of None") @@ -365,9 +360,9 @@ def encode_copy_instruction(offset, length): base_byte = length & 0xff if base_byte: copy_command |= copy_bit - copy_bytes.append(int2byte(base_byte)) + copy_bytes.append(bytes([base_byte])) length >>= 8 - copy_bytes[0] = int2byte(copy_command) + copy_bytes[0] = bytes([copy_command]) return b''.join(copy_bytes) @@ -390,25 +385,25 @@ def decode_copy_instruction(bytes, cmd, pos): offset = 0 length = 0 if (cmd & 0x01): - offset = indexbytes(bytes, pos) + offset = bytes[pos] pos += 1 if (cmd & 0x02): - offset = offset | (indexbytes(bytes, pos) << 8) + offset = offset | (bytes[pos] << 8) pos += 1 if (cmd & 0x04): - offset = offset | (indexbytes(bytes, pos) << 16) + offset = offset | (bytes[pos] << 16) pos += 1 if (cmd & 0x08): - offset = offset | (indexbytes(bytes, pos) << 24) + offset = offset | (bytes[pos] << 24) pos += 1 if (cmd & 0x10): - length = indexbytes(bytes, pos) + length = bytes[pos] pos += 1 if (cmd & 0x20): - length = length | (indexbytes(bytes, pos) << 8) + length = length | (bytes[pos] << 8) pos += 1 if (cmd & 0x40): - length = length | (indexbytes(bytes, pos) << 16) + length = length | (bytes[pos] << 16) pos += 1 if length == 0: length = 65536 @@ -437,7 +432,7 @@ def apply_delta(basis, delta): lines = [] len_delta = len(delta) while pos < len_delta: - cmd = indexbytes(delta, pos) + cmd = delta[pos] pos += 1 if cmd & 0x80: offset, length, pos = decode_copy_instruction(delta, cmd, pos) diff --git a/breezy/bzr/branch.py b/breezy/bzr/branch.py index facc880f22..00afa9ead6 100644 --- a/breezy/bzr/branch.py +++ b/breezy/bzr/branch.py @@ -17,6 +17,7 @@ from __future__ import absolute_import +from io import BytesIO import sys from ..lazy_import import lazy_import @@ -50,11 +51,6 @@ only_raises, ) from ..lock import _RelockDebugMixin, LogicalLockResult -from ..sixish import ( - BytesIO, - text_type, - viewitems, - ) from ..trace import ( mutter, ) @@ -283,7 +279,7 @@ def _set_parent_location(self, url): if url is None: self._transport.delete('parent') else: - if isinstance(url, text_type): + if isinstance(url, str): url = url.encode('utf-8') self._transport.put_bytes('parent', url + b'\n', mode=self.controldir._get_file_mode()) @@ -552,7 +548,7 @@ def _set_all_reference_info(self, info_dict): """ s = BytesIO() writer = rio.RioWriter(s) - for file_id, (branch_location, tree_path) in viewitems(info_dict): + for file_id, (branch_location, tree_path) in info_dict.items(): stanza = rio.Stanza(file_id=file_id, branch_location=branch_location) if tree_path is not None: @@ -653,10 +649,7 @@ def get_stacked_on_url(self): if stacked_url is None: raise errors.NotStacked(self) # TODO(jelmer): Clean this up for pad.lv/1696545 - if sys.version_info[0] == 2: - return stacked_url.encode('utf-8') - else: - return stacked_url + return stacked_url def get_rev_id(self, revno, history=None): """Find the revision id of the specified revno.""" diff --git a/breezy/bzr/btree_index.py b/breezy/bzr/btree_index.py index 340e44a44b..3c079d01ea 100644 --- a/breezy/bzr/btree_index.py +++ b/breezy/bzr/btree_index.py @@ -19,6 +19,8 @@ from __future__ import absolute_import, division +from io import BytesIO + from ..lazy_import import lazy_import lazy_import(globals(), """ import bisect @@ -41,13 +43,6 @@ index, ) from .index import _OPTION_NODE_REFS, _OPTION_KEY_ELEMENTS, _OPTION_LEN -from ..sixish import ( - BytesIO, - map, - range, - viewitems, - viewvalues, - ) _BTSIGNATURE = b"B+Tree Graph Index 2\n" @@ -562,13 +557,13 @@ def _get_nodes_by_key(self): if self._nodes_by_key is None: nodes_by_key = {} if self.reference_lists: - for key, (references, value) in viewitems(self._nodes): + for key, (references, value) in self._nodes.items(): key_dict = nodes_by_key for subkey in key[:-1]: key_dict = key_dict.setdefault(subkey, {}) key_dict[key[-1]] = key, value, references else: - for key, (references, value) in viewitems(self._nodes): + for key, (references, value) in self._nodes.items(): key_dict = nodes_by_key for subkey in key[:-1]: key_dict = key_dict.setdefault(subkey, {}) @@ -970,7 +965,7 @@ def _get_internal_nodes(self, node_indexes): def _cache_leaf_values(self, nodes): """Cache directly from key => value, skipping the btree.""" if self._leaf_value_cache is not None: - for node in viewvalues(nodes): + for node in nodes.values(): for key, value in node.all_items(): if key in self._leaf_value_cache: # Don't add the rest of the keys, we've seen this node diff --git a/breezy/bzr/bundle/bundle_data.py b/breezy/bzr/bundle/bundle_data.py index c224799187..98f1600e3f 100644 --- a/breezy/bzr/bundle/bundle_data.py +++ b/breezy/bzr/bundle/bundle_data.py @@ -42,9 +42,6 @@ ) from ...osutils import sha_string, sha_strings, pathjoin from ...revision import Revision, NULL_REVISION -from ...sixish import ( - viewitems, - ) from ..testament import StrictTestament from ...trace import mutter, warning from ...tree import ( @@ -112,7 +109,7 @@ def from_revision(revision): revision_info.timestamp = revision.timestamp revision_info.message = revision.message.split('\n') revision_info.properties = [': '.join(p) for p in - viewitems(revision.properties)] + revision.properties.items()] return revision_info @@ -260,7 +257,7 @@ def add_sha(d, revision_id, sha1): count = 0 missing = {} - for revision_id, sha1 in viewitems(rev_to_sha): + for revision_id, sha1 in rev_to_sha.items(): if repository.has_revision(revision_id): testament = StrictTestament.from_revision(repository, revision_id) @@ -768,7 +765,7 @@ def list_files(self, include_root=False, from_dir=None, recursive=True): def sorted_path_id(self): paths = [] - for result in viewitems(self._new_id): + for result in self._new_id.items(): paths.append(result) for id in self.base_tree.all_file_ids(): try: diff --git a/breezy/bzr/bundle/commands.py b/breezy/bzr/bundle/commands.py index 07e2413435..28352623bc 100644 --- a/breezy/bzr/bundle/commands.py +++ b/breezy/bzr/bundle/commands.py @@ -23,6 +23,8 @@ from __future__ import absolute_import +from io import BytesIO + from ... import ( errors, ) @@ -40,10 +42,6 @@ """) from ...commands import Command -from ...sixish import ( - BytesIO, - viewitems, - ) class cmd_bundle_info(Command): @@ -81,7 +79,7 @@ def run(self, location, verbose=False): if file_id is not None: file_ids.add(file_id) self.outf.write(gettext('Records\n')) - for kind, records in sorted(viewitems(by_kind)): + for kind, records in sorted(by_kind.items()): multiparent = sum(1 for b, m, k, r, f in records if len(m.get('parents', [])) > 1) self.outf.write(gettext('{0}: {1} ({2} multiparent)\n').format( diff --git a/breezy/bzr/bundle/serializer/__init__.py b/breezy/bzr/bundle/serializer/__init__.py index 630150bd3f..0abda8a313 100644 --- a/breezy/bzr/bundle/serializer/__init__.py +++ b/breezy/bzr/bundle/serializer/__init__.py @@ -20,6 +20,7 @@ from __future__ import absolute_import import base64 +from io import BytesIO import re from .... import ( @@ -28,9 +29,6 @@ ) from ....diff import internal_diff from ....revision import NULL_REVISION -from ....sixish import ( - BytesIO, - ) # For backwards-compatibility from ....timestamp import unpack_highres_date, format_highres_date diff --git a/breezy/bzr/bundle/serializer/v08.py b/breezy/bzr/bundle/serializer/v08.py index 1b3308ac2b..043a82dd79 100644 --- a/breezy/bzr/bundle/serializer/v08.py +++ b/breezy/bzr/bundle/serializer/v08.py @@ -34,7 +34,6 @@ ) from ....diff import internal_diff from ....revision import NULL_REVISION -from ....sixish import text_type from ...testament import StrictTestament from ....timestamp import ( format_highres_date, @@ -166,7 +165,7 @@ def _write(self, key, value, indent=1, trailing_space_when_empty=False): f.write(b': ') f.write(value) f.write(b'\n') - elif isinstance(value, text_type): + elif isinstance(value, str): f.write(b': ') f.write(value.encode('utf-8')) f.write(b'\n') diff --git a/breezy/bzr/bundle/serializer/v4.py b/breezy/bzr/bundle/serializer/v4.py index 4870947d3b..6b7801d084 100644 --- a/breezy/bzr/bundle/serializer/v4.py +++ b/breezy/bzr/bundle/serializer/v4.py @@ -17,6 +17,9 @@ from __future__ import absolute_import import bz2 +from io import ( + BytesIO, + ) import re from .... import ( @@ -38,10 +41,6 @@ ) from .. import bundle_data, serializer as bundle_serializer from ....i18n import ngettext -from ....sixish import ( - BytesIO, - viewitems, - ) class _MPDiffInventoryGenerator(_mod_versionedfile._MPDiffGenerator): @@ -335,7 +334,7 @@ def write_files(self): text_keys = [] altered_fileids = self.repository.fileids_altered_by_revision_ids( self.revision_ids) - for file_id, revision_ids in viewitems(altered_fileids): + for file_id, revision_ids in altered_fileids.items(): for revision_id in revision_ids: text_keys.append((file_id, revision_id)) self._add_mp_records_keys('file', self.repository.texts, text_keys) diff --git a/breezy/bzr/bzrdir.py b/breezy/bzr/bzrdir.py index 950180e875..bd7eaee7ca 100644 --- a/breezy/bzr/bzrdir.py +++ b/breezy/bzr/bzrdir.py @@ -31,9 +31,10 @@ from ..lazy_import import lazy_import lazy_import(globals(), """ +import contextlib + from breezy import ( branch as _mod_branch, - cleanup, lockable_files, lockdir, osutils, @@ -61,7 +62,6 @@ from breezy.i18n import gettext """) -from ..sixish import viewitems from ..trace import ( mutter, note, @@ -377,7 +377,7 @@ def sprout(self, url, revision_id=None, force_new_repo=False, when working locally. :return: The created control directory """ - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: fetch_spec_factory = fetch.FetchSpecFactory() if revision_id is not None: fetch_spec_factory.add_revision_ids([revision_id]) @@ -1151,7 +1151,7 @@ def unregister_feature(cls, name): def check_support_status(self, allow_unsupported, recommend_upgrade=True, basedir=None): - for name, necessity in viewitems(self.features): + for name, necessity in self.features.items(): if name in self._present_features: continue if necessity == b"optional": @@ -1191,7 +1191,7 @@ def as_string(self): """ lines = [self.get_format_string()] lines.extend([(item[1] + b" " + item[0] + b"\n") - for item in sorted(viewitems(self.features))]) + for item in sorted(self.features.items())]) return b"".join(lines) @classmethod diff --git a/breezy/bzr/check.py b/breezy/bzr/check.py index 2bdbfe7c54..37daeb980d 100644 --- a/breezy/bzr/check.py +++ b/breezy/bzr/check.py @@ -55,9 +55,6 @@ from ..branch import Branch from ..check import Check from ..revision import NULL_REVISION -from ..sixish import ( - viewitems, - ) from ..trace import note from ..workingtree import WorkingTree from ..i18n import gettext @@ -119,7 +116,7 @@ def check(self, callback_refs=None, check_repo=True): # landing]. distances = set() existences = set() - for ref, wantlist in viewitems(callback_refs): + for ref, wantlist in callback_refs.items(): wanting_items.update(wantlist) kind, value = ref if kind == 'trees': @@ -132,7 +129,7 @@ def check(self, callback_refs=None, check_repo=True): raise AssertionError( 'unknown ref kind for ref %s' % ref) node_distances = repo.get_graph().find_lefthand_distances(distances) - for key, distance in viewitems(node_distances): + for key, distance in node_distances.items(): refs[('lefthand-distance', key)] = distance if key in existences and distance > 0: refs[('revision-existence', key)] = True @@ -218,7 +215,7 @@ def _report_repo_results(self, verbose): note(gettext('%6d revisions missing parents in ancestry'), len(self.missing_parent_links)) if verbose: - for link, linkers in viewitems(self.missing_parent_links): + for link, linkers in self.missing_parent_links.items(): note(gettext(' %s should be in the ancestry for:'), link.decode('utf-8')) for linker in linkers: @@ -314,7 +311,7 @@ def _check_weaves(self, storebar): wrongs, unused_versions = weave_checker.check_file_version_parents( self.repository.texts) self.checked_weaves = weave_checker.file_ids - for text_key, (stored_parents, correct_parents) in viewitems(wrongs): + for text_key, (stored_parents, correct_parents) in wrongs.items(): # XXX not ready for id join/split operations. weave_id = text_key[0] revision_id = text_key[-1] diff --git a/breezy/bzr/chk_map.py b/breezy/bzr/chk_map.py index 58901429cb..7306e4ad60 100644 --- a/breezy/bzr/chk_map.py +++ b/breezy/bzr/chk_map.py @@ -50,11 +50,6 @@ static_tuple, trace, ) -from ..sixish import ( - viewitems, - viewvalues, - ) -from ..sixish import PY3 from ..static_tuple import StaticTuple # approx 4MB @@ -191,10 +186,7 @@ def _read_bytes(self, key): def _dump_tree(self, include_keys=False, encoding='utf-8'): """Return the tree in a string representation.""" self._ensure_root() - if PY3: - def decode(x): return x.decode(encoding) - else: - def decode(x): return x + def decode(x): return x.decode(encoding) res = self._dump_tree_node(self._root_node, prefix=b'', indent='', decode=decode, include_keys=include_keys) res.append('') # Give a trailing '\n' @@ -216,11 +208,11 @@ def _dump_tree_node(self, node, prefix, indent, decode, include_keys=True): if isinstance(node, InternalNode): # Trigger all child nodes to get loaded list(node._iter_nodes(self._store)) - for prefix, sub in sorted(viewitems(node._items)): + for prefix, sub in sorted(node._items.items()): result.extend(self._dump_tree_node(sub, prefix, indent + ' ', decode=decode, include_keys=include_keys)) else: - for key, value in sorted(viewitems(node._items)): + for key, value in sorted(node._items.items()): # Don't use prefix nor indent here to line up when used in # tests in conjunction with assertEqualDiff result.append(' %r %r' % ( @@ -260,7 +252,7 @@ def _create_via_map(klass, store, initial_value, maximum_size=0, result._root_node.set_maximum_size(maximum_size) result._root_node._key_width = key_width delta = [] - for key, value in viewitems(initial_value): + for key, value in initial_value.items(): delta.append((None, key, value)) root_key = result.apply_delta(delta) return root_key @@ -273,9 +265,9 @@ def _create_directly(klass, store, initial_value, maximum_size=0, node._key_width = key_width as_st = StaticTuple.from_sequence node._items = dict((as_st(key), val) - for key, val in viewitems(initial_value)) + for key, val in initial_value.items()) node._raw_size = sum(node._key_value_len(key, value) - for key, value in viewitems(node._items)) + for key, value in node._items.items()) node._len = len(node._items) node._compute_search_prefix() node._compute_serialised_prefix() @@ -339,7 +331,7 @@ def process_node(node, path, a_map, pending): node = a_map._get_node(node) if isinstance(node, LeafNode): path = (node._key, path) - for key, value in viewitems(node._items): + for key, value in node._items.items(): # For a LeafNode, the key is a serialized_key, rather than # a search_key, but the heap is using search_keys search_key = node._search_key_func(key) @@ -347,12 +339,12 @@ def process_node(node, path, a_map, pending): else: # type(node) == InternalNode path = (node._key, path) - for prefix, child in viewitems(node._items): + for prefix, child in node._items.items(): heapq.heappush(pending, (prefix, None, child, path)) def process_common_internal_nodes(self_node, basis_node): - self_items = set(viewitems(self_node._items)) - basis_items = set(viewitems(basis_node._items)) + self_items = set(self_node._items.items()) + basis_items = set(basis_node._items.items()) path = (self_node._key, None) for prefix, child in self_items - basis_items: heapq.heappush(self_pending, (prefix, None, child, path)) @@ -361,8 +353,8 @@ def process_common_internal_nodes(self_node, basis_node): heapq.heappush(basis_pending, (prefix, None, child, path)) def process_common_leaf_nodes(self_node, basis_node): - self_items = set(viewitems(self_node._items)) - basis_items = set(viewitems(basis_node._items)) + self_items = set(self_node._items.items()) + basis_items = set(basis_node._items.items()) path = (self_node._key, None) for key, value in self_items - basis_items: prefix = self._search_key_func(key) @@ -779,15 +771,14 @@ def iteritems(self, store, key_filter=None): # Short items, we need to match based on a prefix filters.setdefault(len(key), set()).add(key) if filters: - filters_itemview = viewitems(filters) - for item in viewitems(self._items): + filters_itemview = filters.items() + for item in self._items.items(): for length, length_filter in filters_itemview: if item[0][:length] in length_filter: yield item break else: - for item in viewitems(self._items): - yield item + yield from self._items.items() def _key_value_len(self, key, value): # TODO: Should probably be done without actually joining the key, but @@ -848,7 +839,7 @@ def _split(self, store): common_prefix = self._search_prefix split_at = len(common_prefix) + 1 result = {} - for key, value in viewitems(self._items): + for key, value in self._items.items(): search_key = self._search_key(key) prefix = search_key[:split_at] # TODO: Generally only 1 key can be exactly the right length, @@ -881,7 +872,7 @@ def _split(self, store): for split, node in node_details: new_node.add_node(split, node) result[prefix] = new_node - return common_prefix, list(viewitems(result)) + return common_prefix, list(result.items()) def map(self, store, key, value): """Map key to value.""" @@ -916,7 +907,7 @@ def serialise(self, store): else: lines.append(b'%s\n' % (self._common_serialised_prefix,)) prefix_len = len(self._common_serialised_prefix) - for key, value in sorted(viewitems(self._items)): + for key, value in sorted(self._items.items()): # Always add a final newline value_lines = osutils.chunks_to_lines([value + b'\n']) serialized = b"%s\x00%d\n" % (self._serialise_key(key), @@ -1081,7 +1072,7 @@ def _iter_nodes(self, store, key_filter=None, batch_size=None): # yielding all nodes, yield whatever we have, and queue up a read # for whatever we are missing shortcut = True - for prefix, node in viewitems(self._items): + for prefix, node in self._items.items(): if node.__class__ is StaticTuple: keys[node] = (prefix, None) else: @@ -1157,8 +1148,8 @@ def _iter_nodes(self, store, key_filter=None, batch_size=None): else: # The slow way. We walk every item in self._items, and check to # see if there are any matches - length_filters_itemview = viewitems(length_filters) - for prefix, node in viewitems(self._items): + length_filters_itemview = length_filters.items() + for prefix, node in self._items.items(): node_key_filter = [] for length, length_filter in length_filters_itemview: sub_prefix = prefix[:length] @@ -1302,7 +1293,7 @@ def serialise(self, store): :param store: A VersionedFiles honouring the CHK extensions. :return: An iterable of the keys inserted by this operation. """ - for node in viewvalues(self._items): + for node in self._items.values(): if isinstance(node, StaticTuple): # Never deserialised. continue @@ -1319,7 +1310,7 @@ def serialise(self, store): raise AssertionError("_search_prefix should not be None") lines.append(b'%s\n' % (self._search_prefix,)) prefix_len = len(self._search_prefix) - for prefix, node in sorted(viewitems(self._items)): + for prefix, node in sorted(self._items.items()): if isinstance(node, StaticTuple): key = node[0] else: @@ -1361,7 +1352,7 @@ def refs(self): if self._key is None: raise AssertionError("unserialised nodes have no refs.") refs = [] - for value in viewvalues(self._items): + for value in self._items.values(): if isinstance(value, StaticTuple): refs.append(value) else: @@ -1400,7 +1391,7 @@ def unmap(self, store, key, check_remap=True): self._items[search_key] = unmapped if len(self._items) == 1: # this node is no longer needed: - return list(viewvalues(self._items))[0] + return list(self._items.values())[0] if isinstance(unmapped, InternalNode): return self if check_remap: @@ -1450,7 +1441,7 @@ def _check_remap(self, store): if isinstance(node, InternalNode): # Without looking at any leaf nodes, we are sure return self - for key, value in viewitems(node._items): + for key, value in node._items.items(): if new_leaf._map_no_split(key, value): return self trace.mutter("remap generated a new LeafNode") @@ -1539,14 +1530,14 @@ def _read_nodes_from_store(self, keys): # indicate that we keep 100k prefix_refs around while # processing. They *should* be shorter lived than that... # It does cost us ~10s of processing time - prefix_refs = list(viewitems(node._items)) + prefix_refs = list(node._items.items()) items = [] else: prefix_refs = [] # Note: We don't use a StaticTuple here. Profiling showed a # minor memory improvement (0.8MB out of 335MB peak 0.2%) # But a significant slowdown (15s / 145s, or 10%) - items = list(viewitems(node._items)) + items = list(node._items.items()) yield record, node, prefix_refs, items def _read_old_roots(self): diff --git a/breezy/bzr/chk_serializer.py b/breezy/bzr/chk_serializer.py index 4deaaf17ab..46cc605c1f 100644 --- a/breezy/bzr/chk_serializer.py +++ b/breezy/bzr/chk_serializer.py @@ -18,6 +18,10 @@ from __future__ import absolute_import +from io import ( + BytesIO, + ) + from .. import lazy_import lazy_import.lazy_import(globals(), """ @@ -34,9 +38,6 @@ from . import ( serializer, ) -from ..sixish import ( - BytesIO, - ) def _validate_properties(props, _decode=cache_utf8._utf8_decode): diff --git a/breezy/bzr/debug_commands.py b/breezy/bzr/debug_commands.py index 6a511353b0..8d50be4d66 100644 --- a/breezy/bzr/debug_commands.py +++ b/breezy/bzr/debug_commands.py @@ -32,7 +32,6 @@ display_command, ) from ..option import Option -from ..sixish import PY3 from . import ( btree_index, ) @@ -118,18 +117,15 @@ def _dump_entries(self, trans, basename): refs_as_tuples = None else: refs_as_tuples = static_tuple.as_tuples(refs) - if PY3: - if refs_as_tuples is not None: - refs_as_tuples = tuple( - tuple(tuple(r.decode('utf-8') - for r in t1) for t1 in t2) - for t2 in refs_as_tuples) - as_tuple = ( - tuple([r.decode('utf-8') for r in node[1]]), - node[2].decode('utf-8'), - refs_as_tuples) - else: - as_tuple = (tuple(node[1]), node[2], refs_as_tuples) + if refs_as_tuples is not None: + refs_as_tuples = tuple( + tuple(tuple(r.decode('utf-8') + for r in t1) for t1 in t2) + for t2 in refs_as_tuples) + as_tuple = ( + tuple([r.decode('utf-8') for r in node[1]]), + node[2].decode('utf-8'), + refs_as_tuples) self.outf.write('%s\n' % (as_tuple,)) diff --git a/breezy/bzr/dirstate.py b/breezy/bzr/dirstate.py index 44eb8d58bd..8272ad521f 100644 --- a/breezy/bzr/dirstate.py +++ b/breezy/bzr/dirstate.py @@ -221,6 +221,7 @@ from __future__ import absolute_import import bisect +import contextlib import errno import operator import os @@ -235,7 +236,6 @@ ) from .. import ( cache_utf8, - cleanup, config, debug, errors, @@ -245,12 +245,6 @@ trace, urlutils, ) -from ..sixish import ( - range, - text_type, - viewitems, - viewvalues, - ) from ..tree import TreeChange @@ -501,7 +495,7 @@ def add(self, path, file_id, kind, stat, fingerprint): # you should never have files called . or ..; just add the directory # in the parent, or according to the special treatment for the root if basename == '.' or basename == '..': - raise errors.InvalidEntryName(path) + raise inventory.InvalidEntryName(path) # now that we've normalised, we need the correct utf8 path and # dirname and basename elements. This single encode and split should be # faster than three separate encodes. @@ -999,7 +993,7 @@ def _bisect_recursive(self, paths): # Directories that need to be read pending_dirs = set() paths_to_search = set() - for entry_list in viewvalues(newly_found): + for entry_list in newly_found.values(): for dir_name_id, trees_info in entry_list: found[dir_name_id] = trees_info found_dir_names.add(dir_name_id[:2]) @@ -1312,7 +1306,7 @@ def from_tree(tree, dir_state_filename, sha1_provider=None): result = DirState.initialize(dir_state_filename, sha1_provider=sha1_provider) try: - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(tree.lock_read()) parent_ids = tree.get_parent_ids() num_parents = len(parent_ids) @@ -1423,8 +1417,8 @@ def update_by_delta(self, delta): fingerprint, new_child_path) self._check_delta_ids_absent(new_ids, delta, 0) try: - self._apply_removals(viewitems(removals)) - self._apply_insertions(viewvalues(insertions)) + self._apply_removals(removals.items()) + self._apply_insertions(insertions.values()) # Validate parents self._after_delta_check_parents(parents, 0) except errors.BzrError as e: @@ -1964,7 +1958,7 @@ def _read_link(self, abspath, old_link): # higher level, because there either won't be anything on disk, # or the thing on disk will be a file. fs_encoding = osutils._fs_enc - if isinstance(abspath, text_type): + if isinstance(abspath, str): # abspath is defined as the path to pass to lstat. readlink is # buggy in python < 2.6 (it doesn't encode unicode path into FS # encoding), so we need to encode ourselves knowing that unicode @@ -2768,7 +2762,7 @@ def set_parent_trees(self, trees, ghosts): # --- end generation of full tree mappings # sort and output all the entries - new_entries = self._sort_entries(viewitems(by_path)) + new_entries = self._sort_entries(by_path.items()) self._entries_to_current_state(new_entries) self._parents = [rev_id for rev_id, tree in trees] self._ghosts = list(ghosts) diff --git a/breezy/bzr/fetch.py b/breezy/bzr/fetch.py index 84407b62b9..2dff9f3641 100644 --- a/breezy/bzr/fetch.py +++ b/breezy/bzr/fetch.py @@ -42,9 +42,6 @@ ) from ..i18n import gettext from ..revision import NULL_REVISION -from ..sixish import ( - viewvalues, - ) from ..trace import mutter @@ -212,7 +209,7 @@ def _find_root_ids(self, revs, parent_map, graph): revision_id = tree.get_file_revision(u'') revision_root[revision_id] = root_id # Find out which parents we don't already know root ids for - parents = set(viewvalues(parent_map)) + parents = set(parent_map.values()) parents.difference_update(revision_root) parents.discard(NULL_REVISION) # Limit to revisions present in the versionedfile diff --git a/breezy/bzr/generate_ids.py b/breezy/bzr/generate_ids.py index faaabf7cd0..156175ce75 100644 --- a/breezy/bzr/generate_ids.py +++ b/breezy/bzr/generate_ids.py @@ -31,7 +31,6 @@ from .. import ( lazy_regex, ) -from ..sixish import text_type # the regex removes any weird characters; we don't escape them # but rather just pull them out @@ -71,7 +70,7 @@ def gen_file_id(name): The uniqueness is supplied from _next_id_suffix. """ - if isinstance(name, text_type): + if isinstance(name, str): name = name.encode('ascii', 'replace') # The real randomness is in the _next_id_suffix, the # rest of the identifier is just to be nice. diff --git a/breezy/bzr/groupcompress.py b/breezy/bzr/groupcompress.py index fa0e8ee96c..f6c1f27fe4 100644 --- a/breezy/bzr/groupcompress.py +++ b/breezy/bzr/groupcompress.py @@ -46,12 +46,6 @@ ) from .btree_index import BTreeBuilder from ..lru_cache import LRUSizeCache -from ..sixish import ( - indexbytes, - map, - range, - viewitems, - ) from .versionedfile import ( _KeyRefs, adapter_registry, @@ -80,7 +74,7 @@ def sort_gc_optimal(parent_map): # groupcompress ordering is approximately reverse topological, # properly grouped by file-id. per_prefix_map = {} - for key, value in viewitems(parent_map): + for key, value in parent_map.items(): if isinstance(key, bytes) or len(key) == 1: prefix = b'' else: @@ -392,7 +386,7 @@ def _dump(self, include_text=False): result.append((b'd', content_len, decomp_len, delta_info)) measured_len = 0 while delta_pos < content_len: - c = indexbytes(delta_content, delta_pos) + c = delta_content[delta_pos] delta_pos += 1 if c & 0x80: # Copy (offset, length, @@ -1632,7 +1626,7 @@ def _get_remaining_record_stream(self, keys, orig_keys, ordering, # start with one key, recurse to its oldest parent, then grab # everything in the same group, etc. parent_map = dict((key, details[2]) for key, details in - viewitems(locations)) + locations.items()) for key in unadded_keys: parent_map[key] = self._unadded_refs[key] parent_map.update(fallback_parent_map) @@ -2090,10 +2084,10 @@ def add_records(self, records, random_id=False): if changed: result = [] if self._parents: - for key, (value, node_refs) in viewitems(keys): + for key, (value, node_refs) in keys.items(): result.append((key, value, node_refs)) else: - for key, (value, node_refs) in viewitems(keys): + for key, (value, node_refs) in keys.items(): result.append((key, value)) records = result key_dependencies = self._key_dependencies diff --git a/breezy/bzr/groupcompress_repo.py b/breezy/bzr/groupcompress_repo.py index 442f6a11bf..21940a88f3 100644 --- a/breezy/bzr/groupcompress_repo.py +++ b/breezy/bzr/groupcompress_repo.py @@ -59,10 +59,6 @@ from ..bzr.vf_repository import ( StreamSource, ) -from ..sixish import ( - viewitems, - viewvalues, - ) from ..static_tuple import StaticTuple @@ -287,7 +283,7 @@ def _get_referenced_stream(root_keys, parse_leaf_nodes=False): next_keys = set() def handle_internal_node(node): - for prefix, value in viewitems(node._items): + for prefix, value in node._items.items(): # We don't want to request the same key twice, and we # want to order it by the first time it is seen. # Even further, we don't want to request a key which is @@ -549,7 +545,7 @@ def _copy_text_texts(self): ancestor_keys = revision_vf.get_parent_map(revision_vf.keys()) # Strip keys back into revision_ids. ancestors = dict((k[0], tuple([p[0] for p in parents])) - for k, parents in viewitems(ancestor_keys)) + for k, parents in ancestor_keys.items()) del ancestor_keys # TODO: _generate_text_key_index should be much cheaper to generate from # a chk repository, rather than the current implementation @@ -671,7 +667,7 @@ def _filtered_inv_stream(): if search_key_name is None: # Find the name corresponding to the search_key_func search_key_reg = chk_map.search_key_registry - for search_key_name, func in viewitems(search_key_reg): + for search_key_name, func in search_key_reg.items(): if func == chk_inv.id_to_entry._search_key_func: break canonical_inv = inventory.CHKInventory.from_inventory( @@ -748,7 +744,7 @@ def _check_new_inventories(self): # any present parent inventories, which may be used when calculating # deltas for streaming. all_inv_keys = set(corresponding_invs) - for parent_inv_keys in viewvalues(inv_parent_map): + for parent_inv_keys in inv_parent_map.values(): all_inv_keys.update(parent_inv_keys) # Filter out ghost parents. all_inv_keys.intersection_update( diff --git a/breezy/bzr/index.py b/breezy/bzr/index.py index 398d31c939..3339b5559e 100644 --- a/breezy/bzr/index.py +++ b/breezy/bzr/index.py @@ -27,6 +27,7 @@ ] from bisect import bisect_right +from io import BytesIO import re from ..lazy_import import lazy_import @@ -41,13 +42,6 @@ debug, errors, ) -from ..sixish import ( - BytesIO, - bytesintern, - viewvalues, - viewitems, - zip, - ) from ..static_tuple import StaticTuple _HEADER_READV = (0, 200) @@ -202,7 +196,7 @@ def _get_nodes_by_key(self): if self._nodes_by_key is None: nodes_by_key = {} if self.reference_lists: - for key, (absent, references, value) in viewitems(self._nodes): + for key, (absent, references, value) in self._nodes.items(): if absent: continue key_dict = nodes_by_key @@ -210,7 +204,7 @@ def _get_nodes_by_key(self): key_dict = key_dict.setdefault(subkey, {}) key_dict[key[-1]] = key, value, references else: - for key, (absent, references, value) in viewitems(self._nodes): + for key, (absent, references, value) in self._nodes.items(): if absent: continue key_dict = nodes_by_key @@ -334,7 +328,7 @@ def finish(self): # forward sorted by key. In future we may consider topological sorting, # at the cost of table scans for direct lookup, or a second index for # direct lookup - nodes = sorted(viewitems(self._nodes)) + nodes = sorted(self._nodes.items()) # if we do not prepass, we don't know how long it will be up front. expected_bytes = None # we only need to pre-pass if we have reference lists at all. @@ -549,7 +543,7 @@ def _buffer_all(self, stream=None): stream.close() del lines[-1] _, _, _, trailers = self._parse_lines(lines, pos) - for key, absent, references, value in viewvalues(self._keys_by_offset): + for key, absent, references, value in self._keys_by_offset.values(): if absent: continue # resolve references: @@ -580,7 +574,7 @@ def external_references(self, ref_list_num): % (ref_list_num, self.node_ref_lists)) refs = set() nodes = self._nodes - for key, (value, ref_lists) in viewitems(nodes): + for key, (value, ref_lists) in nodes.items(): ref_list = ref_lists[ref_list_num] refs.update([ref for ref in ref_list if ref not in nodes]) return refs @@ -589,13 +583,13 @@ def _get_nodes_by_key(self): if self._nodes_by_key is None: nodes_by_key = {} if self.node_ref_lists: - for key, (value, references) in viewitems(self._nodes): + for key, (value, references) in self._nodes.items(): key_dict = nodes_by_key for subkey in key[:-1]: key_dict = key_dict.setdefault(subkey, {}) key_dict[key[-1]] = key, value, references else: - for key, value in viewitems(self._nodes): + for key, value in self._nodes.items(): key_dict = nodes_by_key for subkey in key[:-1]: key_dict = key_dict.setdefault(subkey, {}) @@ -618,10 +612,10 @@ def iter_all_entries(self): if self._nodes is None: self._buffer_all() if self.node_ref_lists: - for key, (value, node_ref_lists) in viewitems(self._nodes): + for key, (value, node_ref_lists) in self._nodes.items(): yield self, key, value, node_ref_lists else: - for key, value in viewitems(self._nodes): + for key, value in self._nodes.items(): yield self, key, value def _read_prefix(self, stream): @@ -1161,8 +1155,7 @@ def _parse_lines(self, lines, pos): raise BadIndexData(self) # keys are tuples. Each element is a string that may occur many # times, so we intern them to save space. AB, RC, 200807 - key = tuple([bytesintern(element) - for element in elements[:self._key_length]]) + key = tuple([element for element in elements[:self._key_length]]) if first_key is None: first_key = key absent, references, value = elements[-3:] @@ -1673,11 +1666,11 @@ def iter_all_entries(self): trace.mutter_callsite(3, "iter_all_entries scales with size of history.") if self.reference_lists: - for key, (absent, references, value) in viewitems(self._nodes): + for key, (absent, references, value) in self._nodes.items(): if not absent: yield self, key, value, references else: - for key, (absent, references, value) in viewitems(self._nodes): + for key, (absent, references, value) in self._nodes.items(): if not absent: yield self, key, value @@ -1910,7 +1903,7 @@ def _iter_entries_prefix(index_or_builder, nodes_by_key, keys): if len(elements): dicts = [key_dict] while dicts: - values_view = viewvalues(dicts.pop()) + values_view = dicts.pop().values() # can't be empty or would not exist value = next(iter(values_view)) if isinstance(value, dict): diff --git a/breezy/bzr/inventory.py b/breezy/bzr/inventory.py index 9d8f4043e0..ca1d062039 100644 --- a/breezy/bzr/inventory.py +++ b/breezy/bzr/inventory.py @@ -50,15 +50,18 @@ osutils, trace, ) -from ..sixish import ( - bytesintern, - text_type, - viewitems, - viewvalues, - ) from ..static_tuple import StaticTuple +class InvalidEntryName(errors.InternalBzrError): + + _fmt = "Invalid entry name: %(name)s" + + def __init__(self, name): + errors.BzrError.__init__(self) + self.name = name + + class InventoryEntry(object): """Description of a versioned file. @@ -91,30 +94,30 @@ class InventoryEntry(object): >>> i = Inventory() >>> i.path2id('') - 'TREE_ROOT' + b'TREE_ROOT' >>> i.add(InventoryDirectory(b'123', 'src', ROOT_ID)) - InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None) - >>> i.add(InventoryFile(b'2323', 'hello.c', parent_id='123')) - InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None) + InventoryDirectory(b'123', 'src', parent_id=b'TREE_ROOT', revision=None) + >>> i.add(InventoryFile(b'2323', 'hello.c', parent_id=b'123')) + InventoryFile(b'2323', 'hello.c', parent_id=b'123', sha1=None, len=None, revision=None) >>> shouldbe = {0: '', 1: 'src', 2: 'src/hello.c'} >>> for ix, j in enumerate(i.iter_entries()): - ... print (j[0] == shouldbe[ix], j[1]) + ... print(j[0] == shouldbe[ix], j[1]) ... - (True, InventoryDirectory('TREE_ROOT', u'', parent_id=None, revision=None)) - (True, InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None)) - (True, InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None)) - >>> i.add(InventoryFile('2324', 'bye.c', '123')) - InventoryFile('2324', 'bye.c', parent_id='123', sha1=None, len=None, revision=None) - >>> i.add(InventoryDirectory('2325', 'wibble', '123')) - InventoryDirectory('2325', 'wibble', parent_id='123', revision=None) + True InventoryDirectory(b'TREE_ROOT', '', parent_id=None, revision=None) + True InventoryDirectory(b'123', 'src', parent_id=b'TREE_ROOT', revision=None) + True InventoryFile(b'2323', 'hello.c', parent_id=b'123', sha1=None, len=None, revision=None) + >>> i.add(InventoryFile(b'2324', 'bye.c', b'123')) + InventoryFile(b'2324', 'bye.c', parent_id=b'123', sha1=None, len=None, revision=None) + >>> i.add(InventoryDirectory(b'2325', 'wibble', b'123')) + InventoryDirectory(b'2325', 'wibble', parent_id=b'123', revision=None) >>> i.path2id('src/wibble') - '2325' - >>> i.add(InventoryFile('2326', 'wibble.c', '2325')) - InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) - >>> i.get_entry('2326') - InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) + b'2325' + >>> i.add(InventoryFile(b'2326', 'wibble.c', b'2325')) + InventoryFile(b'2326', 'wibble.c', parent_id=b'2325', sha1=None, len=None, revision=None) + >>> i.get_entry(b'2326') + InventoryFile(b'2326', 'wibble.c', parent_id=b'2325', sha1=None, len=None, revision=None) >>> for path, entry in i.iter_entries(): - ... print path + ... print(path) ... src @@ -218,13 +221,13 @@ def __init__(self, file_id, name, parent_id): >>> e.name 'hello.c' >>> e.file_id - '123' + b'123' >>> e = InventoryFile(b'123', 'src/hello.c', ROOT_ID) Traceback (most recent call last): - InvalidEntryName: Invalid entry name: src/hello.c + breezy.bzr.inventory.InvalidEntryName: Invalid entry name: src/hello.c """ if u'/' in name: - raise errors.InvalidEntryName(name=name) + raise InvalidEntryName(name=name) if not isinstance(file_id, bytes): raise TypeError(file_id) self.file_id = file_id @@ -417,7 +420,7 @@ def __init__(self, file_id, name, parent_id): self.children = {} def sorted_children(self): - return sorted(viewitems(self.children)) + return sorted(self.children.items()) def kind_character(self): """See InventoryEntry.kind_character.""" @@ -666,8 +669,8 @@ def id2path(self, file_id): >>> i = Inventory() >>> e = i.add(InventoryDirectory(b'src-id', 'src', ROOT_ID)) - >>> e = i.add(InventoryFile(b'foo-id', 'foo.c', parent_id='src-id')) - >>> print i.id2path(b'foo-id') + >>> e = i.add(InventoryFile(b'foo-id', 'foo.c', parent_id=b'src-id')) + >>> print(i.id2path(b'foo-id')) src/foo.c :raises NoSuchId: If file_id is not present in the inventory. @@ -694,10 +697,9 @@ def iter_entries(self, from_dir=None, recursive=True): # unrolling the recursive called changed the time from # 440ms/663ms (inline/total) to 116ms/116ms - children = sorted(viewitems(from_dir.children)) + children = sorted(from_dir.children.items()) if not recursive: - for name, ie in children: - yield name, ie + yield from children return children = deque(children) stack = [(u'', children)] @@ -719,7 +721,7 @@ def iter_entries(self, from_dir=None, recursive=True): continue # But do this child first - new_children = sorted(viewitems(ie.children)) + new_children = sorted(ie.children.items()) new_children = deque(new_children) stack.append((path, new_children)) # Break out of inner loop, so that we start outer loop with child @@ -804,7 +806,7 @@ def add_ancestors(file_id): cur_relpath, cur_dir = stack.pop() child_dirs = [] - for child_name, child_ie in sorted(viewitems(cur_dir.children)): + for child_name, child_ie in sorted(cur_dir.children.items()): child_relpath = cur_relpath + child_name @@ -848,7 +850,7 @@ def entries(self): accum = [] def descend(dir_ie, dir_path): - kids = sorted(viewitems(dir_ie.children)) + kids = sorted(dir_ie.children.items()) for name, ie in kids: child_path = osutils.pathjoin(dir_path, name) accum.append((child_path, ie)) @@ -866,7 +868,7 @@ def get_entry_by_path_partial(self, relpath): or as list of elements. :return: tuple with ie, resolved elements and elements left to resolve """ - if isinstance(relpath, (str, text_type)): + if isinstance(relpath, str): names = osutils.splitpath(relpath) else: names = relpath @@ -903,7 +905,7 @@ def get_entry_by_path(self, relpath): Returns None IFF the path is not found. """ - if isinstance(relpath, (str, text_type)): + if isinstance(relpath, str): names = osutils.splitpath(relpath) else: names = relpath @@ -1000,21 +1002,21 @@ class Inventory(CommonInventory): >>> inv = Inventory() >>> inv.add(InventoryFile(b'123-123', 'hello.c', ROOT_ID)) - InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) + InventoryFile(b'123-123', 'hello.c', parent_id=b'TREE_ROOT', sha1=None, len=None, revision=None) >>> inv.get_entry(b'123-123').name 'hello.c' Id's may be looked up from paths: >>> inv.path2id('hello.c') - '123-123' + b'123-123' >>> inv.has_id(b'123-123') True There are iterators over the contents: >>> [entry[0] for entry in inv.iter_entries()] - ['', u'hello.c'] + ['', 'hello.c'] """ def __init__(self, root_id=ROOT_ID, revision_id=None): @@ -1185,7 +1187,7 @@ def iter_just_entries(self): """ if self.root is None: return () - return iter(viewvalues(self._byid)) + return self._byid.values() def __len__(self): """Returns number of entries.""" @@ -1196,7 +1198,7 @@ def get_entry(self, file_id): >>> inv = Inventory() >>> inv.add(InventoryFile(b'123123', 'hello.c', ROOT_ID)) - InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) + InventoryFile(b'123123', 'hello.c', parent_id=b'TREE_ROOT', sha1=None, len=None, revision=None) >>> inv.get_entry(b'123123').name 'hello.c' """ @@ -1223,7 +1225,7 @@ def _add_child(self, entry): self._byid[entry.file_id] = entry children = getattr(entry, 'children', {}) if children is not None: - for child in viewvalues(children): + for child in children.values(): self._add_child(child) return entry @@ -1279,7 +1281,7 @@ def delete(self, file_id): >>> inv = Inventory() >>> inv.add(InventoryFile(b'123', 'foo.c', ROOT_ID)) - InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) + InventoryFile(b'123', 'foo.c', parent_id=b'TREE_ROOT', sha1=None, len=None, revision=None) >>> inv.has_id(b'123') True >>> inv.delete(b'123') @@ -1299,11 +1301,11 @@ def __eq__(self, other): >>> i1 == i2 True >>> i1.add(InventoryFile(b'123', 'foo', ROOT_ID)) - InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) + InventoryFile(b'123', 'foo', parent_id=b'TREE_ROOT', sha1=None, len=None, revision=None) >>> i1 == i2 False >>> i2.add(InventoryFile(b'123', 'foo', ROOT_ID)) - InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) + InventoryFile(b'123', 'foo', parent_id=b'TREE_ROOT', sha1=None, len=None, revision=None) >>> i1 == i2 True """ @@ -1374,7 +1376,7 @@ def remove_recursive_id(self, file_id): ie = to_find_delete.pop() to_delete.append(ie.file_id) if ie.kind == 'directory': - to_find_delete.extend(viewvalues(ie.children)) + to_find_delete.extend(ie.children.values()) for file_id in reversed(to_delete): ie = self.get_entry(file_id) del self._byid[file_id] @@ -1623,7 +1625,7 @@ def _bytes_to_utf8name_key(data): # to filter out empty names because of non rich-root... sections = data.split(b'\n') kind, file_id = sections[0].split(b': ') - return (sections[2], bytesintern(file_id), bytesintern(sections[3])) + return (sections[2], file_id, sections[3]) def _bytes_to_entry(self, bytes): """Deserialise a serialised entry.""" @@ -1651,8 +1653,8 @@ def _bytes_to_entry(self, bytes): result.reference_revision = sections[4] else: raise ValueError("Not a serialised entry %r" % bytes) - result.file_id = bytesintern(result.file_id) - result.revision = bytesintern(sections[3]) + result.file_id = result.file_id + result.revision = sections[3] if result.parent_id == b'': result.parent_id = None self._fileid_to_entry_cache[result.file_id] = result @@ -1796,7 +1798,7 @@ def create_by_apply_delta(self, inventory_delta, new_revision_id, continue # This loop could potentially be better by using the id_basename # map to just get the child file ids. - for child in viewvalues(entry.children): + for child in entry.children.values(): if child.file_id not in altered: raise errors.InconsistentDelta(self.id2path(child.file_id), child.file_id, "Child not deleted or reparented when " @@ -1808,7 +1810,7 @@ def create_by_apply_delta(self, inventory_delta, new_revision_id, # re-keying, but its simpler to just output that as a delete+add # to spend less time calculating the delta. delta_list = [] - for key, (old_key, value) in viewitems(parent_id_basename_delta): + for key, (old_key, value) in parent_id_basename_delta.items(): if value is not None: delta_list.append((old_key, key, value)) else: @@ -1855,11 +1857,11 @@ def deserialise(klass, chk_store, lines, expected_revision_id): raise errors.BzrError('Duplicate key in inventory: %r\n%r' % (key, bytes)) info[key] = value - revision_id = bytesintern(info[b'revision_id']) - root_id = bytesintern(info[b'root_id']) - search_key_name = bytesintern(info.get(b'search_key_name', b'plain')) - parent_id_basename_to_file_id = bytesintern(info.get( - b'parent_id_basename_to_file_id', None)) + revision_id = info[b'revision_id'] + root_id = info[b'root_id'] + search_key_name = info.get(b'search_key_name', b'plain') + parent_id_basename_to_file_id = info.get( + b'parent_id_basename_to_file_id', None) if not parent_id_basename_to_file_id.startswith(b'sha1:'): raise ValueError('parent_id_basename_to_file_id should be a sha1' ' key not %r' % (parent_id_basename_to_file_id,)) @@ -2174,7 +2176,7 @@ def _make_delta(self, old): def path2id(self, relpath): """See CommonInventory.path2id().""" # TODO: perhaps support negative hits? - if isinstance(relpath, (str, text_type)): + if isinstance(relpath, str): names = osutils.splitpath(relpath) else: names = relpath diff --git a/breezy/bzr/inventorytree.py b/breezy/bzr/inventorytree.py index 8b8231baac..1f3407c246 100644 --- a/breezy/bzr/inventorytree.py +++ b/breezy/bzr/inventorytree.py @@ -53,9 +53,6 @@ inventory as _mod_inventory, ) """) -from ..sixish import ( - viewvalues, - ) from ..tree import ( FileTimestampUnavailable, InterTree, @@ -249,7 +246,7 @@ def iter_child_entries(self, path): ie = self._path2ie(path) if ie.kind != 'directory': raise errors.NotADirectory(path) - return iter(viewvalues(ie.children)) + return ie.children.values() def _get_plan_merge_data(self, path, other, base): from . import versionedfile @@ -509,7 +506,7 @@ class _SmartAddHelper(object): def get_inventory_delta(self): # GZ 2016-06-05: Returning view would probably be fine but currently # Inventory.apply_delta is documented as requiring a list of changes. - return list(viewvalues(self._invdelta)) + return list(self._invdelta.values()) def _get_ie(self, inv_path): """Retrieve the most up to date inventory entry for a path. diff --git a/breezy/bzr/knit.py b/breezy/bzr/knit.py index 3018471942..852251eb23 100644 --- a/breezy/bzr/knit.py +++ b/breezy/bzr/knit.py @@ -54,6 +54,7 @@ from __future__ import absolute_import import operator +from io import BytesIO import os from ..lazy_import import lazy_import @@ -95,12 +96,6 @@ sha_strings, split_lines, ) -from ..sixish import ( - BytesIO, - range, - viewitems, - viewvalues, - ) from ..bzr.versionedfile import ( _KeyRefs, AbsentContentFactory, @@ -954,7 +949,7 @@ def _get_total_build_size(self, keys, positions): next_keys.add(compression_parent) build_keys = next_keys return sum(index_memo[2] - for index_memo in viewvalues(all_build_index_memos)) + for index_memo in all_build_index_memos.values()) class KnitVersionedFiles(VersionedFilesWithFallbacks): @@ -1287,7 +1282,7 @@ def _get_components_positions(self, keys, allow_missing=False): build_details = self._index.get_build_details(pending_components) current_components = set(pending_components) pending_components = set() - for key, details in viewitems(build_details): + for key, details in build_details.items(): (index_memo, compression_parent, parents, record_details) = details if compression_parent is not None: @@ -1394,7 +1389,7 @@ def _get_record_map_unparsed(self, keys, allow_missing=False): # key = component_id, r = record_details, i_m = index_memo, # n = next records = [(key, i_m) for key, (r, i_m, n) - in viewitems(position_map)] + in position_map.items()] # Sort by the index memo, so that we request records from the # same pack file together, and in forward-sorted order records.sort(key=operator.itemgetter(1)) @@ -1526,7 +1521,7 @@ def _get_remaining_record_stream(self, keys, ordering, # map from key to # (record_details, access_memo, compression_parent_key) positions = dict((key, self._build_details_to_components(details)) - for key, details in viewitems(build_details)) + for key, details in build_details.items()) absent_keys = keys.difference(set(positions)) # There may be more absent keys : if we're missing the basis component # and are trying to include the delta closure. @@ -1641,7 +1636,7 @@ def get_sha1s(self, keys): missing = set(keys) record_map = self._get_record_map(missing, allow_missing=True) result = {} - for key, details in viewitems(record_map): + for key, details in record_map.items(): if key not in missing: continue # record entry 2 is the 'digest'. @@ -1871,7 +1866,7 @@ def iter_lines_added_or_present_in_keys(self, keys, pb=None): # we need key, position, length key_records = [] build_details = self._index.get_build_details(keys) - for key, details in viewitems(build_details): + for key, details in build_details.items(): if key in keys: key_records.append((key, details[0])) records_iter = enumerate(self._read_records_iter(key_records)) @@ -2281,8 +2276,8 @@ def _wire_bytes(self): # one line with next ('' for None) # one line with byte count of the record bytes # the record bytes - for key, (record_bytes, (method, noeol), next) in viewitems( - self._raw_record_map): + for key, (record_bytes, (method, noeol), next) in ( + self._raw_record_map.items()): key_bytes = b'\x00'.join(key) parents = self.global_map.get(key, None) if parents is None: @@ -3012,10 +3007,10 @@ def add_records(self, records, random_id=False, del keys[key] result = [] if self._parents: - for key, (value, node_refs) in viewitems(keys): + for key, (value, node_refs) in keys.items(): result.append((key, value, node_refs)) else: - for key, (value, node_refs) in viewitems(keys): + for key, (value, node_refs) in keys.items(): result.append((key, value)) self._add_callback(result) if missing_compression_parents: @@ -3406,7 +3401,7 @@ def _get_build_graph(self, key): self._all_build_details.update(build_details) # new_nodes = self._vf._index._get_entries(this_iteration) pending = set() - for key, details in viewitems(build_details): + for key, details in build_details.items(): (index_memo, compression_parent, parent_keys, record_details) = details self._parent_map[key] = parent_keys diff --git a/breezy/bzr/knitpack_repo.py b/breezy/bzr/knitpack_repo.py index c1605db613..3ce23cc61a 100644 --- a/breezy/bzr/knitpack_repo.py +++ b/breezy/bzr/knitpack_repo.py @@ -70,10 +70,6 @@ PackRepository, RepositoryPackCollection, ) -from ..sixish import ( - viewitems, - zip - ) from ..bzr.vf_repository import ( StreamSource, ) @@ -659,7 +655,7 @@ def _do_copy_nodes(self, nodes, index_map, writer, write_index, pb, request_groups[index].append((key, value)) record_index = 0 pb.update("Copied record", record_index, len(nodes)) - for index, items in viewitems(request_groups): + for index, items in request_groups.items(): pack_readv_requests = [] for key, value in items: # ---- KnitGraphIndex.get_position @@ -751,7 +747,7 @@ def _process_inventory_lines(self, inv_lines): fileid_revisions = repo._find_file_ids_from_xml_inventory_lines( inv_lines, self.revision_keys) text_filter = [] - for fileid, file_revids in viewitems(fileid_revisions): + for fileid, file_revids in fileid_revisions.items(): text_filter.extend([(fileid, file_revid) for file_revid in file_revids]) self._text_filter = text_filter @@ -950,7 +946,7 @@ def _least_readv_node_readv(self, nodes): request_groups[index] = [] request_groups[index].append((key, value, references)) result = [] - for index, items in viewitems(request_groups): + for index, items in request_groups.items(): pack_readv_requests = [] for key, value, references in items: # ---- KnitGraphIndex.get_position diff --git a/breezy/bzr/pack.py b/breezy/bzr/pack.py index 0fa1dcb04a..0f2fca1f35 100644 --- a/breezy/bzr/pack.py +++ b/breezy/bzr/pack.py @@ -22,12 +22,10 @@ from __future__ import absolute_import +from io import BytesIO import re from .. import errors -from ..sixish import ( - BytesIO, - ) FORMAT_ONE = b"Bazaar pack format 1 (introduced in 0.18)" diff --git a/breezy/bzr/pack_repo.py b/breezy/bzr/pack_repo.py index bc89a6a027..498b9f159c 100644 --- a/breezy/bzr/pack_repo.py +++ b/breezy/bzr/pack_repo.py @@ -21,10 +21,10 @@ from ..lazy_import import lazy_import lazy_import(globals(), """ +import contextlib import time from breezy import ( - cleanup, config, debug, graph, @@ -60,10 +60,6 @@ MetaDirRepository, RepositoryFormatMetaDir, ) -from ..sixish import ( - reraise, - viewitems, - ) from ..bzr.vf_repository import ( MetaDirVersionedFileRepository, MetaDirVersionedFileRepositoryFormat, @@ -1337,7 +1333,7 @@ def _diff_pack_names(self): # do a two-way diff against our original content current_nodes = set() - for name, sizes in viewitems(self._names): + for name, sizes in self._names.items(): current_nodes.add( (name, b' '.join(b'%d' % size for size in sizes))) @@ -1550,7 +1546,7 @@ def _abort_write_group(self): # FIXME: just drop the transient index. # forget what names there are if self._new_pack is not None: - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: stack.callback(setattr, self, '_new_pack', None) # If we aborted while in the middle of finishing the write # group, _remove_pack_indices could fail because the indexes are @@ -1560,7 +1556,7 @@ def _abort_write_group(self): ignore_missing=True) self._new_pack.abort() for resumed_pack in self._resumed_packs: - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: # See comment in previous finally block. stack.callback(self._remove_pack_indices, resumed_pack, ignore_missing=True) @@ -2099,4 +2095,4 @@ def reload_or_raise(self, retry_exc): is_error = True if is_error: # GZ 2017-03-27: No real reason this needs the original traceback. - reraise(*retry_exc.exc_info) + raise retry_exc.exc_info[1] diff --git a/breezy/bzr/reconcile.py b/breezy/bzr/reconcile.py index 7d53e76121..e4104fe4c0 100644 --- a/breezy/bzr/reconcile.py +++ b/breezy/bzr/reconcile.py @@ -26,7 +26,6 @@ ] from .. import ( - cleanup, errors, revision as _mod_revision, ui, diff --git a/breezy/bzr/remote.py b/breezy/bzr/remote.py index ef0aa254d3..1ddeb15317 100644 --- a/breezy/bzr/remote.py +++ b/breezy/bzr/remote.py @@ -59,13 +59,6 @@ from .inventory import Inventory from .inventorytree import InventoryRevisionTree from ..lockable_files import LockableFiles -from ..sixish import ( - get_unbound_function, - map, - text_type, - viewitems, - viewvalues, - ) from .smart import client, vfs, repository as smart_repo from .smart.client import _SmartClient from ..revision import NULL_REVISION @@ -719,7 +712,7 @@ def get_branches(self, possible_transports=None, ignore_fallbacks=False): raise errors.UnexpectedSmartServerResponse(response) body = bencode.bdecode(handler.read_body_bytes()) ret = {} - for name, value in viewitems(body): + for name, value in body.items(): name = name.decode('utf-8') ret[name] = self._open_branch(name, value[0], value[1], possible_transports=possible_transports, @@ -913,12 +906,10 @@ def open_workingtree(self, recommend_upgrade=True): def _path_for_remote_call(self, client): """Return the path to be used for this bzrdir in a remote call.""" remote_path = client.remote_path_from_transport(self.root_transport) - if sys.version_info[0] == 3: - remote_path = remote_path.decode('utf-8') + remote_path = remote_path.decode('utf-8') base_url, segment_parameters = urlutils.split_segment_parameters_raw( remote_path) - if sys.version_info[0] == 3: - base_url = base_url.encode('utf-8') + base_url = base_url.encode('utf-8') return base_url def get_branch_transport(self, branch_format, name=None): @@ -2184,7 +2175,7 @@ def refresh_data(self): def revision_ids_to_search_result(self, result_set): """Convert a set of revision ids to a graph SearchResult.""" result_parents = set() - for parents in viewvalues(self.get_graph().get_parent_map(result_set)): + for parents in self.get_graph().get_parent_map(result_set).values(): result_parents.update(parents) included_keys = result_set.intersection(result_parents) start_keys = result_set.difference(included_keys) @@ -2316,7 +2307,7 @@ def iter_files_bytes(self, desired_files): if not absent: break desired_files = [(key[0], key[1], identifier) - for identifier, key in viewitems(absent)] + for identifier, key in absent.items()] for (identifier, bytes_iterator) in fallback.iter_files_bytes(desired_files): del absent[identifier] yield identifier, bytes_iterator @@ -2363,7 +2354,7 @@ def _get_parent_map_rpc(self, keys): # There is one other "bug" which is that ghosts in # get_revision_graph() are not returned at all. But we won't worry # about that for now. - for node_id, parent_ids in viewitems(rg): + for node_id, parent_ids in rg.items(): if parent_ids == (): rg[node_id] = (NULL_REVISION,) rg[NULL_REVISION] = () @@ -3415,9 +3406,8 @@ def _use_default_local_heads_to_fetch(self): self._ensure_real() if isinstance(self._custom_format, bzrbranch.BranchFormatMetadir): branch_class = self._custom_format._branch_class() - heads_to_fetch_impl = get_unbound_function( - branch_class.heads_to_fetch) - if heads_to_fetch_impl is get_unbound_function(branch.Branch.heads_to_fetch): + heads_to_fetch_impl = branch_class.heads_to_fetch + if heads_to_fetch_impl is branch.Branch.heads_to_fetch: return True return False @@ -3693,9 +3683,7 @@ def get_stacked_on_url(self): return self._real_branch.get_stacked_on_url() if response[0] != b'ok': raise errors.UnexpectedSmartServerResponse(response) - if sys.version_info[0] == 3: - return response[1].decode('utf-8') - return response[1] + return response[1].decode('utf-8') def set_stacked_on_url(self, url): branch.Branch.set_stacked_on_url(self, url) @@ -4009,7 +3997,7 @@ def _set_parent_location(self, url): return self._vfs_set_parent_location(url) try: call_url = url or u'' - if isinstance(call_url, text_type): + if isinstance(call_url, str): call_url = call_url.encode('utf-8') response = self._call(b'Branch.set_parent_location', self._remote_path(), self._lock_token, self._repo_lock_token, @@ -4317,7 +4305,7 @@ def set_option(self, value, name, section=None): def _set_config_option(self, value, name, section): if isinstance(value, (bool, int)): value = str(value) - elif isinstance(value, (text_type, str)): + elif isinstance(value, str): pass else: raise TypeError(value) @@ -4337,9 +4325,9 @@ def _set_config_option(self, value, name, section): def _serialize_option_dict(self, option_dict): utf8_dict = {} for key, value in option_dict.items(): - if isinstance(key, text_type): + if isinstance(key, str): key = key.encode('utf8') - if isinstance(value, text_type): + if isinstance(value, str): value = value.encode('utf8') utf8_dict[key] = value return bencode.bencode(utf8_dict) diff --git a/breezy/bzr/repository.py b/breezy/bzr/repository.py index a85618aa4c..42ff8f592d 100644 --- a/breezy/bzr/repository.py +++ b/breezy/bzr/repository.py @@ -32,9 +32,6 @@ Repository, RepositoryFormat, ) -from ..sixish import ( - viewvalues, - ) class MetaDirRepository(Repository): @@ -94,8 +91,8 @@ def _find_parent_ids_of_revisions(self, revision_ids): :return: set of revisions that are parents of revision_ids which are not part of revision_ids themselves """ - parent_ids = set(itertools.chain.from_iterable(viewvalues( - self.get_parent_map(revision_ids)))) + parent_ids = set(itertools.chain.from_iterable( + self.get_parent_map(revision_ids).values())) parent_ids.difference_update(revision_ids) parent_ids.discard(_mod_revision.NULL_REVISION) return parent_ids diff --git a/breezy/bzr/smart/branch.py b/breezy/bzr/smart/branch.py index cab3b545ec..b44c1de4ae 100644 --- a/breezy/bzr/smart/branch.py +++ b/breezy/bzr/smart/branch.py @@ -24,7 +24,6 @@ revision as _mod_revision, ) from ...controldir import ControlDir -from ...sixish import text_type from .request import ( FailedSmartServerResponse, SmartServerRequest, @@ -254,7 +253,7 @@ def do_with_locked_branch(self, branch, *args): return self.do_tip_change_with_locked_branch(branch, *args) except errors.TipChangeRejected as e: msg = e.msg - if isinstance(msg, text_type): + if isinstance(msg, str): msg = msg.encode('utf-8') return FailedSmartServerResponse((b'TipChangeRejected', msg)) diff --git a/breezy/bzr/smart/bzrdir.py b/breezy/bzr/smart/bzrdir.py index 285363be4b..c386bbabf3 100644 --- a/breezy/bzr/smart/bzrdir.py +++ b/breezy/bzr/smart/bzrdir.py @@ -35,7 +35,6 @@ from ...controldir import ( network_format_registry, ) -from ...sixish import PY3 from .request import ( FailedSmartServerResponse, SmartServerRequest, @@ -483,10 +482,7 @@ def parse_NoneBytestring(self, arg): def parse_NoneString(self, arg): if not arg: return None - if PY3: - return arg.decode('utf-8') - else: - return arg + return arg.decode('utf-8') def _serialize_NoneTrueFalse(self, arg): if arg is False: diff --git a/breezy/bzr/smart/message.py b/breezy/bzr/smart/message.py index 5c3194e800..d33a18829b 100644 --- a/breezy/bzr/smart/message.py +++ b/breezy/bzr/smart/message.py @@ -21,13 +21,14 @@ except ImportError: # python < 3.7 from collections import deque +from io import ( + BytesIO, + ) + from ... import ( debug, errors, ) -from ...sixish import ( - BytesIO, - ) from ...trace import mutter diff --git a/breezy/bzr/smart/ping.py b/breezy/bzr/smart/ping.py index 77f9edcdd5..a5c5c4247a 100644 --- a/breezy/bzr/smart/ping.py +++ b/breezy/bzr/smart/ping.py @@ -20,7 +20,6 @@ from ...commands import Command from ...lazy_import import lazy_import -from ...sixish import viewitems lazy_import(globals(), """ from breezy.bzr.smart.client import _SmartClient @@ -54,5 +53,5 @@ def run(self, location): if getattr(handler, 'headers', None) is not None: headers = { k.decode('utf-8'): v.decode('utf-8') - for (k, v) in viewitems(handler.headers)} + for (k, v) in handler.headers.items()} self.outf.write('Headers: %r\n' % (headers,)) diff --git a/breezy/bzr/smart/protocol.py b/breezy/bzr/smart/protocol.py index 3a9a16c219..229e31136a 100644 --- a/breezy/bzr/smart/protocol.py +++ b/breezy/bzr/smart/protocol.py @@ -25,12 +25,10 @@ except ImportError: # python < 3.7 from collections import deque +from io import BytesIO import struct import sys -try: - import _thread -except ImportError: - import thread as _thread +import _thread import time import breezy @@ -39,12 +37,7 @@ errors, osutils, ) -from ...sixish import ( - BytesIO, - reraise, -) from . import message, request -from ...sixish import text_type from ...trace import log_exception_quietly, mutter from ...bencode import bdecode_as_tuple, bencode @@ -75,7 +68,7 @@ def _decode_tuple(req_line): def _encode_tuple(args): """Encode the tuple args to a bytestream.""" for arg in args: - if isinstance(arg, text_type): + if isinstance(arg, str): raise TypeError(args) return b'\x01'.join(args) + b'\n' @@ -1135,7 +1128,7 @@ def _write_structure(self, args): self._write_func(b's') utf8_args = [] for arg in args: - if isinstance(arg, text_type): + if isinstance(arg, str): utf8_args.append(arg.encode('utf8')) else: utf8_args.append(arg) @@ -1393,8 +1386,9 @@ def call_with_body_stream(self, args, stream): self._write_structure((b'error',)) self._write_end() self._medium_request.finished_writing() + (exc_type, exc_val, exc_tb) = exc_info try: - reraise(*exc_info) + raise exc_val finally: del exc_info else: diff --git a/breezy/bzr/smart/repository.py b/breezy/bzr/smart/repository.py index 6c44b25a40..3046cb100f 100644 --- a/breezy/bzr/smart/repository.py +++ b/breezy/bzr/smart/repository.py @@ -21,10 +21,7 @@ import bz2 import itertools import os -try: - import queue -except ImportError: - import Queue as queue +import queue import sys import tempfile import threading @@ -45,9 +42,6 @@ vf_search, ) from ..bzrdir import BzrDir -from ...sixish import ( - reraise, -) from .request import ( FailedSmartServerResponse, SmartServerRequest, @@ -919,8 +913,9 @@ def do_end(self): if self.insert_thread is not None: self.insert_thread.join() if not self.insert_ok: + (exc_type, exc_val, exc_tb) = self.insert_exception try: - reraise(*self.insert_exception) + raise exc_val finally: del self.insert_exception write_group_tokens, missing_keys = self.insert_result diff --git a/breezy/bzr/smart/request.py b/breezy/bzr/smart/request.py index 3a8e9352ba..d31a9d06f1 100644 --- a/breezy/bzr/smart/request.py +++ b/breezy/bzr/smart/request.py @@ -34,10 +34,7 @@ import threading -try: - from _thread import get_ident -except ImportError: # Python < 3 - from thread import get_ident +from _thread import get_ident from ... import ( branch as _mod_branch, @@ -49,7 +46,6 @@ trace, urlutils, ) -from ...sixish import text_type from ...lazy_import import lazy_import lazy_import(globals(), """ from breezy.bzr import bzrdir @@ -449,7 +445,7 @@ def _translate_error(err): # If it is a DecodeError, than most likely we are starting # with a plain string str_or_unicode = err.object - if isinstance(str_or_unicode, text_type): + if isinstance(str_or_unicode, str): # XXX: UTF-8 might have \x01 (our protocol v1 and v2 seperator # byte) in it, so this encoding could cause broken responses. # Newer clients use protocol v3, so will be fine. diff --git a/breezy/bzr/smart/server.py b/breezy/bzr/smart/server.py index 837c103ad8..3becc6f7ed 100644 --- a/breezy/bzr/smart/server.py +++ b/breezy/bzr/smart/server.py @@ -435,10 +435,7 @@ def _make_backing_transport(self, transport): self.transport = transport def _get_stdin_stdout(self): - if sys.version_info[0] < 3: - return sys.stdin, sys.stdout - else: - return sys.stdin.buffer, sys.stdout.buffer + return sys.stdin.buffer, sys.stdout.buffer def _make_smart_server(self, host, port, inet, timeout): if timeout is None: diff --git a/breezy/bzr/testament.py b/breezy/bzr/testament.py index 0fa4576dbb..156e7c6262 100644 --- a/breezy/bzr/testament.py +++ b/breezy/bzr/testament.py @@ -78,7 +78,6 @@ contains_linebreaks, sha_strings, ) -from ..sixish import text_type from ..tree import Tree @@ -163,7 +162,7 @@ def _get_entries(self): def _escape_path(self, path): if contains_linebreaks(path): raise ValueError(path) - if not isinstance(path, text_type): + if not isinstance(path, str): # TODO(jelmer): Clean this up for pad.lv/1696545 path = path.decode('ascii') return path.replace(u'\\', u'/').replace(u' ', u'\\ ') @@ -245,7 +244,7 @@ class StrictTestament3(StrictTestament): def _escape_path(self, path): if contains_linebreaks(path): raise ValueError(path) - if not isinstance(path, text_type): + if not isinstance(path, str): # TODO(jelmer): Clean this up for pad.lv/1696545 path = path.decode('ascii') if path == u'': diff --git a/breezy/bzr/versionedfile.py b/breezy/bzr/versionedfile.py index cd796753c2..4e6f715314 100644 --- a/breezy/bzr/versionedfile.py +++ b/breezy/bzr/versionedfile.py @@ -19,6 +19,7 @@ from __future__ import absolute_import from copy import copy +from io import BytesIO import itertools import os import struct @@ -46,12 +47,6 @@ errors, ) from ..registry import Registry -from ..sixish import ( - BytesIO, - viewitems, - viewvalues, - zip, - ) from ..textmerge import TextMerge @@ -327,7 +322,7 @@ def _find_needed_keys(self): refcounts = {} setdefault = refcounts.setdefault just_parents = set() - for child_key, parent_keys in viewitems(parent_map): + for child_key, parent_keys in parent_map.items(): if not parent_keys: # parent_keys may be None if a given VersionedFile claims to # not support graph operations. @@ -1179,7 +1174,7 @@ def get_known_graph_ancestry(self, keys): this_parent_map = self.get_parent_map(pending) parent_map.update(this_parent_map) pending = set(itertools.chain.from_iterable( - viewvalues(this_parent_map))) + this_parent_map.values())) pending.difference_update(parent_map) kg = _mod_graph.KnownGraph(parent_map) return kg @@ -1393,11 +1388,11 @@ def get_parent_map(self, keys): """ prefixes = self._partition_keys(keys) result = {} - for prefix, suffixes in viewitems(prefixes): + for prefix, suffixes in prefixes.items(): path = self._mapper.map(prefix) vf = self._get_vf(path) parent_map = vf.get_parent_map(suffixes) - for key, parents in viewitems(parent_map): + for key, parents in parent_map.items(): result[prefix + (key,)] = tuple( prefix + (parent,) for parent in parents) return result @@ -1449,7 +1444,7 @@ def get_record_stream(self, keys, ordering, include_delta_closure): def _iter_keys_vf(self, keys): prefixes = self._partition_keys(keys) sha1s = {} - for prefix, suffixes in viewitems(prefixes): + for prefix, suffixes in prefixes.items(): path = self._mapper.map(prefix) vf = self._get_vf(path) yield prefix, suffixes, vf @@ -1459,7 +1454,7 @@ def get_sha1s(self, keys): sha1s = {} for prefix, suffixes, vf in self._iter_keys_vf(keys): vf_sha1s = vf.get_sha1s(suffixes) - for suffix, sha1 in viewitems(vf_sha1s): + for suffix, sha1 in vf_sha1s.items(): sha1s[prefix + (suffix,)] = sha1 return sha1s @@ -1659,7 +1654,7 @@ def get_parent_map(self, keys): result.update( _mod_graph.StackedParentsProvider( self._providers).get_parent_map(keys)) - for key, parents in viewitems(result): + for key, parents in result.items(): if parents == (): result[key] = (revision.NULL_REVISION,) return result @@ -1838,7 +1833,7 @@ def add_mpdiffs(self, records): def get_parent_map(self, keys): """See VersionedFiles.get_parent_map.""" - parent_view = viewitems(self._get_parent_map(k for (k,) in keys)) + parent_view = self._get_parent_map(k for (k,) in keys).items() return dict(((k,), tuple((p,) for p in v)) for k, v in parent_view) def get_sha1s(self, keys): @@ -1995,7 +1990,7 @@ def sort_groupcompress(parent_map): # gc-optimal ordering is approximately reverse topological, # properly grouped by file-id. per_prefix_map = {} - for item in viewitems(parent_map): + for item in parent_map.items(): key = item[0] if isinstance(key, bytes) or len(key) == 1: prefix = b'' @@ -2064,4 +2059,4 @@ def satisfy_refs_for_keys(self, keys): self._satisfy_refs_for_key(key) def get_referrers(self): - return set(itertools.chain.from_iterable(viewvalues(self.refs))) + return set(itertools.chain.from_iterable(self.refs.values())) diff --git a/breezy/bzr/vf_repository.py b/breezy/bzr/vf_repository.py index 077bdb1139..c6f5927fc5 100644 --- a/breezy/bzr/vf_repository.py +++ b/breezy/bzr/vf_repository.py @@ -18,6 +18,7 @@ from __future__ import absolute_import +from io import BytesIO from ..lazy_import import lazy_import lazy_import(globals(), """ @@ -77,12 +78,6 @@ RepositoryFormatMetaDir, ) -from ..sixish import ( - BytesIO, - range, - viewitems, - viewvalues, - ) from ..trace import ( mutter @@ -422,7 +417,7 @@ def record_iter_changes(self, tree, basis_revision_id, iter_changes, seen_root = False # Is the root in the basis delta? inv_delta = self._basis_delta modified_rev = self._new_revision_id - for change, head_candidates in viewvalues(changes): + for change, head_candidates in changes.values(): if change.versioned[1]: # versioned in target. # Several things may be happening here: # We may have a fork in the per-file graph @@ -1056,7 +1051,7 @@ def get_missing_parent_inventories(self, check_for_missing_texts=True): referrers = frozenset(r[0] for r in key_deps.get_referrers()) file_ids = self.fileids_altered_by_revision_ids(referrers) missing_texts = set() - for file_id, version_ids in viewitems(file_ids): + for file_id, version_ids in file_ids.items(): missing_texts.update( (file_id, version_id) for version_id in version_ids) present_texts = self.texts.get_parent_map(missing_texts) @@ -1232,7 +1227,7 @@ def _find_parent_keys_of_revisions(self, revision_keys): """ parent_map = self.revisions.get_parent_map(revision_keys) parent_keys = set(itertools.chain.from_iterable( - viewvalues(parent_map))) + parent_map.values())) parent_keys.difference_update(revision_keys) parent_keys.discard(_mod_revision.NULL_REVISION) return parent_keys @@ -1312,7 +1307,7 @@ def _do_generate_text_key_index(self, ancestors, text_key_references, pb): # a cache of the text keys to allow reuse; costs a dict of all the # keys, but saves a 2-tuple for every child of a given key. text_key_cache = {} - for text_key, valid in viewitems(text_key_references): + for text_key, valid in text_key_references.items(): if not valid: invalid_keys.add(text_key) else: @@ -1417,7 +1412,7 @@ def _find_file_keys_to_fetch(self, revision_ids, pb): file_ids = self.fileids_altered_by_revision_ids(revision_ids, inv_w) count = 0 num_file_ids = len(file_ids) - for file_id, altered_versions in viewitems(file_ids): + for file_id, altered_versions in file_ids.items(): if pb is not None: pb.update(gettext("Fetch texts"), count, num_file_ids) count += 1 @@ -1631,8 +1626,8 @@ def get_parent_map(self, revision_ids): raise ValueError('get_parent_map(None) is not valid') else: query_keys.append((revision_id,)) - for (revision_id,), parent_keys in viewitems( - self.revisions.get_parent_map(query_keys)): + for (revision_id,), parent_keys in ( + self.revisions.get_parent_map(query_keys)).items(): if parent_keys: result[revision_id] = tuple([parent_revid for (parent_revid,) in parent_keys]) @@ -1657,8 +1652,8 @@ def get_file_graph(self): def revision_ids_to_search_result(self, result_set): """Convert a set of revision ids to a graph SearchResult.""" - result_parents = set(itertools.chain.from_iterable(viewvalues( - self.get_graph().get_parent_map(result_set)))) + result_parents = set(itertools.chain.from_iterable( + self.get_graph().get_parent_map(result_set).values())) included_keys = result_set.intersection(result_parents) start_keys = result_set.difference(included_keys) exclude_keys = result_parents.difference(result_set) @@ -2099,7 +2094,7 @@ def get_stream_for_missing_keys(self, missing_keys): raise AssertionError( 'cannot copy revisions to fill in missing deltas %s' % ( keys['revisions'],)) - for substream_kind, keys in viewitems(keys): + for substream_kind, keys in keys.items(): vf = getattr(self.from_repository, substream_kind) if vf is None and keys: raise AssertionError( @@ -2582,8 +2577,8 @@ def _fetch_parent_invs_for_stacking(self, parent_map, cache): source may be not have _fallback_repositories even though it is stacked.) """ - parent_revs = set(itertools.chain.from_iterable(viewvalues( - parent_map))) + parent_revs = set(itertools.chain.from_iterable( + parent_map.values())) present_parents = self.source.get_parent_map(parent_revs) absent_parents = parent_revs.difference(present_parents) parent_invs_keys_for_stacking = self.source.inventories.get_parent_map( @@ -2915,7 +2910,7 @@ def _install_revision(repository, rev, revision_tree, signature, # commit to determine parents. There is a latent/real bug here where # the parents inserted are not those commit would do - in particular # they are not filtered by heads(). RBC, AB - for revision, tree in viewitems(parent_trees): + for revision, tree in parent_trees.items(): try: path = tree.id2path(ie.file_id) except errors.NoSuchId: diff --git a/breezy/bzr/vf_search.py b/breezy/bzr/vf_search.py index 9ef6228153..1c329af704 100644 --- a/breezy/bzr/vf_search.py +++ b/breezy/bzr/vf_search.py @@ -31,9 +31,6 @@ Graph, invert_parent_map, ) -from ..sixish import ( - viewvalues, - ) class AbstractSearchResult(object): @@ -386,7 +383,7 @@ def search_result_from_parent_map(parent_map, missing_keys): # start_set is all the keys in the cache start_set = set(parent_map) # result set is all the references to keys in the cache - result_parents = set(itertools.chain.from_iterable(viewvalues(parent_map))) + result_parents = set(itertools.chain.from_iterable(parent_map.values())) stop_keys = result_parents.difference(start_set) # We don't need to send ghosts back to the server as a position to # stop either. @@ -424,14 +421,14 @@ def _run_search(parent_map, heads, exclude_keys): next_revs = next(s) except StopIteration: break - for parents in viewvalues(s._current_parents): + for parents in s._current_parents.values(): f_heads = heads.intersection(parents) if f_heads: found_heads.update(f_heads) stop_keys = exclude_keys.intersection(next_revs) if stop_keys: s.stop_searching_any(stop_keys) - for parents in viewvalues(s._current_parents): + for parents in s._current_parents.values(): f_heads = heads.intersection(parents) if f_heads: found_heads.update(f_heads) diff --git a/breezy/bzr/weave.py b/breezy/bzr/weave.py index c9c1b71d0c..bd350ae0df 100755 --- a/breezy/bzr/weave.py +++ b/breezy/bzr/weave.py @@ -67,6 +67,7 @@ # FIXME: the conflict markers should be *7* characters from copy import copy +from io import BytesIO import os import patiencediff @@ -85,9 +86,6 @@ ) from ..osutils import dirname, sha, sha_strings, split_lines from ..revision import NULL_REVISION -from ..sixish import ( - BytesIO, - ) from ..trace import mutter from .versionedfile import ( AbsentContentFactory, diff --git a/breezy/bzr/weavefile.py b/breezy/bzr/weavefile.py index 8a0956ea0b..a2495e76f7 100644 --- a/breezy/bzr/weavefile.py +++ b/breezy/bzr/weavefile.py @@ -36,8 +36,6 @@ from __future__ import absolute_import -from ..sixish import bytesintern - # TODO: When extracting a single version it'd be enough to just pass # an iterator returning the weave lines... We don't really need to # deserialize it into memory. @@ -164,5 +162,5 @@ def _read_weave_v5(f, w): elif l == b'}\n': w._weave.append((b'}', None)) else: - w._weave.append((bytesintern(l[0:1]), int(l[2:].decode('ascii')))) + w._weave.append((l[0:1], int(l[2:].decode('ascii')))) return w diff --git a/breezy/bzr/workingtree.py b/breezy/bzr/workingtree.py index 67507a143c..581707c42b 100644 --- a/breezy/bzr/workingtree.py +++ b/breezy/bzr/workingtree.py @@ -39,6 +39,7 @@ except ImportError: # python < 3.7 from collections import deque import errno +from io import BytesIO import itertools import operator import os @@ -51,9 +52,9 @@ from .. import lazy_import lazy_import.lazy_import(globals(), """ +import contextlib from breezy import ( cache_utf8, - cleanup, conflicts as _mod_conflicts, globbing, ignores, @@ -73,10 +74,6 @@ ) from ..lock import LogicalLockResult from .inventorytree import InventoryRevisionTree, MutableInventoryTree -from ..sixish import ( - BytesIO, - text_type, - ) from ..trace import mutter, note from ..tree import ( get_canonical_path, @@ -384,7 +381,7 @@ def remove(self, files, verbose=False, to_file=None, keep_files=True, :force: Delete files and directories, even if they are changed and even if the directories are not empty. """ - if isinstance(files, (str, text_type)): + if isinstance(files, str): files = [files] inv_delta = [] @@ -1028,7 +1025,7 @@ def list_files(self, include_root=False, from_dir=None, recursive=True, :param from_dir: start from this directory or None for the root :param recursive: whether to recurse into subdirectories or not """ - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(self.lock_read()) if from_dir is None and include_root is True: yield ('', 'V', 'directory', self.root_inventory.root) @@ -1197,7 +1194,7 @@ def move(self, from_paths, to_dir=None, after=False): if to_dir is None: raise TypeError('You must supply a target directory') # check destination directory - if isinstance(from_paths, (str, text_type)): + if isinstance(from_paths, str): raise ValueError() with self.lock_tree_write(): to_abs = self.abspath(to_dir) diff --git a/breezy/bzr/workingtree_4.py b/breezy/bzr/workingtree_4.py index 320a405988..7c1be7f236 100644 --- a/breezy/bzr/workingtree_4.py +++ b/breezy/bzr/workingtree_4.py @@ -29,13 +29,13 @@ from ..lazy_import import lazy_import lazy_import(globals(), """ +import contextlib import errno import stat from breezy import ( branch as _mod_branch, cache_utf8, - cleanup, controldir, debug, filters as _mod_filters, @@ -74,9 +74,6 @@ realpath, safe_unicode, ) -from ..sixish import ( - viewitems, - ) from ..transport import get_transport_from_path from ..transport.local import LocalTransport from ..tree import ( @@ -700,7 +697,7 @@ def move(self, from_paths, to_dir, after=False): # GZ 2017-03-28: The rollbacks variable was shadowed in the loop below # missing those added here, but there's also no test coverage for this. - rollbacks = cleanup.ExitStack() + rollbacks = contextlib.ExitStack() def move_one(old_entry, from_path_utf8, minikind, executable, fingerprint, packed_stat, size, @@ -1044,7 +1041,7 @@ def _paths2ids_using_bisect(self, paths, search_indexes, raise errors.PathsNotVersionedError( [p.decode('utf-8') for p in paths]) - for dir_name_id, trees_info in viewitems(found): + for dir_name_id, trees_info in found.items(): for index in search_indexes: if trees_info[index][0] not in (b'r', b'a'): found_ids.add(dir_name_id[2]) diff --git a/breezy/bzr/xml8.py b/breezy/bzr/xml8.py index 412de17405..cdd0e0880f 100644 --- a/breezy/bzr/xml8.py +++ b/breezy/bzr/xml8.py @@ -36,7 +36,6 @@ unpack_inventory_flat, ) from ..revision import Revision -from ..sixish import unichr from ..errors import BzrError @@ -56,7 +55,7 @@ def _unescaper(match, _map=_xml_unescape_map): except KeyError: if not code.startswith(b'#'): raise - return unichr(int(code[1:])).encode('utf8') + return chr(int(code[1:])).encode('utf8') _unescape_re = lazy_regex.lazy_compile(b'\\&([^;]*);') diff --git a/breezy/bzr/xml_serializer.py b/breezy/bzr/xml_serializer.py index 11187e3565..8ceef13060 100644 --- a/breezy/bzr/xml_serializer.py +++ b/breezy/bzr/xml_serializer.py @@ -49,7 +49,6 @@ errors, lazy_regex, ) -from ..sixish import text_type, bytesintern from . import ( inventory, serializer, @@ -144,10 +143,10 @@ def get_utf8_or_ascii(a_str, _encode_utf8=cache_utf8.encode): # This is fairly optimized because we know what cElementTree does, this is # not meant as a generic function for all cases. Because it is possible for # an 8-bit string to not be ascii or valid utf8. - if a_str.__class__ is text_type: + if a_str.__class__ is str: return _encode_utf8(a_str) else: - return bytesintern(a_str) + return a_str _utf8_re = lazy_regex.lazy_compile(b'[&<>\'\"]|[\x80-\xff]+') @@ -206,7 +205,7 @@ def encode_and_escape(unicode_or_utf8_str, _map=_to_escaped_map): # to check if None, rather than try/KeyError text = _map.get(unicode_or_utf8_str) if text is None: - if isinstance(unicode_or_utf8_str, text_type): + if isinstance(unicode_or_utf8_str, str): # The alternative policy is to do a regular UTF8 encoding # and then escape only XML meta characters. # Performance is equivalent once you use cache_utf8. *However* diff --git a/breezy/cethread.py b/breezy/cethread.py index c7e87cc7cd..79814a9715 100644 --- a/breezy/cethread.py +++ b/breezy/cethread.py @@ -19,10 +19,6 @@ import sys import threading -from .sixish import ( - reraise, -) - class CatchingExceptionThread(threading.Thread): """A thread that keeps track of exceptions. @@ -145,7 +141,7 @@ def join(self, timeout=None): if (self.ignored_exceptions is None or not self.ignored_exceptions(exc_value)): # Raise non ignored exceptions - reraise(exc_class, exc_value, exc_tb) + raise exc_value def pending_exception(self): """Raise the caught exception. diff --git a/breezy/check.py b/breezy/check.py index 537c0f4bc5..1ee6c98d4b 100644 --- a/breezy/check.py +++ b/breezy/check.py @@ -36,8 +36,9 @@ from __future__ import absolute_import +import contextlib + from . import ( - cleanup, errors, ) from .controldir import ControlDir @@ -101,7 +102,7 @@ def check_dwim(path, verbose, do_branch=False, do_repo=False, do_tree=False): except errors.NotBranchError: base_tree = branch = repo = None - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: needed_refs = {} if base_tree is not None: # If the tree is a lightweight checkout we won't see it in diff --git a/breezy/cleanup.py b/breezy/cleanup.py deleted file mode 100644 index 426283367e..0000000000 --- a/breezy/cleanup.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (C) 2009, 2010 Canonical Ltd -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -"""Helpers for managing cleanup functions and the errors they might raise. - -This currently just contains a copy of contextlib.ExitStack, available -even on older versions of Python. -""" - -from __future__ import absolute_import - -from collections import deque -import sys - - -try: - from contextlib import ExitStack -except ImportError: - # Copied from the Python standard library on Python 3.4. - # Copyright: Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, - # 2009, 2010, 2011 Python Software Foundation - # - # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 - # -------------------------------------------- - # . - # 1. This LICENSE AGREEMENT is between the Python Software Foundation - # ("PSF"), and the Individual or Organization ("Licensee") accessing and - # otherwise using this software ("Python") in source or binary form and - # its associated documentation. - # . - # 2. Subject to the terms and conditions of this License Agreement, PSF hereby - # grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, - # analyze, test, perform and/or display publicly, prepare derivative works, - # distribute, and otherwise use Python alone or in any derivative version, - # provided, however, that PSF's License Agreement and PSF's notice of copyright, - # i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, - # 2011 Python Software Foundation; All Rights Reserved" are retained in Python - # alone or in any derivative version prepared by Licensee. - # . - # 3. In the event Licensee prepares a derivative work that is based on - # or incorporates Python or any part thereof, and wants to make - # the derivative work available to others as provided herein, then - # Licensee hereby agrees to include in any such work a brief summary of - # the changes made to Python. - # . - # 4. PSF is making Python available to Licensee on an "AS IS" - # basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR - # IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND - # DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS - # FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT - # INFRINGE ANY THIRD PARTY RIGHTS. - # . - # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON - # FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS - # A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, - # OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - # . - # 6. This License Agreement will automatically terminate upon a material - # breach of its terms and conditions. - # . - # 7. Nothing in this License Agreement shall be deemed to create any - # relationship of agency, partnership, or joint venture between PSF and - # Licensee. This License Agreement does not grant permission to use PSF - # trademarks or trade name in a trademark sense to endorse or promote - # products or services of Licensee, or any third party. - # . - # 8. By copying, installing or otherwise using Python, Licensee - # agrees to be bound by the terms and conditions of this License - # Agreement. - - def _reraise_with_existing_context(exc_details): - # Use 3 argument raise in Python 2, - # but use exec to avoid SyntaxError in Python 3 - exc_type, exc_value, exc_tb = exc_details - exec("raise exc_type, exc_value, exc_tb") - - - # Inspired by discussions on http://bugs.python.org/issue13585 - class ExitStack(object): - """Context manager for dynamic management of a stack of exit callbacks - - For example: - - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception - - """ - def __init__(self): - self._exit_callbacks = deque() - - def pop_all(self): - """Preserve the context stack by transferring it to a new instance""" - new_stack = type(self)() - new_stack._exit_callbacks = self._exit_callbacks - self._exit_callbacks = deque() - return new_stack - - def _push_cm_exit(self, cm, cm_exit): - """Helper to correctly register callbacks to __exit__ methods""" - def _exit_wrapper(*exc_details): - return cm_exit(cm, *exc_details) - _exit_wrapper.__self__ = cm - self.push(_exit_wrapper) - - def push(self, exit): - """Registers a callback with the standard __exit__ method signature - - Can suppress exceptions the same way __exit__ methods can. - - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself) - """ - # We use an unbound method rather than a bound method to follow - # the standard lookup behaviour for special methods - _cb_type = type(exit) - try: - exit_method = _cb_type.__exit__ - except AttributeError: - # Not a context manager, so assume its a callable - self._exit_callbacks.append(exit) - else: - self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator - - def callback(self, callback, *args, **kwds): - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - def _exit_wrapper(exc_type, exc, tb): - callback(*args, **kwds) - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection - _exit_wrapper.__wrapped__ = callback - self.push(_exit_wrapper) - return callback # Allow use as a decorator - - def enter_context(self, cm): - """Enters the supplied context manager - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - # We look up the special methods on the type to match the with statement - _cm_type = type(cm) - _exit = _cm_type.__exit__ - result = _cm_type.__enter__(cm) - self._push_cm_exit(cm, _exit) - return result - - def close(self): - """Immediately unwind the context stack""" - self.__exit__(None, None, None) - - def __enter__(self): - return self - - def __exit__(self, *exc_details): - received_exc = exc_details[0] is not None - - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - def _make_context_fixer(frame_exc): - return lambda new_exc, old_exc: None - _fix_exception_context = _make_context_fixer(frame_exc) - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - pending_raise = False - while self._exit_callbacks: - cb = self._exit_callbacks.pop() - try: - if cb(*exc_details): - suppressed_exc = True - pending_raise = False - exc_details = (None, None, None) - except: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - pending_raise = True - exc_details = new_exc_details - if pending_raise: - _reraise_with_existing_context(exc_details) - return received_exc and suppressed_exc diff --git a/breezy/cmd_version_info.py b/breezy/cmd_version_info.py index b183f995e6..36bbcf8d26 100644 --- a/breezy/cmd_version_info.py +++ b/breezy/cmd_version_info.py @@ -34,7 +34,6 @@ ) from .commands import Command from .option import Option, RegistryOption -from .sixish import text_type def _parse_version_info_format(format): @@ -87,7 +86,7 @@ class cmd_version_info(Command): help='Include the revision-history.'), Option('include-file-revisions', help='Include the last revision for each file.'), - Option('template', type=text_type, + Option('template', type=str, help='Template for the output.'), 'revision', ] diff --git a/breezy/commands.py b/breezy/commands.py index e99a96b894..315bd92bb6 100644 --- a/breezy/commands.py +++ b/breezy/commands.py @@ -23,6 +23,7 @@ # TODO: Specific "examples" property on commands for consistent formatting. +import contextlib import os import sys @@ -38,7 +39,6 @@ import breezy from breezy import ( - cleanup, cmdline, debug, trace, @@ -52,9 +52,6 @@ from .option import Option from .plugin import disable_plugins, load_plugins, plugin_name from . import errors, registry -from .sixish import ( - string_types, - ) class BzrOptionError(errors.BzrCommandError): @@ -716,7 +713,7 @@ def options(self): r = Option.STD_OPTIONS.copy() std_names = set(r) for o in self.takes_options: - if isinstance(o, string_types): + if isinstance(o, str): o = option.Option.OPTIONS[o] r[o.name] = o if o.name in std_names: @@ -783,7 +780,7 @@ def run(*args, **kwargs): for hook in Command.hooks['pre_command']: hook(self) try: - with cleanup.ExitStack() as self._exit_stack: + with contextlib.ExitStack() as self._exit_stack: return class_run(*args, **kwargs) finally: for hook in Command.hooks['post_command']: @@ -1260,11 +1257,8 @@ def _specified_or_unicode_argv(argv): try: # ensure all arguments are unicode strings for a in argv: - if not isinstance(a, string_types): + if not isinstance(a, str): raise ValueError('not native str or unicode: %r' % (a,)) - if isinstance(a, bytes): - # For Python 2 only allow ascii native strings - a = a.decode('ascii') new_argv.append(a) except (ValueError, UnicodeDecodeError): raise errors.BzrError("argv should be list of unicode strings.") diff --git a/breezy/commit.py b/breezy/commit.py index 3c3b2f87bc..04130928fe 100644 --- a/breezy/commit.py +++ b/breezy/commit.py @@ -57,7 +57,7 @@ ui, ) from .branch import Branch -from .cleanup import ExitStack +from contextlib import ExitStack import breezy.config from .errors import (BzrError, ConflictsInTree, diff --git a/breezy/commit_signature_commands.py b/breezy/commit_signature_commands.py index 37dff8b0af..3ee54ed267 100644 --- a/breezy/commit_signature_commands.py +++ b/breezy/commit_signature_commands.py @@ -28,7 +28,6 @@ from .commands import Command from .option import Option from .i18n import gettext, ngettext -from .sixish import text_type class cmd_sign_my_commits(Command): @@ -102,7 +101,7 @@ class cmd_verify_signatures(Command): help='Comma separated list of GPG key patterns which are' ' acceptable for verification.', short_name='k', - type=text_type,), + type=str,), 'revision', 'verbose', ] diff --git a/breezy/config.py b/breezy/config.py index 7d0ef89b06..cb93238db3 100644 --- a/breezy/config.py +++ b/breezy/config.py @@ -79,6 +79,7 @@ import sys import configobj +from io import BytesIO import breezy from .lazy_import import lazy_import @@ -115,13 +116,6 @@ lazy_regex, registry, ) -from .sixish import ( - binary_type, - BytesIO, - PY3, - string_types, - text_type, - ) CHECK_IF_POSSIBLE = 0 @@ -258,19 +252,6 @@ def signing_policy_from_unicode(signature_string): % signature_string) -def _has_decode_bug(): - """True if configobj will fail to decode to unicode on Python 2.""" - if PY3: - return False - conf = configobj.ConfigObj() - decode = getattr(conf, "_decode", None) - if decode: - result = decode(b"\xc2\xa7", "utf-8") - if isinstance(result[0], str): - return True - return False - - def _has_triplequote_bug(): """True if triple quote logic is reversed, see lp:710410.""" conf = configobj.ConfigObj() @@ -288,12 +269,6 @@ def __init__(self, infile=None, **kwargs): interpolation=False, **kwargs) - if _has_decode_bug(): - def _decode(self, infile, encoding): - if isinstance(infile, str) and encoding: - return infile.decode(encoding).splitlines(True) - return super(ConfigObj, self)._decode(infile, encoding) - if _has_triplequote_bug(): def _get_triple_quote(self, value): quot = super(ConfigObj, self)._get_triple_quote(value) @@ -520,7 +495,7 @@ def get_user_option_as_list(self, option_name, expand=None): otherwise. """ l = self.get_user_option(option_name, expand=expand) - if isinstance(l, string_types): + if isinstance(l, str): # A single value, most probably the user forgot (or didn't care to # add) the final ',' l = [l] @@ -563,8 +538,6 @@ def username(self): """ v = os.environ.get('BRZ_EMAIL') if v: - if not PY3: - v = v.decode(osutils.get_user_encoding()) return v v = self._get_user_id() if v: @@ -730,7 +703,7 @@ def from_string(cls, str_or_unicode, file_name=None, save=False): return conf def _create_from_string(self, str_or_unicode, save): - if isinstance(str_or_unicode, text_type): + if isinstance(str_or_unicode, str): str_or_unicode = str_or_unicode.encode('utf-8') self._content = BytesIO(str_or_unicode) # Some tests use in-memory configs, some other always need the config @@ -2129,7 +2102,7 @@ def __init__(self, name, override_from_env=None, raise AssertionError( 'Only empty lists are supported as default values') self.default = u',' - elif isinstance(default, (binary_type, text_type, bool, int, float)): + elif isinstance(default, (bytes, str, bool, int, float)): # Rely on python to convert strings, booleans and integers self.default = u'%s' % (default,) elif callable(default): @@ -2176,8 +2149,6 @@ def get_override(self): try: # If the env variable is defined, its value takes precedence value = os.environ[var] - if not PY3: - value = value.decode(osutils.get_user_encoding()) break except KeyError: continue @@ -2189,8 +2160,6 @@ def get_default(self): try: # If the env variable is defined, its value is the default one value = os.environ[var] - if not PY3: - value = value.decode(osutils.get_user_encoding()) break except KeyError: continue @@ -2198,7 +2167,7 @@ def get_default(self): # Otherwise, fallback to the value defined at registration if callable(self.default): value = self.default() - if not isinstance(value, text_type): + if not isinstance(value, str): raise AssertionError( "Callable default value for '%s' should be unicode" % (self.name)) @@ -2283,7 +2252,7 @@ def __init__(self, name, default=None, default_from_env=None, invalid=invalid, unquote=False) def from_unicode(self, unicode_str): - if not isinstance(unicode_str, string_types): + if not isinstance(unicode_str, str): raise TypeError # Now inject our string directly as unicode. All callers got their # value from configobj, so values that need to be quoted are already @@ -2291,7 +2260,7 @@ def from_unicode(self, unicode_str): _list_converter_config.reset() _list_converter_config._parse([u"list=%s" % (unicode_str,)]) maybe_list = _list_converter_config['list'] - if isinstance(maybe_list, string_types): + if isinstance(maybe_list, str): if maybe_list: # A single value, most probably the user forgot (or didn't care # to add) the final ',' @@ -2323,7 +2292,7 @@ def __init__(self, name, registry, default_from_env=None, self.registry = registry def from_unicode(self, unicode_str): - if not isinstance(unicode_str, string_types): + if not isinstance(unicode_str, str): raise TypeError try: return self.registry.get(unicode_str) @@ -3082,7 +3051,7 @@ def quote(self, value): self._config_obj.list_values = False def unquote(self, value): - if value and isinstance(value, string_types): + if value and isinstance(value, str): # _unquote doesn't handle None nor empty strings nor anything that # is not a string, really. value = self._config_obj._unquote(value) @@ -3499,7 +3468,7 @@ def expand_and_convert(val): # None or ends up being None during expansion or conversion. if val is not None: if expand: - if isinstance(val, string_types): + if isinstance(val, str): val = self._expand_options_in_string(val) else: trace.warning('Cannot expand "%s":' @@ -3906,7 +3875,7 @@ class cmd_config(commands.Command): # http://pad.lv/788991 -- vila 20101115 commands.Option('scope', help='Reduce the scope to the specified' ' configuration file.', - type=text_type), + type=str), commands.Option('all', help='Display all the defined values for the matching options.', ), diff --git a/breezy/conflicts.py b/breezy/conflicts.py index d9da28c9fc..c7f8bf3eff 100644 --- a/breezy/conflicts.py +++ b/breezy/conflicts.py @@ -42,7 +42,6 @@ option, registry, ) -from .sixish import text_type CONFLICT_SUFFIXES = ('.THIS', '.BASE', '.OTHER') @@ -77,7 +76,7 @@ def run(self, text=False, directory=u'.'): continue self.outf.write(conflict.path + '\n') else: - self.outf.write(text_type(conflict) + '\n') + self.outf.write(str(conflict) + '\n') resolve_action_registry = registry.Registry() @@ -154,7 +153,7 @@ def run(self, file_list=None, all=False, action=None, directory=None): before - after) trace.note(gettext('Remaining conflicts:')) for conflict in tree.conflicts(): - trace.note(text_type(conflict)) + trace.note(str(conflict)) return 1 else: trace.note(gettext('All conflicts resolved.')) @@ -289,7 +288,7 @@ def to_stanzas(self): def to_strings(self): """Generate strings for the provided conflicts""" for conflict in self: - yield text_type(conflict) + yield str(conflict) def remove_files(self, tree): """Remove the THIS, BASE and OTHER files for listed conflicts""" @@ -362,7 +361,7 @@ def __init__(self, path, file_id=None): self.path = path # the factory blindly transfers the Stanza values to __init__ and # Stanza is purely a Unicode api. - if isinstance(file_id, text_type): + if isinstance(file_id, str): file_id = cache_utf8.encode(file_id) self.file_id = osutils.safe_file_id(file_id) @@ -736,7 +735,7 @@ def __init__(self, action, path, conflict_path, file_id=None, self.conflict_path = conflict_path # the factory blindly transfers the Stanza values to __init__, # so they can be unicode. - if isinstance(conflict_file_id, text_type): + if isinstance(conflict_file_id, str): conflict_file_id = cache_utf8.encode(conflict_file_id) self.conflict_file_id = osutils.safe_file_id(conflict_file_id) diff --git a/breezy/crash.py b/breezy/crash.py index ee7c7dd1a6..b3bb3e72ac 100644 --- a/breezy/crash.py +++ b/breezy/crash.py @@ -44,6 +44,7 @@ # to test with apport it's useful to set # export APPORT_IGNORE_OBSOLETE_PACKAGES=1 +from io import StringIO import os import platform import pprint @@ -58,9 +59,6 @@ plugin, trace, ) -from .sixish import ( - StringIO, - ) def report_bug(exc_info, stderr): diff --git a/breezy/delta.py b/breezy/delta.py index 5d263d9ac5..deed609fb4 100644 --- a/breezy/delta.py +++ b/breezy/delta.py @@ -16,13 +16,12 @@ from __future__ import absolute_import +from io import StringIO + from breezy import ( osutils, trace, ) -from .sixish import ( - StringIO, - ) from .tree import TreeChange diff --git a/breezy/diff.py b/breezy/diff.py index 937bb1cb5d..bec3d365e0 100644 --- a/breezy/diff.py +++ b/breezy/diff.py @@ -16,6 +16,7 @@ from __future__ import absolute_import +import contextlib import difflib import os import re @@ -29,7 +30,6 @@ import tempfile from breezy import ( - cleanup, controldir, osutils, textfile, @@ -47,7 +47,6 @@ from .registry import ( Registry, ) -from .sixish import text_type from .trace import mutter, note, warning from .tree import FileTimestampUnavailable @@ -519,7 +518,7 @@ def show_diff_trees(old_tree, new_tree, to_file, specific_files=None, context = DEFAULT_CONTEXT_AMOUNT if format_cls is None: format_cls = DiffTree - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(old_tree.lock_read()) if extra_trees is not None: for tree in extra_trees: @@ -802,7 +801,7 @@ def _get_command(self, old_path, new_path): if sys.platform == 'win32': # Popen doesn't accept unicode on win32 command_encoded = [] for c in command: - if isinstance(c, text_type): + if isinstance(c, str): command_encoded.append(c.encode('mbcs')) else: command_encoded.append(c) diff --git a/breezy/doc/api/branch.txt b/breezy/doc/api/branch.txt index a1d30b53c9..38709d75f2 100644 --- a/breezy/doc/api/branch.txt +++ b/breezy/doc/api/branch.txt @@ -26,7 +26,7 @@ or via the ControlDir: A branch has a history of revisions on it: >>> new_branch.last_revision() - 'null:' + b'null:' We need to write some more documentation, showing push and pull operations. Cloning might also be nice. diff --git a/breezy/email_message.py b/breezy/email_message.py index a3ccf45470..c22dc0ef69 100644 --- a/breezy/email_message.py +++ b/breezy/email_message.py @@ -18,26 +18,14 @@ from __future__ import absolute_import -try: - from email.message import Message - from email.header import Header - from email.mime.multipart import MIMEMultipart - from email.mime.text import MIMEText - from email.utils import formataddr, parseaddr -except ImportError: # python < 3 - from email import ( - Header, - Message, - MIMEMultipart, - MIMEText, - ) - from email.Utils import formataddr, parseaddr +from email.message import Message +from email.header import Header +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.utils import formataddr, parseaddr from . import __version__ as _breezy_version from .errors import BzrBadParameterNotUnicode from .osutils import safe_unicode -from .sixish import ( - text_type, - ) from .smtp_connection import SMTPConnection @@ -70,7 +58,7 @@ def __init__(self, from_address, to_address, subject, body=None): self._body = body self._parts = [] - if isinstance(to_address, (bytes, text_type)): + if isinstance(to_address, (bytes, str)): to_address = [to_address] to_addresses = [] @@ -180,7 +168,7 @@ def address_to_encoded_header(address): :param address: An unicode string, or UTF-8 byte string. :return: A possibly RFC2047-encoded string. """ - if not isinstance(address, (str, text_type)): + if not isinstance(address, str): raise BzrBadParameterNotUnicode(address) # Can't call Header over all the address, because that encodes both the # name and the email address, which is not permitted by RFCs. @@ -204,7 +192,7 @@ def string_with_encoding(string_): # avoid base64 when it's not necessary in order to be most compatible # with the capabilities of the receiving side, we check with encode() # and decode() whether the body is actually ascii-only. - if isinstance(string_, text_type): + if isinstance(string_, str): try: return (string_.encode('ascii'), 'ascii') except UnicodeEncodeError: diff --git a/breezy/errors.py b/breezy/errors.py index b41c584edc..972f006464 100644 --- a/breezy/errors.py +++ b/breezy/errors.py @@ -19,9 +19,6 @@ from __future__ import absolute_import -from .sixish import ( - PY3, - ) # TODO: is there any value in providing the .args field used by standard # python exceptions? A list of values with no names seems less useful @@ -108,13 +105,7 @@ def _format(self): getattr(self, '_fmt', None), err) - if PY3: - __str__ = _format - else: - def __str__(self): - return self._format().encode('utf-8') - - __unicode__ = _format + __str__ = _format def __repr__(self): return '%s(%s)' % (self.__class__.__name__, str(self)) @@ -182,15 +173,6 @@ def __init__(self, transport): self.transport = transport -class InvalidEntryName(InternalBzrError): - - _fmt = "Invalid entry name: %(name)s" - - def __init__(self, name): - BzrError.__init__(self) - self.name = name - - class InvalidRevisionNumber(BzrError): _fmt = "Invalid revision number %(revno)s" diff --git a/breezy/export_pot.py b/breezy/export_pot.py index 15453cf6ab..99bdc989d0 100644 --- a/breezy/export_pot.py +++ b/breezy/export_pot.py @@ -39,7 +39,6 @@ option, plugin as _mod_plugin, ) -from .sixish import PY3 from .trace import ( mutter, note, @@ -158,8 +157,6 @@ def poentry(self, path, lineno, s, comment=None): "msgstr \"\"\n" "\n".format( path=path, lineno=lineno, comment=comment, msg=_normalize(s))) - if not PY3: - line = line.decode('utf-8') self.outf.write(line) def poentry_in_context(self, context, string, comment=None): diff --git a/breezy/fetch_ghosts.py b/breezy/fetch_ghosts.py index 779275bda8..0fd9092cec 100644 --- a/breezy/fetch_ghosts.py +++ b/breezy/fetch_ghosts.py @@ -16,7 +16,8 @@ from __future__ import absolute_import -from . import cleanup +import contextlib + from .branch import Branch from .trace import note from .errors import NoSuchRevision, BzrCommandError @@ -43,7 +44,7 @@ def __init__(self, this_branch, other_branch): def run(self): lock_other = self.this_branch.base != self.other_branch.base - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(self.this_branch.lock_write()) if lock_other: exit_stack.enter_context(self.other_branch.lock_read()) diff --git a/breezy/git/branch.py b/breezy/git/branch.py index 947bba57b4..205fe0a7bc 100644 --- a/breezy/git/branch.py +++ b/breezy/git/branch.py @@ -19,6 +19,7 @@ from __future__ import absolute_import +import contextlib from io import BytesIO from collections import defaultdict @@ -35,7 +36,6 @@ from .. import ( branch, - cleanup, config, controldir, errors, @@ -51,10 +51,6 @@ from ..revision import ( NULL_REVISION, ) -from ..sixish import ( - text_type, - viewitems, - ) from ..trace import ( is_quiet, mutter, @@ -241,7 +237,7 @@ def merge_to(self, to_tags, overwrite=False, ignore_master=False, master = None else: master = to_tags.branch.get_master_branch() - with cleanup.ExitStack() as es: + with contextlib.ExitStack() as es: if master is not None: es.enter_context(master.lock_write()) updates, conflicts = self._merge_to_non_git( @@ -277,7 +273,7 @@ def __init__(self, branch): def _set_tag_dict(self, to_dict): extra = set(self.refs.allkeys()) - for k, revid in viewitems(to_dict): + for k, revid in to_dict.items(): name = tag_name_to_ref(k) if name in extra: extra.remove(name) @@ -798,7 +794,7 @@ def _iter_tag_refs(self): :return: iterator over (ref_name, tag_name, peeled_sha1, unpeeled_sha1) """ refs = self.repository.controldir.get_refs_container() - for ref_name, unpeeled in viewitems(refs.as_dict()): + for ref_name, unpeeled in refs.as_dict().items(): try: tag_name = ref_to_tag_name(ref_name) except (ValueError, UnicodeDecodeError): @@ -806,7 +802,7 @@ def _iter_tag_refs(self): peeled = refs.get_peeled(ref_name) if peeled is None: peeled = unpeeled - if not isinstance(tag_name, text_type): + if not isinstance(tag_name, str): raise TypeError(tag_name) yield (ref_name, tag_name, peeled, unpeeled) @@ -1049,7 +1045,7 @@ def pull(self, overwrite=False, stop_revision=None, if local and not bound_location: raise errors.LocalRequiresBoundBranch() source_is_master = False - with cleanup.ExitStack() as es: + with contextlib.ExitStack() as es: es.enter_context(self.source.lock_read()) if bound_location: # bound_location comes from a config file, some care has to be @@ -1137,8 +1133,8 @@ def get_changed_refs(old_refs): raise errors.DivergedBranches(self.source, self.target) refs = {self.target.ref: new_ref} result.new_revid = stop_revision - for name, sha in viewitems( - self.source.repository._git.refs.as_dict(b"refs/tags")): + for name, sha in ( + self.source.repository._git.refs.as_dict(b"refs/tags").items()): if sha not in self.source.repository._git: trace.mutter('Ignoring missing SHA: %s', sha) continue @@ -1300,7 +1296,7 @@ def _get_new_refs(self, stop_revision=None, fetch_tags=None, if fetch_tags is None: c = self.source.get_config_stack() fetch_tags = c.get('branch.fetch_tags') - for name, revid in viewitems(self.source.tags.get_tag_dict()): + for name, revid in self.source.tags.get_tag_dict().items(): if self.source.repository.has_revision(revid): ref = tag_name_to_ref(name) if not check_ref_format(ref): @@ -1337,7 +1333,7 @@ def ref_equals(refs, ref, git_sha, revid): # updated that hasn't actually been updated. return False # FIXME: Check for diverged branches - for ref, (git_sha, revid) in viewitems(new_refs): + for ref, (git_sha, revid) in new_refs.items(): if ref_equals(ret, ref, git_sha, revid): # Already up to date if git_sha is None: @@ -1374,7 +1370,7 @@ def fetch(self, stop_revision=None, fetch_tags=None, lossy=False, stop_revision = self.source.last_revision() ret = [] if fetch_tags: - for k, v in viewitems(self.source.tags.get_tag_dict()): + for k, v in self.source.tags.get_tag_dict().items(): ret.append((None, v)) ret.append((None, stop_revision)) try: diff --git a/breezy/git/cache.py b/breezy/git/cache.py index 3d96876c79..c728e009de 100644 --- a/breezy/git/cache.py +++ b/breezy/git/cache.py @@ -41,11 +41,6 @@ index as _mod_index, versionedfile, ) -from ..sixish import ( - viewitems, - viewkeys, - viewvalues, - ) from ..transport import ( get_transport_from_path, ) @@ -336,7 +331,7 @@ def lookup_blob_id(self, fileid, revision): def lookup_git_sha(self, sha): if not isinstance(sha, bytes): raise TypeError(sha) - for entry in viewvalues(self._by_sha[sha]): + for entry in self._by_sha[sha].values(): yield entry def lookup_tree_id(self, fileid, revision): @@ -346,13 +341,13 @@ def lookup_commit(self, revid): return self._by_revid[revid] def revids(self): - for key, entries in viewitems(self._by_sha): - for (type, type_data) in viewvalues(entries): + for key, entries in self._by_sha.items(): + for (type, type_data) in entries.values(): if type == "commit": yield type_data[0] def sha1s(self): - return viewkeys(self._by_sha) + return self._by_sha.keys() class SqliteCacheUpdater(CacheUpdater): @@ -711,10 +706,7 @@ def missing_revisions(self, revids): return ret def _keys(self): - try: - return self.db.keys() - except AttributeError: # python < 3 - return self.db.iterkeys() + return self.db.keys() def revids(self): """List the revision ids known.""" diff --git a/breezy/git/commands.py b/breezy/git/commands.py index 0978a64fe4..c0f24b378c 100644 --- a/breezy/git/commands.py +++ b/breezy/git/commands.py @@ -33,10 +33,6 @@ Option, RegistryOption, ) -from ..sixish import ( - text_type, - viewitems, - ) class cmd_git_import(Command): @@ -141,7 +137,7 @@ def run(self, src_location, dest_location=None, colocated=False, dest_format=Non mapping = source_repo.get_mapping() result = interrepo.fetch() with ui.ui_factory.nested_progress_bar() as pb: - for i, (name, sha) in enumerate(viewitems(result.refs)): + for i, (name, sha) in enumerate(result.refs.items()): try: branch_name = ref_to_branch_name(name) except ValueError: @@ -186,7 +182,7 @@ class cmd_git_object(Command): takes_args = ["sha1?"] takes_options = [Option('directory', short_name='d', - help='Location of repository.', type=text_type), + help='Location of repository.', type=str), Option('pretty', help='Pretty-print objects.')] encoding_type = 'exact' @@ -247,7 +243,7 @@ def run(self, location="."): object_store = get_object_store(repo) with object_store.lock_read(): refs = get_refs_container(controldir, object_store) - for k, v in sorted(viewitems(refs.as_dict())): + for k, v in sorted(refs.as_dict().items()): self.outf.write("%s -> %s\n" % (k.decode('utf-8'), v.decode('utf-8'))) @@ -304,7 +300,7 @@ class cmd_git_push_pristine_tar_deltas(Command): takes_options = [Option('directory', short_name='d', - help='Location of repository.', type=text_type)] + help='Location of repository.', type=str)] takes_args = ['target', 'package'] def run(self, target, package, directory='.'): diff --git a/breezy/git/commit.py b/breezy/git/commit.py index 030cb3c719..d659d40cfd 100644 --- a/breezy/git/commit.py +++ b/breezy/git/commit.py @@ -39,9 +39,6 @@ from ..repository import ( CommitBuilder, ) -from ..sixish import ( - viewitems, - ) from dulwich.objects import ( Blob, @@ -155,12 +152,11 @@ def update_basis(self, tree): def finish_inventory(self): # eliminate blobs that were removed - self._blobs = {k: v for (k, v) in viewitems( - self._blobs) if v is not None} + self._blobs = {k: v for (k, v) in self._blobs.items() if v is not None} def _iterblobs(self): return ((path, sha, mode) for (path, (mode, sha)) - in viewitems(self._blobs)) + in self._blobs.items()) def commit(self, message): self._validate_unicode_text(message, 'commit message') diff --git a/breezy/git/dir.py b/breezy/git/dir.py index 2715c2384a..9953f84016 100644 --- a/breezy/git/dir.py +++ b/breezy/git/dir.py @@ -19,18 +19,15 @@ from __future__ import absolute_import +import contextlib + from .. import ( branch as _mod_branch, - cleanup, errors as brz_errors, trace, osutils, urlutils, ) -from ..sixish import ( - PY3, - viewitems, - ) from ..transport import ( do_catching_redirections, get_transport_from_path, @@ -192,7 +189,7 @@ def sprout(self, url, revision_id=None, force_new_repo=False, else: wt = None if recurse == 'down': - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: basis = None if wt is not None: basis = wt.basis_tree() @@ -250,7 +247,7 @@ def clone_on_transport(self, transport, revision_id=None, determine_wants = interrepo.determine_wants_all (pack_hint, _, refs) = interrepo.fetch_objects(determine_wants, mapping=default_mapping) - for name, val in viewitems(refs): + for name, val in refs.items(): target_git_repo.refs[name] = val result_dir = self.__class__(transport, target_git_repo, format) if revision_id is not None: @@ -566,8 +563,6 @@ def get_branch_reference(self, name=None): else: base_url = urlutils.local_path_to_url( commondir.decode(osutils._fs_enc)).rstrip('/.git/') + '/' - if not PY3: - params = {k: v.encode('utf-8') for (k, v) in viewitems(params)} return urlutils.join_segment_parameters(base_url, params) return None diff --git a/breezy/git/fetch.py b/breezy/git/fetch.py index 05c6873a05..eefa265e65 100644 --- a/breezy/git/fetch.py +++ b/breezy/git/fetch.py @@ -51,7 +51,6 @@ NULL_REVISION, ) from ..bzr.inventorytree import InventoryRevisionTree -from ..sixish import text_type from ..bzr.testament import ( StrictTestament3, ) @@ -214,7 +213,7 @@ def remove_disappeared_children(base_bzr_tree, path, base_tree, :param lookup_object: Lookup a git object by its SHA1 :return: Inventory delta, as list """ - if not isinstance(path, text_type): + if not isinstance(path, str): raise TypeError(path) ret = [] for name, mode, hexsha in base_tree.iteritems(): diff --git a/breezy/git/git-remote-bzr b/breezy/git/git-remote-bzr index 860f0e494c..0c824ce4a7 100755 --- a/breezy/git/git-remote-bzr +++ b/breezy/git/git-remote-bzr @@ -32,8 +32,6 @@ signal.signal(signal.SIGINT, handle_sigint) import breezy breezy.initialize() -from breezy.sixish import PY3 - from breezy.plugin import load_plugins load_plugins() @@ -48,7 +46,4 @@ parser = optparse.OptionParser() (shortname, url) = args helper = RemoteHelper(open_local_dir(), shortname, open_remote_dir(url)) -if PY3: - helper.process(sys.stdin.buffer, sys.stdout.buffer) -else: - helper.process(sys.stdin, sys.stdout) +helper.process(sys.stdin.buffer, sys.stdout.buffer) diff --git a/breezy/git/git_remote_helper.py b/breezy/git/git_remote_helper.py index 4ebf18ffb7..432d4d7f67 100644 --- a/breezy/git/git_remote_helper.py +++ b/breezy/git/git_remote_helper.py @@ -29,7 +29,6 @@ from ..controldir import ControlDir from ..errors import NotBranchError, NoRepositoryPresent from ..repository import InterRepository -from ..sixish import viewitems from ..transport import get_transport_from_path from . import ( @@ -121,7 +120,7 @@ def cmd_list(self, outf, argv): object_store = get_object_store(repo) with object_store.lock_read(): refs = get_refs_container(self.remote_dir, object_store) - for ref, git_sha1 in viewitems(refs.as_dict()): + for ref, git_sha1 in refs.as_dict().items(): ref = ref.replace(b"~", b"_") outf.write(b"%s %s\n" % (git_sha1, ref)) outf.write(b"\n") diff --git a/breezy/git/interrepo.py b/breezy/git/interrepo.py index fb1978fe56..5c06e68af4 100644 --- a/breezy/git/interrepo.py +++ b/breezy/git/interrepo.py @@ -52,10 +52,6 @@ from ..revision import ( NULL_REVISION, ) -from ..sixish import ( - viewitems, - viewvalues, - ) from .. import ( config, trace, @@ -228,7 +224,7 @@ def missing_revisions(self, stop_revisions): new_stop_revids.append(revid) stop_revids = set() parent_map = graph.get_parent_map(new_stop_revids) - for parent_revids in viewvalues(parent_map): + for parent_revids in parent_map.values(): stop_revids.update(parent_revids) pb.update("determining revisions to fetch", len(missing)) return graph.iter_topo_order(missing) @@ -271,7 +267,7 @@ def fetch_refs(self, update_refs, lossy, overwrite=False): for (git_sha, bzr_revid) in new_refs.values() if git_sha is None or not git_sha.startswith(SYMREF)], lossy=lossy) - for name, (gitid, revid) in viewitems(new_refs): + for name, (gitid, revid) in new_refs.items(): if gitid is None: try: gitid = revidmap[revid][0] @@ -361,9 +357,9 @@ def fetch_refs(self, update_refs, lossy, overwrite=False): def git_update_refs(old_refs): ret = {} self.old_refs = { - k: (v, None) for (k, v) in viewitems(old_refs)} + k: (v, None) for (k, v) in old_refs.items()} new_refs = update_refs(self.old_refs) - for name, (gitid, revid) in viewitems(new_refs): + for name, (gitid, revid) in new_refs.items(): if gitid is None: git_sha = self.source_store._lookup_revision_sha1(revid) gitid = unpeel_map.re_unpeel_tag( @@ -414,12 +410,12 @@ def get_determine_wants_heads(self, wants, include_tags=False): def determine_wants(refs): unpeel_lookup = {} - for k, v in viewitems(refs): + for k, v in refs.items(): if k.endswith(ANNOTATED_TAG_SUFFIX): unpeel_lookup[v] = refs[k[:-len(ANNOTATED_TAG_SUFFIX)]] potential = set([unpeel_lookup.get(w, w) for w in wants]) if include_tags: - for k, sha in viewitems(refs): + for k, sha in refs.items(): if k.endswith(ANNOTATED_TAG_SUFFIX): continue if not is_tag(k): @@ -458,8 +454,8 @@ def search_missing_revision_ids(self, if if_present_ids is not None: todo.update(if_present_ids) result_set = todo.difference(self.target.all_revision_ids()) - result_parents = set(itertools.chain.from_iterable(viewvalues( - self.source.get_graph().get_parent_map(result_set)))) + result_parents = set(itertools.chain.from_iterable( + self.source.get_graph().get_parent_map(result_set).values())) included_keys = result_set.intersection(result_parents) start_keys = result_set.difference(included_keys) exclude_keys = result_parents.difference(result_set) @@ -484,7 +480,7 @@ def _target_has_shas(self, shas): def determine_wants_all(self, refs): potential = set() - for k, v in viewitems(refs): + for k, v in refs.items(): # For non-git target repositories, only worry about peeled if v == ZERO_SHA: continue @@ -557,7 +553,7 @@ def get_target_heads(self): all_revs = self.target.all_revision_ids() parent_map = self.target.get_parent_map(all_revs) all_parents = set() - for values in viewvalues(parent_map): + for values in parent_map.values(): all_parents.update(values) return set(all_revs) - all_parents @@ -641,12 +637,12 @@ def fetch_refs(self, update_refs, lossy, overwrite=False): def determine_wants(heads): old_refs = dict([(k, (v, None)) - for (k, v) in viewitems(heads.as_dict())]) + for (k, v) in heads.as_dict().items()]) new_refs = update_refs(old_refs) ref_changes.update(new_refs) - return [sha1 for (sha1, bzr_revid) in viewvalues(new_refs)] + return [sha1 for (sha1, bzr_revid) in new_refs.values()] self.fetch_objects(determine_wants, lossy=lossy) - for k, (git_sha, bzr_revid) in viewitems(ref_changes): + for k, (git_sha, bzr_revid) in ref_changes.items(): self.target._git.refs[k] = git_sha new_refs = self.target.controldir.get_refs_container() return None, old_refs, new_refs @@ -700,7 +696,7 @@ def get_determine_wants_revids(self, revids, include_tags=False): def get_determine_wants_branches(self, branches, include_tags=False): def determine_wants(refs): ret = [] - for name, value in viewitems(refs): + for name, value in refs.items(): if value == ZERO_SHA: continue diff --git a/breezy/git/mapping.py b/breezy/git/mapping.py index 9066fac20a..856aa13657 100644 --- a/breezy/git/mapping.py +++ b/breezy/git/mapping.py @@ -39,11 +39,6 @@ NULL_REVISION, Revision, ) -from ..sixish import ( - PY3, - text_type, - viewitems, - ) from .errors import ( NoPushSupport, ) @@ -175,7 +170,7 @@ def revision_id_bzr_to_foreign(cls, bzr_rev_id): def generate_file_id(self, path): # Git paths are just bytestrings # We must just hope they are valid UTF-8.. - if isinstance(path, text_type): + if isinstance(path, str): path = path.encode("utf-8") if path == b"": return ROOT_ID @@ -191,8 +186,7 @@ def parse_file_id(self, file_id): def revid_as_refname(self, revid): if not isinstance(revid, bytes): raise TypeError(revid) - if PY3: - revid = revid.decode('utf-8') + revid = revid.decode('utf-8') quoted_revid = urlutils.quote(revid) return b"refs/bzr/" + quoted_revid.encode('utf-8') @@ -252,11 +246,11 @@ def _extract_hg_metadata(self, rev, message): (message, renames, branch, extra) = extract_hg_metadata(message) if branch is not None: rev.properties[u'hg:extra:branch'] = branch - for name, value in viewitems(extra): + for name, value in extra.items(): rev.properties[u'hg:extra:' + name] = base64.b64encode(value) if renames: rev.properties[u'hg:renames'] = base64.b64encode(bencode.bencode( - [(new, old) for (old, new) in viewitems(renames)])) + [(new, old) for (old, new) in renames.items()])) return message def _extract_bzr_metadata(self, rev, message): @@ -347,7 +341,7 @@ def export_commit(self, rev, tree_sha, parent_lookup, lossy, u'commit-timezone-neg-utc', u'git-implicit-encoding', u'git-gpg-signature', u'git-explicit-encoding', u'author-timestamp', u'file-modes']) - for k, v in viewitems(rev.properties): + for k, v in rev.properties.items(): if k not in mapping_properties: metadata.properties[k] = v if not lossy and metadata: @@ -574,7 +568,7 @@ def show_foreign_revid(cls, foreign_revid): def symlink_to_blob(symlink_target): from dulwich.objects import Blob blob = Blob() - if isinstance(symlink_target, text_type): + if isinstance(symlink_target, str): symlink_target = symlink_target.encode('utf-8') blob.data = symlink_target return blob diff --git a/breezy/git/object_store.py b/breezy/git/object_store.py index 239b4994c6..a2099c3719 100644 --- a/breezy/git/object_store.py +++ b/breezy/git/object_store.py @@ -47,7 +47,6 @@ from ..revision import ( NULL_REVISION, ) -from ..sixish import viewitems from ..bzr.testament import ( StrictTestament3, ) @@ -399,7 +398,7 @@ def __len__(self): def __iter__(self): return ((self.store[object_id], path) for (object_id, path) in - viewitems(self.objects)) + self.objects.items()) class BazaarObjectStore(BaseObjectStore): diff --git a/breezy/git/remote.py b/breezy/git/remote.py index a034701fda..14b2d77896 100644 --- a/breezy/git/remote.py +++ b/breezy/git/remote.py @@ -49,10 +49,6 @@ UninitializableFormat, ) from ..revisiontree import RevisionTree -from ..sixish import ( - text_type, - viewitems, - ) from ..transport import ( Transport, register_urlparse_netloc_protocol, @@ -122,12 +118,8 @@ import select import tempfile -try: - import urllib.parse as urlparse - from urllib.parse import splituser -except ImportError: - import urlparse - from urllib import splituser +import urllib.parse as urlparse +from urllib.parse import splituser # urlparse only supports a limited number of schemes by default register_urlparse_netloc_protocol('git') @@ -594,7 +586,7 @@ def get_changed_refs(refs): source, self.open_branch(name, nascent_ok=True)) ret[refname] = new_sha if fetch_tags: - for tagname, revid in viewitems(source.tags.get_tag_dict()): + for tagname, revid in source.tags.get_tag_dict().items(): if lossy: new_sha = source_store._lookup_revision_sha1(revid) else: @@ -998,7 +990,7 @@ def _iter_tag_refs(self): if peeled is None: # Let's just hope it's a commit peeled = unpeeled - if not isinstance(tag_name, text_type): + if not isinstance(tag_name, str): raise TypeError(tag_name) yield (ref_name, tag_name, peeled, unpeeled) diff --git a/breezy/git/repository.py b/breezy/git/repository.py index 6bd4795bc7..3c35666992 100644 --- a/breezy/git/repository.py +++ b/breezy/git/repository.py @@ -34,10 +34,6 @@ from ..foreign import ( ForeignRepository, ) -from ..sixish import ( - viewitems, - viewvalues, - ) from .commit import ( GitCommitBuilder, @@ -298,7 +294,7 @@ def iter_files_bytes(self, desired_files): for (file_id, revision_id, identifier) in desired_files: per_revision.setdefault(revision_id, []).append( (file_id, identifier)) - for revid, files in viewitems(per_revision): + for revid, files in per_revision.items(): try: (commit_id, mapping) = self.lookup_bzr_revision_id(revid) except errors.NoSuchRevision: @@ -410,7 +406,7 @@ def get_known_graph_ancestry(self, revision_ids): this_parent_map[revid] = parents parent_map.update(this_parent_map) pending = set() - for values in viewvalues(this_parent_map): + for values in this_parent_map.values(): pending.update(values) pending = pending.difference(parent_map) return _mod_graph.KnownGraph(parent_map) diff --git a/breezy/git/tree.py b/breezy/git/tree.py index bf336d6fa6..7a3b790981 100644 --- a/breezy/git/tree.py +++ b/breezy/git/tree.py @@ -66,10 +66,6 @@ CURRENT_REVISION, NULL_REVISION, ) -from ..sixish import ( - text_type, - viewitems, - ) from .mapping import ( mode_is_executable, @@ -1266,7 +1262,7 @@ def iter_entries_by_dir(self, specific_files=None, key = (posixpath.dirname(path), path) if key not in ret and self.is_versioned(path): ret[key] = self._get_dir_ie(path, self.path2id(key[0])) - return ((path, ie) for ((_, path), ie) in sorted(viewitems(ret))) + return ((path, ie) for ((_, path), ie) in sorted(ret.items())) def iter_references(self): if self.supports_tree_reference(): @@ -1281,9 +1277,9 @@ def _get_dir_ie(self, path, parent_id): posixpath.basename(path).strip("/"), parent_id) def _get_file_ie(self, name, path, value, parent_id): - if not isinstance(name, text_type): + if not isinstance(name, str): raise TypeError(name) - if not isinstance(path, text_type): + if not isinstance(path, str): raise TypeError(path) if not isinstance(value, tuple) or len(value) != 10: raise TypeError(value) diff --git a/breezy/git/workingtree.py b/breezy/git/workingtree.py index 88692e73dd..e9c3b2eb5b 100644 --- a/breezy/git/workingtree.py +++ b/breezy/git/workingtree.py @@ -73,7 +73,6 @@ BadReferenceTarget, MutableTree, ) -from ..sixish import text_type from .dir import ( @@ -534,7 +533,7 @@ def _iter_files_recursive(self, from_dir=None, include_dirs=False, recurse_nested=False): if from_dir is None: from_dir = u"" - if not isinstance(from_dir, text_type): + if not isinstance(from_dir, str): raise TypeError(from_dir) encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc) for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir): diff --git a/breezy/gpg.py b/breezy/gpg.py index 0ce10be68f..dd85547784 100644 --- a/breezy/gpg.py +++ b/breezy/gpg.py @@ -37,9 +37,6 @@ from . import ( errors, ) -from .sixish import ( - text_type, - ) # verification results SIGNATURE_VALID = 0 @@ -243,7 +240,7 @@ def sign(self, content, mode): raise GpgNotInstalled( 'Set create_signatures=no to disable creating signatures.') - if isinstance(content, text_type): + if isinstance(content, str): raise errors.BzrBadParameterUnicode('content') plain_text = gpg.Data(content) diff --git a/breezy/graph.py b/breezy/graph.py index b897aa339f..8e08333d63 100644 --- a/breezy/graph.py +++ b/breezy/graph.py @@ -25,10 +25,6 @@ revision, trace, ) -from .sixish import ( - viewitems, - viewvalues, - ) STEP_UNIQUE_SEARCHER_EVERY = 5 @@ -343,7 +339,7 @@ def get_child_map(self, keys): """ parent_map = self._parents_provider.get_parent_map(keys) parent_child = {} - for child, parents in sorted(viewitems(parent_map)): + for child, parents in sorted(parent_map.items()): for parent in parents: parent_child.setdefault(parent, []).append(child) return parent_child @@ -654,7 +650,7 @@ def _collapse_unique_searchers(self, unique_tip_searchers, # TODO: it might be possible to collapse searchers faster when they # only have *some* search tips in common. next_unique_searchers = [] - for searchers in viewvalues(unique_search_tips): + for searchers in unique_search_tips.values(): if len(searchers) == 1: # Searching unique tips, go for it next_unique_searchers.append(searchers[0]) @@ -843,7 +839,7 @@ def heads(self, keys): for c in candidate_heads) active_searchers = dict(searchers) # skip over the actual candidate for each searcher - for searcher in viewvalues(active_searchers): + for searcher in active_searchers.values(): next(searcher) # The common walker finds nodes that are common to two or more of the # input keys, so that we don't access all history when a currently @@ -886,11 +882,11 @@ def heads(self, keys): # some searcher has encountered our known common nodes: # just stop it ancestor_set = {ancestor} - for searcher in viewvalues(searchers): + for searcher in searchers.values(): searcher.stop_searching_any(ancestor_set) else: # or it may have been just reached by all the searchers: - for searcher in viewvalues(searchers): + for searcher in searchers.values(): if ancestor not in searcher.seen: break else: @@ -898,7 +894,7 @@ def heads(self, keys): # making it be known as a descendant of all candidates, # so we can stop searching it, and any seen ancestors new_common.add(ancestor) - for searcher in viewvalues(searchers): + for searcher in searchers.values(): seen_ancestors =\ searcher.find_seen_ancestors([ancestor]) searcher.stop_searching_any(seen_ancestors) @@ -1021,7 +1017,7 @@ def iter_ancestry(self, revision_ids): processed.update(pending) next_map = self.get_parent_map(pending) next_pending = set() - for item in viewitems(next_map): + for item in next_map.items(): yield item next_pending.update(p for p in item[1] if p not in processed) ghosts = pending.difference(next_map) @@ -1259,7 +1255,7 @@ def _remove_simple_descendants(self, revisions, parent_map): # for revision in revisions.intersection(descendants): # simple_ancestors.difference_update(descendants[revision]) # return simple_ancestors - for revision, parent_ids in viewitems(parent_map): + for revision, parent_ids in parent_map.items(): if parent_ids is None: continue for parent_id in parent_ids: @@ -1466,7 +1462,7 @@ def _do_query(self, revisions): seen.update(revisions) parent_map = self._parents_provider.get_parent_map(revisions) found_revisions.update(parent_map) - for rev_id, parents in viewitems(parent_map): + for rev_id, parents in parent_map.items(): if parents is None: continue new_found_parents = [p for p in parents if p not in seen] @@ -1509,7 +1505,7 @@ def find_seen_ancestors(self, revisions): all_parents = [] # We don't care if it is a ghost, since it can't be seen if it is # a ghost - for parent_ids in viewvalues(parent_map): + for parent_ids in parent_map.values(): all_parents.extend(parent_ids) next_pending = all_seen.intersection( all_parents).difference(seen_ancestors) @@ -1555,14 +1551,14 @@ def stop_searching_any(self, revisions): stop_rev_references[parent_id] += 1 # if only the stopped revisions reference it, the ref count will be # 0 after this loop - for parents in viewvalues(self._current_parents): + for parents in self._current_parents.values(): for parent_id in parents: try: stop_rev_references[parent_id] -= 1 except KeyError: pass stop_parents = set() - for rev_id, refs in viewitems(stop_rev_references): + for rev_id, refs in stop_rev_references.items(): if refs == 0: stop_parents.add(rev_id) self._next_query.difference_update(stop_parents) @@ -1598,7 +1594,7 @@ def start_searching(self, revisions): def invert_parent_map(parent_map): """Given a map from child => parents, create a map of parent=>children""" child_map = {} - for child, parents in viewitems(parent_map): + for child, parents in parent_map.items(): for p in parents: # Any given parent is likely to have only a small handful # of children, many will have only one. So we avoid mem overhead of @@ -1650,7 +1646,7 @@ def collapse_linear_regions(parent_map): # Will not have any nodes removed, even though you do have an # 'uninteresting' linear D->B and E->C children = {} - for child, parents in viewitems(parent_map): + for child, parents in parent_map.items(): children.setdefault(child, []) for p in parents: children.setdefault(p, []).append(child) diff --git a/breezy/grep.py b/breezy/grep.py index c7bc882ee7..066f14a3b9 100644 --- a/breezy/grep.py +++ b/breezy/grep.py @@ -16,6 +16,7 @@ from __future__ import absolute_import +from io import BytesIO import re from .lazy_import import lazy_import @@ -40,9 +41,6 @@ RevisionSpec_revid, RevisionSpec_revno, ) -from .sixish import ( - BytesIO, - ) _user_encoding = osutils.get_user_encoding() diff --git a/breezy/hashcache.py b/breezy/hashcache.py index e2688cc53b..9b77a1f8ba 100644 --- a/breezy/hashcache.py +++ b/breezy/hashcache.py @@ -41,10 +41,6 @@ osutils, trace, ) -from .sixish import ( - text_type, - viewitems, - ) FP_MTIME_COLUMN = 1 @@ -97,7 +93,7 @@ def __init__(self, root, cache_file_name, mode=None, parameters and returns a stack of ContentFilters. If None, no content filtering is performed. """ - if not isinstance(root, text_type): + if not isinstance(root, str): raise ValueError("Base dir for hashcache must be text") self.root = root self.hit_count = 0 @@ -131,7 +127,7 @@ def scan(self): # Stat in inode order as optimisation for at least linux. def inode_order(path_and_cache): return path_and_cache[1][1][3] - for path, cache_val in sorted(viewitems(self._cache), key=inode_order): + for path, cache_val in sorted(self._cache.items(), key=inode_order): abspath = osutils.pathjoin(self.root, path) fp = self._fingerprint(abspath) self.stat_count += 1 @@ -223,7 +219,7 @@ def write(self): new_mode=self._mode) as outf: outf.write(CACHE_HEADER) - for path, c in viewitems(self._cache): + for path, c in self._cache.items(): line_info = [path.encode('utf-8'), b'// ', c[0], b' '] line_info.append(b'%d %d %d %d %d %d' % c[1]) line_info.append(b'\n') diff --git a/breezy/i18n.py b/breezy/i18n.py index fc2c0de2ed..e06596f5c3 100644 --- a/breezy/i18n.py +++ b/breezy/i18n.py @@ -141,24 +141,17 @@ def _get_locale_dir(base): :param base: plugins can specify their own local directory """ - if sys.version_info > (3,): - decode_path = str - else: - fs_enc = sys.getfilesystemencoding() - - def decode_path(path): - return path.decode(fs_enc) if getattr(sys, 'frozen', False): if base is None: - base = os.path.dirname(decode_path(sys.executable)) + base = os.path.dirname(sys.executable) return os.path.join(base, u'locale') else: if base is None: - base = os.path.dirname(decode_path(__file__)) + base = os.path.dirname(__file__) dirpath = os.path.realpath(os.path.join(base, u'locale')) if os.path.exists(dirpath): return dirpath - return os.path.join(decode_path(sys.prefix), u"share", u"locale") + return os.path.join(sys.prefix, u"share", u"locale") def _check_win32_locale(): diff --git a/breezy/iterablefile.py b/breezy/iterablefile.py index 99fa25d7ea..ed94d5e5ab 100644 --- a/breezy/iterablefile.py +++ b/breezy/iterablefile.py @@ -29,8 +29,8 @@ def __init__(self, iterable): def read_n(self, length): """ - >>> IterableFileBase(['This ', 'is ', 'a ', 'test.']).read_n(8) - 'This is ' + >>> IterableFileBase([b'This ', b'is ', b'a ', b'test.']).read_n(8) + b'This is ' """ def test_length(result): if len(result) >= length: @@ -41,11 +41,11 @@ def test_length(result): def read_to(self, sequence, length=None): """ - >>> f = IterableFileBase(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) - >>> f.read_to('\\n') - 'Th\\n' - >>> f.read_to('\\n') - 'is is \\n' + >>> f = IterableFileBase([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) + >>> f.read_to(b'\\n') + b'Th\\n' + >>> f.read_to(b'\\n') + b'is is \\n' """ def test_contents(result): if length is not None: @@ -79,8 +79,8 @@ def _read(self, result_length): def read_all(self): """ - >>> IterableFileBase(['This ', 'is ', 'a ', 'test.']).read_all() - 'This is a test.' + >>> IterableFileBase([b'This ', b'is ', b'a ', b'test.']).read_all() + b'This is a test.' """ def no_stop(result): return None @@ -88,12 +88,12 @@ def no_stop(result): def push_back(self, contents): """ - >>> f = IterableFileBase(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) - >>> f.read_to('\\n') - 'Th\\n' - >>> f.push_back("Sh") + >>> f = IterableFileBase([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) + >>> f.read_to(b'\\n') + b'Th\\n' + >>> f.push_back(b"Sh") >>> f.read_all() - 'Shis is \\na te\\nst.' + b'Shis is \\na te\\nst.' """ self._buffer = contents + self._buffer @@ -121,7 +121,7 @@ def _check_closed(self): def close(self): """ - >>> f = IterableFile(['This ', 'is ', 'a ', 'test.']) + >>> f = IterableFile([b'This ', b'is ', b'a ', b'test.']) >>> f.closed False >>> f.close() @@ -158,18 +158,18 @@ def flush(self): def __next__(self): """Implementation of the iterator protocol's next() - >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.']) + >>> f = IterableFile([b'This \\n', b'is ', b'a ', b'test.']) >>> next(f) - 'This \\n' + b'This \\n' >>> f.close() >>> next(f) Traceback (most recent call last): ValueError: File is closed. - >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.\\n']) + >>> f = IterableFile([b'This \\n', b'is ', b'a ', b'test.\\n']) >>> next(f) - 'This \\n' + b'This \\n' >>> next(f) - 'is a test.\\n' + b'is a test.\\n' >>> next(f) Traceback (most recent call last): StopIteration @@ -181,9 +181,9 @@ def __next__(self): def __iter__(self): """ - >>> list(IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.'])) - ['Th\\n', 'is is \\n', 'a te\\n', 'st.'] - >>> f = IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) + >>> list(IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.'])) + [b'Th\\n', b'is is \\n', b'a te\\n', b'st.'] + >>> f = IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) >>> f.close() >>> list(f) Traceback (most recent call last): @@ -193,12 +193,12 @@ def __iter__(self): def read(self, length=None): """ - >>> IterableFile(['This ', 'is ', 'a ', 'test.']).read() - 'This is a test.' - >>> f = IterableFile(['This ', 'is ', 'a ', 'test.']) + >>> IterableFile([b'This ', b'is ', b'a ', b'test.']).read() + b'This is a test.' + >>> f = IterableFile([b'This ', b'is ', b'a ', b'test.']) >>> f.read(10) - 'This is a ' - >>> f = IterableFile(['This ', 'is ', 'a ', 'test.']) + b'This is a ' + >>> f = IterableFile([b'This ', b'is ', b'a ', b'test.']) >>> f.close() >>> f.read(10) Traceback (most recent call last): @@ -215,13 +215,13 @@ def read_to(self, sequence, size=None): Read characters until a sequence is found, with optional max size. The specified sequence, if found, will be included in the result - >>> f = IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) - >>> f.read_to('i') - 'Th\\ni' - >>> f.read_to('i') - 's i' + >>> f = IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) + >>> f.read_to(b'i') + b'Th\\ni' + >>> f.read_to(b'i') + b's i' >>> f.close() - >>> f.read_to('i') + >>> f.read_to(b'i') Traceback (most recent call last): ValueError: File is closed. """ @@ -230,11 +230,11 @@ def read_to(self, sequence, size=None): def readline(self, size=None): """ - >>> f = IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) + >>> f = IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) >>> f.readline() - 'Th\\n' + b'Th\\n' >>> f.readline(4) - 'is i' + b'is i' >>> f.close() >>> f.readline() Traceback (most recent call last): @@ -244,10 +244,10 @@ def readline(self, size=None): def readlines(self, sizehint=None): """ - >>> f = IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) + >>> f = IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) >>> f.readlines() - ['Th\\n', 'is is \\n', 'a te\\n', 'st.'] - >>> f = IterableFile(['Th\\nis ', 'is \\n', 'a ', 'te\\nst.']) + [b'Th\\n', b'is is \\n', b'a te\\n', b'st.'] + >>> f = IterableFile([b'Th\\nis ', b'is \\n', b'a ', b'te\\nst.']) >>> f.close() >>> f.readlines() Traceback (most recent call last): diff --git a/breezy/library_state.py b/breezy/library_state.py index fe3a04e1ce..3dac5c6107 100644 --- a/breezy/library_state.py +++ b/breezy/library_state.py @@ -23,11 +23,13 @@ ] +import contextlib + + import breezy from .lazy_import import lazy_import lazy_import(globals(), """ from breezy import ( - cleanup, config, osutils, symbol_versioning, @@ -89,7 +91,7 @@ def _start(self): # isolation within the same interpreter. It's not reached on normal # in-process run_bzr calls. If it's broken, we expect that # TestRunBzrSubprocess may fail. - self.exit_stack = cleanup.ExitStack() + self.exit_stack = contextlib.ExitStack() if breezy.version_info[3] == 'final': self.exit_stack.callback( diff --git a/breezy/location.py b/breezy/location.py index cfa7a2af11..d0d40be94d 100644 --- a/breezy/location.py +++ b/breezy/location.py @@ -25,10 +25,6 @@ urlutils, ) from .hooks import Hooks -from .sixish import ( - PY3, - string_types, - ) class LocationHooks(Hooks): @@ -102,7 +98,7 @@ def location_to_url(location, purpose=None): :raise InvalidURL: If the location is already a URL, but not valid. :return: Byte string with resulting URL """ - if not isinstance(location, string_types): + if not isinstance(location, str): raise AssertionError("location not a byte or unicode string") if location.startswith(':pserver:'): @@ -120,8 +116,7 @@ def location_to_url(location, purpose=None): path=location, extra='URLs must be properly escaped') location = urlutils.local_path_to_url(location) else: - if PY3: - location = location.decode('ascii') + location = location.decode('ascii') if location.startswith("file:") and not location.startswith("file://"): return urlutils.join(urlutils.local_path_to_url("."), location[5:]) diff --git a/breezy/lockdir.py b/breezy/lockdir.py index 3a36045cb2..34e8caeda2 100644 --- a/breezy/lockdir.py +++ b/breezy/lockdir.py @@ -139,10 +139,6 @@ ) from .i18n import gettext from .osutils import format_delta, rand_chars, get_host_name -from .sixish import ( - PY3, - text_type, - ) from .trace import mutter, note @@ -302,7 +298,7 @@ def _handle_lock_contention(self, other_holder): ui.ui_factory.show_user_warning( 'locks_steal_dead', lock_url=urlutils.join(self.transport.base, self.path), - other_holder_info=text_type(other_holder)) + other_holder_info=str(other_holder)) self.force_break(other_holder) self._trace("stole lock from dead holder") return @@ -406,7 +402,7 @@ def break_lock(self): if ui.ui_factory.confirm_action( u"Break %(lock_info)s", 'breezy.lockdir.break', - dict(lock_info=text_type(holder_info))): + dict(lock_info=str(holder_info))): result = self.force_break(holder_info) ui.ui_factory.show_message( "Broke lock %s" % result.lock_url) @@ -730,16 +726,13 @@ def __repr__(self): """Return a debugging representation of this object.""" return "%s(%r)" % (self.__class__.__name__, self.info_dict) - def __unicode__(self): + def __str__(self): """Return a user-oriented description of this object.""" d = self.to_readable_dict() return (gettext( u'held by %(user)s on %(hostname)s (process #%(pid)s), ' u'acquired %(time_ago)s') % d) - if PY3: - __str__ = __unicode__ - def to_readable_dict(self): """Turn the holder info into a dict of human-readable attributes. diff --git a/breezy/log.py b/breezy/log.py index 69f50ab257..7b32f297ac 100644 --- a/breezy/log.py +++ b/breezy/log.py @@ -50,6 +50,7 @@ from __future__ import absolute_import import codecs +from io import BytesIO import itertools import re import sys @@ -84,11 +85,6 @@ get_terminal_encoding, terminal_width, ) -from .sixish import ( - BytesIO, - range, - zip, - ) from .tree import find_previous_path diff --git a/breezy/lru_cache.py b/breezy/lru_cache.py index 743e9714b5..b731e62190 100644 --- a/breezy/lru_cache.py +++ b/breezy/lru_cache.py @@ -21,10 +21,6 @@ from . import ( trace, ) -from .sixish import ( - viewitems, - viewkeys, - ) _null_key = object() @@ -136,11 +132,11 @@ def keys(self): :return: An unordered list of keys that are currently cached. """ # GZ 2016-06-04: Maybe just make this return the view? - return list(viewkeys(self._cache)) + return list(self._cache.keys()) def as_dict(self): """Get a new dict with the same key:value pairs as the cache""" - return dict((k, n.value) for k, n in viewitems(self._cache)) + return dict((k, n.value) for k, n in self._cache.items()) def cleanup(self): """Clear the cache until it shrinks to the requested size. diff --git a/breezy/lsprof.py b/breezy/lsprof.py index d5b4768745..03a9293edd 100644 --- a/breezy/lsprof.py +++ b/breezy/lsprof.py @@ -13,10 +13,7 @@ import operator import os import sys -try: - import _thread -except ImportError: - import thread as _thread +import _thread import threading from _lsprof import Profiler, profiler_entry diff --git a/breezy/mail_client.py b/breezy/mail_client.py index f967745240..d41c697832 100644 --- a/breezy/mail_client.py +++ b/breezy/mail_client.py @@ -32,10 +32,6 @@ urlutils, registry, ) -from .sixish import ( - PY3, - text_type, - ) mail_client_registry = registry.Registry() @@ -235,8 +231,6 @@ def _encode_safe(self, u): :param u: possible unicode string. :return: encoded string if u is unicode, u itself otherwise. """ - if not PY3 and isinstance(u, text_type): - return u.encode(osutils.get_user_encoding(), 'replace') return u def _encode_path(self, path, kind): @@ -248,11 +242,6 @@ def _encode_path(self, path, kind): path itself otherwise. :raise: UnableEncodePath. """ - if not PY3 and isinstance(path, text_type): - try: - return path.encode(osutils.get_user_encoding()) - except UnicodeEncodeError: - raise errors.UnableEncodePath(path, kind) return path diff --git a/breezy/merge.py b/breezy/merge.py index d0152bf86e..38b57907ca 100644 --- a/breezy/merge.py +++ b/breezy/merge.py @@ -16,13 +16,14 @@ from __future__ import absolute_import +import contextlib + from .lazy_import import lazy_import lazy_import(globals(), """ import patiencediff from breezy import ( branch as _mod_branch, - cleanup, conflicts as _mod_conflicts, debug, graph as _mod_graph, @@ -49,9 +50,6 @@ hooks, registry, ) -from .sixish import ( - viewitems, - ) # TODO: Report back as changes are merged in @@ -447,7 +445,7 @@ def set_pending(self): def _add_parent(self): new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id] new_parent_trees = [] - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: for revision_id in new_parents: try: tree = self.revision_tree(revision_id) @@ -654,7 +652,7 @@ def _do_merge_to(self): return merge def do_merge(self): - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: stack.enter_context(self.this_tree.lock_tree_write()) if self.base_tree is not None: stack.enter_context(self.base_tree.lock_read()) @@ -757,7 +755,7 @@ def __init__(self, working_tree, this_tree, base_tree, other_tree, self.do_merge() def do_merge(self): - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: stack.enter_context(self.working_tree.lock_tree_write()) stack.enter_context(self.this_tree.lock_read()) stack.enter_context(self.base_tree.lock_read()) @@ -2255,7 +2253,7 @@ def _remove_external_references(parent_map): filtered_parent_map = {} child_map = {} tails = [] - for key, parent_keys in viewitems(parent_map): + for key, parent_keys in parent_map.items(): culled_parent_keys = [p for p in parent_keys if p in parent_map] if not culled_parent_keys: tails.append(key) diff --git a/breezy/merge_directive.py b/breezy/merge_directive.py index f743ff4397..a03584841b 100644 --- a/breezy/merge_directive.py +++ b/breezy/merge_directive.py @@ -17,13 +17,14 @@ from __future__ import absolute_import import base64 +import contextlib +from io import BytesIO import re from . import lazy_import lazy_import.lazy_import(globals(), """ from breezy import ( branch as _mod_branch, - cleanup, diff, email_message, gpg, @@ -44,9 +45,6 @@ from . import ( errors, ) -from .sixish import ( - BytesIO, - ) class MergeRequestBodyParams(object): @@ -599,7 +597,7 @@ def from_objects(klass, repository, revision_id, time, timezone, If the message is not supplied, the message from revision_id will be used for the commit. """ - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(repository.lock_write()) t_revision_id = revision_id if revision_id == b'null:': diff --git a/breezy/mergeable.py b/breezy/mergeable.py index 9cbcda6fa7..e7e37e6719 100644 --- a/breezy/mergeable.py +++ b/breezy/mergeable.py @@ -16,6 +16,8 @@ from __future__ import absolute_import +from io import BytesIO + from .lazy_import import lazy_import lazy_import(globals(), """ from breezy import ( @@ -30,9 +32,6 @@ from . import ( errors, ) -from .sixish import ( - BytesIO, - ) from .trace import note diff --git a/breezy/msgeditor.py b/breezy/msgeditor.py index 4ebb4f88ef..141bcbc035 100644 --- a/breezy/msgeditor.py +++ b/breezy/msgeditor.py @@ -19,6 +19,10 @@ from __future__ import absolute_import import codecs +from io import ( + BytesIO, + StringIO, + ) import os from subprocess import call import sys @@ -34,10 +38,6 @@ ) from .errors import BzrError from .hooks import Hooks -from .sixish import ( - BytesIO, - StringIO, - ) class BadCommitMessageEncoding(BzrError): diff --git a/breezy/multiparent.py b/breezy/multiparent.py index 2f523285ec..0456fc5808 100644 --- a/breezy/multiparent.py +++ b/breezy/multiparent.py @@ -17,6 +17,9 @@ from __future__ import absolute_import import errno +from io import ( + BytesIO, + ) import os from .lazy_import import lazy_import @@ -35,10 +38,6 @@ errors, ) from .i18n import gettext -from .sixish import ( - BytesIO, - range, - ) def topo_iter_keys(vf, keys=None): diff --git a/breezy/multiwalker.py b/breezy/multiwalker.py index 43896ef5f5..fd18f5e11e 100644 --- a/breezy/multiwalker.py +++ b/breezy/multiwalker.py @@ -24,11 +24,6 @@ osutils, ) -from .sixish import ( - text_type, - viewvalues, - ) - class MultiWalker(object): """Walk multiple trees simultaneously, getting combined results.""" @@ -94,10 +89,10 @@ def _lt_path_by_dirblock(path1, path2): # This is stolen from _dirstate_helpers_py.py, only switching it to # Unicode objects. Consider using encode_utf8() and then using the # optimized versions, or maybe writing optimized unicode versions. - if not isinstance(path1, text_type): + if not isinstance(path1, str): raise TypeError("'path1' must be a unicode string, not %s: %r" % (type(path1), path1)) - if not isinstance(path2, text_type): + if not isinstance(path2, str): raise TypeError("'path2' must be a unicode string, not %s: %r" % (type(path2), path2)) return (MultiWalker._path_to_key(path1) < @@ -247,7 +242,7 @@ def _walk_others(self): # might ensure better ordering, in case a caller strictly # requires parents before children. for idx, other_extra in enumerate(self._others_extra): - others = sorted(viewvalues(other_extra), + others = sorted(other_extra.values(), key=lambda x: self._path_to_key(x[0])) for other_path, other_ie in others: file_id = other_ie.file_id diff --git a/breezy/mutabletree.py b/breezy/mutabletree.py index 349e24bac7..ad3033260f 100644 --- a/breezy/mutabletree.py +++ b/breezy/mutabletree.py @@ -29,9 +29,6 @@ tree, ) -from .sixish import ( - text_type, - ) class BadReferenceTarget(errors.InternalBzrError): @@ -101,12 +98,12 @@ def add(self, files, ids=None, kinds=None): TODO: Perhaps callback with the ids and paths as they're added. """ - if isinstance(files, (str, text_type)): + if isinstance(files, str): # XXX: Passing a single string is inconsistent and should be # deprecated. if not (ids is None or isinstance(ids, bytes)): raise AssertionError() - if not (kinds is None or isinstance(kinds, (str, text_type))): + if not (kinds is None or isinstance(kinds, str)): raise AssertionError() files = [files] if ids is not None: diff --git a/breezy/option.py b/breezy/option.py index a97b37de09..041fc68b01 100644 --- a/breezy/option.py +++ b/breezy/option.py @@ -27,9 +27,6 @@ registry as _mod_registry, revisionspec, ) -from .sixish import ( - text_type, - ) class BadOptionValue(errors.BzrError): @@ -111,7 +108,7 @@ def _parse_change_str(revstr): >>> _parse_change_str('123..124') Traceback (most recent call last): ... - RangeInChangeOption: Option --change does not accept revision ranges + breezy.errors.RangeInChangeOption: Option --change does not accept revision ranges """ revs = _parse_revision_str(revstr) if len(revs) > 1: @@ -564,9 +561,9 @@ def _verbosity_level_callback(option, opt_str, value, parser): short_name='c', param_name='revision', help='Select changes introduced by the specified revision. See also "help revisionspec".') -_global_option('directory', short_name='d', type=text_type, +_global_option('directory', short_name='d', type=str, help='Branch to operate on, instead of working directory.') -_global_option('file', type=text_type, short_name='F') +_global_option('file', type=str, short_name='F') _global_registry_option('log-format', "Use specified log format.", lazy_registry=('breezy.log', 'log_formatter_registry'), value_switches=True, title='Log format', @@ -574,7 +571,7 @@ def _verbosity_level_callback(option, opt_str, value, parser): _global_registry_option('merge-type', 'Select a particular merge algorithm.', lazy_registry=('breezy.merge', 'merge_type_registry'), value_switches=True, title='Merge algorithm') -_global_option('message', type=text_type, +_global_option('message', type=str, short_name='m', help='Message string.') _global_option('null', short_name='0', diff --git a/breezy/osutils.py b/breezy/osutils.py index da4166a7dc..4db3798430 100644 --- a/breezy/osutils.py +++ b/breezy/osutils.py @@ -52,11 +52,6 @@ from breezy.i18n import gettext """) -from .sixish import ( - PY3, - text_type, - ) - from hashlib import ( md5, sha1 as sha, @@ -91,14 +86,7 @@ def __init__(self, timezone): def get_unicode_argv(): - if PY3: - return sys.argv[1:] - try: - user_encoding = get_user_encoding() - return [a.decode(user_encoding) for a in sys.argv[1:]] - except UnicodeDecodeError: - raise errors.BzrError(gettext("Parameter {0!r} encoding is unsupported by {1} " - "application locale.").format(a, user_encoding)) + return sys.argv[1:] def make_readonly(filename): @@ -331,14 +319,7 @@ def _posix_path_from_environ(key): Note that posix systems use arbitrary byte strings for filesystem objects, so a path that raises BadFilenameEncoding here may still be accessible. """ - val = os.environ.get(key, None) - if PY3 or val is None: - return val - try: - return val.decode(_fs_enc) - except UnicodeDecodeError: - # GZ 2011-12-12:Ideally want to include `key` in the exception message - raise errors.BadFilenameEncoding(val, _fs_enc) + return os.environ.get(key, None) def _posix_get_home_dir(): @@ -354,15 +335,7 @@ def _posix_get_home_dir(): def _posix_getuser_unicode(): """Get username from environment or password database as unicode""" - name = getpass.getuser() - if PY3: - return name - user_encoding = get_user_encoding() - try: - return name.decode(user_encoding) - except UnicodeDecodeError: - raise errors.BzrError("Encoding of username %r is unsupported by %s " - "application locale." % (name, user_encoding)) + return getpass.getuser() def _win32_fixdrive(path): @@ -447,10 +420,7 @@ def _rename_wrapper(old, new): return _rename_wrapper -if sys.version_info > (3,): - _getcwd = os.getcwd -else: - _getcwd = os.getcwdu +_getcwd = os.getcwd # Default rename wraps os.rename() @@ -738,12 +708,8 @@ def file_iterator(input_file, readsize=32768): # GZ 2017-09-16: Makes sense in general for hexdigest() result to be text, but # used as bytes through most interfaces so encode with this wrapper. -if PY3: - def _hexdigest(hashobj): - return hashobj.hexdigest().encode() -else: - def _hexdigest(hashobj): - return hashobj.hexdigest() +def _hexdigest(hashobj): + return hashobj.hexdigest().encode() def sha_file(f): @@ -895,7 +861,7 @@ def format_local_date(t, offset=0, timezone='original', date_fmt=None, (date_fmt, tt, offset_str) = \ _format_date(t, offset, timezone, date_fmt, show_offset) date_str = time.strftime(date_fmt, tt) - if not isinstance(date_str, text_type): + if not isinstance(date_str, str): date_str = date_str.decode(get_user_encoding(), 'replace') return date_str + offset_str @@ -1012,10 +978,7 @@ def rand_chars(num): """ s = '' for raw_byte in rand_bytes(num): - if not PY3: - s += ALNUM[ord(raw_byte) % 36] - else: - s += ALNUM[raw_byte % 36] + s += ALNUM[raw_byte % 36] return s @@ -1376,7 +1339,7 @@ def decode_filename(filename): Otherwise it is decoded from the the filesystem's encoding. If decoding fails, a errors.BadFilenameEncoding exception is raised. """ - if isinstance(filename, text_type): + if isinstance(filename, str): return filename try: return filename.decode(_fs_enc) @@ -1391,7 +1354,7 @@ def safe_unicode(unicode_or_utf8_string): Otherwise it is decoded from utf-8. If decoding fails, the exception is wrapped in a BzrBadParameterNotUnicode exception. """ - if isinstance(unicode_or_utf8_string, text_type): + if isinstance(unicode_or_utf8_string, str): return unicode_or_utf8_string try: return unicode_or_utf8_string.decode('utf8') @@ -1730,8 +1693,6 @@ def set_or_unset_env(env_variable, value): if orig_val is not None: del os.environ[env_variable] else: - if not PY3 and isinstance(value, text_type): - value = value.encode(get_user_encoding()) os.environ[env_variable] = value return orig_val @@ -2126,9 +2087,7 @@ def get_host_name(): return win32utils.get_host_name() else: import socket - if PY3: - return socket.gethostname() - return socket.gethostname().decode(get_user_encoding()) + return socket.gethostname() # We must not read/write any more than 64k at a time from/to a socket so we @@ -2713,7 +2672,4 @@ def get_fs_type(path): return _FILESYSTEM_FINDER.find(path) -if PY3: - perf_counter = time.perf_counter -else: - perf_counter = time.clock +perf_counter = time.perf_counter diff --git a/breezy/plugin.py b/breezy/plugin.py index 3a7d0c883f..11f922fa81 100644 --- a/breezy/plugin.py +++ b/breezy/plugin.py @@ -63,10 +63,7 @@ _MODULE_PREFIX = "breezy.plugins." -if __debug__ or sys.version_info > (3,): - COMPILED_EXT = ".pyc" -else: - COMPILED_EXT = ".pyo" +COMPILED_EXT = ".pyc" def disable_plugins(state=None): diff --git a/breezy/plugins/bash_completion/bashcomp.py b/breezy/plugins/bash_completion/bashcomp.py index fd1adbc69f..35c347d33c 100644 --- a/breezy/plugins/bash_completion/bashcomp.py +++ b/breezy/plugins/bash_completion/bashcomp.py @@ -26,9 +26,6 @@ option, plugin, ) -from ...sixish import ( - text_type, - ) import breezy import re import sys @@ -428,13 +425,13 @@ class cmd_bash_completion(commands.Command): """ takes_options = [ - option.Option("function-name", short_name="f", type=text_type, argname="name", + option.Option("function-name", short_name="f", type=str, argname="name", help="Name of the generated function (default: _brz)"), option.Option("function-only", short_name="o", type=None, help="Generate only the shell function, don't enable it"), option.Option("debug", type=None, hidden=True, help="Enable shell code useful for debugging"), - option.ListOption("plugin", type=text_type, argname="name", + option.ListOption("plugin", type=str, argname="name", # param_name="selected_plugins", # doesn't work, bug #387117 help="Enable completions for the selected plugin" + " (default: all plugins)"), diff --git a/breezy/plugins/email/emailer.py b/breezy/plugins/email/emailer.py index 4396216e9a..544aec0b53 100644 --- a/breezy/plugins/email/emailer.py +++ b/breezy/plugins/email/emailer.py @@ -80,7 +80,7 @@ def body(self): # We must use StringIO.StringIO because we want a Unicode string that # we can pass to send_email and have that do the proper encoding. - from ...sixish import StringIO + from io import StringIO outf = StringIO() _body = self.config.get('post_commit_body') @@ -137,7 +137,7 @@ def get_diff(self): # We can use a StringIO because show_diff_trees should only write # 8-bit strings. It is an error to write a Unicode string here. - from ...sixish import StringIO + from io import StringIO diff_content = StringIO() diff_options = self.config.get('post_commit_diffoptions') show_diff_trees(tree_old, tree_new, diff_content, None, diff_options) diff --git a/breezy/plugins/fastimport/cmds.py b/breezy/plugins/fastimport/cmds.py index 99fa5fc33f..83d6cf40ee 100644 --- a/breezy/plugins/fastimport/cmds.py +++ b/breezy/plugins/fastimport/cmds.py @@ -20,9 +20,6 @@ from ... import controldir from ...commands import Command from ...option import Option, RegistryOption -from ...sixish import ( - text_type, - ) from . import ( @@ -242,10 +239,10 @@ class cmd_fast_import(Command): _see_also = ['fast-export', 'fast-import-filter', 'fast-import-info'] takes_args = ['source', 'destination?'] takes_options = ['verbose', - Option('user-map', type=text_type, + Option('user-map', type=str, help="Path to file containing a map of user-ids.", ), - Option('info', type=text_type, + Option('info', type=str, help="Path to file containing caching hints.", ), Option('trees', @@ -271,10 +268,10 @@ class cmd_fast_import(Command): experimental="Enable experimental features.", value_switches=True, enum_switch=False, ), - Option('import-marks', type=text_type, + Option('import-marks', type=str, help="Import marks from file." ), - Option('export-marks', type=text_type, + Option('export-marks', type=str, help="Export marks to file." ), RegistryOption('format', @@ -322,7 +319,7 @@ def run(self, source, destination='.', verbose=False, info=None, user_map=user_map) def _generate_info(self, source): - from ...sixish import StringIO + from io import StringIO from fastimport import parser from fastimport.errors import ParsingError from ...errors import BzrCommandError @@ -448,20 +445,20 @@ class cmd_fast_export(Command): _see_also = ['fast-import', 'fast-import-filter'] takes_args = ['source?', 'destination?'] takes_options = ['verbose', 'revision', - Option('git-branch', short_name='b', type=text_type, + Option('git-branch', short_name='b', type=str, argname='FILE', help='Name of the git branch to create (default=master).' ), Option('checkpoint', type=int, argname='N', help="Checkpoint every N revisions (default=10000)." ), - Option('marks', type=text_type, argname='FILE', + Option('marks', type=str, argname='FILE', help="Import marks from and export marks to file." ), - Option('import-marks', type=text_type, argname='FILE', + Option('import-marks', type=str, argname='FILE', help="Import marks from file." ), - Option('export-marks', type=text_type, argname='FILE', + Option('export-marks', type=str, argname='FILE', help="Export marks to file." ), Option('plain', diff --git a/breezy/plugins/fastimport/exporter.py b/breezy/plugins/fastimport/exporter.py index 32c80d5145..71d49f0993 100644 --- a/breezy/plugins/fastimport/exporter.py +++ b/breezy/plugins/fastimport/exporter.py @@ -47,10 +47,7 @@ # is not updated (because the parent of commit is already merged, so we don't # set new_git_branch to the previously used name) -try: - from email.utils import parseaddr -except ImportError: # python < 3 - from email.Utils import parseaddr +from email.utils import parseaddr import sys import time import re @@ -66,11 +63,6 @@ progress, trace, ) -from ...sixish import ( - int2byte, - PY3, - viewitems, - ) from . import ( helpers, @@ -140,13 +132,14 @@ def sanitize_ref_name_for_git(refname): :param refname: refname to rewrite :return: new refname """ + import struct new_refname = re.sub( # '/.' in refname or startswith '.' br"/\.|^\." # '..' in refname br"|\.\." # ord(c) < 040 - br"|[" + b"".join([int2byte(x) for x in range(0o40)]) + br"]" + br"|[" + b"".join([bytes([x]) for x in range(0o40)]) + br"]" # c in '\177 ~^:?*[' br"|[\177 ~^:?*[]" # last char in "/." @@ -319,10 +312,7 @@ def dump_stats(self): time_required) def print_cmd(self, cmd): - if PY3: - self.outf.write(b"%s\n" % cmd) - else: - self.outf.write(b"%r\n" % cmd) + self.outf.write(b"%s\n" % cmd) def _save_marks(self): if self.export_marks_file: @@ -651,7 +641,7 @@ def _adjust_path_for_renames(self, path, renamed, revision_id): return path def emit_tags(self): - for tag, revid in viewitems(self.branch.tags.get_tag_dict()): + for tag, revid in self.branch.tags.get_tag_dict().items(): try: mark = self.revid_to_mark[revid] except KeyError: diff --git a/breezy/plugins/fastimport/tests/test_head_tracking.py b/breezy/plugins/fastimport/tests/test_head_tracking.py index 813a140d1b..36eb72d363 100644 --- a/breezy/plugins/fastimport/tests/test_head_tracking.py +++ b/breezy/plugins/fastimport/tests/test_head_tracking.py @@ -17,7 +17,7 @@ from __future__ import absolute_import -from ....sixish import StringIO +from io import StringIO from fastimport import ( commands, diff --git a/breezy/plugins/fastimport/user_mapper.py b/breezy/plugins/fastimport/user_mapper.py index 04a066ff9f..57bc7bdfd0 100644 --- a/breezy/plugins/fastimport/user_mapper.py +++ b/breezy/plugins/fastimport/user_mapper.py @@ -15,10 +15,7 @@ from __future__ import absolute_import -try: - from email.utils import parseaddr -except ImportError: - from email.Utils import parseaddr +from email.utils import parseaddr class UserMapper(object): diff --git a/breezy/plugins/launchpad/cmds.py b/breezy/plugins/launchpad/cmds.py index 7184b7435e..1d63222966 100644 --- a/breezy/plugins/launchpad/cmds.py +++ b/breezy/plugins/launchpad/cmds.py @@ -35,9 +35,6 @@ Option, ListOption, ) -from ...sixish import ( - text_type, - ) class cmd_launchpad_open(Command): @@ -191,13 +188,13 @@ class cmd_lp_propose_merge(Command): hidden = True takes_options = [Option('staging', help='Propose the merge on staging.'), - Option('message', short_name='m', type=text_type, + Option('message', short_name='m', type=str, help='Commit message.'), Option('approve', help=('Mark the proposal as approved immediately, ' 'setting the approved revision to tip.')), Option('fixes', 'The bug this proposal fixes.', str), - ListOption('review', short_name='R', type=text_type, + ListOption('review', short_name='R', type=str, help='Requested reviewer and optional type.')] takes_args = ['submit_branch?'] diff --git a/breezy/plugins/launchpad/lp_api.py b/breezy/plugins/launchpad/lp_api.py index e94a91de63..35818b497a 100644 --- a/breezy/plugins/launchpad/lp_api.py +++ b/breezy/plugins/launchpad/lp_api.py @@ -24,16 +24,10 @@ import re -try: - from urllib.parse import ( - urlparse, - urlunparse, - ) -except ImportError: # python < 3 - from urlparse import ( - urlparse, - urlunparse, - ) +from urllib.parse import ( + urlparse, + urlunparse, + ) from ... import ( branch, diff --git a/breezy/plugins/launchpad/lp_directory.py b/breezy/plugins/launchpad/lp_directory.py index f4152796b3..8680b8072e 100644 --- a/breezy/plugins/launchpad/lp_directory.py +++ b/breezy/plugins/launchpad/lp_directory.py @@ -18,14 +18,8 @@ from __future__ import absolute_import -try: - from urllib.parse import urlsplit -except ImportError: # python < 3 - from urlparse import urlsplit -try: - from xmlrpc.client import Fault -except ImportError: # Python < 3 - from xmlrpclib import Fault +from urllib.parse import urlsplit +from xmlrpc.client import Fault from ... import ( debug, diff --git a/breezy/plugins/launchpad/lp_registration.py b/breezy/plugins/launchpad/lp_registration.py index 0649aa5585..469e82cb2e 100644 --- a/breezy/plugins/launchpad/lp_registration.py +++ b/breezy/plugins/launchpad/lp_registration.py @@ -24,22 +24,13 @@ except ImportError: from urlparse import urlsplit, urlunsplit # noqa: F401 import urllib -try: - from xmlrpc.client import ( - __version__ as xmlrpc_version, - Fault, - ProtocolError, - ServerProxy, - Transport, - ) -except ImportError: # python < 3 - from xmlrpclib import ( - __version__ as xmlrpc_version, - Fault, - ProtocolError, - Transport, - ServerProxy, - ) +from xmlrpc.client import ( + __version__ as xmlrpc_version, + Fault, + ProtocolError, + ServerProxy, + Transport, + ) from ... import ( errors, diff --git a/breezy/plugins/launchpad/test_lp_directory.py b/breezy/plugins/launchpad/test_lp_directory.py index 44878c6fff..d93bfc841d 100644 --- a/breezy/plugins/launchpad/test_lp_directory.py +++ b/breezy/plugins/launchpad/test_lp_directory.py @@ -18,11 +18,8 @@ import os -try: - from xmlrpc.client import Fault - from http.client import parse_headers -except ImportError: # python < 3 - from xmlrpclib import Fault +from xmlrpc.client import Fault +from http.client import parse_headers import breezy from ... import ( @@ -32,7 +29,6 @@ ) from ...branch import Branch from ...directory_service import directories -from ...sixish import PY3 from ...tests import ( features, ssl_certs, @@ -428,10 +424,7 @@ class PredefinedRequestHandler(http_server.TestingHTTPRequestHandler): def handle_one_request(self): tcs = self.server.test_case_server requestline = self.rfile.readline() - if PY3: - parse_headers(self.rfile) - else: - self.MessageClass(self.rfile, 0) + parse_headers(self.rfile) if requestline.startswith(b'POST'): # The body should be a single line (or we don't know where it ends # and we don't want to issue a blocking read) diff --git a/breezy/plugins/launchpad/test_lp_service.py b/breezy/plugins/launchpad/test_lp_service.py index 351d7b2cdf..87537da1ae 100644 --- a/breezy/plugins/launchpad/test_lp_service.py +++ b/breezy/plugins/launchpad/test_lp_service.py @@ -17,10 +17,7 @@ """Tests for selection of the right Launchpad service by environment""" import os -try: - from xmlrpc.client import Fault -except ImportError: # python < 3 - from xmlrpclib import Fault +from xmlrpc.client import Fault from .lp_registration import ( InvalidURL, diff --git a/breezy/plugins/launchpad/test_register.py b/breezy/plugins/launchpad/test_register.py index 2746904fb3..42dba7117a 100644 --- a/breezy/plugins/launchpad/test_register.py +++ b/breezy/plugins/launchpad/test_register.py @@ -16,18 +16,11 @@ from io import BytesIO -try: - from xmlrpc.client import ( - loads as xmlrpc_loads, - Transport, - ) -except ImportError: # python < 3 - from xmlrpclib import ( - loads as xmlrpc_loads, - Transport, - ) - -from ...sixish import PY3 +from xmlrpc.client import ( + loads as xmlrpc_loads, + Transport, + ) + from ...tests import TestCaseWithTransport # local import @@ -111,23 +104,17 @@ def make_connection(self, host): raise AssertionError() return InstrumentedXMLRPCConnection(test) - if PY3: - def send_request(self, host, handler_path, request_body, - verbose=None): - self.connected_host = host - test = self.testcase - self.got_request = True - unpacked, method = xmlrpc_loads(request_body) - if None in unpacked: - raise AssertionError( - "xmlrpc result %r shouldn't contain None" % (unpacked,)) - self.sent_params = unpacked - return InstrumentedXMLRPCConnection(test) - else: - def send_request(self, connection, handler_path, request_body, - verbose=None): - test = self.testcase - self.got_request = True + def send_request(self, host, handler_path, request_body, + verbose=None): + self.connected_host = host + test = self.testcase + self.got_request = True + unpacked, method = xmlrpc_loads(request_body) + if None in unpacked: + raise AssertionError( + "xmlrpc result %r shouldn't contain None" % (unpacked,)) + self.sent_params = unpacked + return InstrumentedXMLRPCConnection(test) def send_host(self, conn, host): pass diff --git a/breezy/plugins/netrc_credential_store/tests/test_netrc.py b/breezy/plugins/netrc_credential_store/tests/test_netrc.py index 4063f7f785..2e96d5a716 100644 --- a/breezy/plugins/netrc_credential_store/tests/test_netrc.py +++ b/breezy/plugins/netrc_credential_store/tests/test_netrc.py @@ -14,15 +14,13 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO from .... import ( config, errors, osutils, tests, ) -from ....sixish import ( - BytesIO, - ) from ... import netrc_credential_store diff --git a/breezy/plugins/propose/cmds.py b/breezy/plugins/propose/cmds.py index f003eb9dfc..c77a0ec66a 100644 --- a/breezy/plugins/propose/cmds.py +++ b/breezy/plugins/propose/cmds.py @@ -36,7 +36,6 @@ Option, RegistryOption, ) -from ...sixish import text_type from ...trace import note from ... import ( propose as _mod_propose, @@ -139,7 +138,7 @@ class cmd_propose_merge(Command): 'hoster', help='Use the hoster.', lazy_registry=('breezy.plugins.propose.propose', 'hosters')), - ListOption('reviewers', short_name='R', type=text_type, + ListOption('reviewers', short_name='R', type=str, help='Requested reviewers.'), Option('name', help='Name of the new remote branch.', type=str), Option('description', help='Description of the change.', type=str), @@ -148,7 +147,7 @@ class cmd_propose_merge(Command): Option( 'commit-message', help='Set commit message for merge, if supported', type=str), - ListOption('labels', short_name='l', type=text_type, + ListOption('labels', short_name='l', type=str, help='Labels to apply.'), Option('no-allow-lossy', help='Allow fallback to lossy push, if necessary.'), diff --git a/breezy/plugins/propose/github.py b/breezy/plugins/propose/github.py index 3fdd63c533..f9185e4840 100644 --- a/breezy/plugins/propose/github.py +++ b/breezy/plugins/propose/github.py @@ -47,7 +47,6 @@ from ...errors import InvalidHttpResponse, PermissionDenied from ...git.urls import git_url_to_bzr_url from ...i18n import gettext -from ...sixish import PY3 from ...trace import note from ...transport import get_transport from ...transport.http import default_user_agent @@ -222,8 +221,6 @@ def parse_github_branch_url(branch): def github_url_to_bzr_url(url, branch_name): - if not PY3: - branch_name = branch_name.encode('utf-8') return git_url_to_bzr_url(url, branch_name) diff --git a/breezy/plugins/propose/gitlabs.py b/breezy/plugins/propose/gitlabs.py index ae51f97632..fad89825f9 100644 --- a/breezy/plugins/propose/gitlabs.py +++ b/breezy/plugins/propose/gitlabs.py @@ -30,7 +30,6 @@ urlutils, ) from ...git.urls import git_url_to_bzr_url -from ...sixish import PY3 from ...trace import mutter from ...transport import get_transport @@ -261,8 +260,6 @@ def get_merged_at(self): def gitlab_url_to_bzr_url(url, name): - if not PY3: - name = name.encode('utf-8') return git_url_to_bzr_url(url, branch=name) diff --git a/breezy/plugins/repodebug/missing_keys_for_stacking_fixer.py b/breezy/plugins/repodebug/missing_keys_for_stacking_fixer.py index 82ddd92cef..84def16a67 100644 --- a/breezy/plugins/repodebug/missing_keys_for_stacking_fixer.py +++ b/breezy/plugins/repodebug/missing_keys_for_stacking_fixer.py @@ -19,7 +19,6 @@ from ...controldir import ControlDir from ...commands import Command, Option from ... import errors -from ...sixish import viewvalues from ...bzr.vf_search import PendingAncestryResult from ...repository import WriteGroup from ...revision import NULL_REVISION @@ -65,7 +64,7 @@ def run(self, branch_url, dry_run=False): revs = raw_r.all_revision_ids() rev_parents = raw_r.get_graph().get_parent_map(revs) needed = set() - map(needed.update, viewvalues(rev_parents)) + map(needed.update, rev_parents.values()) needed.discard(NULL_REVISION) needed = set((rev,) for rev in needed) needed = needed - raw_r.inventories.keys() diff --git a/breezy/plugins/upload/cmds.py b/breezy/plugins/upload/cmds.py index c8ff0f82f9..03926d86fe 100644 --- a/breezy/plugins/upload/cmds.py +++ b/breezy/plugins/upload/cmds.py @@ -39,9 +39,6 @@ ) """) -from ...sixish import ( - text_type, - ) auto_option = config.Option( 'upload_auto', default=False, from_unicode=config.bool_from_store, @@ -483,7 +480,7 @@ class cmd_upload(commands.Command): help='Branch to upload from, ' 'rather than the one containing the working directory.', short_name='d', - type=text_type, + type=str, ), option.Option('auto', 'Trigger an upload from this branch whenever the tip ' diff --git a/breezy/plugins/weave_fmt/repository.py b/breezy/plugins/weave_fmt/repository.py index bca644aed1..765db611b2 100644 --- a/breezy/plugins/weave_fmt/repository.py +++ b/breezy/plugins/weave_fmt/repository.py @@ -23,6 +23,7 @@ from __future__ import absolute_import import gzip +from io import BytesIO import os from ...lazy_import import lazy_import @@ -58,10 +59,6 @@ from ...bzr.repository import ( RepositoryFormatMetaDir, ) -from ...sixish import ( - BytesIO, - text_type, - ) from .store.text import TextStore from ...bzr.versionedfile import ( AbsentContentFactory, @@ -88,7 +85,7 @@ def _serializer(self): return xml5.serializer_v5 def _escape(self, file_or_path): - if not isinstance(file_or_path, (str, text_type)): + if not isinstance(file_or_path, str): file_or_path = '/'.join(file_or_path) if file_or_path == '': return u'' diff --git a/breezy/plugins/weave_fmt/store/text.py b/breezy/plugins/weave_fmt/store/text.py index 39cb621705..6ff23ddb30 100644 --- a/breezy/plugins/weave_fmt/store/text.py +++ b/breezy/plugins/weave_fmt/store/text.py @@ -23,13 +23,11 @@ from __future__ import absolute_import import gzip +from io import BytesIO import os from .... import osutils from ....errors import BzrError, NoSuchFile, FileExists -from ....sixish import ( - BytesIO, - ) from . import TransportStore from ....trace import mutter diff --git a/breezy/plugins/weave_fmt/test_repository.py b/breezy/plugins/weave_fmt/test_repository.py index aeadb0153f..38a686430e 100644 --- a/breezy/plugins/weave_fmt/test_repository.py +++ b/breezy/plugins/weave_fmt/test_repository.py @@ -22,6 +22,7 @@ from __future__ import absolute_import +from io import BytesIO from stat import S_ISDIR import sys @@ -39,9 +40,6 @@ from ...bzr.serializer import ( format_registry as serializer_format_registry, ) -from ...sixish import ( - BytesIO, - ) from ...tests import ( TestCase, TestCaseWithTransport, diff --git a/breezy/plugins/weave_fmt/test_store.py b/breezy/plugins/weave_fmt/test_store.py index de7e57e26e..bf6642dbd9 100644 --- a/breezy/plugins/weave_fmt/test_store.py +++ b/breezy/plugins/weave_fmt/test_store.py @@ -16,14 +16,12 @@ """Test Store implementations.""" +from io import BytesIO import os import gzip from ... import errors as errors from ...errors import BzrError -from ...sixish import ( - BytesIO, - ) from .store import TransportStore from .store.text import TextStore from .store.versioned import VersionedFileStore diff --git a/breezy/plugins/weave_fmt/workingtree.py b/breezy/plugins/weave_fmt/workingtree.py index e81e23d02b..30c849c8c5 100644 --- a/breezy/plugins/weave_fmt/workingtree.py +++ b/breezy/plugins/weave_fmt/workingtree.py @@ -18,6 +18,7 @@ from __future__ import absolute_import +from io import BytesIO from ... import ( conflicts as _mod_conflicts, @@ -32,9 +33,6 @@ xml5, ) from ...mutabletree import MutableTree -from ...sixish import ( - BytesIO, - ) from ...transport.local import LocalTransport from ...workingtree import ( WorkingTreeFormat, diff --git a/breezy/plugins/zsh_completion/zshcomp.py b/breezy/plugins/zsh_completion/zshcomp.py index 841dfe94b2..3b45e4865f 100644 --- a/breezy/plugins/zsh_completion/zshcomp.py +++ b/breezy/plugins/zsh_completion/zshcomp.py @@ -26,9 +26,6 @@ option, plugin, ) -from ...sixish import ( - text_type, - ) import breezy import re import sys @@ -266,11 +263,11 @@ class cmd_zsh_completion(commands.Command): """ takes_options = [ - option.Option("function-name", short_name="f", type=text_type, argname="name", + option.Option("function-name", short_name="f", type=str, argname="name", help="Name of the generated function (default: _brz)"), option.Option("debug", type=None, hidden=True, help="Enable shell code useful for debugging"), - option.ListOption("plugin", type=text_type, argname="name", + option.ListOption("plugin", type=str, argname="name", # param_name="selected_plugins", # doesn't work, bug #387117 help="Enable completions for the selected plugin" + " (default: all plugins)"), diff --git a/breezy/registry.py b/breezy/registry.py index a4db96c1fc..0cd197d800 100644 --- a/breezy/registry.py +++ b/breezy/registry.py @@ -20,8 +20,6 @@ from .pyutils import get_named_object -from .sixish import viewitems - class _ObjectGetter(object): """Maintain a reference to an object, and return the object on request. @@ -110,11 +108,11 @@ def __init__(self): def aliases(self): """Return a set of the format names which are aliases.""" - return dict(viewitems(self._aliases)) + return dict(self._aliases.items()) def alias_map(self): ret = {} - for alias, target in viewitems(self._aliases): + for alias, target in self._aliases.items(): ret.setdefault(target, []).append(alias) return ret diff --git a/breezy/rename_map.py b/breezy/rename_map.py index 55bc569e31..d6984cd625 100644 --- a/breezy/rename_map.py +++ b/breezy/rename_map.py @@ -16,6 +16,7 @@ from __future__ import absolute_import +from io import BytesIO from . import ( osutils, @@ -23,11 +24,6 @@ trace, ) from .i18n import gettext -from .sixish import ( - BytesIO, - viewitems, - viewvalues, - ) from .ui import ui_factory @@ -107,7 +103,7 @@ def get_all_hits(self, paths): for num, path in enumerate(paths): task.update(gettext('Determining hash hits'), num, len(paths)) hits = self.hitcounts(self.tree.get_file_lines(path)) - all_hits.extend((v, path, k) for k, v in viewitems(hits)) + all_hits.extend((v, path, k) for k, v in hits.items()) return all_hits def file_match(self, paths): @@ -146,7 +142,7 @@ def get_required_parents(self, matches): break required_parents.setdefault(path, []).append(child) require_ids = {} - for parent, children in viewitems(required_parents): + for parent, children in required_parents.items(): child_file_ids = set() for child in children: file_id = matches.get(child) @@ -163,8 +159,8 @@ def match_parents(self, required_parents, missing_parents): parent directories. """ all_hits = [] - for file_id, file_id_children in viewitems(missing_parents): - for path, path_children in viewitems(required_parents): + for file_id, file_id_children in missing_parents.items(): + for path, path_children in required_parents.items(): hits = len(path_children.intersection(file_id_children)) if hits > 0: all_hits.append((hits, path, file_id)) @@ -239,9 +235,9 @@ def guess_renames(klass, from_tree, to_tree, dry_run=False): def _make_inventory_delta(self, matches): delta = [] - file_id_matches = dict((f, p) for p, f in viewitems(matches)) + file_id_matches = dict((f, p) for p, f in matches.items()) file_id_query = [] - for f in viewvalues(matches): + for f in matches.values(): try: file_id_query.append(self.tree.id2path(f)) except errors.NoSuchId: diff --git a/breezy/repository.py b/breezy/repository.py index 253bacec60..9ed06d5db7 100644 --- a/breezy/repository.py +++ b/breezy/repository.py @@ -40,10 +40,6 @@ from .decorators import only_raises from .inter import InterObject from .lock import _RelockDebugMixin, LogicalLockResult -from .sixish import ( - text_type, - viewitems, - ) from .trace import ( log_exception_quietly, note, mutter, mutter_callsite, warning) @@ -110,7 +106,7 @@ def __init__(self, repository, parents, config_stack, timestamp=None, if committer is None: self._committer = self._config_stack.get('email') - elif not isinstance(committer, text_type): + elif not isinstance(committer, str): self._committer = committer.decode() # throw if non-ascii else: self._committer = committer @@ -152,10 +148,10 @@ def _validate_unicode_text(self, text, context): raise ValueError('Invalid value for %s: %r' % (context, text)) def _validate_revprops(self, revprops): - for key, value in viewitems(revprops): + for key, value in revprops.items(): # We know that the XML serializers do not round trip '\r' # correctly, so refuse to accept them - if not isinstance(value, (text_type, str)): + if not isinstance(value, str): raise ValueError('revision property (%s) is not a valid' ' (unicode) string: %r' % (key, value)) # TODO(jelmer): Make this repository-format specific @@ -1052,8 +1048,8 @@ def _get_parent_map_no_fallbacks(self, revision_ids): else: query_keys.append((revision_id,)) vf = self.revisions.without_fallbacks() - for (revision_id,), parent_keys in viewitems( - vf.get_parent_map(query_keys)): + for (revision_id,), parent_keys in ( + vf.get_parent_map(query_keys).items()): if parent_keys: result[revision_id] = tuple([parent_revid for (parent_revid,) in parent_keys]) @@ -1184,7 +1180,7 @@ def _check_ascii_revisionid(self, revision_id, method): # weave repositories refuse to store revisionids that are non-ascii. if revision_id is not None: # weaves require ascii revision ids. - if isinstance(revision_id, text_type): + if isinstance(revision_id, str): try: revision_id.encode('ascii') except UnicodeEncodeError: @@ -1620,7 +1616,7 @@ def _strip_NULL_ghosts(revision_graph): # Filter ghosts, and null: if _mod_revision.NULL_REVISION in revision_graph: del revision_graph[_mod_revision.NULL_REVISION] - for key, parents in viewitems(revision_graph): + for key, parents in revision_graph.items(): revision_graph[key] = tuple(parent for parent in parents if parent in revision_graph) return revision_graph diff --git a/breezy/revision.py b/breezy/revision.py index a0fb1f8d99..208a66b232 100644 --- a/breezy/revision.py +++ b/breezy/revision.py @@ -28,9 +28,6 @@ errors, osutils, ) -from .sixish import ( - text_type, - ) NULL_REVISION = b"null:" CURRENT_REVISION = b"current:" @@ -90,10 +87,10 @@ def _check_properties(self): """Verify that all revision properties are OK.""" for name, value in self.properties.items(): # GZ 2017-06-10: What sort of string are properties exactly? - not_text = not isinstance(name, (text_type, str)) + not_text = not isinstance(name, str) if not_text or osutils.contains_whitespace(name): raise ValueError("invalid property name %r" % name) - if not isinstance(value, (text_type, bytes)): + if not isinstance(value, (str, bytes)): raise ValueError("invalid property value %r for %r" % (value, name)) diff --git a/breezy/revisionspec.py b/breezy/revisionspec.py index 0eeed4ad9f..fbb21e4aae 100644 --- a/breezy/revisionspec.py +++ b/breezy/revisionspec.py @@ -37,9 +37,6 @@ registry, trace, ) -from .sixish import ( - text_type, - ) class RevisionInfo(object): @@ -160,7 +157,7 @@ def from_string(spec): """ if spec is None: return RevisionSpec(None, _internal=True) - if not isinstance(spec, (str, text_type)): + if not isinstance(spec, str): raise TypeError("revision spec needs to be text") match = revspec_registry.get_prefix(spec) if match is not None: @@ -471,7 +468,7 @@ def _as_revision_id(self, context_branch): # self.spec comes straight from parsing the command line arguments, # so we expect it to be a Unicode string. Switch it to the internal # representation. - if isinstance(self.spec, text_type): + if isinstance(self.spec, str): return cache_utf8.encode(self.spec) return self.spec diff --git a/breezy/rio.py b/breezy/rio.py index a399a4749a..25e0b66158 100644 --- a/breezy/rio.py +++ b/breezy/rio.py @@ -36,9 +36,6 @@ from . import osutils from .iterablefile import IterableFile -from .sixish import ( - text_type, - ) # XXX: some redundancy is allowing to write stanzas in isolation as well as # through a writer object. @@ -139,7 +136,7 @@ def add(self, tag, value): raise ValueError("invalid tag %r" % (tag,)) if isinstance(value, bytes): value = value.decode('ascii') - elif isinstance(value, text_type): + elif isinstance(value, str): pass else: raise TypeError("invalid type for rio value: %r of type %s" diff --git a/breezy/shelf_ui.py b/breezy/shelf_ui.py index d194dc7005..81b0197cdc 100644 --- a/breezy/shelf_ui.py +++ b/breezy/shelf_ui.py @@ -16,6 +16,7 @@ from __future__ import absolute_import +import contextlib import patiencediff import shutil import sys @@ -25,7 +26,6 @@ from . import ( builtins, - cleanup, delta, diff, errors, @@ -458,7 +458,7 @@ def __init__(self, tree, manager, shelf_id, apply_changes=True, def run(self): """Perform the unshelving operation.""" - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(self.tree.lock_tree_write()) if self.read_shelf: trace.note(gettext('Using changes with id "%d".') % diff --git a/breezy/sixish.py b/breezy/sixish.py deleted file mode 100644 index 7095c6f318..0000000000 --- a/breezy/sixish.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (C) 2017 Bazaar hackers -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -""" -Module to aid writing a Python dialect compatible with 2.7 and 3.4+. - -Initially pretty much just a subset of six while things get worked out. -""" - -from __future__ import absolute_import - -from six import ( - binary_type, - get_unbound_function, - indexbytes, - int2byte, - PY3, - reraise, - string_types, - text_type, - unichr, - viewitems, - viewkeys, - viewvalues, - ) # noqa: F401 - - -# The io module exists in Python 2.7 but lacks optimisation. Most uses are not -# performance critical, but want to measure before switching from cStringIO. -if PY3: - import io as _io - BytesIO = _io.BytesIO - StringIO = _io.StringIO - from builtins import range, map, zip -else: - from cStringIO import StringIO as BytesIO # noqa: F401 - from StringIO import StringIO # noqa: F401 - from future_builtins import zip, map # noqa: F401 - range = xrange # noqa: F821 - - -# GZ 2017-06-10: Work out if interning bits of inventory is behaviour we want -# to retain outside of StaticTuple, if so need to implement for Python 3. -if PY3: - def bytesintern(b): - """Dummy intern() function.""" - return b -else: - bytesintern = intern diff --git a/breezy/smtp_connection.py b/breezy/smtp_connection.py index ec68cd4a76..d499412221 100644 --- a/breezy/smtp_connection.py +++ b/breezy/smtp_connection.py @@ -18,10 +18,7 @@ from __future__ import absolute_import -try: - from email.utils import getaddresses, parseaddr -except ImportError: # python < 3 - from email.Utils import getaddresses, parseaddr +from email.utils import getaddresses, parseaddr import errno import smtplib diff --git a/breezy/tag.py b/breezy/tag.py index 8237460b0c..a874766195 100644 --- a/breezy/tag.py +++ b/breezy/tag.py @@ -25,12 +25,12 @@ from __future__ import absolute_import from collections import defaultdict +import contextlib # NOTE: I was going to call this tags.py, but vim seems to think all files # called tags* are ctags files... mbp 20070220. from .registry import Registry -from .sixish import text_type from .lazy_import import lazy_import lazy_import(globals(), """ import itertools @@ -39,7 +39,6 @@ from breezy import ( bencode, - cleanup, trace, ) """) @@ -286,7 +285,7 @@ def merge_to(self, to_tags, overwrite=False, ignore_master=False): (tagname, source_target, dest_target), or None if no copying was done. """ - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: if self.branch == to_tags.branch: return {}, [] if not self.branch.supports_tags(): @@ -406,7 +405,7 @@ def sort_natural(branch, tags): """ def natural_sort_key(tag): return [f(s) for f, s in - zip(itertools.cycle((text_type.lower, int)), + zip(itertools.cycle((str.lower, int)), re.split('([0-9]+)', tag[0]))] tags.sort(key=natural_sort_key) diff --git a/breezy/tests/__init__.py b/breezy/tests/__init__.py index 36c1617229..698afb5e03 100644 --- a/breezy/tests/__init__.py +++ b/breezy/tests/__init__.py @@ -94,12 +94,6 @@ except ImportError: # lsprof not available pass -from ..sixish import ( - int2byte, - PY3, - string_types, - text_type, - ) from ..bzr.smart import client, request from ..transport import ( memory, @@ -1331,14 +1325,14 @@ def assertEqualDiff(self, a, b, message=None): return if message is None: message = "texts not equal:\n" - if a + ('\n' if isinstance(a, text_type) else b'\n') == b: + if a + ('\n' if isinstance(a, str) else b'\n') == b: message = 'first string is missing a final newline.\n' - if a == b + ('\n' if isinstance(b, text_type) else b'\n'): + if a == b + ('\n' if isinstance(b, str) else b'\n'): message = 'second string is missing a final newline.\n' raise AssertionError(message + self._ndiff_strings( - a if isinstance(a, text_type) else a.decode(), - b if isinstance(b, text_type) else b.decode())) + a if isinstance(a, str) else a.decode(), + b if isinstance(b, str) else b.decode())) def assertEqualMode(self, mode, mode_test): self.assertEqual(mode, mode_test, @@ -1556,7 +1550,7 @@ def assertDocstring(self, expected_docstring, obj): def assertPathExists(self, path): """Fail unless path or paths, which may be abs or relative, exist.""" # TODO(jelmer): Clean this up for pad.lv/1696545 - if not isinstance(path, (bytes, str, text_type)): + if not isinstance(path, (bytes, str)): for p in path: self.assertPathExists(p) else: @@ -1565,7 +1559,7 @@ def assertPathExists(self, path): def assertPathDoesNotExist(self, path): """Fail if path or paths, which may be abs or relative, exist.""" - if not isinstance(path, (str, text_type)): + if not isinstance(path, (str, str)): for p in path: self.assertPathDoesNotExist(p) else: @@ -1940,12 +1934,8 @@ def _run_bzr_core(self, args, encoding, stdin, stdout, stderr, self.log('run brz: %r', args) - if PY3: - self._last_cmd_stdout = stdout - self._last_cmd_stderr = stderr - else: - self._last_cmd_stdout = codecs.getwriter(encoding)(stdout) - self._last_cmd_stderr = codecs.getwriter(encoding)(stderr) + self._last_cmd_stdout = stdout + self._last_cmd_stderr = stderr old_ui_factory = ui.ui_factory ui.ui_factory = ui_testing.TestUIFactory( @@ -2005,26 +1995,17 @@ def run_bzr_raw(self, args, retcode=0, stdin=None, encoding=None, :keyword error_regexes: A list of expected error messages. If specified they must be seen in the error output of the command. """ - if isinstance(args, string_types): + if isinstance(args, str): args = shlex.split(args) if encoding is None: encoding = osutils.get_user_encoding() - if sys.version_info[0] == 2: - wrapped_stdout = stdout = ui_testing.BytesIOWithEncoding() - wrapped_stderr = stderr = ui_testing.BytesIOWithEncoding() - stdout.encoding = stderr.encoding = encoding - - # FIXME: don't call into logging here - handler = trace.EncodedStreamHandler( - stderr, errors="replace") - else: - stdout = BytesIO() - stderr = BytesIO() - wrapped_stdout = TextIOWrapper(stdout, encoding) - wrapped_stderr = TextIOWrapper(stderr, encoding) - handler = logging.StreamHandler(wrapped_stderr) + stdout = BytesIO() + stderr = BytesIO() + wrapped_stdout = TextIOWrapper(stdout, encoding) + wrapped_stderr = TextIOWrapper(stderr, encoding) + handler = logging.StreamHandler(wrapped_stderr) handler.setLevel(logging.INFO) logger = logging.getLogger('') @@ -2037,9 +2018,8 @@ def run_bzr_raw(self, args, retcode=0, stdin=None, encoding=None, finally: logger.removeHandler(handler) - if PY3: - wrapped_stdout.flush() - wrapped_stderr.flush() + wrapped_stdout.flush() + wrapped_stderr.flush() out = stdout.getvalue() err = stderr.getvalue() @@ -2088,24 +2068,16 @@ def run_bzr(self, args, retcode=0, stdin=None, encoding=None, :keyword error_regexes: A list of expected error messages. If specified they must be seen in the error output of the command. """ - if isinstance(args, string_types): + if isinstance(args, str): args = shlex.split(args) if encoding is None: encoding = osutils.get_user_encoding() - if sys.version_info[0] == 2: - stdout = ui_testing.BytesIOWithEncoding() - stderr = ui_testing.BytesIOWithEncoding() - stdout.encoding = stderr.encoding = encoding - # FIXME: don't call into logging here - handler = trace.EncodedStreamHandler( - stderr, errors="replace") - else: - stdout = ui_testing.StringIOWithEncoding() - stderr = ui_testing.StringIOWithEncoding() - stdout.encoding = stderr.encoding = encoding - handler = logging.StreamHandler(stream=stderr) + stdout = ui_testing.StringIOWithEncoding() + stderr = ui_testing.StringIOWithEncoding() + stdout.encoding = stderr.encoding = encoding + handler = logging.StreamHandler(stream=stderr) handler.setLevel(logging.INFO) logger = logging.getLogger('') @@ -2190,7 +2162,7 @@ def run_bzr_subprocess(self, *args, **kwargs): if len(args) == 1: if isinstance(args[0], list): args = args[0] - elif isinstance(args[0], (str, text_type)): + elif isinstance(args[0], str): args = list(shlex.split(args[0])) else: raise ValueError("passing varargs to run_bzr_subprocess") @@ -2395,18 +2367,12 @@ def apply_redirected(self, stdin=None, stdout=None, stderr=None, if getattr(self, "_log_file", None) is not None: stdout = self._log_file else: - if sys.version_info[0] == 2: - stdout = BytesIO() - else: - stdout = StringIO() + stdout = StringIO() if stderr is None: if getattr(self, "_log_file", None is not None): stderr = self._log_file else: - if sys.version_info[0] == 2: - stderr = BytesIO() - else: - stderr = StringIO() + stderr = StringIO() real_stdin = sys.stdin real_stdout = sys.stdout real_stderr = sys.stderr @@ -2818,8 +2784,6 @@ def make_branch_builder(self, relpath, format=None): def overrideEnvironmentForTesting(self): test_home_dir = self.test_home_dir - if not PY3 and isinstance(test_home_dir, text_type): - test_home_dir = test_home_dir.encode(sys.getfilesystemencoding()) self.overrideEnv('HOME', test_home_dir) self.overrideEnv('BRZ_HOME', test_home_dir) self.overrideEnv('GNUPGHOME', os.path.join(test_home_dir, '.gnupg')) @@ -2960,7 +2924,7 @@ def build_tree(self, shape, line_endings='binary', transport=None): if transport is None or transport.is_readonly(): transport = _mod_transport.get_transport_from_path(".") for name in shape: - self.assertIsInstance(name, (str, text_type)) + self.assertIsInstance(name, str) if name[-1] == '/': transport.mkdir(urlutils.escape(name[:-1])) else: @@ -2980,7 +2944,7 @@ def assertInWorkingTree(self, path, root_path='.', tree=None): """Assert whether path or paths are in the WorkingTree""" if tree is None: tree = workingtree.WorkingTree.open(root_path) - if not isinstance(path, (str, text_type)): + if not isinstance(path, str): for p in path: self.assertInWorkingTree(p, tree=tree) else: @@ -2991,7 +2955,7 @@ def assertNotInWorkingTree(self, path, root_path='.', tree=None): """Assert whether path or paths are not in the WorkingTree""" if tree is None: tree = workingtree.WorkingTree.open(root_path) - if not isinstance(path, (str, text_type)): + if not isinstance(path, str): for p in path: self.assertNotInWorkingTree(p, tree=tree) else: @@ -3637,7 +3601,7 @@ def run(self, result): # The traceback is formatted to a string and written in one go # to avoid interleaving lines from multiple failing children. tb = traceback.format_exc() - if isinstance(tb, text_type): + if isinstance(tb, str): tb = tb.encode('utf-8') try: stream.write(tb) @@ -4074,7 +4038,6 @@ def _test_suite_testmod_names(): 'breezy.tests.test_chk_serializer', 'breezy.tests.test_chunk_writer', 'breezy.tests.test_clean_tree', - 'breezy.tests.test_cleanup', 'breezy.tests.test_cmdline', 'breezy.tests.test_commands', 'breezy.tests.test_commit', @@ -4310,28 +4273,23 @@ def interesting_module(name): # modules building their suite with loadTestsFromModuleNames suite.addTest(loader.loadTestsFromModuleNames(_test_suite_testmod_names())) - if not PY3: - suite.addTest(loader.loadTestsFromModuleNames(['breezy.doc'])) - - # It's pretty much impossible to write readable doctests that work on - # both Python 2 and Python 3 because of their overreliance on - # consistent repr() return values. - # For now, just run doctests on Python 2 so we now they haven't broken. - for mod in _test_suite_modules_to_doctest(): - if not interesting_module(mod): - # No tests to keep here, move along - continue - try: - # note that this really does mean "report only" -- doctest - # still runs the rest of the examples - doc_suite = IsolatedDocTestSuite( - mod, optionflags=doctest.REPORT_ONLY_FIRST_FAILURE) - except ValueError as e: - print('**failed to get doctest for: %s\n%s' % (mod, e)) - raise - if len(doc_suite._tests) == 0: - raise errors.BzrError("no doctests found in %s" % (mod,)) - suite.addTest(doc_suite) + suite.addTest(loader.loadTestsFromModuleNames(['breezy.doc'])) + + for mod in _test_suite_modules_to_doctest(): + if not interesting_module(mod): + # No tests to keep here, move along + continue + try: + # note that this really does mean "report only" -- doctest + # still runs the rest of the examples + doc_suite = IsolatedDocTestSuite( + mod, optionflags=doctest.REPORT_ONLY_FIRST_FAILURE) + except ValueError as e: + print('**failed to get doctest for: %s\n%s' % (mod, e)) + raise + if len(doc_suite._tests) == 0: + raise errors.BzrError("no doctests found in %s" % (mod,)) + suite.addTest(doc_suite) default_encoding = sys.getdefaultencoding() for name, plugin in _mod_plugin.plugins().items(): @@ -4493,6 +4451,7 @@ def clone_test(test, new_id): return new_test + def permute_tests_for_extension(standard_tests, loader, py_module_name, ext_module_name): """Helper for permutating tests against an extension module. @@ -4526,10 +4485,12 @@ def permute_tests_for_extension(standard_tests, loader, py_module_name, if feature.available(): scenarios.append(('C', {'module': feature.module})) else: - # the compiled module isn't available, so we add a failing test class FailWithoutFeature(TestCase): + def id(self): + return ext_module_name + '.' + super(FailWithoutFeature, self).id() def test_fail(self): self.requireFeature(feature) + # the compiled module isn't available, so we add a failing test suite.addTest(loader.loadTestsFromTestCase(FailWithoutFeature)) result = multiply_tests(standard_tests, scenarios, suite) return result, feature @@ -4588,7 +4549,7 @@ def probe_bad_non_ascii(encoding): for given encoding. """ for i in range(128, 256): - char = int2byte(i) + char = bytes([i]) try: char.decode(encoding) except UnicodeDecodeError: diff --git a/breezy/tests/blackbox/test_command_encoding.py b/breezy/tests/blackbox/test_command_encoding.py index 1f311f5f4e..317ec1d6f8 100644 --- a/breezy/tests/blackbox/test_command_encoding.py +++ b/breezy/tests/blackbox/test_command_encoding.py @@ -18,7 +18,6 @@ from .. import TestCaseWithMemoryTransport from ...commands import Command, register_command, plugin_cmds -from ...sixish import PY3 class cmd_echo_exact(Command): diff --git a/breezy/tests/blackbox/test_commit.py b/breezy/tests/blackbox/test_commit.py index 7b81e95cc5..eb5fad7ccf 100644 --- a/breezy/tests/blackbox/test_commit.py +++ b/breezy/tests/blackbox/test_commit.py @@ -31,7 +31,6 @@ msgeditor, ) from ...controldir import ControlDir -from ...sixish import PY3 from .. import ( test_foreign, features, @@ -147,7 +146,7 @@ def test_unicode_commit_message_is_filename(self): out, err = self.run_bzr(['commit', '-m', file_name]) reflags = re.MULTILINE | re.DOTALL | re.UNICODE te = osutils.get_terminal_encoding() - self.assertContainsRe(err if PY3 else err.decode(te), + self.assertContainsRe(err, u'The commit message is a file name:', flags=reflags) @@ -166,7 +165,7 @@ def test_unicode_commit_message_is_filename(self): out, err = self.run_bzr(['commit', '-m', file_name]) reflags = re.MULTILINE | re.DOTALL | re.UNICODE te = osutils.get_terminal_encoding() - self.assertContainsRe(err if PY3 else err.decode(te, 'replace'), + self.assertContainsRe(err, u'The commit message is a file name:', flags=reflags) finally: @@ -188,11 +187,7 @@ def test_non_ascii_file_unversioned_iso_8859_5(self): tree.add(["f"]) out, err = self.run_bzr_raw(["commit", "-m", "Wrong filename", u"\xa7"], encoding="iso-8859-5", retcode=3) - if not PY3: - self.expectFailure("Error messages are always written as UTF-8", - self.assertNotContainsString, err, b"\xc2\xa7") - else: - self.assertNotContainsString(err, b"\xc2\xa7") + self.assertNotContainsString(err, b"\xc2\xa7") self.assertContainsRe(err, b"(?m)not versioned: \"\xfd\"$") def test_warn_about_forgotten_commit_message(self): diff --git a/breezy/tests/blackbox/test_conflicts.py b/breezy/tests/blackbox/test_conflicts.py index e2411763c7..a1b0825f0d 100644 --- a/breezy/tests/blackbox/test_conflicts.py +++ b/breezy/tests/blackbox/test_conflicts.py @@ -19,7 +19,6 @@ tests, workingtree, ) -from breezy.sixish import PY3 from breezy.tests import script, features @@ -98,7 +97,7 @@ def test_messages(self): make_tree_with_conflicts(self, "branch", prefix=u"\xA7") out, err = self.run_bzr(["conflicts", "-d", "branch"], encoding=self.encoding) - self.assertEqual(out if PY3 else out.decode(self.encoding), + self.assertEqual(out, u"Text conflict in \xA7_other_file\n" u"Path conflict: \xA7dir3 / \xA7dir2\n" u"Text conflict in \xA7file\n") @@ -109,7 +108,7 @@ def test_text_conflict_paths(self): make_tree_with_conflicts(self, "branch", prefix=u"\xA7") out, err = self.run_bzr(["conflicts", "-d", "branch", "--text"], encoding=self.encoding) - self.assertEqual(out if PY3 else out.decode(self.encoding), + self.assertEqual(out, u"\xA7_other_file\n" u"\xA7file\n") self.assertEqual(err, "") diff --git a/breezy/tests/blackbox/test_exceptions.py b/breezy/tests/blackbox/test_exceptions.py index f4be08c94f..3f1ac4ea32 100644 --- a/breezy/tests/blackbox/test_exceptions.py +++ b/breezy/tests/blackbox/test_exceptions.py @@ -28,7 +28,6 @@ repository, tests, ) -from breezy.sixish import PY3 from breezy.bzr.groupcompress_repo import RepositoryFormat2a @@ -45,26 +44,6 @@ def test_exception_exitcode(self): self.assertContainsRe( err, br'Bazaar has encountered an internal error') - def test_undecodable_argv(self): - """A user error must be reported if argv is not in the locale encoding - - A subprocess with an environment ascii-only setting is used so the test - can run without worrying about the locale the test suite is using. - """ - if os.name != "posix": - raise tests.TestNotApplicable("Needs system beholden to C locales") - if PY3: - raise tests.TestNotApplicable( - "Unable to pass argv to subprocess as bytes") - out, err = self.run_bzr_subprocess([b"\xa0"], - env_changes={ - "LANG": "C", "LC_ALL": "C"}, - universal_newlines=True, - retcode=errors.EXIT_ERROR) - self.assertContainsRe(err, br"^brz: ERROR: .*'\\xa0'.* unsupported", - flags=re.MULTILINE) - self.assertEqual(out, b"") - def test_utf8_default_fs_enc(self): """In the C locale brz treats a posix filesystem as UTF-8 encoded""" if os.name != "posix": @@ -79,10 +58,7 @@ class TestOptParseBugHandling(tests.TestCase): def test_nonascii_optparse(self): """Reasonable error raised when non-ascii in option name on Python 2""" - if PY3: - error_re = u'no such option: -\xe4' - else: - error_re = 'Only ASCII permitted in option names' + error_re = u'no such option: -\xe4' out = self.run_bzr_error([error_re], ['st', u'-\xe4']) diff --git a/breezy/tests/blackbox/test_export.py b/breezy/tests/blackbox/test_export.py index f4faa8d0d0..3c4c79fc93 100644 --- a/breezy/tests/blackbox/test_export.py +++ b/breezy/tests/blackbox/test_export.py @@ -18,6 +18,7 @@ """Black-box tests for brz export. """ +from io import BytesIO import os import stat import tarfile @@ -30,9 +31,6 @@ export, osutils, ) -from ...sixish import ( - BytesIO, - ) from .. import ( features, TestCaseWithTransport, diff --git a/breezy/tests/blackbox/test_log.py b/breezy/tests/blackbox/test_log.py index 604abafa76..7101d28e72 100644 --- a/breezy/tests/blackbox/test_log.py +++ b/breezy/tests/blackbox/test_log.py @@ -29,7 +29,6 @@ osutils, tests, ) -from breezy.sixish import PY3 from breezy.tests import ( test_log, features, @@ -785,12 +784,7 @@ def try_encoding(self, encoding, fail=False): out, err = brz('log', encoding=encoding) if not fail: # Make sure we wrote mu as we expected it to exist - if not PY3: - self.assertNotEqual(-1, out.find(encoded_msg)) - out_unicode = out.decode(encoding) - else: - out_unicode = out - self.assertNotEqual(-1, out_unicode.find(self._message)) + self.assertNotEqual(-1, out.find(self._message)) else: self.assertNotEqual(-1, out.find('Message with ?')) finally: diff --git a/breezy/tests/blackbox/test_non_ascii.py b/breezy/tests/blackbox/test_non_ascii.py index 35b07a254a..0d48265749 100644 --- a/breezy/tests/blackbox/test_non_ascii.py +++ b/breezy/tests/blackbox/test_non_ascii.py @@ -24,7 +24,6 @@ tests, urlutils, ) -from breezy.sixish import PY3 from breezy.tests import EncodingAdapter from breezy.tests.scenarios import load_tests_apply_scenarios @@ -43,11 +42,7 @@ def setUp(self): self.overrideAttr(osutils, '_cached_user_encoding', self.encoding) email = self.info['committer'] + ' ' - if sys.version_info[0] == 2: - self.overrideEnv('BRZ_EMAIL', email.encode( - osutils.get_user_encoding())) - else: - self.overrideEnv('BRZ_EMAIL', email) + self.overrideEnv('BRZ_EMAIL', email) self.create_base() def run_bzr_decode(self, args, encoding=None, fail=False, retcode=None, diff --git a/breezy/tests/blackbox/test_ping.py b/breezy/tests/blackbox/test_ping.py index 7725cbaa80..ee5d163ba7 100644 --- a/breezy/tests/blackbox/test_ping.py +++ b/breezy/tests/blackbox/test_ping.py @@ -17,7 +17,6 @@ """External tests of 'brz ping'""" import breezy -from breezy.sixish import PY3 from breezy import tests @@ -33,12 +32,7 @@ def test_simple_ping(self): out, err = self.run_bzr(['ping', self.get_url('branch')]) self.assertLength(1, self.hpss_calls) self.assertLength(1, self.hpss_connections) - if PY3: - self.assertEqual(out, - "Response: (b'ok', b'2')\n" - "Headers: {'Software version': '%s'}\n" % (breezy.version_string,)) - else: - self.assertEqual(out, - "Response: ('ok', '2')\n" - "Headers: {u'Software version': u'%s'}\n" % (breezy.version_string,)) + self.assertEqual(out, + "Response: (b'ok', b'2')\n" + "Headers: {'Software version': '%s'}\n" % (breezy.version_string,)) self.assertEqual(err, "") diff --git a/breezy/tests/blackbox/test_plugins.py b/breezy/tests/blackbox/test_plugins.py index b102624769..173841c676 100644 --- a/breezy/tests/blackbox/test_plugins.py +++ b/breezy/tests/blackbox/test_plugins.py @@ -22,7 +22,6 @@ from ... import ( commands, ) -from ...sixish import PY3 from ..test_plugins import ( BaseTestPlugins, ) @@ -32,10 +31,7 @@ class TestPluginHelp(BaseTestPlugins): def run_bzr_utf8_out(self, *args, **kwargs): out, _ = self.run_bzr(*args, **kwargs) - if PY3: - return out - else: - return out.decode('utf-8') + return out def split_help_commands(self): help = {} diff --git a/breezy/tests/blackbox/test_send.py b/breezy/tests/blackbox/test_send.py index 9b36db39ab..703d19379b 100644 --- a/breezy/tests/blackbox/test_send.py +++ b/breezy/tests/blackbox/test_send.py @@ -15,6 +15,9 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import ( + BytesIO, + ) from ... import ( branch, @@ -23,9 +26,6 @@ ) from ...controldir import ControlDir from ...bzr.bundle import serializer -from ...sixish import ( - BytesIO, - ) from ...transport import memory from .. import ( scenarios, diff --git a/breezy/tests/blackbox/test_serve.py b/breezy/tests/blackbox/test_serve.py index 1c1e9fc0cd..70054fcb26 100644 --- a/breezy/tests/blackbox/test_serve.py +++ b/breezy/tests/blackbox/test_serve.py @@ -20,10 +20,7 @@ import os import signal import sys -try: - from _thread import interrupt_main -except ImportError: # Python < 3 - from thread import interrupt_main +from _thread import interrupt_main import threading diff --git a/breezy/tests/blackbox/test_shell_complete.py b/breezy/tests/blackbox/test_shell_complete.py index a5378b16d5..142b6aa440 100644 --- a/breezy/tests/blackbox/test_shell_complete.py +++ b/breezy/tests/blackbox/test_shell_complete.py @@ -19,7 +19,6 @@ """Black-box tests for 'brz shell-complete'.""" from breezy.tests import TestCaseWithTransport -from breezy.sixish import PY3 class ShellCompleteTests(TestCaseWithTransport): diff --git a/breezy/tests/blackbox/test_status.py b/breezy/tests/blackbox/test_status.py index 653e4fedb4..1267e2a15e 100644 --- a/breezy/tests/blackbox/test_status.py +++ b/breezy/tests/blackbox/test_status.py @@ -23,6 +23,10 @@ """ import codecs +from io import ( + BytesIO, + StringIO, + ) from os import mkdir, chdir, rmdir, unlink import sys @@ -38,11 +42,6 @@ import breezy.branch from ...osutils import pathjoin from ...revisionspec import RevisionSpec -from ...sixish import ( - BytesIO, - StringIO, - PY3, - ) from ...status import show_tree_status from .. import TestCaseWithTransport, TestSkipped from ...workingtree import WorkingTree @@ -805,6 +804,4 @@ def test_stdout_latin1(self): added: hell\u00d8 """ - if not PY3: - expected = expected.encode('latin-1') self.assertEqual(stdout, expected) diff --git a/breezy/tests/blackbox/test_tags.py b/breezy/tests/blackbox/test_tags.py index e9b49b3b66..a30decceb0 100644 --- a/breezy/tests/blackbox/test_tags.py +++ b/breezy/tests/blackbox/test_tags.py @@ -31,7 +31,6 @@ from breezy.branch import ( Branch, ) -from breezy.sixish import PY3 from breezy.tests import ( script, TestCaseWithTransport, @@ -392,20 +391,12 @@ def test_conflicting_tags(self): b2.tags.set_tag(tagname, b'revid2') # push should give a warning about the tags out, err = self.run_bzr('push -d one two', encoding='utf-8') - if PY3: - self.assertContainsRe(out, 'Conflicting tags:\n.*' + tagname) - else: - self.assertContainsRe( - out, 'Conflicting tags:\n.*' + tagname.encode('utf-8')) + self.assertContainsRe(out, 'Conflicting tags:\n.*' + tagname) # pull should give a warning about the tags out, err = self.run_bzr('pull -d one two', encoding='utf-8', retcode=1) - if PY3: - self.assertContainsRe(out, - 'Conflicting tags:\n.*' + tagname) - else: - self.assertContainsRe(out, - 'Conflicting tags:\n.*' + tagname.encode('utf-8')) + self.assertContainsRe(out, + 'Conflicting tags:\n.*' + tagname) # merge should give a warning about the tags -- not implemented yet ## out, err = self.run_bzr('merge -d one two', encoding='utf-8') # self.assertContainsRe(out, diff --git a/breezy/tests/blackbox/test_testament.py b/breezy/tests/blackbox/test_testament.py index b6eb384863..f30dc71b97 100644 --- a/breezy/tests/blackbox/test_testament.py +++ b/breezy/tests/blackbox/test_testament.py @@ -18,7 +18,6 @@ import re -from breezy.sixish import PY3 from breezy.tests.test_testament import ( osutils, REV_1_SHORT, diff --git a/breezy/tests/blackbox/test_version.py b/breezy/tests/blackbox/test_version.py index ced319b2f5..022fc75fb1 100644 --- a/breezy/tests/blackbox/test_version.py +++ b/breezy/tests/blackbox/test_version.py @@ -21,7 +21,6 @@ import breezy from breezy import osutils, trace -from breezy.sixish import PY3 from breezy.tests import ( probe_unicode_in_user_encoding, TestCase, @@ -96,10 +95,7 @@ def test_unicode_bzr_home(self): raise TestSkipped('Cannot find a unicode character that works in' ' encoding %s' % (osutils.get_user_encoding(),)) - if PY3: - self.overrideEnv('BRZ_HOME', uni_val) - else: - self.overrideEnv('BRZ_HOME', str_val) + self.overrideEnv('BRZ_HOME', uni_val) self.permit_source_tree_branch_repo() out = self.run_bzr_raw("version")[0] self.assertTrue(len(out) > 0) @@ -147,10 +143,7 @@ def test_unicode_brz_log(self): "Test string %r unrepresentable in user encoding %s" % ( uni_val, enc)) brz_log = os.path.join(self.test_base_dir, uni_val) - if PY3: - self.overrideEnv("BRZ_LOG", brz_log) - else: - self.overrideEnv("BRZ_LOG", brz_log.encode(enc)) + self.overrideEnv("BRZ_LOG", brz_log) out, err = self.run_bzr_subprocess("version") uni_out = out.decode(enc) self.assertContainsRe(uni_out, u"(?m)^ Breezy log file: .*/\xa7$") diff --git a/breezy/tests/blackbox/test_version_info.py b/breezy/tests/blackbox/test_version_info.py index 50b3b57ebc..20b3ca0fd2 100644 --- a/breezy/tests/blackbox/test_version_info.py +++ b/breezy/tests/blackbox/test_version_info.py @@ -18,7 +18,6 @@ import os -from breezy.sixish import PY3 from breezy.tests import TestCaseWithTransport from breezy.version_info_formats import VersionInfoBuilder diff --git a/breezy/tests/blackbox/test_whoami.py b/breezy/tests/blackbox/test_whoami.py index 1b92bc8ca0..647b1bedd4 100644 --- a/breezy/tests/blackbox/test_whoami.py +++ b/breezy/tests/blackbox/test_whoami.py @@ -23,7 +23,6 @@ errors, tests, ) -from breezy.sixish import PY3 from ..test_bedding import override_whoami @@ -35,7 +34,7 @@ def assertWhoAmI(self, expected, *cmd_args, **kwargs): self.assertEqual('', err) lines = out.splitlines() self.assertLength(1, lines) - if PY3 and isinstance(expected, bytes): + if isinstance(expected, bytes): expected = expected.decode(kwargs.get('encoding', 'ascii')) self.assertEqual(expected, lines[0].rstrip()) diff --git a/breezy/tests/file_utils.py b/breezy/tests/file_utils.py index b5e7a17ab9..e065701017 100644 --- a/breezy/tests/file_utils.py +++ b/breezy/tests/file_utils.py @@ -14,9 +14,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from ..sixish import ( - BytesIO, - ) +from io import BytesIO class FakeReadFile(object): diff --git a/breezy/tests/fixtures.py b/breezy/tests/fixtures.py index f9660cc2c0..fd3c859e6e 100644 --- a/breezy/tests/fixtures.py +++ b/breezy/tests/fixtures.py @@ -37,7 +37,7 @@ def generate_unicode_names(): >>> n1 = next(gen) >>> n2 = next(gen) >>> type(n1) - + >>> n1 == n2 False >>> n1.encode('ascii', 'replace') == n1 diff --git a/breezy/tests/http_server.py b/breezy/tests/http_server.py index 275d6fb1d0..8836512189 100644 --- a/breezy/tests/http_server.py +++ b/breezy/tests/http_server.py @@ -15,22 +15,15 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import errno -try: - import http.client as http_client - import http.server as http_server -except ImportError: - import httplib as http_client - import SimpleHTTPServer as http_server +import http.client as http_client +import http.server as http_server import os import posixpath import random import re import socket import sys -try: - from urlparse import urlparse -except ImportError: - from urllib.parse import urlparse +from urllib.parse import urlparse from .. import ( osutils, @@ -340,8 +333,6 @@ def _translate_path(self, path): # abandon query parameters path = urlparse(path)[2] path = posixpath.normpath(urlutils.unquote(path)) - if sys.version_info[0] == 2: - path = path.decode('utf-8') words = path.split('/') path = self._cwd for num, word in enumerate(w for w in words if w): diff --git a/breezy/tests/http_utils.py b/breezy/tests/http_utils.py index ec3e006237..4f5d913742 100644 --- a/breezy/tests/http_utils.py +++ b/breezy/tests/http_utils.py @@ -15,17 +15,12 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import base64 +from io import BytesIO import re -try: - from urllib.request import ( - parse_http_list, - parse_keqv_list, - ) -except ImportError: # python < 3 - from urllib2 import ( - parse_http_list, - parse_keqv_list, - ) +from urllib.request import ( + parse_http_list, + parse_keqv_list, + ) from .. import ( @@ -34,9 +29,6 @@ tests, transport, ) -from ..sixish import ( - BytesIO, - ) from ..bzr.smart import ( medium, ) diff --git a/breezy/tests/matchers.py b/breezy/tests/matchers.py index 78cdac4c15..3718f97b4d 100644 --- a/breezy/tests/matchers.py +++ b/breezy/tests/matchers.py @@ -45,9 +45,6 @@ from breezy.bzr.smart.request import request_handlers as smart_request_handlers from breezy.bzr.smart import vfs """) -from ..sixish import ( - text_type, - ) from ..tree import find_previous_path from testtools.matchers import Equals, Mismatch, Matcher @@ -157,7 +154,7 @@ def _strip_unreferenced_directories(entries): """ directories = [] for entry in entries: - if isinstance(entry, (str, text_type)): + if isinstance(entry, str): path = entry else: path = entry[0] @@ -177,7 +174,7 @@ def __str__(self): def match(self, tree): include_file_ids = self.entries and not isinstance( - self.entries[0], (str, text_type)) + self.entries[0], str) actual = list(self.get_tree_layout( tree, include_file_ids=include_file_ids)) if not tree.has_versioned_directories(): diff --git a/breezy/tests/per_branch/test_branch.py b/breezy/tests/per_branch/test_branch.py index 705c898f1d..3094793967 100644 --- a/breezy/tests/per_branch/test_branch.py +++ b/breezy/tests/per_branch/test_branch.py @@ -40,9 +40,6 @@ branch as _mod_bzrbranch, remote, ) -from breezy.sixish import ( - text_type, - ) from breezy.tests import ( per_branch, ) @@ -292,7 +289,7 @@ def test_nicks(self): branch = self.make_branch('bzr.dev') # An implicit nick name is set; what it is exactly depends on the # format. - self.assertIsInstance(branch.nick, text_type) + self.assertIsInstance(branch.nick, str) # Set the branch nick explicitly. branch.nick = u"Aaron's branch" # Because the nick has been set explicitly, the nick is now always diff --git a/breezy/tests/per_branch/test_check.py b/breezy/tests/per_branch/test_check.py index 2aa4be9204..6d21afde5b 100644 --- a/breezy/tests/per_branch/test_check.py +++ b/breezy/tests/per_branch/test_check.py @@ -16,15 +16,13 @@ """Tests for branch implementations - test check() functionality""" +from io import BytesIO from ... import ( errors, tests, ui, ) -from ...sixish import ( - BytesIO, - ) from . import TestCaseWithBranch diff --git a/breezy/tests/per_branch/test_push.py b/breezy/tests/per_branch/test_push.py index bcfd6dcc5d..d0e132af90 100644 --- a/breezy/tests/per_branch/test_push.py +++ b/breezy/tests/per_branch/test_push.py @@ -16,6 +16,7 @@ """Tests for branch.push behaviour.""" +from io import BytesIO import os from ... import ( @@ -33,9 +34,6 @@ from ...bzr import ( branch as bzrbranch, ) -from ...sixish import ( - BytesIO, - ) from ...bzr.smart import ( client, ) diff --git a/breezy/tests/per_foreign_vcs/test_branch.py b/breezy/tests/per_foreign_vcs/test_branch.py index 1c35c49265..278904c33d 100644 --- a/breezy/tests/per_foreign_vcs/test_branch.py +++ b/breezy/tests/per_foreign_vcs/test_branch.py @@ -29,9 +29,6 @@ from breezy.tests import ( TestCaseWithTransport, ) -from breezy.sixish import ( - text_type, - ) class ForeignBranchFactory(object): @@ -99,8 +96,8 @@ def test_attributes(self): def test__get_nick(self): """Make sure _get_nick is implemented and returns a string.""" branch = self.make_branch() - self.assertIsInstance(branch._get_nick(local=False), text_type) - self.assertIsInstance(branch._get_nick(local=True), text_type) + self.assertIsInstance(branch._get_nick(local=False), str) + self.assertIsInstance(branch._get_nick(local=True), str) def test_null_revid_revno(self): """null: should return revno 0.""" diff --git a/breezy/tests/per_interbranch/test_push.py b/breezy/tests/per_interbranch/test_push.py index e81b470646..e9d40216fc 100644 --- a/breezy/tests/per_interbranch/test_push.py +++ b/breezy/tests/per_interbranch/test_push.py @@ -16,6 +16,8 @@ """Tests for branch.push behaviour.""" +from io import BytesIO + from testtools.matchers import ( Equals, MatchesAny, @@ -37,9 +39,6 @@ from ...controldir import ControlDir from ...memorytree import MemoryTree from ...revision import NULL_REVISION -from ...sixish import ( - BytesIO, - ) from ...bzr.smart.repository import SmartServerRepositoryGetParentMap from . import ( TestCaseWithInterBranch, diff --git a/breezy/tests/per_repository/test_repository.py b/breezy/tests/per_repository/test_repository.py index 08f7b03dd5..0f3ff0780c 100644 --- a/breezy/tests/per_repository/test_repository.py +++ b/breezy/tests/per_repository/test_repository.py @@ -16,6 +16,7 @@ """Tests for repository implementations - tests a repository format.""" +from io import BytesIO import re from ... import ( @@ -42,11 +43,6 @@ from ...bzr import ( knitpack_repo, ) -from ...sixish import ( - BytesIO, - text_type, - unichr, - ) from .. import ( per_repository, test_server, @@ -421,7 +417,7 @@ def assertMessageRoundtrips(self, message): self.assertEqual(rev.message, message) # insist the class is unicode no matter what came in for # consistency. - self.assertIsInstance(rev.message, text_type) + self.assertIsInstance(rev.message, str) def test_commit_unicode_message(self): # a siple unicode message should be preserved @@ -429,7 +425,7 @@ def test_commit_unicode_message(self): def test_commit_unicode_control_characters(self): # a unicode message with control characters should roundtrip too. - unichars = [unichr(x) for x in range(256)] + unichars = [chr(x) for x in range(256)] # '\r' is not directly allowed anymore, as it used to be translated # into '\n' anyway unichars[ord('\r')] = u'\n' diff --git a/breezy/tests/per_repository_reference/test_get_record_stream.py b/breezy/tests/per_repository_reference/test_get_record_stream.py index 61b04f6bf4..66b40380ab 100644 --- a/breezy/tests/per_repository_reference/test_get_record_stream.py +++ b/breezy/tests/per_repository_reference/test_get_record_stream.py @@ -22,7 +22,6 @@ from breezy.bzr import ( knit, ) -from breezy.sixish import int2byte from breezy.tests.per_repository_reference import ( TestCaseWithExternalReferenceRepository, ) @@ -107,7 +106,7 @@ def make_complex_split(self): def test_unordered_fetch_simple_split(self): self.make_simple_split() - keys = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCDF')] + keys = [(b'f-id', bytes([r])) for r in bytearray(b'ABCDF')] self.stacked_repo.lock_read() self.addCleanup(self.stacked_repo.unlock) stream = self.stacked_repo.texts.get_record_stream( @@ -122,7 +121,7 @@ def test_unordered_fetch_simple_split(self): def test_unordered_fetch_complex_split(self): self.make_complex_split() - keys = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCDEG')] + keys = [(b'f-id', bytes([r])) for r in bytearray(b'ABCDEG')] self.stacked_repo.lock_read() self.addCleanup(self.stacked_repo.unlock) stream = self.stacked_repo.texts.get_record_stream( @@ -144,11 +143,11 @@ def test_ordered_no_closure(self): # or, because E can be returned before B: # # A C E B D G - keys = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCDEG')] - alt_1 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACBDEG')] - alt_2 = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCEDG')] - alt_3 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACBEDG')] - alt_4 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACEBDG')] + keys = [(b'f-id', bytes([r])) for r in bytearray(b'ABCDEG')] + alt_1 = [(b'f-id', bytes([r])) for r in bytearray(b'ACBDEG')] + alt_2 = [(b'f-id', bytes([r])) for r in bytearray(b'ABCEDG')] + alt_3 = [(b'f-id', bytes([r])) for r in bytearray(b'ACBEDG')] + alt_4 = [(b'f-id', bytes([r])) for r in bytearray(b'ACEBDG')] self.stacked_repo.lock_read() self.addCleanup(self.stacked_repo.unlock) stream = self.stacked_repo.texts.get_record_stream( @@ -168,8 +167,8 @@ def test_ordered_fulltext_simple(self): # Topological ordering allows B & C and D & E to be returned with # either one first, so the required ordering is: # [A (B C) D F] - keys = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCDF')] - alt_1 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACBDF')] + keys = [(b'f-id', bytes([r])) for r in bytearray(b'ABCDF')] + alt_1 = [(b'f-id', bytes([r])) for r in bytearray(b'ACBDF')] self.stacked_repo.lock_read() self.addCleanup(self.stacked_repo.unlock) stream = self.stacked_repo.texts.get_record_stream( @@ -190,11 +189,11 @@ def test_ordered_fulltext_complex(self): # or, because E can be returned before B: # # A C E B D G - keys = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCDEG')] - alt_1 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACBDEG')] - alt_2 = [(b'f-id', int2byte(r)) for r in bytearray(b'ABCEDG')] - alt_3 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACBEDG')] - alt_4 = [(b'f-id', int2byte(r)) for r in bytearray(b'ACEBDG')] + keys = [(b'f-id', bytes([r])) for r in bytearray(b'ABCDEG')] + alt_1 = [(b'f-id', bytes([r])) for r in bytearray(b'ACBDEG')] + alt_2 = [(b'f-id', bytes([r])) for r in bytearray(b'ABCEDG')] + alt_3 = [(b'f-id', bytes([r])) for r in bytearray(b'ACBEDG')] + alt_4 = [(b'f-id', bytes([r])) for r in bytearray(b'ACEBDG')] self.stacked_repo.lock_read() self.addCleanup(self.stacked_repo.unlock) stream = self.stacked_repo.texts.get_record_stream( diff --git a/breezy/tests/per_transport.py b/breezy/tests/per_transport.py index a3a4cae66f..8f90ecf71e 100644 --- a/breezy/tests/per_transport.py +++ b/breezy/tests/per_transport.py @@ -20,6 +20,7 @@ TransportTestProviderAdapter. """ +from io import BytesIO import os import stat import sys @@ -39,10 +40,6 @@ TransportNotPossible, ) from ..osutils import getcwd -from ..sixish import ( - BytesIO, - zip, - ) from ..bzr.smart import medium from . import ( TestSkipped, diff --git a/breezy/tests/per_tree/test_test_trees.py b/breezy/tests/per_tree/test_test_trees.py index 2ea570f350..c9d5c4e923 100644 --- a/breezy/tests/per_tree/test_test_trees.py +++ b/breezy/tests/per_tree/test_test_trees.py @@ -17,7 +17,6 @@ """Tests for the test trees used by the per_tree tests.""" from breezy import errors -from breezy.sixish import text_type from breezy.tests import per_tree from breezy.tests import ( TestNotApplicable, @@ -323,7 +322,7 @@ def test_tree_with_utf8(self): for expected, (path, ie) in zip(path_and_ids, path_entries): self.assertEqual(expected[0], path) # Paths should match - self.assertIsInstance(path, text_type) + self.assertIsInstance(path, str) self.assertEqual(expected[1], ie.file_id) self.assertIsInstance(ie.file_id, bytes) self.assertEqual(expected[2], ie.parent_id) @@ -393,7 +392,7 @@ def test_tree_with_merged_utf8(self): for (epath, efid, eparent, erev), (path, ie) in zip(path_and_ids, path_entries): self.assertEqual(epath, path) # Paths should match - self.assertIsInstance(path, text_type) + self.assertIsInstance(path, str) self.assertIsInstance(ie.file_id, bytes) if wt.supports_setting_file_ids(): self.assertEqual(efid, ie.file_id) diff --git a/breezy/tests/per_versionedfile.py b/breezy/tests/per_versionedfile.py index e83ad51c2a..363070ca9d 100644 --- a/breezy/tests/per_versionedfile.py +++ b/breezy/tests/per_versionedfile.py @@ -22,6 +22,7 @@ # considered typical and check that it can be detected/corrected. from gzip import GzipFile +from io import BytesIO import itertools from .. import ( @@ -45,10 +46,6 @@ make_file_factory, make_pack_factory, ) -from ..sixish import ( - BytesIO, - zip, - ) from . import ( TestCase, TestCaseWithMemoryTransport, diff --git a/breezy/tests/per_workingtree/test_eol_conversion.py b/breezy/tests/per_workingtree/test_eol_conversion.py index 23e98eab67..b44e17a89b 100644 --- a/breezy/tests/per_workingtree/test_eol_conversion.py +++ b/breezy/tests/per_workingtree/test_eol_conversion.py @@ -16,12 +16,10 @@ """Tests for eol conversion.""" +from io import BytesIO import sys from ... import rules, status -from ...sixish import ( - BytesIO, - ) from .. import TestSkipped from . import TestCaseWithWorkingTree from ...workingtree import WorkingTree diff --git a/breezy/tests/per_workingtree/test_parents.py b/breezy/tests/per_workingtree/test_parents.py index 33bdbff7a9..7e698e2e19 100644 --- a/breezy/tests/per_workingtree/test_parents.py +++ b/breezy/tests/per_workingtree/test_parents.py @@ -16,6 +16,7 @@ """Tests of the parent related functions of WorkingTrees.""" +from io import BytesIO import os from ... import ( @@ -32,9 +33,6 @@ InventoryRevisionTree, InventoryTree, ) -from ...sixish import ( - BytesIO, - ) from ...tests import TestNotApplicable from ..per_workingtree import TestCaseWithWorkingTree from .. import ( diff --git a/breezy/tests/per_workingtree/test_smart_add.py b/breezy/tests/per_workingtree/test_smart_add.py index 2c54d179a4..ed4f0408aa 100644 --- a/breezy/tests/per_workingtree/test_smart_add.py +++ b/breezy/tests/per_workingtree/test_smart_add.py @@ -16,6 +16,7 @@ """Test that we can use smart_add on all Tree implementations.""" +from io import StringIO import os import sys @@ -28,9 +29,6 @@ trace, workingtree, ) -from ...sixish import ( - StringIO, - ) from .. import ( features, per_workingtree, diff --git a/breezy/tests/per_workingtree/test_workingtree.py b/breezy/tests/per_workingtree/test_workingtree.py index e62ebee0e4..0dfe19058b 100644 --- a/breezy/tests/per_workingtree/test_workingtree.py +++ b/breezy/tests/per_workingtree/test_workingtree.py @@ -17,6 +17,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import errno +from io import StringIO import os from ... import ( @@ -41,9 +42,6 @@ from ...bzr.inventory import Inventory from ...mutabletree import MutableTree from ...osutils import pathjoin, getcwd, has_symlinks -from ...sixish import ( - StringIO, - ) from .. import ( features, TestSkipped, diff --git a/breezy/tests/script.py b/breezy/tests/script.py index d5ef9dea9a..b374a69f32 100644 --- a/breezy/tests/script.py +++ b/breezy/tests/script.py @@ -320,19 +320,10 @@ def _write_output(self, output, out_name, out_mode): def do_brz(self, test_case, input, args): encoding = osutils.get_user_encoding() - if sys.version_info[0] == 2: - stdout = ui_testing.BytesIOWithEncoding() - stderr = ui_testing.BytesIOWithEncoding() - stdout.encoding = stderr.encoding = encoding - - # FIXME: don't call into logging here - handler = trace.EncodedStreamHandler( - stderr, errors="replace") - else: - stdout = ui_testing.StringIOWithEncoding() - stderr = ui_testing.StringIOWithEncoding() - stdout.encoding = stderr.encoding = encoding - handler = logging.StreamHandler(stderr) + stdout = ui_testing.StringIOWithEncoding() + stderr = ui_testing.StringIOWithEncoding() + stdout.encoding = stderr.encoding = encoding + handler = logging.StreamHandler(stderr) handler.setLevel(logging.INFO) logger = logging.getLogger('') diff --git a/breezy/tests/stub_sftp.py b/breezy/tests/stub_sftp.py index b78f923de7..7d763859d1 100644 --- a/breezy/tests/stub_sftp.py +++ b/breezy/tests/stub_sftp.py @@ -22,10 +22,7 @@ import os import paramiko import socket -try: - import socketserver -except ImportError: - import SocketServer as socketserver +import socketserver import sys import time diff --git a/breezy/tests/test__btree_serializer.py b/breezy/tests/test__btree_serializer.py index be407bb6eb..9993591109 100644 --- a/breezy/tests/test__btree_serializer.py +++ b/breezy/tests/test__btree_serializer.py @@ -21,9 +21,6 @@ import bisect from .. import tests -from ..sixish import ( - int2byte, - ) from .test_btree_index import compiled_btreeparser_feature @@ -60,7 +57,7 @@ def assertFailUnhexlify(self, as_hex): self.assertIs(None, self.module._py_unhexlify(as_hex)) def test_to_hex(self): - raw_bytes = b''.join(map(int2byte, range(256))) + raw_bytes = bytes(range(256)) for i in range(0, 240, 20): self.assertHexlify(raw_bytes[i:i + 20]) self.assertHexlify(raw_bytes[240:] + raw_bytes[0:4]) @@ -70,7 +67,7 @@ def test_from_hex(self): self.assertUnhexlify(b'123456789abcdef0123456789abcdef012345678') self.assertUnhexlify(b'0123456789ABCDEF0123456789ABCDEF01234567') self.assertUnhexlify(b'123456789ABCDEF0123456789ABCDEF012345678') - hex_chars = binascii.hexlify(b''.join(map(int2byte, range(256)))) + hex_chars = binascii.hexlify(bytes(range(256))) for i in range(0, 480, 40): self.assertUnhexlify(hex_chars[i:i + 40]) self.assertUnhexlify(hex_chars[480:] + hex_chars[0:8]) diff --git a/breezy/tests/test__chk_map.py b/breezy/tests/test__chk_map.py index b8f09d2637..619db11910 100644 --- a/breezy/tests/test__chk_map.py +++ b/breezy/tests/test__chk_map.py @@ -22,7 +22,6 @@ from ..bzr import ( chk_map, ) -from ..sixish import int2byte from ..static_tuple import StaticTuple stuple = StaticTuple @@ -65,9 +64,9 @@ def test_255_does_not_include_newline(self): chars_used = set() for char_in in range(256): search_key = self.module._search_key_255( - stuple(int2byte(char_in),)) - chars_used.update(map(int2byte, bytearray(search_key))) - all_chars = {int2byte(x) for x in range(256)} + stuple(bytes([char_in]),)) + chars_used.update([bytes([x]) for x in search_key]) + all_chars = {bytes([x]) for x in range(256)} unused_chars = all_chars.symmetric_difference(chars_used) self.assertEqual({b'\n'}, unused_chars) diff --git a/breezy/tests/test__groupcompress.py b/breezy/tests/test__groupcompress.py index b7f594e9b6..2e998038dc 100644 --- a/breezy/tests/test__groupcompress.py +++ b/breezy/tests/test__groupcompress.py @@ -27,9 +27,6 @@ from .scenarios import ( load_tests_apply_scenarios, ) -from ..sixish import ( - indexbytes, - ) from . import ( features, ) @@ -470,7 +467,7 @@ def assertEncode(self, expected, offset, length): self.assertEqual(expected, data) def assertDecode(self, exp_offset, exp_length, exp_newpos, data, pos): - cmd = indexbytes(data, pos) + cmd = data[pos] pos += 1 out = _groupcompress_py.decode_copy_instruction(data, cmd, pos) self.assertEqual((exp_offset, exp_length, exp_newpos), out) diff --git a/breezy/tests/test__rio.py b/breezy/tests/test__rio.py index 5c12b88311..384bf36fac 100644 --- a/breezy/tests/test__rio.py +++ b/breezy/tests/test__rio.py @@ -22,9 +22,6 @@ rio, tests, ) -from ..sixish import ( - text_type, - ) def load_tests(loader, standard_tests, pattern): @@ -56,11 +53,8 @@ def test_empty(self): self.assertFalse(self.module._valid_tag("")) def test_unicode(self): - if text_type is str: - # When str is a unicode type, it is valid for a tag - self.assertTrue(self.module._valid_tag(u"foo")) - else: - self.assertRaises(TypeError, self.module._valid_tag, u"foo") + # When str is a unicode type, it is valid for a tag + self.assertTrue(self.module._valid_tag(u"foo")) def test_non_ascii_char(self): self.assertFalse(self.module._valid_tag("\xb5")) @@ -76,7 +70,7 @@ def assertReadStanza(self, result, line_iter): if s is not None: for tag, value in s.iter_pairs(): self.assertIsInstance(tag, str) - self.assertIsInstance(value, text_type) + self.assertIsInstance(value, str) def assertReadStanzaRaises(self, exception, line_iter): self.assertRaises(exception, self.module._read_stanza_utf8, line_iter) @@ -132,7 +126,7 @@ def assertReadStanza(self, result, line_iter): if s is not None: for tag, value in s.iter_pairs(): self.assertIsInstance(tag, str) - self.assertIsInstance(value, text_type) + self.assertIsInstance(value, str) def assertReadStanzaRaises(self, exception, line_iter): self.assertRaises(exception, self.module._read_stanza_unicode, diff --git a/breezy/tests/test__static_tuple.py b/breezy/tests/test__static_tuple.py index abb9722eee..89e1926287 100644 --- a/breezy/tests/test__static_tuple.py +++ b/breezy/tests/test__static_tuple.py @@ -30,10 +30,6 @@ static_tuple, tests, ) -from breezy.sixish import ( - PY3, - text_type, - ) from breezy.tests import ( features, ) @@ -221,16 +217,6 @@ class subint(int): # But not a subclass, because subint could introduce refcycles self.assertRaises(TypeError, self.module.StaticTuple, subint(2)) - def test_holds_long(self): - if PY3: - self.skipTest("No long type on Python 3") - k1 = self.module.StaticTuple(2**65) - - class sublong(long): - pass - # But not a subclass - self.assertRaises(TypeError, self.module.StaticTuple, sublong(1)) - def test_holds_float(self): k1 = self.module.StaticTuple(1.2) @@ -248,7 +234,7 @@ class substr(bytes): def test_holds_unicode(self): k1 = self.module.StaticTuple(u'\xb5') - class subunicode(text_type): + class subunicode(str): pass self.assertRaises(TypeError, self.module.StaticTuple, subunicode(u'\xb5')) @@ -295,7 +281,7 @@ def test_compare_similar_obj(self): def check_strict_compare(self, k1, k2, mismatched_types): """True if on Python 3 and stricter comparison semantics are used.""" - if PY3 and mismatched_types: + if mismatched_types: for op in ("ge", "gt", "le", "lt"): self.assertRaises(TypeError, getattr(operator, op), k1, k2) return True diff --git a/breezy/tests/test_annotate.py b/breezy/tests/test_annotate.py index 5eb8d9f189..2b2fe13565 100644 --- a/breezy/tests/test_annotate.py +++ b/breezy/tests/test_annotate.py @@ -17,15 +17,12 @@ """Whitebox tests for annotate functionality.""" import codecs -from io import BytesIO +from io import BytesIO, StringIO from .. import ( annotate, tests, ) -from ..sixish import ( - StringIO, - ) from .ui_testing import StringIOWithEncoding diff --git a/breezy/tests/test_bad_files.py b/breezy/tests/test_bad_files.py index d8f330b1d7..f69b8f9313 100644 --- a/breezy/tests/test_bad_files.py +++ b/breezy/tests/test_bad_files.py @@ -17,14 +17,12 @@ """Tests being able to ignore bad filetypes.""" +from io import StringIO import os from .. import ( errors, ) -from ..sixish import ( - StringIO, - ) from ..status import show_tree_status from . import TestCaseWithTransport from .features import ( diff --git a/breezy/tests/test_bedding.py b/breezy/tests/test_bedding.py index 6039c4c392..86df52f965 100644 --- a/breezy/tests/test_bedding.py +++ b/breezy/tests/test_bedding.py @@ -20,9 +20,6 @@ import os import sys -from ..sixish import ( - text_type, - ) from .. import ( bedding, osutils, @@ -56,7 +53,7 @@ def test_config_dir(self): self.assertEqual(bedding.config_dir(), self.brz_home) def test_config_dir_is_unicode(self): - self.assertIsInstance(bedding.config_dir(), text_type) + self.assertIsInstance(bedding.config_dir(), str) def test_config_path(self): self.assertEqual(bedding.config_path(), @@ -84,7 +81,7 @@ def test_config_dir(self): self.assertEqual(bedding.config_dir(), self.bzr_home) def test_config_dir_is_unicode(self): - self.assertIsInstance(bedding.config_dir(), text_type) + self.assertIsInstance(bedding.config_dir(), str) def test_config_path(self): self.assertEqual(bedding.config_path(), diff --git a/breezy/tests/test_bisect.py b/breezy/tests/test_bisect.py index 6b31d233af..6fe33fffe5 100644 --- a/breezy/tests/test_bisect.py +++ b/breezy/tests/test_bisect.py @@ -18,7 +18,7 @@ from __future__ import absolute_import -from ..sixish import StringIO +from io import StringIO import os import shutil diff --git a/breezy/tests/test_branch.py b/breezy/tests/test_branch.py index 0aeaf60be1..1a6559806f 100644 --- a/breezy/tests/test_branch.py +++ b/breezy/tests/test_branch.py @@ -22,6 +22,8 @@ also see this file. """ +from io import StringIO + from .. import ( bedding, branch as _mod_branch, @@ -40,9 +42,6 @@ BzrBranch5, BzrBranchFormat5, ) -from ..sixish import ( - StringIO, - ) class TestErrors(tests.TestCase): diff --git a/breezy/tests/test_cache_utf8.py b/breezy/tests/test_cache_utf8.py index 18b228e80f..22d6abf16c 100644 --- a/breezy/tests/test_cache_utf8.py +++ b/breezy/tests/test_cache_utf8.py @@ -19,7 +19,6 @@ from .. import ( cache_utf8, ) -from ..sixish import text_type from . import TestCase @@ -109,7 +108,7 @@ def test_cached_ascii(self): # objects. uni_x = cache_utf8.decode(x) self.assertEqual(u'simple text', uni_x) - self.assertIsInstance(uni_x, text_type) + self.assertIsInstance(uni_x, str) utf8_x = cache_utf8.encode(uni_x) self.assertIs(utf8_x, x) diff --git a/breezy/tests/test_cleanup.py b/breezy/tests/test_cleanup.py deleted file mode 100644 index 8b904e8ce2..0000000000 --- a/breezy/tests/test_cleanup.py +++ /dev/null @@ -1,400 +0,0 @@ -# Copyright (C) 2009, 2010 Canonical Ltd -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -import re - -from ..cleanup import ( - ExitStack, - ) -from ..sixish import PY3 -from .. import ( - tests, - ) - -from contextlib import contextmanager - - -check_exception_chaining = PY3 - - -# Imported from contextlib2's test_contextlib2.py -# Copyright: Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, -# 2009, 2010, 2011 Python Software Foundation -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# . -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# . -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011 Python Software Foundation; All Rights Reserved" are retained in Python -# alone or in any derivative version prepared by Licensee. -# . -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# . -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# . -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# . -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# . -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# . -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. - - -class TestExitStack(tests.TestCase): - - def test_no_resources(self): - with ExitStack(): - pass - - def test_callback(self): - expected = [ - ((), {}), - ((1,), {}), - ((1, 2), {}), - ((), dict(example=1)), - ((1,), dict(example=1)), - ((1, 2), dict(example=1)), - ] - result = [] - def _exit(*args, **kwds): - """Test metadata propagation""" - result.append((args, kwds)) - with ExitStack() as stack: - for args, kwds in reversed(expected): - if args and kwds: - f = stack.callback(_exit, *args, **kwds) - elif args: - f = stack.callback(_exit, *args) - elif kwds: - f = stack.callback(_exit, **kwds) - else: - f = stack.callback(_exit) - self.assertIs(f, _exit) - self.assertEqual(result, expected) - - def test_push(self): - exc_raised = ZeroDivisionError - def _expect_exc(exc_type, exc, exc_tb): - self.assertIs(exc_type, exc_raised) - def _suppress_exc(*exc_details): - return True - def _expect_ok(exc_type, exc, exc_tb): - self.assertIsNone(exc_type) - self.assertIsNone(exc) - self.assertIsNone(exc_tb) - class ExitCM(object): - def __init__(self, check_exc): - self.check_exc = check_exc - def __enter__(self): - self.fail("Should not be called!") - def __exit__(self, *exc_details): - self.check_exc(*exc_details) - with ExitStack() as stack: - stack.push(_expect_ok) - cm = ExitCM(_expect_ok) - stack.push(cm) - stack.push(_suppress_exc) - cm = ExitCM(_expect_exc) - stack.push(cm) - stack.push(_expect_exc) - stack.push(_expect_exc) - 1 / 0 - - def test_enter_context(self): - class TestCM(object): - def __enter__(self): - result.append(1) - def __exit__(self, *exc_details): - result.append(3) - - result = [] - cm = TestCM() - with ExitStack() as stack: - @stack.callback # Registered first => cleaned up last - def _exit(): - result.append(4) - self.assertIsNotNone(_exit) - stack.enter_context(cm) - result.append(2) - self.assertEqual(result, [1, 2, 3, 4]) - - def test_close(self): - result = [] - with ExitStack() as stack: - @stack.callback - def _exit(): - result.append(1) - self.assertIsNotNone(_exit) - stack.close() - result.append(2) - self.assertEqual(result, [1, 2]) - - def test_pop_all(self): - result = [] - with ExitStack() as stack: - @stack.callback - def _exit(): - result.append(3) - self.assertIsNotNone(_exit) - new_stack = stack.pop_all() - result.append(1) - result.append(2) - new_stack.close() - self.assertEqual(result, [1, 2, 3]) - - def test_exit_raise(self): - def _raise(): - with ExitStack() as stack: - stack.push(lambda *exc: False) - 1 / 0 - self.assertRaises(ZeroDivisionError, _raise) - - def test_exit_suppress(self): - with ExitStack() as stack: - stack.push(lambda *exc: True) - 1 / 0 - - def test_exit_exception_chaining_reference(self): - # Sanity check to make sure that ExitStack chaining matches - # actual nested with statements - class RaiseExc: - def __init__(self, exc): - self.exc = exc - def __enter__(self): - return self - def __exit__(self, *exc_details): - raise self.exc - - class RaiseExcWithContext: - def __init__(self, outer, inner): - self.outer = outer - self.inner = inner - def __enter__(self): - return self - def __exit__(self, *exc_details): - try: - raise self.inner - except: - raise self.outer - - class SuppressExc: - def __enter__(self): - return self - def __exit__(self, *exc_details): - self.__class__.saved_details = exc_details - return True - - try: - with RaiseExc(IndexError): - with RaiseExcWithContext(KeyError, AttributeError): - with SuppressExc(): - with RaiseExc(ValueError): - 1 / 0 - except IndexError as exc: - if check_exception_chaining: - self.assertIsInstance(exc.__context__, KeyError) - self.assertIsInstance(exc.__context__.__context__, AttributeError) - # Inner exceptions were suppressed - self.assertIsNone(exc.__context__.__context__.__context__) - else: - self.fail("Expected IndexError, but no exception was raised") - # Check the inner exceptions - inner_exc = SuppressExc.saved_details[1] - self.assertIsInstance(inner_exc, ValueError) - if check_exception_chaining: - self.assertIsInstance(inner_exc.__context__, ZeroDivisionError) - - def test_exit_exception_chaining(self): - # Ensure exception chaining matches the reference behaviour - def raise_exc(exc): - raise exc - - saved_details = [None] - def suppress_exc(*exc_details): - saved_details[0] = exc_details - return True - - try: - with ExitStack() as stack: - stack.callback(raise_exc, IndexError) - stack.callback(raise_exc, KeyError) - stack.callback(raise_exc, AttributeError) - stack.push(suppress_exc) - stack.callback(raise_exc, ValueError) - 1 / 0 - except IndexError as exc: - if check_exception_chaining: - self.assertIsInstance(exc.__context__, KeyError) - self.assertIsInstance(exc.__context__.__context__, AttributeError) - # Inner exceptions were suppressed - self.assertIsNone(exc.__context__.__context__.__context__) - else: - self.fail("Expected IndexError, but no exception was raised") - # Check the inner exceptions - inner_exc = saved_details[0][1] - self.assertIsInstance(inner_exc, ValueError) - if check_exception_chaining: - self.assertIsInstance(inner_exc.__context__, ZeroDivisionError) - - def test_exit_exception_non_suppressing(self): - # http://bugs.python.org/issue19092 - def raise_exc(exc): - raise exc - - def suppress_exc(*exc_details): - return True - - try: - with ExitStack() as stack: - stack.callback(lambda: None) - stack.callback(raise_exc, IndexError) - except Exception as exc: - self.assertIsInstance(exc, IndexError) - else: - self.fail("Expected IndexError, but no exception was raised") - - try: - with ExitStack() as stack: - stack.callback(raise_exc, KeyError) - stack.push(suppress_exc) - stack.callback(raise_exc, IndexError) - except Exception as exc: - self.assertIsInstance(exc, KeyError) - else: - self.fail("Expected KeyError, but no exception was raised") - - def test_exit_exception_with_correct_context(self): - # http://bugs.python.org/issue20317 - @contextmanager - def gets_the_context_right(exc): - try: - yield - finally: - raise exc - - exc1 = Exception(1) - exc2 = Exception(2) - exc3 = Exception(3) - exc4 = Exception(4) - - # The contextmanager already fixes the context, so prior to the - # fix, ExitStack would try to fix it *again* and get into an - # infinite self-referential loop - try: - with ExitStack() as stack: - stack.enter_context(gets_the_context_right(exc4)) - stack.enter_context(gets_the_context_right(exc3)) - stack.enter_context(gets_the_context_right(exc2)) - raise exc1 - except Exception as exc: - self.assertIs(exc, exc4) - if check_exception_chaining: - self.assertIs(exc.__context__, exc3) - self.assertIs(exc.__context__.__context__, exc2) - self.assertIs(exc.__context__.__context__.__context__, exc1) - self.assertIsNone( - exc.__context__.__context__.__context__.__context__) - - def test_exit_exception_with_existing_context(self): - # Addresses a lack of test coverage discovered after checking in a - # fix for issue 20317 that still contained debugging code. - def raise_nested(inner_exc, outer_exc): - try: - raise inner_exc - finally: - raise outer_exc - exc1 = Exception(1) - exc2 = Exception(2) - exc3 = Exception(3) - exc4 = Exception(4) - exc5 = Exception(5) - try: - with ExitStack() as stack: - stack.callback(raise_nested, exc4, exc5) - stack.callback(raise_nested, exc2, exc3) - raise exc1 - except Exception as exc: - self.assertIs(exc, exc5) - if check_exception_chaining: - self.assertIs(exc.__context__, exc4) - self.assertIs(exc.__context__.__context__, exc3) - self.assertIs(exc.__context__.__context__.__context__, exc2) - self.assertIs( - exc.__context__.__context__.__context__.__context__, exc1) - self.assertIsNone( - exc.__context__.__context__.__context__.__context__.__context__) - - def test_body_exception_suppress(self): - def suppress_exc(*exc_details): - return True - try: - with ExitStack() as stack: - stack.push(suppress_exc) - 1 / 0 - except IndexError as exc: - self.fail("Expected no exception, got IndexError") - - def test_exit_exception_chaining_suppress(self): - with ExitStack() as stack: - stack.push(lambda *exc: True) - stack.push(lambda *exc: 1 / 0) - stack.push(lambda *exc: {}[1]) - - def test_excessive_nesting(self): - # The original implementation would die with RecursionError here - with ExitStack() as stack: - for i in range(10000): - stack.callback(int) - - def test_instance_bypass(self): - class Example(object): - pass - cm = Example() - cm.__exit__ = object() - stack = ExitStack() - self.assertRaises(AttributeError, stack.enter_context, cm) - stack.push(cm) - # self.assertIs(stack._exit_callbacks[-1], cm) diff --git a/breezy/tests/test_config.py b/breezy/tests/test_config.py index 3ed612ee45..05b65f8bad 100644 --- a/breezy/tests/test_config.py +++ b/breezy/tests/test_config.py @@ -17,6 +17,7 @@ """Tests for finding and reading the bzr config file[s].""" from textwrap import dedent +from io import BytesIO import os import sys import threading @@ -43,10 +44,6 @@ from ..bzr import ( remote, ) -from ..sixish import ( - BytesIO, - text_type, - ) from ..transport import remote as transport_remote from . import ( features, diff --git a/breezy/tests/test_conflicts.py b/breezy/tests/test_conflicts.py index 12abbed52d..9db46b51d3 100644 --- a/breezy/tests/test_conflicts.py +++ b/breezy/tests/test_conflicts.py @@ -24,7 +24,6 @@ osutils, tests, ) -from ..sixish import text_type from . import ( script, scenarios, @@ -131,7 +130,7 @@ class TestPerConflict(tests.TestCase): scenarios = scenarios.multiply_scenarios(vary_by_conflicts()) def test_stringification(self): - text = text_type(self.conflict) + text = str(self.conflict) self.assertContainsString(text, self.conflict.path) self.assertContainsString(text.lower(), "conflict") self.assertContainsString(repr(self.conflict), @@ -142,14 +141,14 @@ def test_stanza_roundtrip(self): o = conflicts.Conflict.factory(**p.as_stanza().as_dict()) self.assertEqual(o, p) - self.assertIsInstance(o.path, text_type) + self.assertIsInstance(o.path, str) if o.file_id is not None: self.assertIsInstance(o.file_id, bytes) conflict_path = getattr(o, 'conflict_path', None) if conflict_path is not None: - self.assertIsInstance(conflict_path, text_type) + self.assertIsInstance(conflict_path, str) conflict_file_id = getattr(o, 'conflict_file_id', None) if conflict_file_id is not None: @@ -176,7 +175,7 @@ def test_stanzas_roundtrip(self): def test_stringification(self): for text, o in zip(example_conflicts.to_strings(), example_conflicts): - self.assertEqual(text, text_type(o)) + self.assertEqual(text, str(o)) # FIXME: The shell-like tests should be converted to real whitebox tests... or diff --git a/breezy/tests/test_crash.py b/breezy/tests/test_crash.py index 84a948e56b..2d055e4d44 100644 --- a/breezy/tests/test_crash.py +++ b/breezy/tests/test_crash.py @@ -16,6 +16,7 @@ import doctest +from io import StringIO import os import sys @@ -27,9 +28,6 @@ plugin, tests, ) -from ..sixish import ( - StringIO, - ) from . import features diff --git a/breezy/tests/test_delta.py b/breezy/tests/test_delta.py index 09e01dfe98..5ea3493ef7 100644 --- a/breezy/tests/test_delta.py +++ b/breezy/tests/test_delta.py @@ -14,6 +14,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import StringIO import os from .. import ( @@ -22,10 +23,6 @@ tests, ) from ..tree import TreeChange -from ..sixish import ( - PY3, - StringIO, - ) class InstrumentedReporter(object): diff --git a/breezy/tests/test_diff.py b/breezy/tests/test_diff.py index b0f2edd347..dd30f04e54 100644 --- a/breezy/tests/test_diff.py +++ b/breezy/tests/test_diff.py @@ -14,6 +14,8 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import contextlib +from io import BytesIO import os import re import subprocess @@ -21,7 +23,6 @@ import tempfile from .. import ( - cleanup, diff, errors, osutils, @@ -30,10 +31,6 @@ revisiontree, tests, ) -from ..sixish import ( - BytesIO, - unichr, - ) from ..tests import ( features, EncodingAdapter, @@ -1031,7 +1028,7 @@ class TestGetTreesAndBranchesToDiffLocked(tests.TestCaseWithTransport): def call_gtabtd(self, path_list, revision_specs, old_url, new_url): """Call get_trees_and_branches_to_diff_locked.""" - exit_stack = cleanup.ExitStack() + exit_stack = contextlib.ExitStack() self.addCleanup(exit_stack.close) return diff.get_trees_and_branches_to_diff_locked( path_list, revision_specs, old_url, new_url, exit_stack) diff --git a/breezy/tests/test_email_message.py b/breezy/tests/test_email_message.py index fab530d2c2..96f1657184 100644 --- a/breezy/tests/test_email_message.py +++ b/breezy/tests/test_email_message.py @@ -20,7 +20,6 @@ from .. import __version__ as _breezy_version from ..email_message import EmailMessage from ..errors import BzrBadParameterNotUnicode -from ..sixish import PY3, text_type from ..smtp_connection import SMTPConnection from .. import tests @@ -68,17 +67,9 @@ ''' % {'version': _breezy_version, 'boundary': BOUNDARY} -def final_newline_or_not(msg): - if sys.version_info >= (2, 7, 6): - # Some internals of python's email module changed in an (minor) - # incompatible way: a final newline is appended in 2.7.6... - msg += '\n' - return msg - - def simple_multipart_message(): - msg = _MULTIPART_HEAD + '--%s--' % BOUNDARY - return final_newline_or_not(msg) + msg = _MULTIPART_HEAD + '--%s--\n' % BOUNDARY + return msg def complex_multipart_message(typ): @@ -95,8 +86,8 @@ def complex_multipart_message(typ): d e ---%(boundary)s--''' % {'boundary': BOUNDARY} - msg = final_newline_or_not(msg) +--%(boundary)s-- +''' % {'boundary': BOUNDARY} return msg % (typ,) @@ -170,14 +161,8 @@ def test_setting_headers(self): def test_address_to_encoded_header(self): def decode(s): """Convert a RFC2047-encoded string to a unicode string.""" - if PY3: - return ''.join([chunk.decode(encoding or 'ascii') - for chunk, encoding in decode_header(s)]) - else: - # Cope with python2 stripping whitespace. - # https://bugs.python.org/issue1467619 - return ' '.join([chunk.decode(encoding or 'ascii') - for chunk, encoding in decode_header(s)]) + return ''.join([chunk.decode(encoding or 'ascii') + for chunk, encoding in decode_header(s)]) address = 'jrandom@example.com' encoded = EmailMessage.address_to_encoded_header(address) diff --git a/breezy/tests/test_errors.py b/breezy/tests/test_errors.py index ecf4ef28ea..d10a33c1ec 100644 --- a/breezy/tests/test_errors.py +++ b/breezy/tests/test_errors.py @@ -28,10 +28,6 @@ tests, urlutils, ) -from ..sixish import ( - PY3, - text_type, - ) class TestErrors(tests.TestCase): @@ -49,10 +45,7 @@ def test_no_arg_named_message(self): init = getattr(c, '__init__', None) fmt = getattr(c, '_fmt', None) if init: - if PY3: - args = inspect.getfullargspec(init)[0] - else: - args = inspect.getargspec(init)[0] + args = inspect.getfullargspec(init)[0] self.assertFalse('message' in args, ('Argument name "message" not allowed for ' '"errors.%s.__init__"' % c.__name__)) @@ -365,7 +358,7 @@ def test_duplicate_record_name_error(self): e = errors.DuplicateRecordNameError(b"n\xc3\xa5me") self.assertEqual( u"Container has multiple records with the same name: n\xe5me", - text_type(e)) + str(e)) def test_check_error(self): e = errors.BzrCheckError('example check failure') @@ -409,7 +402,7 @@ def test_tip_change_rejected(self): err = errors.TipChangeRejected(u'Unicode message\N{INTERROBANG}') self.assertEqual( u'Tip change rejected: Unicode message\N{INTERROBANG}', - text_type(err)) + str(err)) def test_error_from_smart_server(self): error_tuple = ('error', 'tuple') @@ -508,10 +501,7 @@ def test_always_str(self): # Unicode error, because it tries to call str() on the string # returned from e.__str__(), and it has non ascii characters s = str(e) - if PY3: - self.assertEqual('Pass through \xb5 and bar', s) - else: - self.assertEqual('Pass through \xc2\xb5 and bar', s) + self.assertEqual('Pass through \xb5 and bar', s) def test_missing_format_string(self): e = ErrorWithNoFormat(param='randomvalue') diff --git a/breezy/tests/test_export.py b/breezy/tests/test_export.py index 87559f2419..e864ce2ce2 100644 --- a/breezy/tests/test_export.py +++ b/breezy/tests/test_export.py @@ -17,6 +17,7 @@ """Tests for breezy.export.""" import gzip +from io import BytesIO import os import tarfile import time @@ -29,9 +30,6 @@ ) from ..export import get_root_name from ..archive.tar import tarball_generator -from ..sixish import ( - BytesIO, - ) from . import features diff --git a/breezy/tests/test_fifo_cache.py b/breezy/tests/test_fifo_cache.py index 01678a3f52..7d886fcd75 100644 --- a/breezy/tests/test_fifo_cache.py +++ b/breezy/tests/test_fifo_cache.py @@ -20,11 +20,6 @@ fifo_cache, tests, ) -from ..sixish import ( - viewitems, - viewkeys, - viewvalues, - ) class TestFIFOCache(tests.TestCase): @@ -39,9 +34,9 @@ def test_add_is_present(self): self.assertEqual(2, c.get(1)) self.assertEqual(2, c.get(1, None)) self.assertEqual([1], list(c)) - self.assertEqual({1}, viewkeys(c)) - self.assertEqual([(1, 2)], sorted(viewitems(c))) - self.assertEqual([2], sorted(viewvalues(c))) + self.assertEqual({1}, c.keys()) + self.assertEqual([(1, 2)], sorted(c.items())) + self.assertEqual([2], sorted(c.values())) self.assertEqual({1: 2}, c) def test_cache_size(self): @@ -58,9 +53,9 @@ def test_missing(self): self.assertEqual(None, c.get(1)) self.assertEqual(None, c.get(1, None)) self.assertEqual([], list(c)) - self.assertEqual(set(), viewkeys(c)) - self.assertEqual([], list(viewitems(c))) - self.assertEqual([], list(viewvalues(c))) + self.assertEqual(set(), c.keys()) + self.assertEqual([], list(c.items())) + self.assertEqual([], list(c.values())) self.assertEqual({}, c) def test_add_maintains_fifo(self): @@ -69,16 +64,16 @@ def test_add_maintains_fifo(self): c[2] = 3 c[3] = 4 c[4] = 5 - self.assertEqual({1, 2, 3, 4}, viewkeys(c)) + self.assertEqual({1, 2, 3, 4}, c.keys()) c[5] = 6 # This should pop out the oldest entry - self.assertEqual({2, 3, 4, 5}, viewkeys(c)) + self.assertEqual({2, 3, 4, 5}, c.keys()) # Replacing an item doesn't change the stored keys c[2] = 7 - self.assertEqual({2, 3, 4, 5}, viewkeys(c)) + self.assertEqual({2, 3, 4, 5}, c.keys()) # But it does change the position in the FIFO c[6] = 7 - self.assertEqual({2, 4, 5, 6}, viewkeys(c)) + self.assertEqual({2, 4, 5, 6}, c.keys()) self.assertEqual([4, 5, 2, 6], list(c._queue)) def test_default_after_cleanup_count(self): @@ -90,10 +85,10 @@ def test_default_after_cleanup_count(self): c[4] = 5 c[5] = 6 # So far, everything fits - self.assertEqual({1, 2, 3, 4, 5}, viewkeys(c)) + self.assertEqual({1, 2, 3, 4, 5}, c.keys()) c[6] = 7 # But adding one more should shrink down to after_cleanup_count - self.assertEqual({3, 4, 5, 6}, viewkeys(c)) + self.assertEqual({3, 4, 5, 6}, c.keys()) def test_clear(self): c = fifo_cache.FIFOCache(5) @@ -103,9 +98,9 @@ def test_clear(self): c[4] = 5 c[5] = 6 c.cleanup() - self.assertEqual({2, 3, 4, 5}, viewkeys(c)) + self.assertEqual({2, 3, 4, 5}, c.keys()) c.clear() - self.assertEqual(set(), viewkeys(c)) + self.assertEqual(set(), c.keys()) self.assertEqual([], list(c._queue)) self.assertEqual({}, c) @@ -250,9 +245,9 @@ def test_add_is_present(self): self.assertEqual('2', c.get(1)) self.assertEqual('2', c.get(1, None)) self.assertEqual([1], list(c)) - self.assertEqual({1}, viewkeys(c)) - self.assertEqual([(1, '2')], sorted(viewitems(c))) - self.assertEqual(['2'], sorted(viewvalues(c))) + self.assertEqual({1}, c.keys()) + self.assertEqual([(1, '2')], sorted(c.items())) + self.assertEqual(['2'], sorted(c.values())) self.assertEqual({1: '2'}, c) self.assertEqual(1024 * 1024, c.cache_size()) @@ -264,9 +259,9 @@ def test_missing(self): self.assertEqual(None, c.get(1)) self.assertEqual(None, c.get(1, None)) self.assertEqual([], list(c)) - self.assertEqual(set(), viewkeys(c)) - self.assertEqual([], list(viewitems(c))) - self.assertEqual([], list(viewvalues(c))) + self.assertEqual(set(), c.keys()) + self.assertEqual([], list(c.items())) + self.assertEqual([], list(c.values())) self.assertEqual({}, c) def test_add_maintains_fifo(self): diff --git a/breezy/tests/test_filters.py b/breezy/tests/test_filters.py index bf17641c15..77a14f6056 100644 --- a/breezy/tests/test_filters.py +++ b/breezy/tests/test_filters.py @@ -14,6 +14,8 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO + from .. import errors, filters from ..filters import ( ContentFilter, @@ -25,9 +27,6 @@ internal_size_sha_file_byname, ) from ..osutils import sha_string -from ..sixish import ( - BytesIO, - ) from . import TestCase, TestCaseInTempDir diff --git a/breezy/tests/test_generate_docs.py b/breezy/tests/test_generate_docs.py index 20ab0b4ead..75398dba4a 100644 --- a/breezy/tests/test_generate_docs.py +++ b/breezy/tests/test_generate_docs.py @@ -20,11 +20,9 @@ and produce non-empty output. """ +from io import StringIO import breezy.commands -from ..sixish import ( - StringIO, - ) from . import TestCase diff --git a/breezy/tests/test_gpg.py b/breezy/tests/test_gpg.py index b542b09e77..53f906c965 100644 --- a/breezy/tests/test_gpg.py +++ b/breezy/tests/test_gpg.py @@ -17,6 +17,8 @@ """Tests for signing and verifying blobs of data via gpg.""" +from io import BytesIO + # import system imports here import sys @@ -28,9 +30,6 @@ trace, ui, ) -from ..sixish import ( - BytesIO, - ) from . import ( TestCase, features, diff --git a/breezy/tests/test_grep.py b/breezy/tests/test_grep.py index 1fc914a871..a590027a35 100644 --- a/breezy/tests/test_grep.py +++ b/breezy/tests/test_grep.py @@ -21,7 +21,6 @@ import unicodedata as ud from .. import tests, osutils -from ..sixish import PY3 from .._termcolor import color_string, FG from ..tests.features import ( @@ -390,22 +389,16 @@ def test_ver_null_option(self): out, err = self.run_bzr( ['grep', '-r', 'last:1', '--null', 'line[1-3]']) - if not PY3: - out = out.decode('utf-8', 'ignore') nout = ud.normalize(u'NFC', out) self.assertEqual(nout, nref) self.assertEqual(len(out.splitlines()), 1) out, err = self.run_bzr(['grep', '-r', 'last:1', '-Z', 'line[1-3]']) - if not PY3: - out = out.decode('utf-8', 'ignore') nout = ud.normalize(u'NFC', out) self.assertEqual(nout, nref) self.assertEqual(len(out.splitlines()), 1) out, err = self.run_bzr(['grep', '-r', 'last:1', '--null', 'line']) - if not PY3: - out = out.decode('utf-8', 'ignore') nout = ud.normalize(u'NFC', out) self.assertEqual(nout, nref) self.assertEqual(len(out.splitlines()), 1) diff --git a/breezy/tests/test_http.py b/breezy/tests/test_http.py index 98b19f1744..ead7956f7a 100644 --- a/breezy/tests/test_http.py +++ b/breezy/tests/test_http.py @@ -23,12 +23,8 @@ # TODO: Should be renamed to breezy.transport.http.tests? # TODO: What about renaming to breezy.tests.transport.http ? -try: - from http.client import UnknownProtocol, parse_headers - from http.server import SimpleHTTPRequestHandler -except ImportError: # python < 3 - from httplib import UnknownProtocol - from SimpleHTTPServer import SimpleHTTPRequestHandler +from http.client import UnknownProtocol, parse_headers +from http.server import SimpleHTTPRequestHandler import io import socket import sys @@ -50,7 +46,6 @@ from ..bzr import ( remote as _mod_remote, ) -from ..sixish import PY3 from . import ( features, http_server, @@ -1949,19 +1944,10 @@ class PredefinedRequestHandler(http_server.TestingHTTPRequestHandler): def _handle_one_request(self): tcs = self.server.test_case_server requestline = self.rfile.readline() - if PY3: - headers = parse_headers(self.rfile) - bytes_read = len(headers.as_bytes()) - bytes_read += headers.as_bytes().count(b'\n') - bytes_read += len(requestline) - else: - headers = self.MessageClass(self.rfile, 0) - # We just read: the request, the headers, an empty line indicating the - # end of the headers. - bytes_read = len(requestline) - for line in headers.headers: - bytes_read += len(line) - bytes_read += len(b'\r\n') + headers = parse_headers(self.rfile) + bytes_read = len(headers.as_bytes()) + bytes_read += headers.as_bytes().count(b'\n') + bytes_read += len(requestline) if requestline.startswith(b'POST'): # The body should be a single line (or we don't know where it ends # and we don't want to issue a blocking read) diff --git a/breezy/tests/test_http_response.py b/breezy/tests/test_http_response.py index da7ac08e38..8d7f0b06d7 100644 --- a/breezy/tests/test_http_response.py +++ b/breezy/tests/test_http_response.py @@ -37,24 +37,16 @@ InvalidHttpResponse. """ -try: - import http.client as http_client -except ImportError: # python < 3 without future - import httplib as http_client +from io import BytesIO -try: - parse_headers = http_client.parse_headers -except AttributeError: # python 2 - parse_headers = http_client.HTTPMessage +import http.client as http_client + +parse_headers = http_client.parse_headers from .. import ( errors, tests, ) -from ..sixish import ( - BytesIO, - PY3, - ) from ..transport.http import ( response, HTTPConnection, @@ -726,10 +718,7 @@ def _build_HTTPMessage(self, raw_headers): # Get rid of the status line status_and_headers.readline() msg = parse_headers(status_and_headers) - if PY3: - return msg.get - else: - return msg.getheader + return msg.get def get_response(self, a_response): """Process a supplied response, and return the result.""" diff --git a/breezy/tests/test_https_urllib.py b/breezy/tests/test_https_urllib.py index 23755c370b..dd3efdd713 100644 --- a/breezy/tests/test_https_urllib.py +++ b/breezy/tests/test_https_urllib.py @@ -79,13 +79,6 @@ def test_from_string(self): class MatchHostnameTests(tests.TestCase): - def setUp(self): - super(MatchHostnameTests, self).setUp() - if sys.version_info < (2, 7, 9): - raise tests.TestSkipped( - 'python version too old to provide proper' - ' https hostname verification') - def test_no_certificate(self): self.assertRaises(ValueError, ssl.match_hostname, {}, "example.com") diff --git a/breezy/tests/test_i18n.py b/breezy/tests/test_i18n.py index 3a80f7d393..08b38e2692 100644 --- a/breezy/tests/test_i18n.py +++ b/breezy/tests/test_i18n.py @@ -24,7 +24,6 @@ errors, workingtree, ) -from ..sixish import PY3 class ZzzTranslations(object): @@ -63,28 +62,16 @@ def test_translation(self): t = trans.zzz('msg') self._check_exact(u'zz\xe5{{msg}}', t) - if PY3: - t = trans.gettext('msg') - self._check_exact(u'zz\xe5{{msg}}', t) - - t = trans.ngettext('msg1', 'msg2', 0) - self._check_exact(u'zz\xe5{{msg2}}', t) - t = trans.ngettext('msg1', 'msg2', 2) - self._check_exact(u'zz\xe5{{msg2}}', t) - - t = trans.ngettext('msg1', 'msg2', 1) - self._check_exact(u'zz\xe5{{msg1}}', t) - else: - t = trans.ugettext('msg') - self._check_exact(u'zz\xe5{{msg}}', t) + t = trans.gettext('msg') + self._check_exact(u'zz\xe5{{msg}}', t) - t = trans.ungettext('msg1', 'msg2', 0) - self._check_exact(u'zz\xe5{{msg2}}', t) - t = trans.ungettext('msg1', 'msg2', 2) - self._check_exact(u'zz\xe5{{msg2}}', t) + t = trans.ngettext('msg1', 'msg2', 0) + self._check_exact(u'zz\xe5{{msg2}}', t) + t = trans.ngettext('msg1', 'msg2', 2) + self._check_exact(u'zz\xe5{{msg2}}', t) - t = trans.ungettext('msg1', 'msg2', 1) - self._check_exact(u'zz\xe5{{msg1}}', t) + t = trans.ngettext('msg1', 'msg2', 1) + self._check_exact(u'zz\xe5{{msg1}}', t) class TestGetText(tests.TestCase): @@ -164,11 +151,7 @@ def test_error_message_translation(self): workingtree.WorkingTree.open('./foo') except errors.NotBranchError as e: err = str(e) - if PY3: - self.assertContainsRe(err, u"zz\xe5{{Not a branch: .*}}") - else: - self.assertContainsRe( - err, u"zz\xe5{{Not a branch: .*}}".encode('utf-8')) + self.assertContainsRe(err, u"zz\xe5{{Not a branch: .*}}") def test_topic_help_translation(self): """does topic help get translated?""" diff --git a/breezy/tests/test_ignores.py b/breezy/tests/test_ignores.py index dfbcdd31d6..914d1c1a80 100644 --- a/breezy/tests/test_ignores.py +++ b/breezy/tests/test_ignores.py @@ -16,15 +16,13 @@ """Tests for handling of ignore files""" +from io import BytesIO import os from .. import ( bedding, ignores, ) -from ..sixish import ( - BytesIO, - ) from . import ( TestCase, TestCaseInTempDir, diff --git a/breezy/tests/test_import_tariff.py b/breezy/tests/test_import_tariff.py index 3595d64568..cef6c42e7d 100644 --- a/breezy/tests/test_import_tariff.py +++ b/breezy/tests/test_import_tariff.py @@ -27,7 +27,6 @@ ) from ..controldir import ControlDir from ..bzr.smart import medium -from ..sixish import PY3 from ..transport import remote from . import ( @@ -104,12 +103,8 @@ def check_forbidden_modules(self, err, forbidden_imports): bad_modules = [] for module_name in forbidden_imports: - if PY3: - if err.find("\nimport '%s' " % module_name) != -1: - bad_modules.append(module_name) - else: - if err.find("\nimport %s " % module_name) != -1: - bad_modules.append(module_name) + if err.find("\nimport '%s' " % module_name) != -1: + bad_modules.append(module_name) if bad_modules: self.fail("command loaded forbidden modules %r" diff --git a/breezy/tests/test_index.py b/breezy/tests/test_index.py index 46223dc00b..e07c8b8d52 100644 --- a/breezy/tests/test_index.py +++ b/breezy/tests/test_index.py @@ -21,7 +21,6 @@ tests, transport, ) -from ..sixish import int2byte from ..bzr import ( index as _mod_index, ) @@ -309,7 +308,7 @@ def test_add_node_bad_key(self): builder = _mod_index.GraphIndexBuilder() for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '): self.assertRaises(_mod_index.BadIndexKey, builder.add_node, - (b'a%skey' % int2byte(bad_char), ), b'data') + (b'a%skey' % bytes([bad_char]), ), b'data') self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (), b'data') self.assertRaises(_mod_index.BadIndexKey, builder.add_node, @@ -324,7 +323,7 @@ def test_add_node_bad_key(self): builder = _mod_index.GraphIndexBuilder(key_elements=2) for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '): self.assertRaises(_mod_index.BadIndexKey, builder.add_node, - (b'prefix', b'a%skey' % int2byte(bad_char)), b'data') + (b'prefix', b'a%skey' % bytes([bad_char])), b'data') def test_add_node_bad_data(self): builder = _mod_index.GraphIndexBuilder() diff --git a/breezy/tests/test_inv.py b/breezy/tests/test_inv.py index a24d44997f..2f278ea2f6 100644 --- a/breezy/tests/test_inv.py +++ b/breezy/tests/test_inv.py @@ -23,7 +23,6 @@ tests, workingtree, ) -from ..sixish import text_type from ..bzr import ( chk_map, groupcompress, @@ -36,6 +35,7 @@ InventoryFile, InventoryDirectory, InventoryEntry, + InvalidEntryName, TreeReference, mutable_inventory_from_tree, ) @@ -696,7 +696,7 @@ def test_entries_for_empty_inventory(self): class TestInventoryEntry(TestCase): def test_file_invalid_entry_name(self): - self.assertRaises(errors.InvalidEntryName, inventory.InventoryFile, + self.assertRaises(InvalidEntryName, inventory.InventoryFile, b'123', 'a/hello.c', ROOT_ID) def test_file_backslash(self): @@ -1234,7 +1234,7 @@ def test_file_entry_to_bytes(self): b'file-rev-id\nabcdefgh\n100\nY', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) + self.assertIsInstance(ie2.name, str) self.assertEqual((b'filename', b'file-id', b'file-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1251,7 +1251,7 @@ def test_file2_entry_to_bytes(self): b'file-rev-id\n123456\n25\nN', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) + self.assertIsInstance(ie2.name, str) self.assertEqual((b'\xce\xa9name', b'file-id', b'file-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1263,7 +1263,7 @@ def test_dir_entry_to_bytes(self): self.assertEqual(b'dir: dir-id\nparent-id\ndirname\ndir-rev-id', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) + self.assertIsInstance(ie2.name, str) self.assertEqual((b'dirname', b'dir-id', b'dir-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1277,7 +1277,7 @@ def test_dir2_entry_to_bytes(self): b'dir-rev-id', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) + self.assertIsInstance(ie2.name, str) self.assertIs(ie2.parent_id, None) self.assertEqual((b'dir\xce\xa9name', b'dir-id', b'dir-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1292,8 +1292,8 @@ def test_symlink_entry_to_bytes(self): b'link-rev-id\ntarget/path', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) - self.assertIsInstance(ie2.symlink_target, text_type) + self.assertIsInstance(ie2.name, str) + self.assertIsInstance(ie2.symlink_target, str) self.assertEqual((b'linkname', b'link-id', b'link-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1308,8 +1308,8 @@ def test_symlink2_entry_to_bytes(self): b'link-rev-id\ntarget/\xce\xa9path', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) - self.assertIsInstance(ie2.symlink_target, text_type) + self.assertIsInstance(ie2.name, str) + self.assertIsInstance(ie2.symlink_target, str) self.assertEqual((b'link\xce\xa9name', b'link-id', b'link-rev-id'), inv._bytes_to_utf8name_key(bytes)) @@ -1324,7 +1324,7 @@ def test_tree_reference_entry_to_bytes(self): b'tree-rev-id\nref-rev-id', bytes) ie2 = inv._bytes_to_entry(bytes) self.assertEqual(ie, ie2) - self.assertIsInstance(ie2.name, text_type) + self.assertIsInstance(ie2.name, str) self.assertEqual((b'tree\xce\xa9name', b'tree-root-id', b'tree-rev-id'), inv._bytes_to_utf8name_key(bytes)) diff --git a/breezy/tests/test_inventory_delta.py b/breezy/tests/test_inventory_delta.py index 0b642f6fe2..cea4bdd2c8 100644 --- a/breezy/tests/test_inventory_delta.py +++ b/breezy/tests/test_inventory_delta.py @@ -19,6 +19,9 @@ See doc/developer/inventory.txt for more information. """ +from io import ( + BytesIO, + ) from ..bzr import ( inventory, inventory_delta, @@ -26,9 +29,6 @@ from ..bzr.inventory_delta import InventoryDeltaError from ..bzr.inventory import Inventory from ..revision import NULL_REVISION -from ..sixish import ( - BytesIO, - ) from . import TestCase ### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ### diff --git a/breezy/tests/test_lockdir.py b/breezy/tests/test_lockdir.py index 55d81a6049..6c3e0be301 100644 --- a/breezy/tests/test_lockdir.py +++ b/breezy/tests/test_lockdir.py @@ -40,9 +40,6 @@ LockDir, LockHeldInfo, ) -from ..sixish import ( - text_type, - ) from . import ( features, TestCase, @@ -668,7 +665,7 @@ def test_repr(self): def test_unicode(self): info = LockHeldInfo.for_this_process(None) - self.assertContainsRe(text_type(info), + self.assertContainsRe(str(info), r'held by .* on .* \(process #\d+\), acquired .* ago') def test_is_locked_by_this_process(self): diff --git a/breezy/tests/test_log.py b/breezy/tests/test_log.py index 0f52f7a077..b289e9aa4b 100644 --- a/breezy/tests/test_log.py +++ b/breezy/tests/test_log.py @@ -14,6 +14,10 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import ( + BytesIO, + StringIO, + ) import os import re @@ -28,11 +32,6 @@ gpg, trace, ) -from ..sixish import ( - BytesIO, - StringIO, - unichr, - ) class TestLogMixin(object): @@ -223,7 +222,7 @@ def test_simple_commit(self): def test_commit_message_with_control_chars(self): wt = self.make_branch_and_tree('.') - msg = u"All 8-bit chars: " + ''.join([unichr(x) for x in range(256)]) + msg = u"All 8-bit chars: " + ''.join([chr(x) for x in range(256)]) msg = msg.replace(u'\r', u'\n') wt.commit(msg) lf = LogCatcher() @@ -241,7 +240,7 @@ def test_commit_message_without_control_chars(self): # newline conversion, neither LF (\x0A) nor CR (\x0D) are # included in the test commit message, even though they are # valid XML 1.0 characters. - msg = "\x09" + ''.join([unichr(x) for x in range(0x20, 256)]) + msg = "\x09" + ''.join([chr(x) for x in range(0x20, 256)]) wt.commit(msg) lf = LogCatcher() log.show_log(wt.branch, lf, verbose=True) diff --git a/breezy/tests/test_matchers.py b/breezy/tests/test_matchers.py index 04e4cbda0a..1186056379 100644 --- a/breezy/tests/test_matchers.py +++ b/breezy/tests/test_matchers.py @@ -19,7 +19,6 @@ from testtools.matchers import * from ..bzr.smart.client import CallHookParams -from ..sixish import PY3 from . import ( CapturedCall, @@ -133,14 +132,9 @@ def test_mismatch(self): t.add(['a', 'b', 'b/c'], [b'a-id', b'b-id', b'c-id']) mismatch = HasLayout(['a']).match(t) self.assertIsNot(None, mismatch) - if PY3: - self.assertEqual( - set(("['', 'a', 'b/', 'b/c']", "['a']")), - set(mismatch.describe().split(" != "))) - else: - self.assertEqual( - set(("[u'', u'a', u'b/', u'b/c']", "['a']")), - set(mismatch.describe().split(" != "))) + self.assertEqual( + set(("['', 'a', 'b/', 'b/c']", "['a']")), + set(mismatch.describe().split(" != "))) def test_no_dirs(self): # Some tree/repository formats do not support versioned directories @@ -152,14 +146,9 @@ def test_no_dirs(self): self.assertIs(None, HasLayout(['', 'a', 'b/', 'b/c', 'd/']).match(t)) mismatch = HasLayout([u'', u'a', u'd/']).match(t) self.assertIsNot(None, mismatch) - if PY3: - self.assertEqual( - set(("['', 'a', 'b/', 'b/c']", "['', 'a']")), - set(mismatch.describe().split(" != "))) - else: - self.assertEqual( - set(("[u'', u'a', u'b/', u'b/c']", "[u'', u'a']")), - set(mismatch.describe().split(" != "))) + self.assertEqual( + set(("['', 'a', 'b/', 'b/c']", "['', 'a']")), + set(mismatch.describe().split(" != "))) class TestHasPathRelations(TestCaseWithTransport): @@ -237,11 +226,6 @@ def test_mismatch(self): tree.commit('msg1', rev_id=b'a') tree.commit('msg2', rev_id=b'b') matcher = RevisionHistoryMatches([b'a', b'b', b'c']) - if PY3: - self.assertEqual( - set(("[b'a', b'b']", "[b'a', b'b', b'c']")), - set(matcher.match(tree.branch).describe().split(" != "))) - else: - self.assertEqual( - set(("['a', 'b']", "['a', 'b', 'c']")), - set(matcher.match(tree.branch).describe().split(" != "))) + self.assertEqual( + set(("[b'a', b'b']", "[b'a', b'b', b'c']")), + set(matcher.match(tree.branch).describe().split(" != "))) diff --git a/breezy/tests/test_merge.py b/breezy/tests/test_merge.py index 627840d37d..14e8da12a3 100644 --- a/breezy/tests/test_merge.py +++ b/breezy/tests/test_merge.py @@ -14,11 +14,11 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import contextlib import os from .. import ( branch as _mod_branch, - cleanup, conflicts, errors, memorytree, @@ -37,7 +37,6 @@ from ..errors import UnrelatedBranches, NoCommits from ..merge import transform_tree, merge_inner, _PlanMerge from ..osutils import basename, pathjoin, file_kind -from ..sixish import int2byte from . import ( features, TestCaseWithMemoryTransport, @@ -565,8 +564,8 @@ def setUp(self): self.plan_merge_vf.fallback_versionedfiles.append(self.vf) def add_version(self, key, parents, text): - self.vf.add_lines(key, parents, [int2byte( - c) + b'\n' for c in bytearray(text)]) + self.vf.add_lines( + key, parents, [bytes([c]) + b'\n' for c in bytearray(text)]) def add_rev(self, prefix, revision_id, parents, text): self.add_version((prefix, revision_id), [(prefix, p) for p in parents], @@ -574,7 +573,7 @@ def add_rev(self, prefix, revision_id, parents, text): def add_uncommitted_version(self, key, parents, text): self.plan_merge_vf.add_lines(key, parents, - [int2byte(c) + b'\n' for c in bytearray(text)]) + [bytes([c]) + b'\n' for c in bytearray(text)]) def setup_plan_merge(self): self.add_rev(b'root', b'A', [], b'abc') @@ -3179,7 +3178,7 @@ def do_merge_into(self, location, merge_as): :param merge_as: the path in a tree to add the new directory as. :returns: the conflicts from 'do_merge'. """ - with cleanup.ExitStack() as stack: + with contextlib.ExitStack() as stack: # Open and lock the various tree and branch objects wt, subdir_relpath = WorkingTree.open_containing(merge_as) stack.enter_context(wt.lock_write()) diff --git a/breezy/tests/test_merge3.py b/breezy/tests/test_merge3.py index 70a6b1f422..70612de6db 100644 --- a/breezy/tests/test_merge3.py +++ b/breezy/tests/test_merge3.py @@ -14,16 +14,13 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO from .. import ( merge3, tests, ) from ..errors import BinaryFile -from ..sixish import ( - BytesIO, - int2byte, - ) def split_lines(t): @@ -325,7 +322,7 @@ def test_minimal_conflicts_common(self): def test_minimal_conflicts_unique(self): def add_newline(s): """Add a newline to each entry in the string""" - return [(int2byte(x) + b'\n') for x in bytearray(s)] + return [(bytes([x]) + b'\n') for x in bytearray(s)] base_text = add_newline(b"abcdefghijklm") this_text = add_newline(b"abcdefghijklmNOPQRSTUVWXYZ") @@ -343,7 +340,7 @@ def add_newline(s): def test_minimal_conflicts_nonunique(self): def add_newline(s): """Add a newline to each entry in the string""" - return [(int2byte(x) + b'\n') for x in bytearray(s)] + return [(bytes([x]) + b'\n') for x in bytearray(s)] base_text = add_newline(b"abacddefgghij") this_text = add_newline(b"abacddefgghijkalmontfprz") diff --git a/breezy/tests/test_mergeable.py b/breezy/tests/test_mergeable.py index 65c6f5144f..60d8c19eea 100644 --- a/breezy/tests/test_mergeable.py +++ b/breezy/tests/test_mergeable.py @@ -14,10 +14,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -try: - import socketserver -except ImportError: - import SocketServer as socketserver +import socketserver from ..mergeable import read_mergeable_from_url from ..directory_service import directories diff --git a/breezy/tests/test_multiparent.py b/breezy/tests/test_multiparent.py index 42df314e96..d8675bc037 100644 --- a/breezy/tests/test_multiparent.py +++ b/breezy/tests/test_multiparent.py @@ -21,7 +21,6 @@ multiparent, tests, ) -from ..sixish import int2byte LINES_1 = b"a\nb\nc\nd\ne\n".splitlines(True) @@ -184,7 +183,7 @@ class TestVersionedFile(TestCase): def add_version(self, vf, text, version_id, parent_ids): vf.add_version( - [(int2byte(t) + b'\n') for t in bytearray(text)], + [(bytes([t]) + b'\n') for t in bytearray(text)], version_id, parent_ids) def make_vf(self): diff --git a/breezy/tests/test_osutils.py b/breezy/tests/test_osutils.py index 479b9219ad..036724d95a 100644 --- a/breezy/tests/test_osutils.py +++ b/breezy/tests/test_osutils.py @@ -19,6 +19,7 @@ from __future__ import absolute_import, division import errno +from io import BytesIO import os import select import socket @@ -33,11 +34,6 @@ trace, win32utils, ) -from ..sixish import ( - BytesIO, - PY3, - text_type, - ) from . import ( features, file_utils, @@ -405,7 +401,7 @@ def test_format_date(self): self.assertRaises(osutils.UnsupportedTimezoneFormat, osutils.format_date, 0, timezone='foo') self.assertIsInstance(osutils.format_date(0), str) - self.assertIsInstance(osutils.format_local_date(0), text_type) + self.assertIsInstance(osutils.format_local_date(0), str) # Testing for the actual value of the local weekday without # duplicating the code from format_date is difficult. # Instead blackbox.test_locale should check for localized @@ -947,10 +943,7 @@ def test_pathjoin(self): osutils._win32_pathjoin('path/to/', 'foo')) def test_pathjoin_late_bugfix(self): - if sys.version_info < (2, 7, 6): - expected = '/foo' - else: - expected = 'C:/foo' + expected = 'C:/foo' self.assertEqual(expected, osutils._win32_pathjoin('C:/path/to/', '/foo')) self.assertEqual(expected, @@ -1409,11 +1402,11 @@ def test_unicode__walkdirs_utf8(self): # all abspaths are Unicode, and encode them back into utf8. for dirdetail, dirblock in osutils._walkdirs_utf8('.'): self.assertIsInstance(dirdetail[0], bytes) - if isinstance(dirdetail[1], text_type): + if isinstance(dirdetail[1], str): dirdetail = (dirdetail[0], dirdetail[1].encode('utf8')) dirblock = [list(info) for info in dirblock] for info in dirblock: - self.assertIsInstance(info[4], text_type) + self.assertIsInstance(info[4], str) info[4] = info[4].encode('utf8') new_dirblock = [] for info in dirblock: @@ -1740,10 +1733,7 @@ def test_unicode(self): % (osutils.get_user_encoding(),)) osutils.set_or_unset_env('BRZ_TEST_ENV_VAR', uni_val) - if PY3: - self.assertEqual(uni_val, os.environ.get('BRZ_TEST_ENV_VAR')) - else: - self.assertEqual(env_val, os.environ.get('BRZ_TEST_ENV_VAR')) + self.assertEqual(uni_val, os.environ.get('BRZ_TEST_ENV_VAR')) def test_unset(self): """Test that passing None will remove the env var""" @@ -2004,13 +1994,9 @@ def setUp(self): def test_failure_to_load(self): self._try_loading() self.assertLength(1, osutils._extension_load_failures) - if PY3: - self.assertEqual( - osutils._extension_load_failures[0], - "No module named 'breezy._fictional_extension_py'") - else: - self.assertEqual(osutils._extension_load_failures[0], - "No module named _fictional_extension_py") + self.assertEqual( + osutils._extension_load_failures[0], + "No module named 'breezy._fictional_extension_py'") def test_report_extension_load_failures_no_warning(self): self.assertTrue(self._try_loading()) @@ -2158,13 +2144,13 @@ class TestPathFromEnviron(tests.TestCase): def test_is_unicode(self): self.overrideEnv('BRZ_TEST_PATH', './anywhere at all/') path = osutils.path_from_environ('BRZ_TEST_PATH') - self.assertIsInstance(path, text_type) + self.assertIsInstance(path, str) self.assertEqual(u'./anywhere at all/', path) def test_posix_path_env_ascii(self): self.overrideEnv('BRZ_TEST_PATH', '/tmp') home = osutils._posix_path_from_environ('BRZ_TEST_PATH') - self.assertIsInstance(home, text_type) + self.assertIsInstance(home, str) self.assertEqual(u'/tmp', home) def test_posix_path_env_unicode(self): @@ -2174,34 +2160,26 @@ def test_posix_path_env_unicode(self): self.assertEqual(u'/home/\xa7test', osutils._posix_path_from_environ('BRZ_TEST_PATH')) osutils._fs_enc = "iso8859-5" - if PY3: - # In Python 3, os.environ returns unicode. - self.assertEqual(u'/home/\xa7test', - osutils._posix_path_from_environ('BRZ_TEST_PATH')) - else: - self.assertEqual(u'/home/\u0407test', - osutils._posix_path_from_environ('BRZ_TEST_PATH')) - osutils._fs_enc = "utf-8" - self.assertRaises( - errors.BadFilenameEncoding, - osutils._posix_path_from_environ, 'BRZ_TEST_PATH') + # In Python 3, os.environ returns unicode. + self.assertEqual(u'/home/\xa7test', + osutils._posix_path_from_environ('BRZ_TEST_PATH')) class TestGetHomeDir(tests.TestCase): def test_is_unicode(self): home = osutils._get_home_dir() - self.assertIsInstance(home, text_type) + self.assertIsInstance(home, str) def test_posix_homeless(self): self.overrideEnv('HOME', None) home = osutils._get_home_dir() - self.assertIsInstance(home, text_type) + self.assertIsInstance(home, str) def test_posix_home_ascii(self): self.overrideEnv('HOME', '/home/test') home = osutils._posix_get_home_dir() - self.assertIsInstance(home, text_type) + self.assertIsInstance(home, str) self.assertEqual(u'/home/test', home) def test_posix_home_unicode(self): @@ -2210,22 +2188,15 @@ def test_posix_home_unicode(self): self.overrideAttr(osutils, "_fs_enc", "iso8859-1") self.assertEqual(u'/home/\xa7test', osutils._posix_get_home_dir()) osutils._fs_enc = "iso8859-5" - if PY3: - # In python 3, os.environ returns unicode - self.assertEqual(u'/home/\xa7test', osutils._posix_get_home_dir()) - else: - self.assertEqual(u'/home/\u0407test', - osutils._posix_get_home_dir()) - osutils._fs_enc = "utf-8" - self.assertRaises(errors.BadFilenameEncoding, - osutils._posix_get_home_dir) + # In python 3, os.environ returns unicode + self.assertEqual(u'/home/\xa7test', osutils._posix_get_home_dir()) class TestGetuserUnicode(tests.TestCase): def test_is_unicode(self): user = osutils.getuser_unicode() - self.assertIsInstance(user, text_type) + self.assertIsInstance(user, str) def envvar_to_override(self): if sys.platform == "win32": @@ -2247,10 +2218,7 @@ def test_unicode_user(self): % (osutils.get_user_encoding(),)) uni_username = u'jrandom' + uni_val encoded_username = uni_username.encode(ue) - if PY3: - self.overrideEnv(self.envvar_to_override(), uni_username) - else: - self.overrideEnv(self.envvar_to_override(), encoded_username) + self.overrideEnv(self.envvar_to_override(), uni_username) self.assertEqual(uni_username, osutils.getuser_unicode()) @@ -2374,7 +2342,7 @@ class GetFsTypeTests(tests.TestCaseInTempDir): def test_returns_string_or_none(self): ret = osutils.get_fs_type(self.test_dir) - self.assertTrue(isinstance(ret, text_type) or ret is None) + self.assertTrue(isinstance(ret, str) or ret is None) def test_returns_most_specific(self): self.overrideAttr( diff --git a/breezy/tests/test_osutils_encodings.py b/breezy/tests/test_osutils_encodings.py index af6e94985b..8507707504 100644 --- a/breezy/tests/test_osutils_encodings.py +++ b/breezy/tests/test_osutils_encodings.py @@ -23,7 +23,6 @@ from .. import ( osutils, ) -from ..sixish import PY3 from . import ( TestCase, ) @@ -88,20 +87,11 @@ def make_wrapped_streams(self, stdin_encoding, user_encoding='user_encoding', enable_fake_encodings=True): - if PY3: - sys.stdout = StringIOWithEncoding() - else: - sys.stdout = BytesIOWithEncoding() + sys.stdout = StringIOWithEncoding() sys.stdout.encoding = stdout_encoding - if PY3: - sys.stderr = StringIOWithEncoding() - else: - sys.stderr = BytesIOWithEncoding() + sys.stderr = StringIOWithEncoding() sys.stderr.encoding = stderr_encoding - if PY3: - sys.stdin = StringIOWithEncoding() - else: - sys.stdin = BytesIOWithEncoding() + sys.stdin = StringIOWithEncoding() sys.stdin.encoding = stdin_encoding osutils._cached_user_encoding = user_encoding if enable_fake_encodings: @@ -184,10 +174,7 @@ def setUp(self): self.overrideAttr(osutils, '_cached_user_encoding', None) self.overrideAttr(locale, 'getpreferredencoding', self.get_encoding) self.overrideAttr(locale, 'CODESET', None) - if PY3: - self.overrideAttr(sys, 'stderr', StringIOWithEncoding()) - else: - self.overrideAttr(sys, 'stderr', BytesIOWithEncoding()) + self.overrideAttr(sys, 'stderr', StringIOWithEncoding()) def get_encoding(self, do_setlocale=True): return self._encoding diff --git a/breezy/tests/test_pack.py b/breezy/tests/test_pack.py index d4d2710eb9..a1190bd1df 100644 --- a/breezy/tests/test_pack.py +++ b/breezy/tests/test_pack.py @@ -16,13 +16,12 @@ """Tests for breezy.pack.""" +from io import BytesIO + from .. import errors, tests from ..bzr import ( pack, ) -from ..sixish import ( - BytesIO, - ) class TestContainerSerialiser(tests.TestCase): diff --git a/breezy/tests/test_plugins.py b/breezy/tests/test_plugins.py index eb2d594489..311318be43 100644 --- a/breezy/tests/test_plugins.py +++ b/breezy/tests/test_plugins.py @@ -18,6 +18,7 @@ """Tests for plugins""" import importlib +from io import StringIO import logging import os import sys @@ -30,10 +31,6 @@ tests, ) from ..tests.features import pkg_resources_feature -from ..sixish import ( - StringIO, - viewkeys, - ) # TODO: Write a test for plugin decoration of commands. @@ -301,7 +298,7 @@ def test_plugin_with_bad_api_version_reports(self): log = self.load_and_capture(name) self.assertNotContainsRe(log, r"It supports breezy version") - self.assertEqual({'wants100'}, viewkeys(self.plugin_warnings)) + self.assertEqual({'wants100'}, self.plugin_warnings.keys()) self.assertContainsRe( self.plugin_warnings['wants100'][0], r"It supports breezy version") @@ -700,7 +697,7 @@ def test_partial_imports(self): self.create_plugin_package('ugly') self.overrideEnv('BRZ_DISABLE_PLUGINS', 'bad:ugly') self.load_with_paths(['.']) - self.assertEqual({'good'}, viewkeys(self.plugins)) + self.assertEqual({'good'}, self.plugins.keys()) self.assertPluginModules({ 'good': self.plugins['good'].module, 'bad': None, diff --git a/breezy/tests/test_read_bundle.py b/breezy/tests/test_read_bundle.py index 5e4034ee0b..05033a6b4a 100644 --- a/breezy/tests/test_read_bundle.py +++ b/breezy/tests/test_read_bundle.py @@ -16,14 +16,12 @@ """Test read_bundle works properly across various transports.""" +from io import BytesIO + import breezy.mergeable from ..bzr.bundle.serializer import write_bundle import breezy.bzr.bzrdir from .. import errors -from ..sixish import ( - BytesIO, - text_type, - ) from .. import tests from .test_transport import TestTransportImplementation from .per_transport import transport_test_permutations @@ -88,7 +86,7 @@ def create_test_bundle(self): def test_read_mergeable_from_url(self): info = self.read_mergeable_from_url( - text_type(self.get_url(self.bundle_name))) + str(self.get_url(self.bundle_name))) revision = info.real_revisions[-1] self.assertEqual(b'commit-1', revision.revision_id) @@ -107,6 +105,6 @@ def test_read_mergeable_respects_possible_transports(self): # transports (the test will fail even). raise tests.TestSkipped( 'Need a ConnectedTransport to test transport reuse') - url = text_type(self.get_url(self.bundle_name)) + url = str(self.get_url(self.bundle_name)) self.read_mergeable_from_url(url) self.assertEqual(1, len(self.possible_transports)) diff --git a/breezy/tests/test_registry.py b/breezy/tests/test_registry.py index eb00e3d08b..db67c97c05 100644 --- a/breezy/tests/test_registry.py +++ b/breezy/tests/test_registry.py @@ -26,7 +26,6 @@ tests, ) -from ..sixish import viewitems class TestRegistry(tests.TestCase): @@ -218,7 +217,7 @@ def test_registry_alias(self): self.assertEqual({'two': 'one', 'three': 'one'}, a_registry.aliases()) self.assertEqual( {'one': ['three', 'two']}, - {k: sorted(v) for (k, v) in viewitems(a_registry.alias_map())}) + {k: sorted(v) for (k, v) in a_registry.alias_map().items()}) def test_registry_alias_exists(self): a_registry = registry.Registry() diff --git a/breezy/tests/test_remote.py b/breezy/tests/test_remote.py index 6648fb8c2c..d148c04e2a 100644 --- a/breezy/tests/test_remote.py +++ b/breezy/tests/test_remote.py @@ -25,6 +25,7 @@ import base64 import bz2 +from io import BytesIO import tarfile import zlib @@ -69,11 +70,6 @@ NULL_REVISION, Revision, ) -from ..sixish import ( - BytesIO, - PY3, - text_type, - ) from ..bzr.smart import medium, request from ..bzr.smart.client import _SmartClient from ..bzr.smart.repository import ( @@ -339,10 +335,7 @@ def test_unicode_path(self): client.add_success_response(b'yes',) transport = RemoteTransport('bzr://localhost/', _client=client) filename = u'/hell\u00d8' - if PY3: - result = transport.has(filename) - else: - result = transport.has(filename.encode('utf-8')) + result = transport.has(filename) self.assertEqual( [('call', b'has', (filename.encode('utf-8'),))], client._calls) @@ -1787,7 +1780,7 @@ def test_tip_change_rejected(self): branch._set_last_revision, b'rev-id') # The UTF-8 message from the response has been decoded into a unicode # object. - self.assertIsInstance(err.msg, text_type) + self.assertIsInstance(err.msg, str) self.assertEqual(rejection_msg_unicode, err.msg) branch.unlock() self.assertFinished(client) diff --git a/breezy/tests/test_selftest.py b/breezy/tests/test_selftest.py index c84069390e..9723f80434 100644 --- a/breezy/tests/test_selftest.py +++ b/breezy/tests/test_selftest.py @@ -19,7 +19,7 @@ import gc import doctest from functools import reduce -from io import BytesIO, TextIOWrapper +from io import BytesIO, StringIO, TextIOWrapper import os import signal import sys @@ -67,11 +67,6 @@ from ..git import ( workingtree as git_workingtree, ) -from ..sixish import ( - PY3, - StringIO, - text_type, - ) from ..symbol_versioning import ( deprecated_function, deprecated_in, @@ -809,8 +804,8 @@ def _time_hello_world_encoding(self): This is used to exercise the test framework. """ - self.time(text_type, b'hello', errors='replace') - self.time(text_type, b'world', errors='replace') + self.time(str, b'hello', errors='replace') + self.time(str, b'world', errors='replace') def test_lsprofiling(self): """Verbose test result prints lsprof statistics from test cases.""" @@ -840,16 +835,10 @@ def test_lsprofiling(self): # and then repeated but with 'world', rather than 'hello'. # this should appear in the output stream of our test result. output = result_stream.getvalue() - if PY3: - self.assertContainsRe(output, - r"LSProf output for \(\(b'hello',\), {'errors': 'replace'}\)") - self.assertContainsRe(output, - r"LSProf output for \(\(b'world',\), {'errors': 'replace'}\)") - else: - self.assertContainsRe(output, - r"LSProf output for \(\('hello',\), {'errors': 'replace'}\)") - self.assertContainsRe(output, - r"LSProf output for \(\('world',\), {'errors': 'replace'}\)\n") + self.assertContainsRe(output, + r"LSProf output for \(\(b'hello',\), {'errors': 'replace'}\)") + self.assertContainsRe(output, + r"LSProf output for \(\(b'world',\), {'errors': 'replace'}\)") self.assertContainsRe(output, r" *CallCount *Recursive *Total\(ms\) *Inline\(ms\) *module:lineno\(function\)\n") self.assertContainsRe(output, @@ -1305,11 +1294,8 @@ class FailureWithUnicode(tests.TestCase): def test_log_unicode(self): self.log(u"\u2606") self.fail("Now print that log!") - if PY3: - bio = BytesIO() - out = TextIOWrapper(bio, 'ascii', 'backslashreplace') - else: - bio = out = StringIO() + bio = BytesIO() + out = TextIOWrapper(bio, 'ascii', 'backslashreplace') self.overrideAttr(osutils, "get_terminal_encoding", lambda trace=False: "ascii") self.run_test_runner( @@ -1982,7 +1968,7 @@ class TestExtraAssertions(tests.TestCase): def test_assert_isinstance(self): self.assertIsInstance(2, int) - self.assertIsInstance(u'', (str, text_type)) + self.assertIsInstance(u'', str) e = self.assertRaises(AssertionError, self.assertIsInstance, None, int) self.assertIn( str(e), @@ -1994,16 +1980,10 @@ def test_assert_isinstance(self): e = self.assertRaises(AssertionError, self.assertIsInstance, None, int, "it's just not") - if PY3: - self.assertEqual( - str(e), - "None is an instance of rather " - "than : it's just not") - else: - self.assertEqual( - str(e), - "None is an instance of " - "rather than : it's just not") + self.assertEqual( + str(e), + "None is an instance of rather " + "than : it's just not") def test_assertEndsWith(self): self.assertEndsWith('foo', 'oo') @@ -2129,11 +2109,8 @@ class SelfTestHelper(object): def run_selftest(self, **kwargs): """Run selftest returning its output.""" - if PY3: - bio = BytesIO() - output = TextIOWrapper(bio, 'utf-8') - else: - bio = output = StringIO() + bio = BytesIO() + output = TextIOWrapper(bio, 'utf-8') old_transport = breezy.tests.default_transport old_root = tests.TestCaseWithMemoryTransport.TEST_ROOT tests.TestCaseWithMemoryTransport.TEST_ROOT = None @@ -2142,9 +2119,8 @@ def run_selftest(self, **kwargs): finally: breezy.tests.default_transport = old_transport tests.TestCaseWithMemoryTransport.TEST_ROOT = old_root - if PY3: - output.flush() - output.detach() + output.flush() + output.detach() bio.seek(0) return bio @@ -3155,17 +3131,8 @@ def doctests(): # plugins can't be tested that way since selftest may be run with # --no-plugins ] - if __doc__ is not None and not PY3: - expected_test_list.extend([ - # modules_to_doctest - 'breezy.timestamp.format_highres_date', - ]) suite = tests.test_suite() - if PY3: - self.assertEqual({"testmod_names"}, set(calls)) - else: - self.assertEqual({"testmod_names", "modules_to_doctest"}, - set(calls)) + self.assertEqual({"testmod_names", "modules_to_doctest"}, set(calls)) self.assertSubset(expected_test_list, _test_ids(suite)) def test_test_suite_list_and_start(self): @@ -3497,11 +3464,8 @@ def _inject_stream_into_subunit(self, stream): """To be overridden by subclasses that run tests out of process""" def _run_selftest(self, **kwargs): - if PY3: - bio = BytesIO() - sio = TextIOWrapper(bio, 'utf-8') - else: - sio = bio = StringIO() + bio = BytesIO() + sio = TextIOWrapper(bio, 'utf-8') self._inject_stream_into_subunit(bio) tests.selftest(stream=sio, stop_on_failure=False, **kwargs) sio.flush() diff --git a/breezy/tests/test_server.py b/breezy/tests/test_server.py index ff340b8ed7..1b3156104f 100644 --- a/breezy/tests/test_server.py +++ b/breezy/tests/test_server.py @@ -16,10 +16,7 @@ import errno import socket -try: - import socketserver -except ImportError: - import SocketServer as socketserver +import socketserver import sys import threading diff --git a/breezy/tests/test_shelf_ui.py b/breezy/tests/test_shelf_ui.py index 0f1177168b..4f2c415f25 100644 --- a/breezy/tests/test_shelf_ui.py +++ b/breezy/tests/test_shelf_ui.py @@ -15,6 +15,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO import os import sys from textwrap import dedent @@ -26,9 +27,6 @@ shelf, tests, ) -from ..sixish import ( - BytesIO, - ) from . import script from . import ( features, diff --git a/breezy/tests/test_smart_add.py b/breezy/tests/test_smart_add.py index 44c8fa4863..c32a7ec78d 100644 --- a/breezy/tests/test_smart_add.py +++ b/breezy/tests/test_smart_add.py @@ -14,6 +14,8 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import StringIO + from .. import ( add, cache_utf8, @@ -23,9 +25,6 @@ from ..bzr import ( inventory, ) -from ..sixish import ( - StringIO, - ) class AddCustomIDAction(add.AddAction): diff --git a/breezy/tests/test_smart_transport.py b/breezy/tests/test_smart_transport.py index c0d0163944..f25ce29ae0 100644 --- a/breezy/tests/test_smart_transport.py +++ b/breezy/tests/test_smart_transport.py @@ -19,6 +19,7 @@ # all of this deals with byte strings so this is safe import doctest import errno +from io import BytesIO import os import socket import subprocess @@ -55,10 +56,6 @@ test_smart, test_server, ) -from ..sixish import ( - BytesIO, - int2byte, - ) from ..transport import ( http, local, @@ -3169,7 +3166,7 @@ def test_incomplete_message(self): decoder, response_handler = self.make_logging_response_decoder() for byte in bytearray(simple_response): self.assertNotEqual(0, decoder.next_read_size()) - decoder.accept_bytes(int2byte(byte)) + decoder.accept_bytes(bytes([byte])) # Now the response is complete self.assertEqual(0, decoder.next_read_size()) diff --git a/breezy/tests/test_smtp_connection.py b/breezy/tests/test_smtp_connection.py index f2d022732c..be226800fd 100644 --- a/breezy/tests/test_smtp_connection.py +++ b/breezy/tests/test_smtp_connection.py @@ -14,10 +14,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -try: - from email.message import Message -except ImportError: # python < 3 - from email.Message import Message +from email.message import Message import errno import smtplib import socket diff --git a/breezy/tests/test_status.py b/breezy/tests/test_status.py index 0e9e31d0d7..0638d804bf 100644 --- a/breezy/tests/test_status.py +++ b/breezy/tests/test_status.py @@ -14,15 +14,13 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import StringIO from .. import ( config, status as _mod_status, ) from ..revisionspec import RevisionSpec -from ..sixish import ( - StringIO, - ) from ..status import show_pending_merges, show_tree_status from . import TestCaseWithTransport diff --git a/breezy/tests/test_symbol_versioning.py b/breezy/tests/test_symbol_versioning.py index 9ac5d44a9c..cc468f4388 100644 --- a/breezy/tests/test_symbol_versioning.py +++ b/breezy/tests/test_symbol_versioning.py @@ -19,7 +19,6 @@ import warnings from breezy import symbol_versioning -from breezy.sixish import PY3 from breezy.symbol_versioning import ( deprecated_function, deprecated_in, @@ -70,19 +69,10 @@ def deprecated_static(): return 1 def test_deprecated_static(self): - if PY3: - expected_warning = ( - "breezy.tests.test_symbol_versioning.TestDeprecationWarnings." - "deprecated_static " - "was deprecated in version 0.7.0.", DeprecationWarning, 2) - else: - # XXX: The results are not quite right because the class name is not - # shown on Python 2- however it is enough to give people a good indication of - # where the problem is. - expected_warning = ( - "breezy.tests.test_symbol_versioning." - "deprecated_static " - "was deprecated in version 0.7.0.", DeprecationWarning, 2) + expected_warning = ( + "breezy.tests.test_symbol_versioning.TestDeprecationWarnings." + "deprecated_static " + "was deprecated in version 0.7.0.", DeprecationWarning, 2) expected_docstring = ( 'Deprecated static.\n' '\n' diff --git a/breezy/tests/test_textfile.py b/breezy/tests/test_textfile.py index 561fa84369..db3a615060 100644 --- a/breezy/tests/test_textfile.py +++ b/breezy/tests/test_textfile.py @@ -14,11 +14,9 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO from ..errors import BinaryFile -from ..sixish import ( - BytesIO, - ) from . import TestCase, TestCaseInTempDir from ..textfile import text_file, check_text_lines, check_text_path diff --git a/breezy/tests/test_trace.py b/breezy/tests/test_trace.py index f7f8f8eabd..dff4cd4a54 100644 --- a/breezy/tests/test_trace.py +++ b/breezy/tests/test_trace.py @@ -19,6 +19,7 @@ """Tests for trace library""" import errno +from io import StringIO import logging import os import re @@ -30,10 +31,6 @@ errors, trace, ) -from ..sixish import ( - PY3, - StringIO, - ) from . import features, TestCaseInTempDir, TestCase, TestSkipped from ..trace import ( mutter, mutter_callsite, report_exception, @@ -146,11 +143,7 @@ def test_format_unicode_error(self): raise errors.BzrCommandError(u'argument foo\xb5 does not exist') except errors.BzrCommandError: msg = _format_exception() - if PY3: - expected = 'brz: ERROR: argument foo\xb5 does not exist\n' - else: - # GZ 2017-06-10: Pretty bogus, should encode per the output stream - expected = 'brz: ERROR: argument foo\xc2\xb5 does not exist\n' + expected = 'brz: ERROR: argument foo\xb5 does not exist\n' self.assertEqual(msg, expected) def test_format_exception(self): @@ -410,10 +403,7 @@ def test_log_utf8_msg(self): def test_log_utf8_arg(self): logging.getLogger("brz").debug(b"%s", b"\xc2\xa7") - if PY3: - expected = u" DEBUG b'\\xc2\\xa7'\n" - else: - expected = u" DEBUG \xa7\n" + expected = u" DEBUG b'\\xc2\\xa7'\n" self.assertEqual(expected, self.get_log()) def test_log_bytes_msg(self): @@ -426,24 +416,12 @@ def test_log_bytes_msg(self): def test_log_bytes_arg(self): logging.getLogger("brz").debug(b"%s", b"\xa7") log = self.get_log() - if PY3: - self.assertEqual(u" DEBUG b'\\xa7'\n", self.get_log()) - else: - self.assertContainsString(log, "UnicodeDecodeError: ") - self.assertContainsRe( - log, - "Logging record unformattable: ?'%s' % \\(b?'\\\\xa7',\\)\n") + self.assertEqual(u" DEBUG b'\\xa7'\n", self.get_log()) def test_log_mixed_strings(self): logging.getLogger("brz").debug(u"%s", b"\xa7") log = self.get_log() - if PY3: - self.assertEqual(u" DEBUG b'\\xa7'\n", self.get_log()) - else: - self.assertContainsString(log, "UnicodeDecodeError: ") - self.assertContainsRe( - log, - "Logging record unformattable: u'%s' % \\('\\\\xa7',\\)\n") + self.assertEqual(u" DEBUG b'\\xa7'\n", self.get_log()) def test_log_repr_broken(self): class BadRepr(object): diff --git a/breezy/tests/test_transform.py b/breezy/tests/test_transform.py index 2c276da5d1..eecaae9c94 100644 --- a/breezy/tests/test_transform.py +++ b/breezy/tests/test_transform.py @@ -63,11 +63,6 @@ ) from ..merge import Merge3Merger, Merger from ..mutabletree import MutableTree -from ..sixish import ( - BytesIO, - PY3, - text_type, - ) from . import ( features, TestCaseInTempDir, @@ -892,7 +887,7 @@ def test_string_conflicts(self): raw_conflicts = resolve_conflicts(tt) cooked_conflicts = cook_conflicts(raw_conflicts, tt) tt.finalize() - conflicts_s = [text_type(c) for c in cooked_conflicts] + conflicts_s = [str(c) for c in cooked_conflicts] self.assertEqual(len(cooked_conflicts), len(conflicts_s)) self.assertEqual(conflicts_s[0], 'Conflict adding file dorothy. ' 'Moved existing file to ' @@ -2611,12 +2606,8 @@ def _override_globals_in_method(self, instance, method_name, globals): new_globals.update(globals) new_func = types.FunctionType(func.__code__, new_globals, func.__name__, func.__defaults__) - if PY3: - setattr(instance, method_name, - types.MethodType(new_func, instance)) - else: - setattr(instance, method_name, - types.MethodType(new_func, instance, instance.__class__)) + setattr(instance, method_name, + types.MethodType(new_func, instance)) self.addCleanup(delattr, instance, method_name) @staticmethod diff --git a/breezy/tests/test_transport.py b/breezy/tests/test_transport.py index 53efc2b269..9211864597 100644 --- a/breezy/tests/test_transport.py +++ b/breezy/tests/test_transport.py @@ -16,6 +16,7 @@ import errno +from io import BytesIO import os import subprocess import sys @@ -28,9 +29,6 @@ transport, urlutils, ) -from ..sixish import ( - BytesIO, - ) from ..transport import ( chroot, fakenfs, diff --git a/breezy/tests/test_urlutils.py b/breezy/tests/test_urlutils.py index d19e38623a..8a378cc237 100644 --- a/breezy/tests/test_urlutils.py +++ b/breezy/tests/test_urlutils.py @@ -23,10 +23,6 @@ from ..errors import ( PathNotChild, ) -from ..sixish import ( - text_type, - PY3, - ) from . import features, TestCaseInTempDir, TestCase, TestSkipped @@ -129,14 +125,6 @@ def test_normalize_url_hybrid(self): eq('http://host/~bob%2525-._', normalize_url(u'http://host/%7Ebob%2525%2D%2E%5F')) - if not PY3: - # On Python 2, normalize verifies URLs when they are not unicode - # (indicating they did not come from the user) - self.assertRaises(urlutils.InvalidURL, normalize_url, - b'http://host/\xb5') - self.assertRaises(urlutils.InvalidURL, - normalize_url, b'http://host/ ') - def test_url_scheme_re(self): # Test paths that may be URLs def test_one(url, scheme_and_path): @@ -449,7 +437,7 @@ def test_win32_unc_path_to_url(self): self.assertEqual( 'file://HOST/path/to/r%C3%A4ksm%C3%B6rg%C3%A5s', result) - self.assertFalse(isinstance(result, text_type)) + self.assertFalse(isinstance(result, str)) def test_win32_local_path_from_url(self): from_url = urlutils._win32_local_path_from_url @@ -678,7 +666,7 @@ def test_unescape_for_display_utf8(self): # Test that URLs are converted to nice unicode strings for display def test(expected, url, encoding='utf-8'): disp_url = urlutils.unescape_for_display(url, encoding=encoding) - self.assertIsInstance(disp_url, text_type) + self.assertIsInstance(disp_url, str) self.assertEqual(expected, disp_url) test('http://foo', 'http://foo') @@ -727,14 +715,9 @@ def test_unescape(self): self.assertEqual('%', urlutils.unescape('%25')) self.assertEqual(u'\xe5', urlutils.unescape('%C3%A5')) - if not PY3: - self.assertRaises(urlutils.InvalidURL, urlutils.unescape, u'\xe5') self.assertRaises((TypeError, urlutils.InvalidURL), urlutils.unescape, b'\xe5') - if not PY3: - self.assertRaises(urlutils.InvalidURL, urlutils.unescape, '%E5') - else: - self.assertEqual('\xe5', urlutils.unescape('%C3%A5')) + self.assertEqual('\xe5', urlutils.unescape('%C3%A5')) def test_escape_unescape(self): self.assertEqual(u'\xe5', urlutils.unescape(urlutils.escape(u'\xe5'))) @@ -1140,10 +1123,7 @@ def test_quote_tildes(self): def test_unquote(self): self.assertEqual('%', urlutils.unquote('%25')) - if PY3: - self.assertEqual('\xe5', urlutils.unquote('%C3%A5')) - else: - self.assertEqual('\xc3\xa5', urlutils.unquote('%C3%A5')) + self.assertEqual('\xe5', urlutils.unquote('%C3%A5')) self.assertEqual(u"\xe5", urlutils.unquote(u'\xe5')) def test_unquote_to_bytes(self): diff --git a/breezy/tests/test_version.py b/breezy/tests/test_version.py index ff9a3dd691..704be3f234 100644 --- a/breezy/tests/test_version.py +++ b/breezy/tests/test_version.py @@ -16,6 +16,7 @@ """Tests for versioning of breezy.""" +from io import StringIO import platform import re @@ -24,10 +25,6 @@ version, workingtree, ) -from ..sixish import ( - PY3, - StringIO, - ) from .scenarios import load_tests_apply_scenarios @@ -78,6 +75,5 @@ def test_platform(self): version.show_version(show_config=False, show_copyright=False, to_file=out) expected = r'(?m)^ Platform: %s' % self._platform - if PY3: - expected = expected.encode('utf-8') + expected = expected.encode('utf-8') self.assertContainsRe(out.getvalue(), expected) diff --git a/breezy/tests/test_version_info.py b/breezy/tests/test_version_info.py index f3acf83256..b6bc1246ef 100644 --- a/breezy/tests/test_version_info.py +++ b/breezy/tests/test_version_info.py @@ -16,6 +16,10 @@ """Tests for version_info""" +from io import ( + BytesIO, + StringIO, + ) import os from .. import ( @@ -23,10 +27,6 @@ tests, version_info_formats, ) -from ..sixish import ( - BytesIO, - StringIO, - ) from . import TestCaseWithTransport from ..rio import read_stanzas, read_stanzas_unicode diff --git a/breezy/tests/test_weave.py b/breezy/tests/test_weave.py index 70c2d40076..656a2bd8dd 100644 --- a/breezy/tests/test_weave.py +++ b/breezy/tests/test_weave.py @@ -21,15 +21,13 @@ """test suite for weave algorithm""" +from io import BytesIO from pprint import pformat from .. import ( errors, ) from ..osutils import sha_string -from ..sixish import ( - BytesIO, - ) from . import TestCase, TestCaseInTempDir from ..bzr.weave import Weave, WeaveFormatError, WeaveInvalidChecksum from ..bzr.weavefile import write_weave, read_weave diff --git a/breezy/tests/test_wsgi.py b/breezy/tests/test_wsgi.py index fefaa16e5f..8329174976 100644 --- a/breezy/tests/test_wsgi.py +++ b/breezy/tests/test_wsgi.py @@ -16,10 +16,9 @@ """Tests for WSGI application""" +from io import BytesIO + from .. import tests -from ..sixish import ( - BytesIO, - ) from ..bzr.smart import medium, protocol from ..transport.http import wsgi from ..transport import chroot, memory diff --git a/breezy/tests/test_xml.py b/breezy/tests/test_xml.py index cce4452b19..ae2b45a9a3 100644 --- a/breezy/tests/test_xml.py +++ b/breezy/tests/test_xml.py @@ -14,6 +14,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from io import BytesIO from .. import ( errors, @@ -25,10 +26,6 @@ xml7, xml8, ) -from ..sixish import ( - BytesIO, - text_type, - ) from ..bzr.inventory import Inventory from . import TestCase import breezy.bzr.xml5 @@ -491,7 +488,7 @@ def test_revision_ids_are_utf8(self): for parent_id in rev.parent_ids: self.assertIsInstance(parent_id, bytes) self.assertEqual(u'Include \xb5nicode characters\n', rev.message) - self.assertIsInstance(rev.message, text_type) + self.assertIsInstance(rev.message, str) # ie.revision should either be None or a utf-8 revision id inv = s_v5.read_inventory_from_lines(breezy.osutils.split_lines(_inventory_utf8_v5)) @@ -513,7 +510,7 @@ def test_revision_ids_are_utf8(self): for ((exp_path, exp_file_id, exp_parent_id, exp_rev_id), (act_path, act_ie)) in zip(expected, actual): self.assertEqual(exp_path, act_path) - self.assertIsInstance(act_path, text_type) + self.assertIsInstance(act_path, str) self.assertEqual(exp_file_id, act_ie.file_id) self.assertIsInstance(act_ie.file_id, bytes) self.assertEqual(exp_parent_id, act_ie.parent_id) diff --git a/breezy/tests/ui_testing.py b/breezy/tests/ui_testing.py index 018056b1e2..add824782d 100644 --- a/breezy/tests/ui_testing.py +++ b/breezy/tests/ui_testing.py @@ -22,9 +22,6 @@ ui, ) from ..ui import text as ui_text -from ..sixish import ( - text_type, -) class StringIOWithEncoding(io.StringIO): @@ -54,7 +51,7 @@ class TextUIFactory(ui_text.TextUIFactory): def __init__(self, stdin=None, stdout=None, stderr=None): if isinstance(stdin, bytes): stdin = stdin.decode() - if isinstance(stdin, text_type): + if isinstance(stdin, str): stdin = StringIOWithEncoding(stdin) if stdout is None: stdout = StringIOWithEncoding() diff --git a/breezy/trace.py b/breezy/trace.py index 77a00ab89c..5302a7ca53 100644 --- a/breezy/trace.py +++ b/breezy/trace.py @@ -57,6 +57,7 @@ # that. import errno +from io import StringIO import logging import os import sys @@ -82,12 +83,6 @@ errors, ) -from .sixish import ( - PY3, - StringIO, - text_type, - ) - # global verbosity for breezy; controls the log level for stderr; 0=normal; <0 # is quiet; >0 is verbose. @@ -162,9 +157,6 @@ def mutter(fmt, *args): fmt = fmt.decode('ascii', 'replace') if args: - if not PY3: - args = tuple( - _Bytes(arg) if isinstance(arg, bytes) else arg for arg in args) out = fmt % args else: out = fmt @@ -306,12 +298,7 @@ def enable_default_logging(): r'%Y-%m-%d %H:%M:%S') # after hooking output into brz_log, we also need to attach a stderr # handler, writing only at level info and with encoding - if sys.version_info[0] == 2: - stderr_handler = EncodedStreamHandler( - sys.stderr, osutils.get_terminal_encoding(), 'replace', - level=logging.INFO) - else: - stderr_handler = logging.StreamHandler(stream=sys.stderr) + stderr_handler = logging.StreamHandler(stream=sys.stderr) logging.getLogger('brz').addHandler(stderr_handler) return memento @@ -615,12 +602,11 @@ def flush(self): def emit(self, record): try: - if not isinstance(record.msg, text_type): + if not isinstance(record.msg, str): msg = record.msg.decode("utf-8") - if PY3: - record.msg = msg + record.msg = msg line = self.format(record) - if not isinstance(line, text_type): + if not isinstance(line, str): line = line.decode("utf-8") self.stream.write(line.encode(self.encoding, self.errors) + b"\n") except Exception: diff --git a/breezy/transform.py b/breezy/transform.py index b46ac4deb6..36366927c9 100644 --- a/breezy/transform.py +++ b/breezy/transform.py @@ -16,6 +16,7 @@ from __future__ import absolute_import +import contextlib import os import errno from stat import S_ISREG, S_IEXEC @@ -33,7 +34,6 @@ from breezy import ( annotate, bencode, - cleanup, controldir, commit, conflicts, @@ -65,11 +65,6 @@ supports_symlinks, ) from .progress import ProgressPhase -from .sixish import ( - text_type, - viewitems, - viewvalues, - ) from .tree import ( find_previous_path, TreeChange, @@ -239,7 +234,7 @@ def fixup_new_roots(self): irrelevant. """ - new_roots = [k for k, v in viewitems(self._new_parent) + new_roots = [k for k, v in self._new_parent.items() if v == ROOT_PARENT] if len(new_roots) < 1: return @@ -480,7 +475,7 @@ def inactive_file_id(self, trans_id): file_id = self.tree_file_id(trans_id) if file_id is not None: return file_id - for key, value in viewitems(self._non_present_ids): + for key, value in self._non_present_ids.items(): if value == trans_id: return key @@ -510,7 +505,7 @@ def by_parent(self): Only new paths and parents of tree files with assigned ids are used. """ by_parent = {} - items = list(viewitems(self._new_parent)) + items = list(self._new_parent.items()) items.extend((t, self.final_parent(t)) for t in list(self._tree_id_paths)) for trans_id, parent_id in items: @@ -635,7 +630,7 @@ def _parent_loops(self): def _unversioned_parents(self, by_parent): """If parent directories are versioned, children must be versioned.""" conflicts = [] - for parent_id, children in viewitems(by_parent): + for parent_id, children in by_parent.items(): if parent_id == ROOT_PARENT: continue if self.final_file_id(parent_id) is not None: @@ -697,7 +692,7 @@ def _duplicate_entries(self, by_parent): conflicts = [] if (self._new_name, self._new_parent) == ({}, {}): return conflicts - for children in viewvalues(by_parent): + for children in by_parent.values(): name_ids = [] for child_tid in children: name = self.final_name(child_tid) @@ -732,7 +727,7 @@ def _duplicate_ids(self): removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in self._removed_id)) active_tree_ids = all_ids.difference(removed_tree_ids) - for trans_id, file_id in viewitems(self._new_id): + for trans_id, file_id in self._new_id.items(): if file_id in active_tree_ids: path = self._tree.id2path(file_id) old_trans_id = self.trans_id_tree_path(path) @@ -742,7 +737,7 @@ def _duplicate_ids(self): def _parent_type_conflicts(self, by_parent): """Children must have a directory parent""" conflicts = [] - for parent_id, children in viewitems(by_parent): + for parent_id, children in by_parent.items(): if parent_id == ROOT_PARENT: continue no_children = True @@ -1112,17 +1107,17 @@ def serialize(self, serializer): :param serializer: A Serialiser like pack.ContainerSerializer. """ new_name = {k.encode('utf-8'): v.encode('utf-8') - for k, v in viewitems(self._new_name)} + for k, v in self._new_name.items()} new_parent = {k.encode('utf-8'): v.encode('utf-8') - for k, v in viewitems(self._new_parent)} + for k, v in self._new_parent.items()} new_id = {k.encode('utf-8'): v - for k, v in viewitems(self._new_id)} + for k, v in self._new_id.items()} new_executability = {k.encode('utf-8'): int(v) - for k, v in viewitems(self._new_executability)} + for k, v in self._new_executability.items()} tree_path_ids = {k.encode('utf-8'): v.encode('utf-8') - for k, v in viewitems(self._tree_path_ids)} + for k, v in self._tree_path_ids.items()} non_present_ids = {k: v.encode('utf-8') - for k, v in viewitems(self._non_present_ids)} + for k, v in self._non_present_ids.items()} removed_contents = [trans_id.encode('utf-8') for trans_id in self._removed_contents] removed_id = [trans_id.encode('utf-8') @@ -1140,7 +1135,7 @@ def serialize(self, serializer): } yield serializer.bytes_record(bencode.bencode(attribs), ((b'attribs',),)) - for trans_id, kind in sorted(viewitems(self._new_contents)): + for trans_id, kind in sorted(self._new_contents.items()): if kind == 'file': with open(self._limbo_name(trans_id), 'rb') as cur_file: lines = cur_file.readlines() @@ -1166,18 +1161,18 @@ def deserialize(self, records): attribs = bencode.bdecode(content) self._id_number = attribs[b'_id_number'] self._new_name = {k.decode('utf-8'): v.decode('utf-8') - for k, v in viewitems(attribs[b'_new_name'])} + for k, v in attribs[b'_new_name'].items()} self._new_parent = {k.decode('utf-8'): v.decode('utf-8') - for k, v in viewitems(attribs[b'_new_parent'])} + for k, v in attribs[b'_new_parent'].items()} self._new_executability = { k.decode('utf-8'): bool(v) - for k, v in viewitems(attribs[b'_new_executability'])} + for k, v in attribs[b'_new_executability'].items()} self._new_id = {k.decode('utf-8'): v - for k, v in viewitems(attribs[b'_new_id'])} - self._r_new_id = {v: k for k, v in viewitems(self._new_id)} + for k, v in attribs[b'_new_id'].items()} + self._r_new_id = {v: k for k, v in self._new_id.items()} self._tree_path_ids = {} self._tree_id_paths = {} - for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']): + for bytepath, trans_id in attribs[b'_tree_path_ids'].items(): path = bytepath.decode('utf-8') trans_id = trans_id.decode('utf-8') self._tree_path_ids[path] = trans_id @@ -1189,7 +1184,7 @@ def deserialize(self, records): for trans_id in attribs[b'_removed_contents']) self._non_present_ids = { k: v.decode('utf-8') - for k, v in viewitems(attribs[b'_non_present_ids'])} + for k, v in attribs[b'_non_present_ids'].items()} for ((trans_id, kind),), content in records: trans_id = trans_id.decode('utf-8') kind = kind.decode('ascii') @@ -1280,7 +1275,7 @@ def finalize(self): if self._tree is None: return try: - limbo_paths = list(viewvalues(self._limbo_files)) + limbo_paths = list(self._limbo_files.values()) limbo_paths.extend(self._possibly_stale_limbo_files) limbo_paths.sort(reverse=True) for path in limbo_paths: @@ -1748,8 +1743,8 @@ def _generate_limbo_path(self, trans_id): in (trans_id, None)): use_direct_path = True else: - for l_filename, l_trans_id in viewitems( - self._limbo_children_names[parent]): + for l_filename, l_trans_id in ( + self._limbo_children_names[parent].items()): if l_trans_id == trans_id: continue if l_filename.lower() == filename.lower(): @@ -1878,7 +1873,7 @@ def _apply_removals(self, mover): If inventory_delta is None, no inventory delta generation is performed. """ - tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True) + tree_paths = sorted(self._tree_path_ids.items(), reverse=True) with ui.ui_factory.nested_progress_bar() as child_pb: for num, (path, trans_id) in enumerate(tree_paths): # do not attempt to move root into a subdirectory of itself. @@ -1963,7 +1958,7 @@ def _apply_observed_sha1s(self): # problems. (we could observe start time, and finish time, and if # it is less than eg 10% overhead, add a sleep call.) paths = FinalPaths(self) - for trans_id, observed in viewitems(self._observed_sha1s): + for trans_id, observed in self._observed_sha1s.items(): path = paths.get_path(trans_id) self._tree._observed_sha1(path, observed) @@ -2110,7 +2105,7 @@ def all_file_ids(self): tree_ids = set(self._transform._tree.all_file_ids()) tree_ids.difference_update(self._transform.tree_file_id(t) for t in self._transform._removed_id) - tree_ids.update(viewvalues(self._transform._new_id)) + tree_ids.update(self._transform._new_id.values()) return tree_ids def all_versioned_paths(self): @@ -2408,7 +2403,7 @@ def path_content_summary(self, path): executable = None if kind == 'symlink': link_or_sha1 = os.readlink(limbo_name) - if not isinstance(link_or_sha1, text_type): + if not isinstance(link_or_sha1, str): link_or_sha1 = link_or_sha1.decode(osutils._fs_enc) executable = tt._new_executability.get(trans_id, executable) return kind, size, executable, link_or_sha1 @@ -2593,7 +2588,7 @@ def build_tree(tree, wt, accelerator_tree=None, hardlink=False, :param delta_from_tree: If true, build_tree may use the input Tree to generate the inventory delta. """ - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(wt.lock_tree_write()) exit_stack.enter_context(tree.lock_read()) if accelerator_tree is not None: @@ -2702,7 +2697,7 @@ def resolver(t, c): precomputed_delta = None conflicts = cook_conflicts(raw_conflicts, tt) for conflict in conflicts: - trace.warning(text_type(conflict)) + trace.warning(str(conflict)) try: wt.add_conflicts(conflicts) except errors.UnsupportedOperation: @@ -2896,7 +2891,7 @@ def revert(working_tree, target_tree, filenames, backups=False, unversioned_filter=working_tree.is_ignored) delta.report_changes(tt.iter_changes(), change_reporter) for conflict in conflicts: - trace.warning(text_type(conflict)) + trace.warning(str(conflict)) pp.next_phase() tt.apply() if working_tree.supports_merge_modified(): diff --git a/breezy/transport/__init__.py b/breezy/transport/__init__.py index 5f3e522a00..2344232fdb 100644 --- a/breezy/transport/__init__.py +++ b/breezy/transport/__init__.py @@ -29,15 +29,11 @@ from __future__ import absolute_import import errno +from io import BytesIO import sys from stat import S_ISDIR -from ..sixish import ( - BytesIO, - PY3, - string_types, - ) from ..trace import ( mutter, ) diff --git a/breezy/transport/gio_transport.py b/breezy/transport/gio_transport.py index a33938fe99..d2939c32cf 100644 --- a/breezy/transport/gio_transport.py +++ b/breezy/transport/gio_transport.py @@ -31,16 +31,10 @@ import random import stat import time -try: - from urllib.parse import ( - urlparse, - urlunparse, - ) -except ImportError: - from urlparse import ( - urlparse, - urlunparse, - ) +from urllib.parse import ( + urlparse, + urlunparse, + ) from .. import ( config, @@ -50,9 +44,6 @@ debug, ui, ) -from ..sixish import ( - text_type, - ) from ..trace import mutter from . import ( FileStream, @@ -154,7 +145,7 @@ def __init__(self, base, _from_transport=None): def _relpath_to_url(self, relpath): full_url = urlutils.join(self.url, relpath) - if isinstance(full_url, text_type): + if isinstance(full_url, str): raise urlutils.InvalidURL(full_url) return full_url diff --git a/breezy/transport/http/__init__.py b/breezy/transport/http/__init__.py index 0b9401aa5b..28192685e6 100644 --- a/breezy/transport/http/__init__.py +++ b/breezy/transport/http/__init__.py @@ -35,19 +35,9 @@ import urllib import weakref -try: - import http.client as http_client -except ImportError: - import httplib as http_client -try: - import urllib.request as urllib_request -except ImportError: # python < 3 - import urllib2 as urllib_request -try: - from urllib.parse import urljoin, splitport, splittype, splithost, urlencode -except ImportError: - from urlparse import urljoin - from urllib import splitport, splittype, splithost, urlencode +import http.client as http_client +import urllib.request as urllib_request +from urllib.parse import urljoin, splitport, splittype, splithost, urlencode # TODO: handle_response should be integrated into the http/__init__.py from .response import handle_response @@ -73,11 +63,6 @@ urlutils, ) from ...bzr.smart import medium -from ...sixish import ( - PY3, - reraise, - text_type, -) from ...trace import mutter from ...transport import ( ConnectedTransport, @@ -89,18 +74,6 @@ def default_user_agent(): return 'Breezy/%s' % breezy_version -try: - _ = (ssl.match_hostname, ssl.CertificateError) -except AttributeError: - # Provide fallbacks for python < 2.7.9 - def match_hostname(cert, host): - trace.warning( - '%s cannot be verified, https certificates verification is only' - ' available for python versions >= 2.7.9' % (host,)) - ssl.match_hostname = match_hostname - ssl.CertificateError = ValueError - - # Note for packagers: if there is no package providing certs for your platform, # the curl project produces http://curl.haxx.se/ca/cacert.pem weekly. _ssl_ca_certs_known_locations = [ @@ -186,26 +159,6 @@ def default_ca_reqs(): kerberos = None -class addinfourl(urllib_request.addinfourl): - '''Replacement addinfourl class compatible with python-2.7's xmlrpclib - - In python-2.7, xmlrpclib expects that the response object that it receives - has a getheader method. http_client.HTTPResponse provides this but - urllib_request.addinfourl does not. Add the necessary functions here, ported to - use the internal data structures of addinfourl. - ''' - - def getheader(self, name, default=None): - if self.headers is None: - raise http_client.ResponseNotReady() - return self.headers.getheader(name, default) - - def getheaders(self): - if self.headers is None: - raise http_client.ResponseNotReady() - return list(self.headers.items()) - - class _ReportingFileSocket(object): def __init__(self, filesock, report_activity=None): @@ -284,11 +237,10 @@ class Response(http_client.HTTPResponse): # 8k chunks should be fine. _discarded_buf_size = 8192 - if PY3: - def __init__(self, sock, debuglevel=0, method=None, url=None): - self.url = url - super(Response, self).__init__( - sock, debuglevel=debuglevel, method=method, url=url) + def __init__(self, sock, debuglevel=0, method=None, url=None): + self.url = url + super(Response, self).__init__( + sock, debuglevel=debuglevel, method=method, url=url) def begin(self): """Begin to read the response from the server. @@ -424,11 +376,7 @@ class HTTPConnection(AbstractHTTPConnection, http_client.HTTPConnection): def __init__(self, host, port=None, proxied_host=None, report_activity=None, ca_certs=None): AbstractHTTPConnection.__init__(self, report_activity=report_activity) - if PY3: - http_client.HTTPConnection.__init__(self, host, port) - else: - # Use strict=True since we don't support HTTP/0.9 - http_client.HTTPConnection.__init__(self, host, port, strict=True) + http_client.HTTPConnection.__init__(self, host, port) self.proxied_host = proxied_host # ca_certs is ignored, it's only relevant for https @@ -445,13 +393,8 @@ def __init__(self, host, port=None, key_file=None, cert_file=None, proxied_host=None, report_activity=None, ca_certs=None): AbstractHTTPConnection.__init__(self, report_activity=report_activity) - if PY3: - http_client.HTTPSConnection.__init__( - self, host, port, key_file, cert_file) - else: - # Use strict=True since we don't support HTTP/0.9 - http_client.HTTPSConnection.__init__(self, host, port, - key_file, cert_file, strict=True) + http_client.HTTPSConnection.__init__( + self, host, port, key_file, cert_file) self.proxied_host = proxied_host self.ca_certs = ca_certs @@ -547,10 +490,7 @@ def get_method(self): def set_proxy(self, proxy, type): """Set the proxy and remember the proxied host.""" - if PY3: - host, port = splitport(self.host) - else: - host, port = splitport(self.get_host()) + host, port = splitport(self.host) if port is None: # We need to set the default port ourselves way before it gets set # in the HTTP[S]Connection object at build time. @@ -722,17 +662,14 @@ def retry_or_raise(self, http_class, request, first_try): exc_type, exc_val, exc_tb = sys.exc_info() if exc_type == socket.gaierror: # No need to retry, that will not help - if PY3: - origin_req_host = request.origin_req_host - else: - origin_req_host = request.get_origin_req_host() + origin_req_host = request.origin_req_host raise errors.ConnectionError("Couldn't resolve host '%s'" % origin_req_host, orig_error=exc_val) elif isinstance(exc_val, http_client.ImproperConnectionState): # The http_client pipeline is in incorrect state, it's a bug in our # implementation. - reraise(exc_type, exc_val, exc_tb) + raise exc_val else: if first_try: if self._debuglevel >= 2: @@ -771,10 +708,7 @@ def retry_or_raise(self, http_class, request, first_try): # far outside our scope, so closing the # connection and retrying is the best we can # do. - if PY3: - selector = request.selector - else: - selector = request.get_selector() + selector = request.selector my_exception = errors.ConnectionError( msg='while sending %s %s:' % (request.get_method(), selector), @@ -786,7 +720,7 @@ def retry_or_raise(self, http_class, request, first_try): url = request.get_full_url() print(' Failed again, %s %r' % (method, url)) print(' Will raise: [%r]' % my_exception) - reraise(type(my_exception), my_exception, exc_tb) + raise my_exception.with_traceback(exc_tb) return response def do_open(self, http_class, request, first_try=True): @@ -813,10 +747,7 @@ def do_open(self, http_class, request, first_try=True): try: method = request.get_method() - if PY3: - url = request.selector - else: - url = request.get_selector() + url = request.selector if sys.version_info[:2] >= (3, 6): connection._send_request(method, url, # FIXME: implements 100-continue @@ -853,57 +784,8 @@ def do_open(self, http_class, request, first_try=True): response = self.retry_or_raise(http_class, request, first_try) convert_to_addinfourl = False - if PY3: - response.msg = response.reason - return response - -# FIXME: HTTPConnection does not fully support 100-continue (the -# server responses are just ignored) - -# if code == 100: -# mutter('Will send the body') -# # We can send the body now -# body = request.data -# if body is None: -# raise URLError("No data given") -# connection.send(body) -# response = connection.getresponse() - - if self._debuglevel >= 2: - print('Receives response: %r' % response) - print(' For: %r(%r)' % (request.get_method(), - request.get_full_url())) - - if convert_to_addinfourl: - # Shamelessly copied from urllib_request - req = request - r = response - r.recv = r.read - fp = socket._fileobject(r, bufsize=65536) - resp = addinfourl(fp, r.msg, req.get_full_url()) - resp.code = r.status - resp.msg = r.reason - resp.version = r.version - if self._debuglevel >= 2: - print('Create addinfourl: %r' % resp) - print(' For: %r(%r)' % (request.get_method(), - request.get_full_url())) - if 'http' in debug.debug_flags: - version = 'HTTP/%d.%d' - try: - version = version % (resp.version / 10, - resp.version % 10) - except: - version = 'HTTP/%r' % resp.version - trace.mutter('< %s %s %s' % (version, resp.code, - resp.msg)) - # Use the raw header lines instead of treating resp.info() as a - # dict since we may miss duplicated headers otherwise. - hdrs = [h.rstrip('\r\n') for h in resp.info().headers] - trace.mutter('< ' + '\n< '.join(hdrs) + '\n') - else: - resp = response - return resp + response.msg = response.reason + return response class HTTPHandler(AbstractHTTPHandler): @@ -997,10 +879,7 @@ def redirect_request(self, req, fp, code, msg, headers, newurl): # and that we MAY avoid following the redirections. But # if we want to be sure, we MUST follow them. - if PY3: - origin_req_host = req.origin_req_host - else: - origin_req_host = req.get_origin_req_host() + origin_req_host = req.origin_req_host if code in (301, 302, 303, 307): return Request(req.get_method(), newurl, @@ -1185,10 +1064,7 @@ def evaluate_proxy_bypass(self, host, no_proxy): return None def set_proxy(self, request, type): - if PY3: - host = request.host - else: - host = request.get_host() + host = request.host if self.proxy_bypass(host): return request @@ -1323,10 +1199,7 @@ def auth_required(self, request, headers): # Let's be ready for next round self._retry_count = None return None - if PY3: - server_headers = headers.get_all(self.auth_required_header) - else: - server_headers = headers.getheaders(self.auth_required_header) + server_headers = headers.get_all(self.auth_required_header) if not server_headers: # The http error MUST have the associated # header. This must never happen in production code. @@ -1684,10 +1557,7 @@ def auth_match(self, header, auth): return True def build_auth_header(self, auth, request): - if PY3: - selector = request.selector - else: - selector = request.get_selector() + selector = request.selector url_scheme, url_selector = splittype(selector) sel_host, uri = splithost(url_selector) @@ -2005,10 +1875,7 @@ def __init__(self, actual): def getheader(self, name, default=None): if self._actual.headers is None: raise http_client.ResponseNotReady() - if PY3: - return self._actual.headers.get(name, default) - else: - return self._actual.headers.getheader(name, default) + return self._actual.headers.get(name, default) def getheaders(self): if self._actual.headers is None: diff --git a/breezy/transport/http/response.py b/breezy/transport/http/response.py index df5541ba48..80e134a5a5 100644 --- a/breezy/transport/http/response.py +++ b/breezy/transport/http/response.py @@ -24,24 +24,15 @@ from __future__ import absolute_import import cgi +from io import BytesIO import os -try: - import http.client as http_client -except ImportError: # python < 3 - import httplib as http_client -try: - import email.utils as email_utils -except ImportError: # python < 3 - import rfc822 as email_utils +import http.client as http_client +import email.utils as email_utils from ... import ( errors, osutils, ) -from ...sixish import ( - BytesIO, - PY3, - ) class ResponseFile(object): @@ -80,8 +71,6 @@ def read(self, size=None): :param size: The number of bytes to read. Leave unspecified or pass -1 to read to EOF. """ - if size is None and not PY3: - size = -1 data = self._file.read(size) self._pos += len(data) return data @@ -220,10 +209,7 @@ def read_range_definition(self): Parse the headers including the empty line following them so that we are ready to read the data itself. """ - if PY3: - self._headers = http_client.parse_headers(self._file) - else: - self._headers = http_client.HTTPMessage(self._file, seekable=0) + self._headers = http_client.parse_headers(self._file) # Extract the range definition content_range = self._headers.get('content-range', None) if content_range is None: diff --git a/breezy/transport/remote.py b/breezy/transport/remote.py index ba892388ff..c0d813a750 100644 --- a/breezy/transport/remote.py +++ b/breezy/transport/remote.py @@ -37,7 +37,6 @@ from ..bzr import ( remote, ) -from ..sixish import PY3 from ..bzr.smart import client, medium @@ -472,13 +471,13 @@ def listable(self): def list_dir(self, relpath): resp = self._call2(b'list_dir', self._remote_path(relpath)) if resp[0] == b'names': - return [name.decode('utf-8') if PY3 else name for name in resp[1:]] + return [name.decode('utf-8') for name in resp[1:]] raise errors.UnexpectedSmartServerResponse(resp) def iter_files_recursive(self): resp = self._call2(b'iter_files_recursive', self._remote_path('')) if resp[0] == b'names': - return [name.decode('utf-8') if PY3 else name for name in resp[1:]] + return [name.decode('utf-8') for name in resp[1:]] raise errors.UnexpectedSmartServerResponse(resp) diff --git a/breezy/transport/sftp.py b/breezy/transport/sftp.py index 13e1379224..e40bd2902c 100644 --- a/breezy/transport/sftp.py +++ b/breezy/transport/sftp.py @@ -49,9 +49,6 @@ ParamikoNotPresent, ) from ..osutils import fancy_rename -from ..sixish import ( - zip, - ) from ..trace import mutter, warning from ..transport import ( FileFileStream, diff --git a/breezy/tree.py b/breezy/tree.py index a5ad0c0e96..1852a18801 100644 --- a/breezy/tree.py +++ b/breezy/tree.py @@ -45,10 +45,6 @@ osutils, ) from .inter import InterObject -from .sixish import ( - text_type, - viewvalues, - ) class FileTimestampUnavailable(errors.BzrError): diff --git a/breezy/ui/__init__.py b/breezy/ui/__init__.py index c593183f36..fcbe5d6de0 100644 --- a/breezy/ui/__init__.py +++ b/breezy/ui/__init__.py @@ -54,12 +54,6 @@ ) """) -from ..sixish import ( - PY3, - string_types, - text_type, - ) - _valid_boolean_strings = dict(yes=True, no=False, y=True, n=False, @@ -89,7 +83,7 @@ def bool_from_string(s, accepted_values=None): if accepted_values is None: accepted_values = _valid_boolean_strings val = None - if isinstance(s, string_types): + if isinstance(s, str): try: val = accepted_values[s.lower()] except KeyError: @@ -319,14 +313,14 @@ def format_user_warning(self, warning_id, message_args): fail = "brz warning: %r, %r" % (warning_id, message_args) warnings.warn("no template for warning: " + fail) # so tests will fail etc - return text_type(fail) + return str(fail) try: - return text_type(template) % message_args + return str(template) % message_args except ValueError as e: fail = "brz unprintable warning: %r, %r, %s" % ( warning_id, message_args, e) warnings.warn(fail) # so tests will fail etc - return text_type(fail) + return str(fail) def choose(self, msg, choices, default=None): """Prompt the user for a list of alternatives. diff --git a/breezy/ui/text.py b/breezy/ui/text.py index ffb36564b6..15192d4e1e 100644 --- a/breezy/ui/text.py +++ b/breezy/ui/text.py @@ -40,9 +40,6 @@ osutils, trace, ) -from ..sixish import ( - text_type, - ) from . import ( NullProgressView, UIFactory, @@ -325,7 +322,7 @@ def prompt(self, prompt, **kwargs): :param kwargs: Dictionary of arguments to insert into the prompt, to allow UIs to reformat the prompt. """ - if not isinstance(prompt, text_type): + if not isinstance(prompt, str): raise ValueError("prompt %r not a unicode string" % prompt) if kwargs: # See diff --git a/breezy/upstream_import.py b/breezy/upstream_import.py index 9185ce5204..a2c36ec74d 100644 --- a/breezy/upstream_import.py +++ b/breezy/upstream_import.py @@ -34,9 +34,6 @@ from .errors import (BzrError, NoSuchFile, BzrCommandError, NotBranchError) from .osutils import (pathjoin, isdir, file_iterator, basename, file_kind, splitpath) -from .sixish import ( - text_type, - ) from .trace import warning from .transform import resolve_conflicts, cook_conflicts from .transport import get_transport @@ -258,7 +255,7 @@ def import_archive_to_transform(tree, archive_file, tt): # interpret relative to fs encoding, which would match native # behaviour better. relative_path = member.name - if not isinstance(relative_path, text_type): + if not isinstance(relative_path, str): relative_path = relative_path.decode('utf-8') if prefix is not None: relative_path = relative_path[len(prefix) + 1:] diff --git a/breezy/urlutils.py b/breezy/urlutils.py index 2fc4ad3feb..b4fe91ab16 100644 --- a/breezy/urlutils.py +++ b/breezy/urlutils.py @@ -22,10 +22,7 @@ import re import sys -try: - import urlparse -except ImportError: - from urllib import parse as urlparse +from urllib import parse as urlparse from . import ( errors, @@ -37,12 +34,6 @@ from posixpath import split as _posix_split """) -from .sixish import ( - int2byte, - PY3, - text_type, - unichr, - ) class InvalidURL(errors.PathError): @@ -99,74 +90,14 @@ def dirname(url, exclude_trailing_slash=True): return split(url, exclude_trailing_slash=exclude_trailing_slash)[0] -if PY3: - quote_from_bytes = urlparse.quote_from_bytes - quote = urlparse.quote - unquote_to_bytes = urlparse.unquote_to_bytes -else: - # Private copies of quote and unquote, copied from Python's urllib module - # because urllib unconditionally imports socket, which imports ssl. - - always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz' - '0123456789' '_.-') - _safe_map = {} - for i, c in zip(range(256), ''.join(map(chr, range(256)))): - _safe_map[c] = c if ( - i < 128 and c in always_safe) else '%{0:02X}'.format(i) - _safe_quoters = {} - - def quote_from_bytes(s, safe='/'): - """quote('abc def') -> 'abc%20def' - - Each part of a URL, e.g. the path info, the query, etc., has a - different set of reserved characters that must be quoted. - - RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists - the following reserved characters. - - reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | - "$" | "," - - Each of these characters is reserved in some component of a URL, - but not necessarily in all of them. - - By default, the quote function is intended for quoting the path - section of a URL. Thus, it will not encode '/'. This character - is reserved, but in typical usage the quote function is being - called on a path where the existing slash characters are used as - reserved characters. - """ - # fastpath - if not s: - if s is None: - raise TypeError('None object cannot be quoted') - return s - cachekey = (safe, always_safe) - try: - (quoter, safe) = _safe_quoters[cachekey] - except KeyError: - safe_map = _safe_map.copy() - safe_map.update([(c, c) for c in safe]) - quoter = safe_map.__getitem__ - safe = always_safe + safe - _safe_quoters[cachekey] = (quoter, safe) - if not s.rstrip(safe): - return s - return ''.join(map(quoter, s)) - - quote = quote_from_bytes - unquote_to_bytes = urlparse.unquote - - +quote_from_bytes = urlparse.quote_from_bytes +quote = urlparse.quote +unquote_to_bytes = urlparse.unquote_to_bytes unquote = urlparse.unquote def escape(relpath, safe='/~'): """Escape relpath to be a valid url.""" - if not isinstance(relpath, str) and sys.version_info[0] == 2: - # GZ 2019-06-16: Should use _fs_enc instead here really? - relpath = relpath.encode('utf-8') return quote(relpath, safe=safe) @@ -411,7 +342,7 @@ def normalize_url(url): return local_path_to_url(url) prefix = url[:path_start] path = url[path_start:] - if not isinstance(url, text_type): + if not isinstance(url, str): for c in url: if c not in _url_safe_characters: raise InvalidURL(url, 'URLs can only contain specific' @@ -691,28 +622,13 @@ def unescape(url): # try to encode the UNICODE => ASCII, and then decode # it into utf-8. - if PY3: - if isinstance(url, text_type): - try: - url.encode("ascii") - except UnicodeError as e: - raise InvalidURL( - url, 'URL was not a plain ASCII url: %s' % (e,)) - return urlparse.unquote(url) - else: - if isinstance(url, text_type): - try: - url = url.encode("ascii") - except UnicodeError as e: - raise InvalidURL( - url, 'URL was not a plain ASCII url: %s' % (e,)) - unquoted = unquote(url) + if isinstance(url, str): try: - unicode_path = unquoted.decode('utf-8') + url.encode("ascii") except UnicodeError as e: raise InvalidURL( - url, 'Unable to encode the URL as utf-8: %s' % (e,)) - return unicode_path + url, 'URL was not a plain ASCII url: %s' % (e,)) + return urlparse.unquote(url) # These are characters that if escaped, should stay that way @@ -720,8 +636,8 @@ def unescape(url): _no_decode_ords = [ord(c) for c in _no_decode_chars] _no_decode_hex = (['%02x' % o for o in _no_decode_ords] + ['%02X' % o for o in _no_decode_ords]) -_hex_display_map = dict(([('%02x' % o, int2byte(o)) for o in range(256)] - + [('%02X' % o, int2byte(o)) for o in range(256)])) +_hex_display_map = dict(([('%02x' % o, bytes([o])) for o in range(256)] + + [('%02X' % o, bytes([o])) for o in range(256)])) # These entries get mapped to themselves _hex_display_map.update((hex, b'%' + hex.encode('ascii')) for hex in _no_decode_hex) @@ -765,11 +681,10 @@ def _unescape_segment_for_display(segment, encoding): escaped_chunks[j] = _hex_display_map[item[:2]] except KeyError: # Put back the percent symbol - escaped_chunks[j] = b'%' + \ - (item[:2].encode('utf-8') if PY3 else item[:2]) + escaped_chunks[j] = b'%' + (item[:2].encode('utf-8')) except UnicodeDecodeError: - escaped_chunks[j] = unichr(int(item[:2], 16)).encode('utf-8') - escaped_chunks[j] += (item[2:].encode('utf-8') if PY3 else item[2:]) + escaped_chunks[j] = chr(int(item[:2], 16)).encode('utf-8') + escaped_chunks[j] += (item[2:].encode('utf-8')) unescaped = b''.join(escaped_chunks) try: decoded = unescaped.decode('utf-8') @@ -933,7 +848,7 @@ def from_string(cls, url): # unicode. if isinstance(url, str): pass - elif isinstance(url, text_type): + elif isinstance(url, str): try: url = url.encode() except UnicodeEncodeError: @@ -1004,7 +919,7 @@ def _combine_paths(base_path, relpath): # unicode. if isinstance(relpath, str): pass - elif isinstance(relpath, text_type): + elif isinstance(relpath, str): try: relpath = relpath.encode() except UnicodeEncodeError: @@ -1042,8 +957,6 @@ def clone(self, offset=None): """ if offset is not None: relative = unescape(offset) - if sys.version_info[0] == 2: - relative = relative.encode('utf-8') path = self._combine_paths(self.path, relative) path = quote(path, safe="/~") else: diff --git a/breezy/util/_bencode_py.py b/breezy/util/_bencode_py.py index f3be255a35..8590f25f2d 100644 --- a/breezy/util/_bencode_py.py +++ b/breezy/util/_bencode_py.py @@ -144,12 +144,8 @@ def encode_dict(x, r): encode_func = {} encode_func[type(Bencached(0))] = encode_bencached encode_func[int] = encode_int -if sys.version_info < (3,): - encode_func[long] = encode_int - int_to_bytes = str -else: - def int_to_bytes(n): - return b'%d' % n +def int_to_bytes(n): + return b'%d' % n encode_func[bytes] = encode_string encode_func[list] = encode_list encode_func[tuple] = encode_list diff --git a/breezy/version.py b/breezy/version.py index 530a9041d5..0cc3961160 100644 --- a/breezy/version.py +++ b/breezy/version.py @@ -30,7 +30,6 @@ osutils, trace, ) -from .sixish import text_type def show_version(show_config=True, show_copyright=True, to_file=None): @@ -71,7 +70,7 @@ def show_version(show_config=True, show_copyright=True, to_file=None): to_file.write(" Python standard library:" + ' ') to_file.write(os.path.dirname(os.__file__) + '\n') platform_str = platform.platform(aliased=1) - if not isinstance(platform_str, text_type): + if not isinstance(platform_str, str): platform_str = platform_str.decode('utf-8') to_file.write(" Platform: %s\n" % platform_str) to_file.write(" breezy: ") @@ -84,7 +83,7 @@ def show_version(show_config=True, show_copyright=True, to_file=None): if show_config: config_dir = osutils.normpath( bedding.config_dir()) # use native slashes - if not isinstance(config_dir, text_type): + if not isinstance(config_dir, str): config_dir = config_dir.decode(osutils.get_user_encoding()) to_file.write(" Breezy configuration: %s\n" % (config_dir,)) to_file.write(" Breezy log file: ") diff --git a/breezy/version_info_formats/format_custom.py b/breezy/version_info_formats/format_custom.py index fea6d80269..e22c73a2b2 100644 --- a/breezy/version_info_formats/format_custom.py +++ b/breezy/version_info_formats/format_custom.py @@ -26,7 +26,6 @@ NULL_REVISION, ) from breezy.lazy_regex import lazy_compile -from breezy.sixish import PY3 from breezy.version_info_formats import ( create_date_str, VersionInfoBuilder, @@ -76,8 +75,6 @@ def add(self, name, value): def process(self, tpl): unicode_escape = codecs.getdecoder("unicode_escape") tpl = unicode_escape(tpl)[0] - if not PY3: - tpl = tpl.encode('utf-8') pos = 0 while True: match = self._tag_re.search(tpl, pos) diff --git a/breezy/workingtree.py b/breezy/workingtree.py index 8e76dd8a71..aa20ef7bcc 100644 --- a/breezy/workingtree.py +++ b/breezy/workingtree.py @@ -29,6 +29,7 @@ from __future__ import absolute_import +import contextlib import errno import os import sys @@ -41,7 +42,6 @@ import stat from breezy import ( - cleanup, conflicts as _mod_conflicts, filters as _mod_filters, merge, @@ -1002,7 +1002,7 @@ def remove(self, files, verbose=False, to_file=None, keep_files=True, def revert(self, filenames=None, old_tree=None, backups=True, pb=None, report_changes=False): from .conflicts import resolve - with cleanup.ExitStack() as exit_stack: + with contextlib.ExitStack() as exit_stack: exit_stack.enter_context(self.lock_tree_write()) if old_tree is None: basis_tree = self.basis_tree() diff --git a/setup.py b/setup.py index df2fa33113..949a14e1b5 100755 --- a/setup.py +++ b/setup.py @@ -12,8 +12,8 @@ import copy import glob -if sys.version_info < (2, 7): - sys.stderr.write("[ERROR] Not a supported Python version. Need 2.7+\n") +if sys.version_info < (3, 5): + sys.stderr.write("[ERROR] Not a supported Python version. Need 3.5+\n") sys.exit(1) @@ -64,7 +64,6 @@ def get_long_description(): ], 'install_requires': [ 'configobj', - 'six>=1.9.0', 'patiencediff', # Technically, Breezy works without these two dependencies too. But there's # no way to enable them by default and let users opt out. @@ -595,12 +594,6 @@ def run(self): includes.append(module) additional_packages = set() - if sys.version.startswith('2.7'): - additional_packages.add('xml.etree') - else: - import warnings - warnings.warn('Unknown Python version.\n' - 'Please check setup.py script for compatibility.') # Although we currently can't enforce it, we consider it an error for # py2exe to report any files are "missing". Such modules we know aren't diff --git a/tools/rst2prettyhtml.py b/tools/rst2prettyhtml.py index a312707833..85637b4bbc 100755 --- a/tools/rst2prettyhtml.py +++ b/tools/rst2prettyhtml.py @@ -2,7 +2,7 @@ import errno import os -from breezy.sixish import StringIO +from io import StringIO import sys try: diff --git a/tools/testr-run.py b/tools/testr-run.py deleted file mode 100755 index 02af465237..0000000000 --- a/tools/testr-run.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/python3 - -import argparse -import subprocess -from subunit.v2 import StreamResultToBytes -import sys -import tempfile -from testrepository.testlist import parse_enumeration, parse_list, write_list - - -def main(): - parser = argparse.ArgumentParser( - description="Test runner that supports both Python 2 and Python 3.") - - parser.add_argument( - "--load-list", metavar="PATH", help="Path to read list of tests to run from.", - type=str) - parser.add_argument( - "--list", help="List available tests.", action="store_true") - - args = parser.parse_args() - - if args.list: - testids = [] - output = subprocess.check_output( - ['python2', './brz', 'selftest', '--subunit2', '--list']) - for n in parse_enumeration(output): - testids.append('python2.' + n) - - output = subprocess.check_output( - ['python3', './brz', 'selftest', '--subunit2', '--list']) - for n in parse_enumeration(output): - testids.append('python3.' + n) - stream = StreamResultToBytes(sys.stdout) - for testid in testids: - stream.status(test_id=testid, test_status='exists') - else: - if args.load_list: - py2_tests = [] - py3_tests = [] - with open(args.load_list, 'rb') as f: - all_tests = parse_list(f.read()) - for testname in all_tests: - if testname.startswith("python2."): - py2_tests.append(testname[len('python2.'):].strip()) - elif testname.startswith("python3."): - py3_tests.append(testname[len('python3.'):].strip()) - else: - sys.stderr.write("unknown prefix %s\n" % testname) - if py2_tests: - with tempfile.NamedTemporaryFile() as py2f: - write_list(py2f, py2_tests) - py2f.flush() - subprocess.call( - 'python2 ./brz selftest --subunit2 --load-list=%s | subunit-filter -s --passthrough --rename "^" "python2."' % py2f.name, shell=True) - - if py3_tests: - with tempfile.NamedTemporaryFile() as py3f: - write_list(py3f, py3_tests) - py3f.flush() - subprocess.call( - 'python3 ./brz selftest --subunit2 --load-list=%s | subunit-filter -s --passthrough --rename "^" "python3."' % py3f.name, shell=True) - else: - subprocess.call( - 'python2 ./brz selftest --subunit2 | subunit-filter -s --passthrough --rename "^" "python2."', shell=True) - subprocess.call( - 'python3 ./brz selftest --subunit2 | subunit-filter -s --passthrough --rename "^" "python3."', shell=True) - - -if __name__ == '__main__': - main()