diff --git a/AUTHORS b/AUTHORS index a0aa707c7..3504c498a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -33,5 +33,6 @@ Contributors are: -Steven Whitman -Stefan Stancu -César Izurieta +-Santos Gallegos Portions derived from other open source works and are clearly marked. diff --git a/git/cmd.py b/git/cmd.py index 50b1e3212..b8256b266 100644 --- a/git/cmd.py +++ b/git/cmd.py @@ -4,7 +4,9 @@ # This module is part of GitPython and is released under # the BSD License: http://www.opensource.org/licenses/bsd-license.php -from contextlib import contextmanager +import contextlib +import re + import io import logging import os @@ -19,6 +21,7 @@ import threading from collections import OrderedDict from textwrap import dedent +import mock from git.compat import ( string_types, @@ -31,7 +34,7 @@ is_posix, is_win, ) -from git.exc import CommandError +from git.exc import CommandError, UnsafeOptionError, UnsafeProtocolError from git.util import is_cygwin_git, cygpath, expand_path from .exc import ( @@ -59,6 +62,11 @@ __all__ = ('Git',) +@contextlib.contextmanager +def nullcontext(enter_result=None): + yield enter_result + + # ============================================================================== ## @name Utilities # ------------------------------------------------------------------------------ @@ -124,6 +132,59 @@ def pump_stream(cmdline, name, stream, is_decode, handler): return finalizer(process) +def _safer_popen_windows(command, shell, env=None, **kwargs): + """Call :class:`subprocess.Popen` on Windows but don't include a CWD in the search. + This avoids an untrusted search path condition where a file like ``git.exe`` in a + malicious repository would be run when GitPython operates on the repository. The + process using GitPython may have an untrusted repository's working tree as its + current working directory. Some operations may temporarily change to that directory + before running a subprocess. In addition, while by default GitPython does not run + external commands with a shell, it can be made to do so, in which case the CWD of + the subprocess, which GitPython usually sets to a repository working tree, can + itself be searched automatically by the shell. This wrapper covers all those cases. + :note: This currently works by setting the ``NoDefaultCurrentDirectoryInExePath`` + environment variable during subprocess creation. It also takes care of passing + Windows-specific process creation flags, but that is unrelated to path search. + :note: The current implementation contains a race condition on :attr:`os.environ`. + GitPython isn't thread-safe, but a program using it on one thread should ideally + be able to mutate :attr:`os.environ` on another, without unpredictable results. + See comments in https://github.com/gitpython-developers/GitPython/pull/1650. + """ + # CREATE_NEW_PROCESS_GROUP is needed for some ways of killing it afterwards. See: + # https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal + # https://docs.python.org/3/library/subprocess.html#subprocess.CREATE_NEW_PROCESS_GROUP + creationflags = subprocess.CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP + + # When using a shell, the shell is the direct subprocess, so the variable must be + # set in its environment, to affect its search behavior. (The "1" can be any value.) + if shell: + safer_env = {} if env is None else dict(env) + safer_env["NoDefaultCurrentDirectoryInExePath"] = "1" + else: + safer_env = env + + # When not using a shell, the current process does the search in a CreateProcessW + # API call, so the variable must be set in our environment. With a shell, this is + # unnecessary, in versions where https://github.com/python/cpython/issues/101283 is + # patched. If not, in the rare case the ComSpec environment variable is unset, the + # shell is searched for unsafely. Setting NoDefaultCurrentDirectoryInExePath in all + # cases, as here, is simpler and protects against that. (The "1" can be any value.) + with patch_env("NoDefaultCurrentDirectoryInExePath", "1"): + return Popen( + command, + shell=shell, + env=safer_env, + creationflags=creationflags, + **kwargs + ) + + +if os.name == "nt": + safer_popen = _safer_popen_windows +else: + safer_popen = Popen + + def dashify(string): return string.replace('_', '-') @@ -144,11 +205,6 @@ def dict_to_slots_and__excluded_are_none(self, d, excluded=()): # value of Windows process creation flag taken from MSDN CREATE_NO_WINDOW = 0x08000000 -## CREATE_NEW_PROCESS_GROUP is needed to allow killing it afterwards, -# see https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal -PROC_CREATIONFLAGS = (CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP - if is_win else 0) - class Git(LazyMixin): @@ -171,9 +227,49 @@ class Git(LazyMixin): _excluded_ = ('cat_file_all', 'cat_file_header', '_version_info') + re_unsafe_protocol = re.compile("(.+)::.+") + def __getstate__(self): return slots_to_dict(self, exclude=self._excluded_) + @classmethod + def check_unsafe_protocols(cls, url): + """ + Check for unsafe protocols. + Apart from the usual protocols (http, git, ssh), + Git allows "remote helpers" that have the form `::
`, + one of these helpers (`ext::`) can be used to invoke any arbitrary command. + See: + - https://git-scm.com/docs/gitremote-helpers + - https://git-scm.com/docs/git-remote-ext + """ + match = cls.re_unsafe_protocol.match(url) + if match: + protocol = match.group(1) + raise UnsafeProtocolError( + "The `" + protocol + "::` protocol looks suspicious, use `allow_unsafe_protocols=True` to allow it." + ) + + @classmethod + def check_unsafe_options(cls, options, unsafe_options): + """ + Check for unsafe options. + Some options that are passed to `git ` can be used to execute + arbitrary commands, this are blocked by default. + """ + # Options can be of the form `foo` or `--foo bar` `--foo=bar`, + # so we need to check if they start with "--foo" or if they are equal to "foo". + bare_unsafe_options = [ + option.lstrip("-") + for option in unsafe_options + ] + for option in options: + for unsafe_option, bare_option in zip(unsafe_options, bare_unsafe_options): + if option.startswith(unsafe_option) or option == bare_option: + raise UnsafeOptionError( + unsafe_option +" is not allowed, use `allow_unsafe_options=True` to allow it." + ) + def __setstate__(self, d): dict_to_slots_and__excluded_are_none(self, d, excluded=self._excluded_) @@ -705,11 +801,15 @@ def execute(self, command, cmd_not_found_exception = OSError if kill_after_timeout: raise GitCommandError(command, '"kill_after_timeout" feature is not supported on Windows.') + + # Only search PATH, not CWD. This must be in the *caller* environment. The "1" can be any value. + patch_caller_env = unittest.mock.patch.dict(os.environ, {"NoDefaultCurrentDirectoryInExePath": "1"}) else: if sys.version_info[0] > 2: cmd_not_found_exception = FileNotFoundError # NOQA # exists, flake8 unknown @UndefinedVariable else: cmd_not_found_exception = OSError + patch_caller_env = nullcontext() # end handle stdout_sink = (PIPE @@ -721,19 +821,18 @@ def execute(self, command, log.debug("Popen(%s, cwd=%s, universal_newlines=%s, shell=%s, istream=%s)", command, cwd, universal_newlines, shell, istream_ok) try: - proc = Popen(command, - env=env, - cwd=cwd, - bufsize=-1, - stdin=istream, - stderr=PIPE, - stdout=stdout_sink, - shell=shell is not None and shell or self.USE_SHELL, - close_fds=is_posix, # unsupported on windows - universal_newlines=universal_newlines, - creationflags=PROC_CREATIONFLAGS, - **subprocess_kwargs - ) + proc = safer_popen( + command, + env=env, + cwd=cwd, + bufsize=-1, + stdin=istream, + stderr=PIPE, + stdout=stdout_sink, + shell=shell is not None and shell or self.USE_SHELL, + universal_newlines=universal_newlines, + **subprocess_kwargs + ) except cmd_not_found_exception as err: raise GitCommandNotFound(command, err) @@ -862,7 +961,7 @@ def update_environment(self, **kwargs): del self._environment[key] return old_env - @contextmanager + @contextlib.contextmanager def custom_environment(self, **kwargs): """ A context manager around the above ``update_environment`` method to restore the @@ -1082,7 +1181,7 @@ def get_object_data(self, ref): :note: not threadsafe""" hexsha, typename, size, stream = self.stream_object_data(ref) data = stream.read(size) - del(stream) + del (stream) return (hexsha, typename, size, data) def stream_object_data(self, ref): diff --git a/git/exc.py b/git/exc.py index 4865da944..b9ea93abf 100644 --- a/git/exc.py +++ b/git/exc.py @@ -25,6 +25,14 @@ class NoSuchPathError(GitError, OSError): """ Thrown if a path could not be access by the system. """ +class UnsafeProtocolError(GitError): + """Thrown if unsafe protocols are passed without being explicitly allowed.""" + + +class UnsafeOptionError(GitError): + """Thrown if unsafe options are passed without being explicitly allowed.""" + + class CommandError(UnicodeMixin, GitError): """Base class for exceptions thrown at every stage of `Popen()` execution. diff --git a/git/index/base.py b/git/index/base.py index 04a3934d6..404dc8ea8 100644 --- a/git/index/base.py +++ b/git/index/base.py @@ -18,11 +18,7 @@ defenc, mviter, ) -from git.exc import ( - GitCommandError, - CheckoutError, - InvalidGitRepositoryError -) +from git.exc import GitCommandError, CheckoutError, InvalidGitRepositoryError from git.objects import ( Blob, Submodule, @@ -38,7 +34,7 @@ file_contents_ro, to_native_path_linux, unbare_repo, - to_bin_sha + to_bin_sha, ) from gitdb.base import IStream from gitdb.db import MemoryDB @@ -54,25 +50,19 @@ write_tree_from_cache, stat_mode_to_index_mode, S_IFGITLINK, - run_commit_hook + run_commit_hook, ) from .typ import ( BaseIndexEntry, IndexEntry, ) -from .util import ( - TemporaryFileSwap, - post_clear_cache, - default_index, - git_working_dir -) +from .util import TemporaryFileSwap, post_clear_cache, default_index, git_working_dir -__all__ = ('IndexFile', 'CheckoutError') +__all__ = ("IndexFile", "CheckoutError") class IndexFile(LazyMixin, diff.Diffable, Serializable): - """ Implements an Index that can be manipulated using a native implementation in order to save git command function calls wherever possible. @@ -93,8 +83,9 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): Make sure you use index.write() once you are done manipulating the index directly before operating on it using the git command""" + __slots__ = ("repo", "version", "entries", "_extension_data", "_file_path") - _VERSION = 2 # latest version we support + _VERSION = 2 # latest version we support S_IFGITLINK = S_IFGITLINK # a submodule def __init__(self, repo, file_path=None): @@ -105,7 +96,7 @@ def __init__(self, repo, file_path=None): repository's index on demand.""" self.repo = repo self.version = self._VERSION - self._extension_data = b'' + self._extension_data = b"" self._file_path = file_path or self._index_path() def _set_cache_(self, attr): @@ -142,23 +133,25 @@ def _index_path(self): @property def path(self): - """ :return: Path to the index file we are representing """ + """:return: Path to the index file we are representing""" return self._file_path def _delete_entries_cache(self): """Safely clear the entries cache so it can be recreated""" try: - del(self.entries) + del self.entries except AttributeError: # fails in python 2.6.5 with this exception pass # END exception handling - #{ Serializable Interface + # { Serializable Interface def _deserialize(self, stream): """Initialize this instance with index values read from the given stream""" - self.version, self.entries, self._extension_data, conten_sha = read_cache(stream) # @UnusedVariable + self.version, self.entries, self._extension_data, conten_sha = read_cache( + stream + ) # @UnusedVariable return self def _entries_sorted(self): @@ -173,7 +166,7 @@ def _serialize(self, stream, ignore_extension_data=False): write_cache(entries, stream, extension_data) return self - #} END serializable interface + # } END serializable interface def write(self, file_path=None, ignore_extension_data=False): """Write the current state to our file path or to the given one @@ -242,7 +235,7 @@ def merge_tree(self, rhs, base=None): be raised at the first conflicting path. If you want to have proper merge resolution to be done by yourself, you have to commit the changed index ( or make a valid tree from it ) and retry with a three-way - index.from_tree call. """ + index.from_tree call.""" # -i : ignore working tree status # --aggressive : handle more merge cases # -m : do an actual merge @@ -256,7 +249,7 @@ def merge_tree(self, rhs, base=None): @classmethod def new(cls, repo, *tree_sha): - """ Merge the given treeish revisions into a new index which is returned. + """Merge the given treeish revisions into a new index which is returned. This method behaves like git-read-tree --aggressive when doing the merge. :param repo: The repository treeish are located in. @@ -268,12 +261,18 @@ def new(cls, repo, *tree_sha): New IndexFile instance. Its path will be undefined. If you intend to write such a merged Index, supply an alternate file_path to its 'write' method.""" - base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha]) + base_entries = aggressive_tree_merge( + repo.odb, [to_bin_sha(str(t)) for t in tree_sha] + ) inst = cls(repo) # convert to entries dict - entries = dict(izip(((e.path, e.stage) for e in base_entries), - (IndexEntry.from_base(e) for e in base_entries))) + entries = dict( + izip( + ((e.path, e.stage) for e in base_entries), + (IndexEntry.from_base(e) for e in base_entries), + ) + ) inst.entries = entries return inst @@ -314,7 +313,9 @@ def from_tree(cls, repo, *treeish, **kwargs): it will be temporarily moved out of the way to assure there are no unsuspected interferences.""" if len(treeish) == 0 or len(treeish) > 3: - raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish)) + raise ValueError( + "Please specify between 1 and 3 treeish, got %i" % len(treeish) + ) arg_list = [] # ignore that working tree and index possibly are out of date @@ -327,7 +328,7 @@ def from_tree(cls, repo, *treeish, **kwargs): # tmp file created in git home directory to be sure renaming # works - /tmp/ dirs could be on another device - tmp_index = tempfile.mktemp('', '', repo.git_dir) + tmp_index = tempfile.mktemp("", "", repo.git_dir) arg_list.append("--index-output=%s" % tmp_index) arg_list.extend(treeish) @@ -335,12 +336,12 @@ def from_tree(cls, repo, *treeish, **kwargs): # as it considers existing entries. moving it essentially clears the index. # Unfortunately there is no 'soft' way to do it. # The TemporaryFileSwap assure the original file get put back - index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index')) + index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, "index")) try: repo.git.read_tree(*arg_list, **kwargs) index = cls(repo, tmp_index) - index.entries # force it to read the file as we will delete the temp-file - del(index_handler) # release as soon as possible + index.entries # force it to read the file as we will delete the temp-file + del index_handler # release as soon as possible finally: if osp.exists(tmp_index): os.remove(tmp_index) @@ -356,8 +357,10 @@ def _iter_expand_paths(self, paths): Note: git will add items multiple times even if a glob overlapped with manually specified paths or if paths where specified multiple times - we respect that and do not prune""" + def raise_exc(e): raise e + r = self.repo.working_tree_dir rs = r + os.sep for path in paths: @@ -367,18 +370,18 @@ def raise_exc(e): # END make absolute path try: - st = os.lstat(abs_path) # handles non-symlinks as well + st = os.lstat(abs_path) # handles non-symlinks as well except OSError: # the lstat call may fail as the path may contain globs as well pass else: if S_ISLNK(st.st_mode): - yield abs_path.replace(rs, '') + yield abs_path.replace(rs, "") continue # end check symlink # resolve globs if possible - if '?' in path or '*' in path or '[' in path: + if "?" in path or "*" in path or "[" in path: resolved_paths = glob.glob(abs_path) # not abs_path in resolved_paths: # a glob() resolving to the same path we are feeding it with @@ -388,24 +391,27 @@ def raise_exc(e): # whose name contains wildcard characters. if abs_path not in resolved_paths: for f in self._iter_expand_paths(glob.glob(abs_path)): - yield f.replace(rs, '') + yield f.replace(rs, "") continue # END glob handling try: - for root, dirs, files in os.walk(abs_path, onerror=raise_exc): # @UnusedVariable + for root, dirs, files in os.walk( + abs_path, onerror=raise_exc + ): # @UnusedVariable for rela_file in files: # add relative paths only - yield osp.join(root.replace(rs, ''), rela_file) + yield osp.join(root.replace(rs, ""), rela_file) # END for each file in subdir # END for each subdirectory except OSError: # was a file or something that could not be iterated - yield path.replace(rs, '') + yield path.replace(rs, "") # END path exception handling # END for each path - def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress, - read_from_stdout=True): + def _write_path_to_stdin( + self, proc, filepath, item, fmakeexc, fprogress, read_from_stdout=True + ): """Write path to proc.stdin and make sure it processes the item, including progress. :return: stdout string @@ -441,7 +447,7 @@ def iter_blobs(self, predicate=lambda t: True): :param predicate: Function(t) returning True if tuple(stage, Blob) should be yielded by the iterator. A default filter, the BlobFilter, allows you to yield blobs - only if they match a given list of paths. """ + only if they match a given list of paths.""" for entry in mviter(self.entries): blob = entry.to_blob(self.repo) blob.size = entry.size @@ -467,8 +473,8 @@ def unmerged_blobs(self): for stage, blob in self.iter_blobs(is_unmerged_blob): path_map.setdefault(blob.path, []).append((stage, blob)) # END for each unmerged blob - for l in mviter(path_map): - l.sort() + for i in mviter(path_map): + i.sort() return path_map @classmethod @@ -499,7 +505,7 @@ def resolve_blobs(self, iter_blobs): # delete all possible stages for stage in (1, 2, 3): try: - del(self.entries[(blob.path, stage)]) + del self.entries[(blob.path, stage)] except KeyError: pass # END ignore key errors @@ -530,7 +536,7 @@ def write_tree(self): does not yet exist in the object database. This could happen if you added Entries to the index directly. :raise ValueError: if there are no entries in the cache - :raise UnmergedEntriesError: """ + :raise UnmergedEntriesError:""" # we obtain no lock as we just flush our contents to disk as tree # If we are a new index, the entries access will load our data accordingly mdb = MemoryDB() @@ -542,7 +548,7 @@ def write_tree(self): # note: additional deserialization could be saved if write_tree_from_cache # would return sorted tree entries - root_tree = Tree(self.repo, binsha, path='') + root_tree = Tree(self.repo, binsha, path="") root_tree._cache = tree_items return root_tree @@ -562,11 +568,14 @@ def _to_relative_path(self, path): if self.repo.bare: raise InvalidGitRepositoryError("require non-bare repository") if not path.startswith(self.repo.working_tree_dir): - raise ValueError("Absolute path %r is not in git repository at %r" % (path, self.repo.working_tree_dir)) + raise ValueError( + "Absolute path %r is not in git repository at %r" + % (path, self.repo.working_tree_dir) + ) return os.path.relpath(path, self.repo.working_tree_dir) def _preprocess_add_items(self, items): - """ Split the items into two lists of path strings and BaseEntries. """ + """Split the items into two lists of path strings and BaseEntries.""" paths = [] entries = [] @@ -584,19 +593,28 @@ def _preprocess_add_items(self, items): def _store_path(self, filepath, fprogress): """Store file at filepath in the database and return the base index entry - Needs the git_working_dir decorator active ! This must be assured in the calling code""" - st = os.lstat(filepath) # handles non-symlinks as well + Needs the git_working_dir decorator active ! This must be assured in the calling code + """ + st = os.lstat(filepath) # handles non-symlinks as well if S_ISLNK(st.st_mode): # in PY3, readlink is string, but we need bytes. In PY2, it's just OS encoded bytes, we assume UTF-8 - open_stream = lambda: BytesIO(force_bytes(os.readlink(filepath), encoding=defenc)) + open_stream = lambda: BytesIO( + force_bytes(os.readlink(filepath), encoding=defenc) + ) else: - open_stream = lambda: open(filepath, 'rb') + open_stream = lambda: open(filepath, "rb") with open_stream() as stream: fprogress(filepath, False, filepath) istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream)) fprogress(filepath, True, filepath) - return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode), - istream.binsha, 0, to_native_path_linux(filepath))) + return BaseIndexEntry( + ( + stat_mode_to_index_mode(st.st_mode), + istream.binsha, + 0, + to_native_path_linux(filepath), + ) + ) @unbare_repo @git_working_dir @@ -612,13 +630,16 @@ def _entries_for_paths(self, paths, path_rewriter, fprogress, entries): abspath = osp.join(self.repo.working_tree_dir, gitrelative_path) # end obtain relative and absolute paths - blob = Blob(self.repo, Blob.NULL_BIN_SHA, - stat_mode_to_index_mode(os.stat(abspath).st_mode), - to_native_path_linux(gitrelative_path)) + blob = Blob( + self.repo, + Blob.NULL_BIN_SHA, + stat_mode_to_index_mode(os.stat(abspath).st_mode), + to_native_path_linux(gitrelative_path), + ) # TODO: variable undefined entries.append(BaseIndexEntry.from_blob(blob)) # END for each path - del(paths[:]) + del paths[:] # END rewrite paths # HANDLE PATHS @@ -629,8 +650,15 @@ def _entries_for_paths(self, paths, path_rewriter, fprogress, entries): # END path handling return entries_added - def add(self, items, force=True, fprogress=lambda *args: None, path_rewriter=None, - write=True, write_extension_data=False): + def add( + self, + items, + force=True, + fprogress=lambda *args: None, + path_rewriter=None, + write=True, + write_extension_data=False, + ): """Add files from the working tree, specific blobs or BaseIndexEntries to the index. @@ -738,20 +766,26 @@ def add(self, items, force=True, fprogress=lambda *args: None, path_rewriter=Non # That way, we are OK on a bare repository as well. # If there are no paths, the rewriter has nothing to do either if paths: - entries_added.extend(self._entries_for_paths(paths, path_rewriter, fprogress, entries)) + entries_added.extend( + self._entries_for_paths(paths, path_rewriter, fprogress, entries) + ) # HANDLE ENTRIES if entries: null_mode_entries = [e for e in entries if e.mode == 0] if null_mode_entries: raise ValueError( - "At least one Entry has a null-mode - please use index.remove to remove files for clarity") + "At least one Entry has a null-mode - please use index.remove to remove files for clarity" + ) # END null mode should be remove # HANLDE ENTRY OBJECT CREATION # create objects if required, otherwise go with the existing shas - null_entries_indices = [i for i, e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA] + null_entries_indices = [ + i for i, e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA + ] if null_entries_indices: + @git_working_dir def handle_null_entries(self): for ei in null_entries_indices: @@ -760,8 +794,15 @@ def handle_null_entries(self): # update null entry entries[ei] = BaseIndexEntry( - (null_entry.mode, new_entry.binsha, null_entry.stage, null_entry.path)) + ( + null_entry.mode, + new_entry.binsha, + null_entry.stage, + null_entry.path, + ) + ) # END for each entry index + # end closure handle_null_entries(self) # END null_entry handling @@ -771,7 +812,9 @@ def handle_null_entries(self): # all object sha's if path_rewriter: for i, e in enumerate(entries): - entries[i] = BaseIndexEntry((e.mode, e.binsha, e.stage, path_rewriter(e))) + entries[i] = BaseIndexEntry( + (e.mode, e.binsha, e.stage, path_rewriter(e)) + ) # END for each entry # END handle path rewriting @@ -848,7 +891,7 @@ def remove(self, items, working_tree=False, **kwargs): List(path_string, ...) list of repository relative paths that have been removed effectively. This is interesting to know in case you have provided a directory or - globs. Paths are relative to the repository. """ + globs. Paths are relative to the repository.""" args = [] if not working_tree: args.append("--cached") @@ -890,14 +933,16 @@ def move(self, items, skip_errors=False, **kwargs): GitCommandError: If git could not handle your request""" args = [] if skip_errors: - args.append('-k') + args.append("-k") paths = self._items_to_rela_paths(items) if len(paths) < 2: - raise ValueError("Please provide at least one source and one destination of the move operation") + raise ValueError( + "Please provide at least one source and one destination of the move operation" + ) - was_dry_run = kwargs.pop('dry_run', kwargs.pop('n', None)) - kwargs['dry_run'] = True + was_dry_run = kwargs.pop("dry_run", kwargs.pop("n", None)) + kwargs["dry_run"] = True # first execute rename in dryrun so the command tells us what it actually does # ( for later output ) @@ -907,7 +952,7 @@ def move(self, items, skip_errors=False, **kwargs): # parse result - first 0:n/2 lines are 'checking ', the remaining ones # are the 'renaming' ones which we parse for ln in xrange(int(len(mvlines) / 2), len(mvlines)): - tokens = mvlines[ln].split(' to ') + tokens = mvlines[ln].split(" to ") assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln] # [0] = Renaming x @@ -921,14 +966,22 @@ def move(self, items, skip_errors=False, **kwargs): # END handle dryrun # now apply the actual operation - kwargs.pop('dry_run') + kwargs.pop("dry_run") self.repo.git.mv(args, paths, **kwargs) return out - def commit(self, message, parent_commits=None, head=True, author=None, - committer=None, author_date=None, commit_date=None, - skip_hooks=False): + def commit( + self, + message, + parent_commits=None, + head=True, + author=None, + committer=None, + author_date=None, + commit_date=None, + skip_hooks=False, + ): """Commit the current default index file, creating a commit object. For more information on the arguments, see tree.commit. @@ -938,20 +991,28 @@ def commit(self, message, parent_commits=None, head=True, author=None, or `--no-verify` on the command line. :return: Commit object representing the new commit""" if not skip_hooks: - run_commit_hook('pre-commit', self) + run_commit_hook("pre-commit", self) self._write_commit_editmsg(message) - run_commit_hook('commit-msg', self, self._commit_editmsg_filepath()) + run_commit_hook("commit-msg", self, self._commit_editmsg_filepath()) message = self._read_commit_editmsg() self._remove_commit_editmsg() tree = self.write_tree() - rval = Commit.create_from_tree(self.repo, tree, message, parent_commits, - head, author=author, committer=committer, - author_date=author_date, commit_date=commit_date) + rval = Commit.create_from_tree( + self.repo, + tree, + message, + parent_commits, + head, + author=author, + committer=committer, + author_date=author_date, + commit_date=commit_date, + ) if not skip_hooks: - run_commit_hook('post-commit', self) + run_commit_hook("post-commit", self) return rval - + def _write_commit_editmsg(self, message): with open(self._commit_editmsg_filepath(), "wb") as commit_editmsg_file: commit_editmsg_file.write(message.encode(defenc)) @@ -970,7 +1031,7 @@ def _commit_editmsg_filepath(self): def _flush_stdin_and_wait(cls, proc, ignore_stdout=False): proc.stdin.flush() proc.stdin.close() - stdout = '' + stdout = "" if not ignore_stdout: stdout = proc.stdout.read() proc.stdout.close() @@ -1020,7 +1081,7 @@ def checkout(self, paths=None, force=False, fprogress=lambda *args: None, **kwar the working tree will not be deleted. This behaviour is fundamentally different to *head.checkout*, i.e. if you want git-checkout like behaviour, use head.checkout instead of index.checkout. - """ + """ args = ["--index"] if force: args.append("--force") @@ -1035,20 +1096,27 @@ def handle_stderr(proc, iter_checked_out_files): failed_files = [] failed_reasons = [] unknown_lines = [] - endings = (' already exists', ' is not in the cache', ' does not exist at stage', ' is unmerged') + endings = ( + " already exists", + " is not in the cache", + " does not exist at stage", + " is unmerged", + ) for line in stderr.splitlines(): - if not line.startswith("git checkout-index: ") and not line.startswith("git-checkout-index: "): + if not line.startswith("git checkout-index: ") and not line.startswith( + "git-checkout-index: " + ): is_a_dir = " is a directory" unlink_issue = "unable to unlink old '" - already_exists_issue = ' already exists, no checkout' # created by entry.c:checkout_entry(...) + already_exists_issue = " already exists, no checkout" # created by entry.c:checkout_entry(...) if line.endswith(is_a_dir): - failed_files.append(line[:-len(is_a_dir)]) + failed_files.append(line[: -len(is_a_dir)]) failed_reasons.append(is_a_dir) elif line.startswith(unlink_issue): - failed_files.append(line[len(unlink_issue):line.rfind("'")]) + failed_files.append(line[len(unlink_issue): line.rfind("'")]) failed_reasons.append(unlink_issue) elif line.endswith(already_exists_issue): - failed_files.append(line[:-len(already_exists_issue)]) + failed_files.append(line[: -len(already_exists_issue)]) failed_reasons.append(already_exists_issue) else: unknown_lines.append(line) @@ -1057,7 +1125,7 @@ def handle_stderr(proc, iter_checked_out_files): for e in endings: if line.endswith(e): - failed_files.append(line[20:-len(e)]) + failed_files.append(line[20: -len(e)]) failed_reasons.append(e) break # END if ending matches @@ -1069,12 +1137,16 @@ def handle_stderr(proc, iter_checked_out_files): valid_files = list(set(iter_checked_out_files) - set(failed_files)) raise CheckoutError( "Some files could not be checked out from the index due to local modifications", - failed_files, valid_files, failed_reasons) + failed_files, + valid_files, + failed_reasons, + ) + # END stderr handler if paths is None: args.append("--all") - kwargs['as_process'] = 1 + kwargs["as_process"] = 1 fprogress(None, False, None) proc = self.repo.git.checkout_index(*args, **kwargs) proc.wait() @@ -1092,11 +1164,13 @@ def handle_stderr(proc, iter_checked_out_files): self.entries args.append("--stdin") - kwargs['as_process'] = True - kwargs['istream'] = subprocess.PIPE + kwargs["as_process"] = True + kwargs["istream"] = subprocess.PIPE proc = self.repo.git.checkout_index(args, **kwargs) # FIXME: Reading from GIL! - make_exc = lambda: GitCommandError(("git-checkout-index",) + tuple(args), 128, proc.stderr.read()) + make_exc = lambda: GitCommandError( + ("git-checkout-index",) + tuple(args), 128, proc.stderr.read() + ) checked_out_files = [] for path in paths: @@ -1108,13 +1182,14 @@ def handle_stderr(proc, iter_checked_out_files): self.entries[(co_path, 0)] except KeyError: folder = co_path - if not folder.endswith('/'): - folder += '/' + if not folder.endswith("/"): + folder += "/" for entry in mviter(self.entries): if entry.path.startswith(folder): p = entry.path - self._write_path_to_stdin(proc, p, p, make_exc, - fprogress, read_from_stdout=False) + self._write_path_to_stdin( + proc, p, p, make_exc, fprogress, read_from_stdout=False + ) checked_out_files.append(p) path_is_directory = True # END if entry is in directory @@ -1122,8 +1197,9 @@ def handle_stderr(proc, iter_checked_out_files): # END path exception handlnig if not path_is_directory: - self._write_path_to_stdin(proc, co_path, path, make_exc, - fprogress, read_from_stdout=False) + self._write_path_to_stdin( + proc, co_path, path, make_exc, fprogress, read_from_stdout=False + ) checked_out_files.append(co_path) # END path is a file # END for each path @@ -1135,7 +1211,9 @@ def handle_stderr(proc, iter_checked_out_files): assert "Should not reach this point" @default_index - def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwargs): + def reset( + self, commit="HEAD", working_tree=False, paths=None, head=False, **kwargs + ): """Reset the index to reflect the tree at the given commit. This will not adjust our HEAD reference as opposed to HEAD.reset by default. @@ -1167,7 +1245,7 @@ def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwa checkout the files according to their state in the index. If you want git-reset like behaviour, use *HEAD.reset* instead. - :return: self """ + :return: self""" # what we actually want to do is to merge the tree into our existing # index, which is what git-read-tree does new_inst = type(self).from_tree(self.repo, commit) @@ -1183,7 +1261,7 @@ def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwa except KeyError: # if key is not in theirs, it musn't be in ours try: - del(self.entries[key]) + del self.entries[key] except KeyError: pass # END handle deletion keyerror @@ -1197,7 +1275,9 @@ def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwa # END handle working tree if head: - self.repo.head.set_commit(self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit) + self.repo.head.set_commit( + self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit + ) # END handle head change return self @@ -1226,14 +1306,16 @@ def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwar if isinstance(other, Object): # invert the existing R flag - cur_val = kwargs.get('R', False) - kwargs['R'] = not cur_val + cur_val = kwargs.get("R", False) + kwargs["R"] = not cur_val return other.diff(self.Index, paths, create_patch, **kwargs) # END diff against other item handling # if other is not None here, something is wrong if other is not None: - raise ValueError("other must be None, Diffable.Index, a Tree or Commit, was %r" % other) + raise ValueError( + "other must be None, Diffable.Index, a Tree or Commit, was %r" % other + ) # diff against working copy - can be handled by superclass natively return super(IndexFile, self).diff(other, paths, create_patch, **kwargs) diff --git a/git/index/fun.py b/git/index/fun.py index c8912dd23..e88cd862d 100644 --- a/git/index/fun.py +++ b/git/index/fun.py @@ -13,7 +13,7 @@ ) import subprocess -from git.cmd import PROC_CREATIONFLAGS, handle_process_output +from git.cmd import handle_process_output, safer_popen from git.compat import ( PY3, defenc, @@ -23,14 +23,11 @@ safe_encode, safe_decode, ) -from git.exc import ( - UnmergedEntriesError, - HookExecutionError -) +from git.exc import UnmergedEntriesError, HookExecutionError from git.objects.fun import ( tree_to_stream, traverse_tree_recursive, - traverse_trees_recursive + traverse_trees_recursive, ) from git.util import IndexFileSHA1Writer, finalize_process from gitdb.base import IStream @@ -38,28 +35,28 @@ import os.path as osp -from .typ import ( - BaseIndexEntry, - IndexEntry, - CE_NAMEMASK, - CE_STAGESHIFT -) -from .util import ( - pack, - unpack -) +from .typ import BaseIndexEntry, IndexEntry, CE_NAMEMASK, CE_STAGESHIFT +from .util import pack, unpack -S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule +S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule CE_NAMEMASK_INV = ~CE_NAMEMASK -__all__ = ('write_cache', 'read_cache', 'write_tree_from_cache', 'entry_key', - 'stat_mode_to_index_mode', 'S_IFGITLINK', 'run_commit_hook', 'hook_path') +__all__ = ( + "write_cache", + "read_cache", + "write_tree_from_cache", + "entry_key", + "stat_mode_to_index_mode", + "S_IFGITLINK", + "run_commit_hook", + "hook_path", +) def hook_path(name, git_dir): """:return: path to the given named hook in the given git repository directory""" - return osp.join(git_dir, 'hooks', name) + return osp.join(git_dir, "hooks", name) def run_commit_hook(name, index, *args): @@ -67,30 +64,29 @@ def run_commit_hook(name, index, *args): :param name: name of hook, like 'pre-commit' :param index: IndexFile instance :param args: arguments passed to hook file - :raises HookExecutionError: """ + :raises HookExecutionError:""" hp = hook_path(name, index.repo.git_dir) if not os.access(hp, os.X_OK): return env = os.environ.copy() - env['GIT_INDEX_FILE'] = safe_decode(index.path) if PY3 else safe_encode(index.path) - env['GIT_EDITOR'] = ':' + env["GIT_INDEX_FILE"] = safe_decode(index.path) if PY3 else safe_encode(index.path) + env["GIT_EDITOR"] = ":" try: - cmd = subprocess.Popen([hp] + list(args), + cmd = safer_popen([hp] + list(args), env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=index.repo.working_dir, - close_fds=is_posix, - creationflags=PROC_CREATIONFLAGS,) + close_fds=is_posix) except Exception as ex: raise HookExecutionError(hp, ex) else: stdout = [] stderr = [] handle_process_output(cmd, stdout.append, stderr.append, finalize_process) - stdout = ''.join(stdout) - stderr = ''.join(stderr) + stdout = "".join(stdout) + stderr = "".join(stderr) if cmd.returncode != 0: stdout = force_text(stdout, defenc) stderr = force_text(stderr, defenc) @@ -101,11 +97,11 @@ def run_commit_hook(name, index, *args): def stat_mode_to_index_mode(mode): """Convert the given mode from a stat call to the corresponding index mode and return it""" - if S_ISLNK(mode): # symlinks + if S_ISLNK(mode): # symlinks return S_IFLNK - if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules + if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules return S_IFGITLINK - return S_IFREG | 0o644 | (mode & 0o111) # blobs with or without executable bit + return S_IFREG | 0o644 | (mode & 0o111) # blobs with or without executable bit def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer): @@ -134,17 +130,28 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1 # body for entry in entries: beginoffset = tell() - write(entry[4]) # ctime - write(entry[5]) # mtime + write(entry[4]) # ctime + write(entry[5]) # mtime path = entry[3] path = force_bytes(path, encoding=defenc) - plen = len(path) & CE_NAMEMASK # path length + plen = len(path) & CE_NAMEMASK # path length assert plen == len(path), "Path %s too long to fit into index" % entry[3] - flags = plen | (entry[2] & CE_NAMEMASK_INV) # clear possible previous values - write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0], - entry[8], entry[9], entry[10], entry[1], flags)) + flags = plen | (entry[2] & CE_NAMEMASK_INV) # clear possible previous values + write( + pack( + ">LLLLLL20sH", + entry[6], + entry[7], + entry[0], + entry[8], + entry[9], + entry[10], + entry[1], + flags, + ) + ) write(path) - real_size = ((tell() - beginoffset + 8) & ~7) + real_size = (tell() - beginoffset + 8) & ~7 write(b"\0" * ((beginoffset + real_size) - tell())) # END for each entry @@ -195,14 +202,17 @@ def read_cache(stream): beginoffset = tell() ctime = unpack(">8s", read(8))[0] mtime = unpack(">8s", read(8))[0] - (dev, ino, mode, uid, gid, size, sha, flags) = \ - unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2)) + (dev, ino, mode, uid, gid, size, sha, flags) = unpack( + ">LLLLLL20sH", read(20 + 4 * 6 + 2) + ) path_size = flags & CE_NAMEMASK path = read(path_size).decode(defenc) - real_size = ((tell() - beginoffset + 8) & ~7) + real_size = (tell() - beginoffset + 8) & ~7 read((beginoffset + real_size) - tell()) - entry = IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size)) + entry = IndexEntry( + (mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size) + ) # entry_key would be the method to use, but we safe the effort entries[(path, entry.stage)] = entry count += 1 @@ -215,8 +225,10 @@ def read_cache(stream): # 4 bytes length of chunk # repeated 0 - N times extension_data = stream.read(~0) - assert len(extension_data) > 19, "Index Footer was not at least a sha on content as it was only %i bytes in size"\ - % len(extension_data) + assert len(extension_data) > 19, ( + "Index Footer was not at least a sha on content as it was only %i bytes in size" + % len(extension_data) + ) content_sha = extension_data[-20:] @@ -246,7 +258,7 @@ def write_tree_from_cache(entries, odb, sl, si=0): raise UnmergedEntriesError(entry) # END abort on unmerged ci += 1 - rbound = entry.path.find('/', si) + rbound = entry.path.find("/", si) if rbound == -1: # its not a tree tree_items_append((entry.binsha, entry.mode, entry.path[si:])) @@ -256,7 +268,7 @@ def write_tree_from_cache(entries, odb, sl, si=0): xi = ci while xi < end: oentry = entries[xi] - orbound = oentry.path.find('/', si) + orbound = oentry.path.find("/", si) if orbound == -1 or oentry.path[si:orbound] != base: break # END abort on base mismatch @@ -265,7 +277,9 @@ def write_tree_from_cache(entries, odb, sl, si=0): # enter recursion # ci - 1 as we want to count our current item as well - sha, tree_entry_list = write_tree_from_cache(entries, odb, slice(ci - 1, xi), rbound + 1) # @UnusedVariable + sha, tree_entry_list = write_tree_from_cache( + entries, odb, slice(ci - 1, xi), rbound + 1 + ) # @UnusedVariable tree_items_append((sha, S_IFDIR, base)) # skip ahead @@ -283,7 +297,9 @@ def write_tree_from_cache(entries, odb, sl, si=0): def _tree_entry_to_baseindexentry(tree_entry, stage): - return BaseIndexEntry((tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2])) + return BaseIndexEntry( + (tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2]) + ) def aggressive_tree_merge(odb, tree_shas): @@ -301,7 +317,7 @@ def aggressive_tree_merge(odb, tree_shas): # one and two way is the same for us, as we don't have to handle an existing # index, instrea if len(tree_shas) in (1, 2): - for entry in traverse_tree_recursive(odb, tree_shas[-1], ''): + for entry in traverse_tree_recursive(odb, tree_shas[-1], ""): out_append(_tree_entry_to_baseindexentry(entry, 0)) # END for each entry return out @@ -311,7 +327,7 @@ def aggressive_tree_merge(odb, tree_shas): raise ValueError("Cannot handle %i trees at once" % len(tree_shas)) # three trees - for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ''): + for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ""): if base is not None: # base version exists if ours is not None: @@ -320,8 +336,15 @@ def aggressive_tree_merge(odb, tree_shas): # it exists in all branches, if it was changed in both # its a conflict, otherwise we take the changed version # This should be the most common branch, so it comes first - if(base[0] != ours[0] and base[0] != theirs[0] and ours[0] != theirs[0]) or \ - (base[1] != ours[1] and base[1] != theirs[1] and ours[1] != theirs[1]): + if ( + base[0] != ours[0] + and base[0] != theirs[0] + and ours[0] != theirs[0] + ) or ( + base[1] != ours[1] + and base[1] != theirs[1] + and ours[1] != theirs[1] + ): # changed by both out_append(_tree_entry_to_baseindexentry(base, 1)) out_append(_tree_entry_to_baseindexentry(ours, 2)) diff --git a/git/objects/fun.py b/git/objects/fun.py index 38dce0a5d..60d791415 100644 --- a/git/objects/fun.py +++ b/git/objects/fun.py @@ -1,27 +1,25 @@ """Module with functions which are supposed to be as fast as possible""" + from stat import S_ISDIR -from git.compat import ( - byte_ord, - safe_decode, - defenc, - xrange, - text_type, - bchr -) +from git.compat import byte_ord, safe_decode, defenc, xrange, text_type, bchr -__all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive', - 'traverse_tree_recursive') +__all__ = ( + "tree_to_stream", + "tree_entries_from_data", + "traverse_trees_recursive", + "traverse_tree_recursive", +) def tree_to_stream(entries, write): """Write the give list of entries into a stream using its write method :param entries: **sorted** list of tuples with (binsha, mode, name) :param write: write method which takes a data string""" - ord_zero = ord('0') - bit_mask = 7 # 3 bits set + ord_zero = ord("0") + bit_mask = 7 # 3 bits set for binsha, mode, name in entries: - mode_str = b'' + mode_str = b"" for i in xrange(6): mode_str = bchr(((mode >> (i * 3)) & bit_mask) + ord_zero) + mode_str # END for each 8 octal value @@ -38,7 +36,7 @@ def tree_to_stream(entries, write): # takes the input literally, which appears to be utf8 on linux. if isinstance(name, text_type): name = name.encode(defenc) - write(b''.join((mode_str, b' ', name, b'\0', binsha))) + write(b"".join((mode_str, b" ", name, b"\0", binsha))) # END for each item @@ -46,8 +44,8 @@ def tree_entries_from_data(data): """Reads the binary representation of a tree and returns tuples of Tree items :param data: data block with tree data (as bytes) :return: list(tuple(binsha, mode, tree_relative_path), ...)""" - ord_zero = ord('0') - space_ord = ord(' ') + ord_zero = ord("0") + space_ord = ord(" ") len_data = len(data) i = 0 out = [] @@ -81,7 +79,7 @@ def tree_entries_from_data(data): # byte is NULL, get next 20 i += 1 - sha = data[i:i + 20] + sha = data[i: i + 20] i = i + 20 out.append((sha, mode, name)) # END for each byte in data stream @@ -156,8 +154,8 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix): # END skip already done items entries = [None for _ in range(nt)] entries[ti] = item - sha, mode, name = item # its faster to unpack @UnusedVariable - is_dir = S_ISDIR(mode) # type mode bits + sha, mode, name = item # its faster to unpack @UnusedVariable + is_dir = S_ISDIR(mode) # type mode bits # find this item in all other tree data items # wrap around, but stop one before our current index, hence @@ -169,8 +167,13 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix): # if we are a directory, enter recursion if is_dir: - out.extend(traverse_trees_recursive( - odb, [((ei and ei[0]) or None) for ei in entries], path_prefix + name + '/')) + out.extend( + traverse_trees_recursive( + odb, + [((ei and ei[0]) or None) for ei in entries], + path_prefix + name + "/", + ) + ) else: out_append(tuple(_to_full_path(e, path_prefix) for e in entries)) # END handle recursion @@ -180,7 +183,7 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix): # END for each item # we are done with one tree, set all its data empty - del(tree_data[:]) + del tree_data[:] # END for each tree_data chunk return out @@ -199,7 +202,7 @@ def traverse_tree_recursive(odb, tree_sha, path_prefix): # unpacking/packing is faster than accessing individual items for sha, mode, name in data: if S_ISDIR(mode): - entries.extend(traverse_tree_recursive(odb, sha, path_prefix + name + '/')) + entries.extend(traverse_tree_recursive(odb, sha, path_prefix + name + "/")) else: entries.append((sha, mode, path_prefix + name)) # END for each item diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py index a75826eb3..08205efa3 100644 --- a/git/objects/submodule/base.py +++ b/git/objects/submodule/base.py @@ -13,16 +13,8 @@ defenc, is_win, ) -from git.config import ( - SectionConstraint, - GitConfigParser, - cp -) -from git.exc import ( - InvalidGitRepositoryError, - NoSuchPathError, - RepositoryDirtyError -) +from git.config import SectionConstraint, GitConfigParser, cp +from git.exc import InvalidGitRepositoryError, NoSuchPathError, RepositoryDirtyError from git.objects.base import IndexObject, Object from git.objects.util import Traversable from git.util import ( @@ -31,7 +23,7 @@ to_native_path_linux, RemoteProgress, rmtree, - unbare_repo + unbare_repo, ) from git.util import HIDE_WINDOWS_KNOWN_ERRORS @@ -42,22 +34,25 @@ sm_name, sm_section, SubmoduleConfigParser, - find_first_remote_branch + find_first_remote_branch, ) __all__ = ["Submodule", "UpdateProgress"] -log = logging.getLogger('git.objects.submodule.base') +log = logging.getLogger("git.objects.submodule.base") log.addHandler(logging.NullHandler()) class UpdateProgress(RemoteProgress): - """Class providing detailed progress information to the caller who should derive from it and implement the ``update(...)`` message""" - CLONE, FETCH, UPDWKTREE = [1 << x for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes + 3)] + + CLONE, FETCH, UPDWKTREE = [ + 1 << x + for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes + 3) + ] _num_op_codes = RemoteProgress._num_op_codes + 3 __slots__ = () @@ -74,7 +69,6 @@ class UpdateProgress(RemoteProgress): # mechanism which cause plenty of trouble of the only reason for packages and # modules is refactoring - subpackages shouldn't depend on parent packages class Submodule(IndexObject, Iterable, Traversable): - """Implements access to a git submodule. They are special in that their sha represents a commit in the submodule's repository which is to be checked out at the path of this instance. @@ -84,18 +78,30 @@ class Submodule(IndexObject, Iterable, Traversable): All methods work in bare and non-bare repositories.""" _id_attribute_ = "name" - k_modules_file = '.gitmodules' - k_head_option = 'branch' - k_head_default = 'master' - k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status + k_modules_file = ".gitmodules" + k_head_option = "branch" + k_head_default = "master" + k_default_mode = ( + stat.S_IFDIR | stat.S_IFLNK + ) # submodules are directories with link-status # this is a bogus type for base class compatibility - type = 'submodule' - - __slots__ = ('_parent_commit', '_url', '_branch_path', '_name', '__weakref__') - _cache_attrs = ('path', '_url', '_branch_path') - - def __init__(self, repo, binsha, mode=None, path=None, name=None, parent_commit=None, url=None, branch_path=None): + type = "submodule" + + __slots__ = ("_parent_commit", "_url", "_branch_path", "_name", "__weakref__") + _cache_attrs = ("path", "_url", "_branch_path") + + def __init__( + self, + repo, + binsha, + mode=None, + path=None, + name=None, + parent_commit=None, + url=None, + branch_path=None, + ): """Initialize this instance with its attributes. We only document the ones that differ from ``IndexObject`` @@ -103,7 +109,8 @@ def __init__(self, repo, binsha, mode=None, path=None, name=None, parent_commit= :param binsha: binary sha referring to a commit in the remote repository, see url parameter :param parent_commit: see set_parent_commit() :param url: The url to the remote repository which is the submodule - :param branch_path: full (relative) path to ref to checkout when cloning the remote repository""" + :param branch_path: full (relative) path to ref to checkout when cloning the remote repository + """ super(Submodule, self).__init__(repo, binsha, mode, path) self.size = 0 self._parent_commit = parent_commit @@ -116,20 +123,26 @@ def __init__(self, repo, binsha, mode=None, path=None, name=None, parent_commit= self._name = name def _set_cache_(self, attr): - if attr in ('path', '_url', '_branch_path'): + if attr in ("path", "_url", "_branch_path"): reader = self.config_reader() # default submodule values try: - self.path = reader.get('path') + self.path = reader.get("path") except cp.NoSectionError: - raise ValueError("This submodule instance does not exist anymore in '%s' file" - % osp.join(self.repo.working_tree_dir, '.gitmodules')) + raise ValueError( + "This submodule instance does not exist anymore in '%s' file" + % osp.join(self.repo.working_tree_dir, ".gitmodules") + ) # end - self._url = reader.get('url') + self._url = reader.get("url") # git-python extension values - optional - self._branch_path = reader.get_value(self.k_head_option, git.Head.to_full_path(self.k_head_default)) - elif attr == '_name': - raise AttributeError("Cannot retrieve the name of a submodule if it was not set initially") + self._branch_path = reader.get_value( + self.k_head_option, git.Head.to_full_path(self.k_head_default) + ) + elif attr == "_name": + raise AttributeError( + "Cannot retrieve the name of a submodule if it was not set initially" + ) else: super(Submodule, self)._set_cache_(attr) # END handle attribute name @@ -165,8 +178,13 @@ def __str__(self): return self._name def __repr__(self): - return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)"\ - % (type(self).__name__, self._name, self.path, self.url, self.branch_path) + return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)" % ( + type(self).__name__, + self._name, + self.path, + self.url, + self.branch_path, + ) @classmethod def _config_parser(cls, repo, parent_commit, read_only): @@ -186,17 +204,23 @@ def _config_parser(cls, repo, parent_commit, read_only): if not repo.bare and parent_matches_head: fp_module = osp.join(repo.working_tree_dir, cls.k_modules_file) else: - assert parent_commit is not None, "need valid parent_commit in bare repositories" + assert ( + parent_commit is not None + ), "need valid parent_commit in bare repositories" try: fp_module = cls._sio_modules(parent_commit) except KeyError: - raise IOError("Could not find %s file in the tree of parent commit %s" % - (cls.k_modules_file, parent_commit)) + raise IOError( + "Could not find %s file in the tree of parent commit %s" + % (cls.k_modules_file, parent_commit) + ) # END handle exceptions # END handle non-bare working tree if not read_only and (repo.bare or not parent_matches_head): - raise ValueError("Cannot write blobs of 'historical' submodule configurations") + raise ValueError( + "Cannot write blobs of 'historical' submodule configurations" + ) # END handle writes of historical submodules return SubmoduleConfigParser(fp_module, read_only=read_only) @@ -232,13 +256,22 @@ def _config_parser_constrained(self, read_only): @classmethod def _module_abspath(cls, parent_repo, path, name): if cls._need_gitfile_submodules(parent_repo.git): - return osp.join(parent_repo.git_dir, 'modules', name) + return osp.join(parent_repo.git_dir, "modules", name) else: return osp.join(parent_repo.working_tree_dir, path) # end @classmethod - def _clone_repo(cls, repo, url, path, name, **kwargs): + def _clone_repo( + cls, + repo, + url, + path, + name, + allow_unsafe_options=False, + allow_unsafe_protocols=False, + **kwargs + ): """:return: Repo instance of newly cloned repository :param repo: our parent repository :param url: url to clone from @@ -248,14 +281,20 @@ def _clone_repo(cls, repo, url, path, name, **kwargs): module_abspath = cls._module_abspath(repo, path, name) module_checkout_path = module_abspath if cls._need_gitfile_submodules(repo.git): - kwargs['separate_git_dir'] = module_abspath + kwargs["separate_git_dir"] = module_abspath module_abspath_dir = osp.dirname(module_abspath) if not osp.isdir(module_abspath_dir): os.makedirs(module_abspath_dir) module_checkout_path = osp.join(repo.working_tree_dir, path) # end - clone = git.Repo.clone_from(url, module_checkout_path, **kwargs) + clone = git.Repo.clone_from( + url, + module_checkout_path, + allow_unsafe_options=allow_unsafe_options, + allow_unsafe_protocols=allow_unsafe_protocols, + **kwargs + ) if cls._need_gitfile_submodules(repo.git): cls._write_git_file_and_module_config(module_checkout_path, module_abspath) # end @@ -264,20 +303,26 @@ def _clone_repo(cls, repo, url, path, name, **kwargs): @classmethod def _to_relative_path(cls, parent_repo, path): """:return: a path guaranteed to be relative to the given parent-repository - :raise ValueError: if path is not contained in the parent repository's working tree""" + :raise ValueError: if path is not contained in the parent repository's working tree + """ path = to_native_path_linux(path) - if path.endswith('/'): + if path.endswith("/"): path = path[:-1] # END handle trailing slash if osp.isabs(path): working_tree_linux = to_native_path_linux(parent_repo.working_tree_dir) if not path.startswith(working_tree_linux): - raise ValueError("Submodule checkout path '%s' needs to be within the parents repository at '%s'" - % (working_tree_linux, path)) - path = path[len(working_tree_linux.rstrip('/')) + 1:] + raise ValueError( + "Submodule checkout path '%s' needs to be within the parents repository at '%s'" + % (working_tree_linux, path) + ) + path = path[len(working_tree_linux.rstrip("/")) + 1:] if not path: - raise ValueError("Absolute submodule path '%s' didn't yield a valid relative path" % path) + raise ValueError( + "Absolute submodule path '%s' didn't yield a valid relative path" + % path + ) # end verify converted relative path makes sense # end convert to a relative path @@ -294,23 +339,39 @@ def _write_git_file_and_module_config(cls, working_tree_dir, module_abspath): :param working_tree_dir: directory to write the .git file into :param module_abspath: absolute path to the bare repository """ - git_file = osp.join(working_tree_dir, '.git') + git_file = osp.join(working_tree_dir, ".git") rela_path = osp.relpath(module_abspath, start=working_tree_dir) if is_win: if osp.isfile(git_file): os.remove(git_file) - with open(git_file, 'wb') as fp: + with open(git_file, "wb") as fp: fp.write(("gitdir: %s" % rela_path).encode(defenc)) - with GitConfigParser(osp.join(module_abspath, 'config'), - read_only=False, merge_includes=False) as writer: - writer.set_value('core', 'worktree', - to_native_path_linux(osp.relpath(working_tree_dir, start=module_abspath))) + with GitConfigParser( + osp.join(module_abspath, "config"), read_only=False, merge_includes=False + ) as writer: + writer.set_value( + "core", + "worktree", + to_native_path_linux( + osp.relpath(working_tree_dir, start=module_abspath) + ), + ) - #{ Edit Interface + # { Edit Interface @classmethod - def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): + def add( + cls, + repo, + name, + path, + url=None, + branch=None, + no_checkout=False, + allow_unsafe_options=False, + allow_unsafe_protocols=False, + ): """Add a new submodule to the given repository. This will alter the index as well as the .gitmodules file, but will not create a new commit. If the submodule already exists, no matter if the configuration differs @@ -339,7 +400,9 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): :note: works atomically, such that no change will be done if the repository update fails for instance""" if repo.bare: - raise InvalidGitRepositoryError("Cannot add submodules to bare repositories") + raise InvalidGitRepositoryError( + "Cannot add submodules to bare repositories" + ) # END handle bare repos path = cls._to_relative_path(repo, path) @@ -351,7 +414,14 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): # END assure url correctness # INSTANTIATE INTERMEDIATE SM - sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name, url='invalid-temporary') + sm = cls( + repo, + cls.NULL_BIN_SHA, + cls.k_default_mode, + path, + name, + url="invalid-temporary", + ) if sm.exists(): # reretrieve submodule from tree try: @@ -374,30 +444,45 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): if has_module and url is not None: if url not in [r.url for r in sm.module().remotes]: raise ValueError( - "Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath)) + "Specified URL '%s' does not match any remote url of the repository at '%s'" + % (url, sm.abspath) + ) # END check url # END verify urls match mrepo = None if url is None: if not has_module: - raise ValueError("A URL was not given and existing repository did not exsit at %s" % path) + raise ValueError( + "A URL was not given and existing repository did not exsit at %s" + % path + ) # END check url mrepo = sm.module() urls = [r.url for r in mrepo.remotes] if not urls: - raise ValueError("Didn't find any remote url in repository at %s" % sm.abspath) + raise ValueError( + "Didn't find any remote url in repository at %s" % sm.abspath + ) # END verify we have url url = urls[0] else: # clone new repo - kwargs = {'n': no_checkout} + kwargs = {"n": no_checkout} if not branch_is_default: - kwargs['b'] = br.name + kwargs["b"] = br.name # END setup checkout-branch # _clone_repo(cls, repo, url, path, name, **kwargs): - mrepo = cls._clone_repo(repo, url, path, name, **kwargs) + mrepo = cls._clone_repo( + repo, + url, + path, + name, + allow_unsafe_options=allow_unsafe_options, + allow_unsafe_protocols=allow_unsafe_protocols, + **kwargs + ) # END verify url ## See #525 for ensuring git urls in config-files valid under Windows. @@ -408,13 +493,13 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): # a38efa84daef914e4de58d1905a500d8d14aaf45 mymodule (v0.9.0-1-ga38efa8) # -a38efa84daef914e4de58d1905a500d8d14aaf45 submodules/intermediate/one with sm.repo.config_writer() as writer: - writer.set_value(sm_section(name), 'url', url) + writer.set_value(sm_section(name), "url", url) # update configuration and index index = sm.repo.index with sm.config_writer(index=index, write=False) as writer: - writer.set_value('url', url) - writer.set_value('path', path) + writer.set_value("url", url) + writer.set_value("path", path) sm._url = url if not branch_is_default: @@ -428,8 +513,18 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): return sm - def update(self, recursive=False, init=True, to_latest_revision=False, progress=None, dry_run=False, - force=False, keep_going=False): + def update( + self, + recursive=False, + init=True, + to_latest_revision=False, + progress=None, + dry_run=False, + force=False, + keep_going=False, + allow_unsafe_options=False, + allow_unsafe_protocols=False, + ): """Update the repository of this submodule to point to the checkout we point at with the binsha of this instance. @@ -464,7 +559,7 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= if progress is None: progress = UpdateProgress() # END handle progress - prefix = '' + prefix = "" if dry_run: prefix = "DRY-RUN: " # END handle prefix @@ -487,17 +582,27 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= op |= BEGIN # END handle start - progress.update(op, i, len_rmts, prefix + "Fetching remote %s of submodule %r" - % (remote, self.name)) - #=============================== + progress.update( + op, + i, + len_rmts, + prefix + + "Fetching remote %s of submodule %r" % (remote, self.name), + ) + # =============================== if not dry_run: remote.fetch(progress=progress) # END handle dry-run - #=============================== + # =============================== if i == len_rmts - 1: op |= END # END handle end - progress.update(op, i, len_rmts, prefix + "Done fetching remote of submodule %r" % self.name) + progress.update( + op, + i, + len_rmts, + prefix + "Done fetching remote of submodule %r" % self.name, + ) # END fetch new data except InvalidGitRepositoryError: if not init: @@ -510,25 +615,48 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= try: os.rmdir(checkout_module_abspath) except OSError: - raise OSError("Module directory at %r does already exist and is non-empty" - % checkout_module_abspath) + raise OSError( + "Module directory at %r does already exist and is non-empty" + % checkout_module_abspath + ) # END handle OSError # END handle directory removal # don't check it out at first - nonetheless it will create a local # branch according to the remote-HEAD if possible - progress.update(BEGIN | CLONE, 0, 1, prefix + "Cloning url '%s' to '%s' in submodule %r" % - (self.url, checkout_module_abspath, self.name)) + progress.update( + BEGIN | CLONE, + 0, + 1, + prefix + + "Cloning url '%s' to '%s' in submodule %r" + % (self.url, checkout_module_abspath, self.name), + ) if not dry_run: - mrepo = self._clone_repo(self.repo, self.url, self.path, self.name, n=True) + mrepo = self._clone_repo( + self.repo, + self.url, + self.path, + self.name, + n=True, + allow_unsafe_options=allow_unsafe_options, + allow_unsafe_protocols=allow_unsafe_protocols, + ) # END handle dry-run - progress.update(END | CLONE, 0, 1, prefix + "Done cloning to %s" % checkout_module_abspath) + progress.update( + END | CLONE, + 0, + 1, + prefix + "Done cloning to %s" % checkout_module_abspath, + ) if not dry_run: # see whether we have a valid branch to checkout try: # find a remote which has our branch - we try to be flexible - remote_branch = find_first_remote_branch(mrepo.remotes, self.branch_name) + remote_branch = find_first_remote_branch( + mrepo.remotes, self.branch_name + ) local_branch = mkhead(mrepo, self.branch_path) # have a valid branch, but no checkout - make sure we can figure @@ -537,10 +665,15 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= # END initial checkout + branch creation # make sure HEAD is not detached - mrepo.head.set_reference(local_branch, logmsg="submodule: attaching head to %s" % local_branch) + mrepo.head.set_reference( + local_branch, + logmsg="submodule: attaching head to %s" % local_branch, + ) mrepo.head.ref.set_tracking_branch(remote_branch) except (IndexError, InvalidGitRepositoryError): - log.warn("Failed to checkout tracking branch %s", self.branch_path) + log.warn( + "Failed to checkout tracking branch %s", self.branch_path + ) # END handle tracking branch # NOTE: Have to write the repo config file as well, otherwise @@ -548,7 +681,7 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= # Maybe this is a good way to assure it doesn't get into our way, but # we want to stay backwards compatible too ... . Its so redundant ! with self.repo.config_writer() as writer: - writer.set_value(sm_section(self.name), 'url', self.url) + writer.set_value(sm_section(self.name), "url", self.url) # END handle dry_run # END handle initialization @@ -562,7 +695,10 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= # END handle dry_run if mrepo is not None and to_latest_revision: - msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir + msg_base = ( + "Cannot update to latest revision in repository at %r as " + % mrepo.working_dir + ) if not is_detached: rref = mrepo.head.ref.tracking_branch() if rref is not None: @@ -570,7 +706,11 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= binsha = rcommit.binsha hexsha = rcommit.hexsha else: - log.error("%s a tracking branch was not set for local branch '%s'", msg_base, mrepo.head.ref) + log.error( + "%s a tracking branch was not set for local branch '%s'", + msg_base, + mrepo.head.ref, + ) # END handle remote ref else: log.error("%s there was no local tracking branch", msg_base) @@ -594,21 +734,38 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= log.debug(msg) else: msg = "Skipping %s on branch '%s' of submodule repo '%s' as it contains un-pushed commits" - msg %= (is_detached and "checkout" or "reset", mrepo.head, mrepo) + msg %= ( + is_detached and "checkout" or "reset", + mrepo.head, + mrepo, + ) log.info(msg) may_reset = False # end handle force # end handle if we are in the future - if may_reset and not force and mrepo.is_dirty(index=True, working_tree=True, untracked_files=True): - raise RepositoryDirtyError(mrepo, "Cannot reset a dirty repository") + if ( + may_reset + and not force + and mrepo.is_dirty( + index=True, working_tree=True, untracked_files=True + ) + ): + raise RepositoryDirtyError( + mrepo, "Cannot reset a dirty repository" + ) # end handle force and dirty state # end handle empty repo # end verify future/past - progress.update(BEGIN | UPDWKTREE, 0, 1, prefix + - "Updating working tree at %s for submodule %r to revision %s" - % (self.path, self.name, hexsha)) + progress.update( + BEGIN | UPDWKTREE, + 0, + 1, + prefix + + "Updating working tree at %s for submodule %r to revision %s" + % (self.path, self.name, hexsha), + ) if not dry_run and may_reset: if is_detached: @@ -621,8 +778,12 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= mrepo.head.reset(hexsha, index=True, working_tree=True) # END handle checkout # if we may reset/checkout - progress.update(END | UPDWKTREE, 0, 1, prefix + "Done updating working tree for submodule %r" - % self.name) + progress.update( + END | UPDWKTREE, + 0, + 1, + prefix + "Done updating working tree for submodule %r" % self.name, + ) # END update to new commit only if needed except Exception as err: if not keep_going: @@ -636,8 +797,15 @@ def update(self, recursive=False, init=True, to_latest_revision=False, progress= # in dry_run mode, the module might not exist if mrepo is not None: for submodule in self.iter_items(self.module()): - submodule.update(recursive, init, to_latest_revision, progress=progress, dry_run=dry_run, - force=force, keep_going=keep_going) + submodule.update( + recursive, + init, + to_latest_revision, + progress=progress, + dry_run=dry_run, + force=force, + keep_going=keep_going, + ) # END handle recursive update # END handle dry run # END for each submodule @@ -665,7 +833,9 @@ def move(self, module_path, configuration=True, module=True): in an inconsistent state if a sub-step fails for some reason """ if module + configuration < 1: - raise ValueError("You must specify to move at least the module or the configuration of the submodule") + raise ValueError( + "You must specify to move at least the module or the configuration of the submodule" + ) # END handle input module_checkout_path = self._to_relative_path(self.repo, module_path) @@ -675,9 +845,13 @@ def move(self, module_path, configuration=True, module=True): return self # END handle no change - module_checkout_abspath = join_path_native(self.repo.working_tree_dir, module_checkout_path) + module_checkout_abspath = join_path_native( + self.repo.working_tree_dir, module_checkout_path + ) if osp.isfile(module_checkout_abspath): - raise ValueError("Cannot move repository onto a file: %s" % module_checkout_abspath) + raise ValueError( + "Cannot move repository onto a file: %s" % module_checkout_abspath + ) # END handle target files index = self.repo.index @@ -713,9 +887,11 @@ def move(self, module_path, configuration=True, module=True): os.renames(cur_path, module_checkout_abspath) renamed_module = True - if osp.isfile(osp.join(module_checkout_abspath, '.git')): + if osp.isfile(osp.join(module_checkout_abspath, ".git")): module_abspath = self._module_abspath(self.repo, self.path, self.name) - self._write_git_file_and_module_config(module_checkout_abspath, module_abspath) + self._write_git_file_and_module_config( + module_checkout_abspath, module_abspath + ) # end handle git file rewrite # END move physical module @@ -727,16 +903,20 @@ def move(self, module_path, configuration=True, module=True): try: ekey = index.entry_key(self.path, 0) entry = index.entries[ekey] - del(index.entries[ekey]) - nentry = git.IndexEntry(entry[:3] + (module_checkout_path,) + entry[4:]) + del index.entries[ekey] + nentry = git.IndexEntry( + entry[:3] + (module_checkout_path,) + entry[4:] + ) index.entries[tekey] = nentry except KeyError: - raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path)) + raise InvalidGitRepositoryError( + "Submodule's entry at %r did not exist" % (self.path) + ) # END handle submodule doesn't exist # update configuration - with self.config_writer(index=index) as writer: # auto-write - writer.set_value('path', module_checkout_path) + with self.config_writer(index=index) as writer: # auto-write + writer.set_value("path", module_checkout_path) self.path = module_checkout_path # END handle configuration flag except Exception: @@ -782,7 +962,9 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): :raise InvalidGitRepositoryError: thrown if the repository cannot be deleted :raise OSError: if directories or files could not be removed""" if not (module or configuration): - raise ValueError("Need to specify to delete at least the module, or the configuration") + raise ValueError( + "Need to specify to delete at least the module, or the configuration" + ) # END handle parameters # Recursively remove children of this submodule @@ -790,12 +972,14 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): for csm in self.children(): nc += 1 csm.remove(module, force, configuration, dry_run) - del(csm) + del csm # end if configuration and not dry_run and nc > 0: # Assure we don't leave the parent repository in a dirty state, and commit our changes # It's important for recursive, unforced, deletions to work as expected - self.module().index.commit("Removed at least one of child-modules of '%s'" % self.name) + self.module().index.commit( + "Removed at least one of child-modules of '%s'" % self.name + ) # end handle recursion # DELETE REPOSITORY WORKING TREE @@ -814,7 +998,9 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): elif osp.isdir(mp): method = rmtree elif osp.exists(mp): - raise AssertionError("Cannot forcibly delete repository as it was neither a link, nor a directory") + raise AssertionError( + "Cannot forcibly delete repository as it was neither a link, nor a directory" + ) # END handle brutal deletion if not dry_run: assert method @@ -825,7 +1011,8 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): if mod.is_dirty(index=True, working_tree=True, untracked_files=True): raise InvalidGitRepositoryError( "Cannot delete module at %s with any modifications, unless force is specified" - % mod.working_tree_dir) + % mod.working_tree_dir + ) # END check for dirt # figure out whether we have new commits compared to the remotes @@ -842,29 +1029,34 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): # not a single remote branch contained all our commits if len(rrefs) and num_branches_with_new_commits == len(rrefs): raise InvalidGitRepositoryError( - "Cannot delete module at %s as there are new commits" % mod.working_tree_dir) + "Cannot delete module at %s as there are new commits" + % mod.working_tree_dir + ) # END handle new commits # have to manually delete references as python's scoping is # not existing, they could keep handles open ( on windows this is a problem ) if len(rrefs): - del(rref) + del rref # END handle remotes - del(rrefs) - del(remote) + del rrefs + del remote # END for each remote # finally delete our own submodule if not dry_run: self._clear_cache() wtd = mod.working_tree_dir - del(mod) # release file-handles (windows) + del mod # release file-handles (windows) import gc + gc.collect() try: rmtree(wtd) except Exception as ex: if HIDE_WINDOWS_KNOWN_ERRORS: - raise SkipTest("FIXME: fails with: PermissionError\n %s", ex) + raise SkipTest( + "FIXME: fails with: PermissionError\n %s", ex + ) else: raise # END delete tree if possible @@ -892,7 +1084,7 @@ def remove(self, module=True, force=False, configuration=True, dry_run=False): # first the index-entry parent_index = self.repo.index try: - del(parent_index.entries[parent_index.entry_key(self.path, 0)]) + del parent_index.entries[parent_index.entry_key(self.path, 0)] except KeyError: pass # END delete entry @@ -930,7 +1122,10 @@ def set_parent_commit(self, commit, check=True): pcommit = self.repo.commit(commit) pctree = pcommit.tree if self.k_modules_file not in pctree: - raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file)) + raise ValueError( + "Tree of commit %s did not contain the %s file" + % (commit, self.k_modules_file) + ) # END handle exceptions prev_pc = self._parent_commit @@ -940,7 +1135,10 @@ def set_parent_commit(self, commit, check=True): parser = self._config_parser(self.repo, self._parent_commit, read_only=True) if not parser.has_section(sm_section(self.name)): self._parent_commit = prev_pc - raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit)) + raise ValueError( + "Submodule at path %r did not exist in parent commit %s" + % (self.path, commit) + ) # END handle submodule did not exist # END handle checking mode @@ -1010,7 +1208,9 @@ def rename(self, new_name): # .git/modules mod = self.module() if mod.has_separate_working_tree(): - destination_module_abspath = self._module_abspath(self.repo, self.path, new_name) + destination_module_abspath = self._module_abspath( + self.repo, self.path, new_name + ) source_dir = mod.git_dir # Let's be sure the submodule name is not so obviously tied to a directory if destination_module_abspath.startswith(mod.git_dir): @@ -1019,14 +1219,16 @@ def rename(self, new_name): source_dir = tmp_dir # end handle self-containment os.renames(source_dir, destination_module_abspath) - self._write_git_file_and_module_config(mod.working_tree_dir, destination_module_abspath) + self._write_git_file_and_module_config( + mod.working_tree_dir, destination_module_abspath + ) # end move separate git repository return self - #} END edit interface + # } END edit interface - #{ Query Interface + # { Query Interface @unbare_repo def module(self): @@ -1041,9 +1243,13 @@ def module(self): return repo # END handle repo uninitialized except (InvalidGitRepositoryError, NoSuchPathError): - raise InvalidGitRepositoryError("No valid repository at %s" % module_checkout_abspath) + raise InvalidGitRepositoryError( + "No valid repository at %s" % module_checkout_abspath + ) else: - raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_checkout_abspath) + raise InvalidGitRepositoryError( + "Repository at %r was not yet checked out" % module_checkout_abspath + ) # END handle exceptions def module_exists(self): @@ -1117,7 +1323,8 @@ def url(self): @property def parent_commit(self): """:return: Commit instance with the tree containing the .gitmodules file - :note: will always point to the current head's commit if it was not set explicitly""" + :note: will always point to the current head's commit if it was not set explicitly + """ if self._parent_commit is None: return self.repo.commit() return self._parent_commit @@ -1146,29 +1353,30 @@ def config_reader(self): def children(self): """ :return: IterableList(Submodule, ...) an iterable list of submodules instances - which are children of this submodule or 0 if the submodule is not checked out""" + which are children of this submodule or 0 if the submodule is not checked out + """ return self._get_intermediate_items(self) - #} END query interface + # } END query interface - #{ Iterable Interface + # { Iterable Interface @classmethod - def iter_items(cls, repo, parent_commit='HEAD'): + def iter_items(cls, repo, parent_commit="HEAD"): """:return: iterator yielding Submodule instances available in the given repository""" - pc = repo.commit(parent_commit) # parent commit instance + pc = repo.commit(parent_commit) # parent commit instance try: parser = cls._config_parser(repo, pc, read_only=True) except IOError: return # END handle empty iterator - rt = pc.tree # root tree + rt = pc.tree # root tree for sms in parser.sections(): n = sm_name(sms) - p = parser.get(sms, 'path') - u = parser.get(sms, 'url') + p = parser.get(sms, "path") + u = parser.get(sms, "url") b = cls.k_head_default if parser.has_option(sms, cls.k_head_option): b = str(parser.get(sms, cls.k_head_option)) @@ -1201,4 +1409,4 @@ def iter_items(cls, repo, parent_commit='HEAD'): yield sm # END for each section - #} END iterable interface + # } END iterable interface diff --git a/git/objects/tree.py b/git/objects/tree.py index d6134e308..e9487f93f 100644 --- a/git/objects/tree.py +++ b/git/objects/tree.py @@ -13,10 +13,7 @@ from .submodule.base import Submodule from git.compat import string_types -from .fun import ( - tree_entries_from_data, - tree_to_stream -) +from .fun import tree_entries_from_data, tree_to_stream from git.compat import PY3 @@ -74,12 +71,12 @@ def merge_sort(a, cmp): class TreeModifier(object): - """A utility class providing methods to alter the underlying cache in a list-like fashion. Once all adjustments are complete, the _cache, which really is a reference to the cache of a tree, will be sorted. Assuring it will be in a serializable state""" - __slots__ = '_cache' + + __slots__ = "_cache" def __init__(self, cache): self._cache = cache @@ -93,7 +90,7 @@ def _index_by_name(self, name): # END for each item in cache return -1 - #{ Interface + # { Interface def set_done(self): """Call this method once you are done modifying the tree information. It may be called several times, but be aware that each call will cause @@ -101,9 +98,10 @@ def set_done(self): :return self:""" merge_sort(self._cache, git_cmp) return self - #} END interface - #{ Mutators + # } END interface + + # { Mutators def add(self, sha, mode, name, force=False): """Add the given item to the tree. If an item with the given name already exists, nothing will be done, but a ValueError will be raised if the @@ -115,7 +113,7 @@ def add(self, sha, mode, name, force=False): :param force: If True, an item with your name and information will overwrite any existing item with the same name, no matter which information it has :return: self""" - if '/' in name: + if "/" in name: raise ValueError("Name must not contain '/' characters") if (mode >> 12) not in Tree._map_id_to_type: raise ValueError("Invalid object type according to mode %o" % mode) @@ -148,13 +146,12 @@ def __delitem__(self, name): """Deletes an item with the given name if it exists""" index = self._index_by_name(name) if index > -1: - del(self._cache[index]) + del self._cache[index] - #} END mutators + # } END mutators class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable): - """Tree objects represent an ordered list of Blobs and other Trees. ``Tree as a list``:: @@ -170,7 +167,7 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable): __slots__ = "_cache" # actual integer ids for comparison - commit_id = 0o16 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link + commit_id = 0o16 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link blob_id = 0o10 symlink_id = 0o12 tree_id = 0o04 @@ -178,7 +175,7 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable): _map_id_to_type = { commit_id: Submodule, blob_id: Blob, - symlink_id: Blob + symlink_id: Blob, # tree id added once Tree is defined } @@ -208,7 +205,9 @@ def _iter_convert_to_object(self, iterable): try: yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path) except KeyError: - raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path)) + raise TypeError( + "Unknown mode %o found in tree data for path '%s'" % (mode, path) + ) # END for each item def join(self, file): @@ -217,13 +216,13 @@ def join(self, file): :raise KeyError: if given file or tree does not exist in tree""" msg = "Blob or Tree named %r not found" - if '/' in file: + if "/" in file: tree = self item = self - tokens = file.split('/') + tokens = file.split("/") for i, token in enumerate(tokens): item = tree[token] - if item.type == 'tree': + if item.type == "tree": tree = item else: # safety assertion - blobs are at the end of the path @@ -237,9 +236,10 @@ def join(self, file): return item else: for info in self._cache: - if info[2] == file: # [2] == name - return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], - join_path(self.path, info[2])) + if info[2] == file: # [2] == name + return self._map_id_to_type[info[1] >> 12]( + self.repo, info[0], info[1], join_path(self.path, info[2]) + ) # END for each obj raise KeyError(msg % file) # END handle long paths @@ -271,12 +271,20 @@ def cache(self): See the ``TreeModifier`` for more information on how to alter the cache""" return TreeModifier(self._cache) - def traverse(self, predicate=lambda i, d: True, - prune=lambda i, d: False, depth=-1, branch_first=True, - visit_once=False, ignore_self=1): + def traverse( + self, + predicate=lambda i, d: True, + prune=lambda i, d: False, + depth=-1, + branch_first=True, + visit_once=False, + ignore_self=1, + ): """For documentation, see util.Traversable.traverse Trees are set to visit_once = False to gain more performance in the traversal""" - return super(Tree, self).traverse(predicate, prune, depth, branch_first, visit_once, ignore_self) + return super(Tree, self).traverse( + predicate, prune, depth, branch_first, visit_once, ignore_self + ) # List protocol def __getslice__(self, i, j): @@ -291,7 +299,9 @@ def __len__(self): def __getitem__(self, item): if isinstance(item, int): info = self._cache[item] - return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2])) + return self._map_id_to_type[info[1] >> 12]( + self.repo, info[0], info[1], join_path(self.path, info[2]) + ) if isinstance(item, string_types): # compatibility diff --git a/git/refs/symbolic.py b/git/refs/symbolic.py index a8ca6538f..64255c4d1 100644 --- a/git/refs/symbolic.py +++ b/git/refs/symbolic.py @@ -1,9 +1,6 @@ import os -from git.compat import ( - string_types, - defenc -) +from git.compat import string_types, defenc from git.objects import Object, Commit from git.util import ( join_path, @@ -11,12 +8,9 @@ to_native_path_linux, assure_directory_exists, hex_to_bin, - LockedFD -) -from gitdb.exc import ( - BadObject, - BadName + LockedFD, ) +from gitdb.exc import BadObject, BadName import os.path as osp @@ -27,20 +21,20 @@ def _git_dir(repo, path): - """ Find the git dir that's appropriate for the path""" + """Find the git dir that's appropriate for the path""" name = "%s" % (path,) - if name in ['HEAD', 'ORIG_HEAD', 'FETCH_HEAD', 'index', 'logs']: + if name in ["HEAD", "ORIG_HEAD", "FETCH_HEAD", "index", "logs"]: return repo.git_dir return repo.common_dir class SymbolicReference(object): - """Represents a special case of a reference such that this reference is symbolic. It does not point to a specific commit, but to another Head, which itself specifies a commit. A typical example for a symbolic reference is HEAD.""" + __slots__ = ("repo", "path") _resolve_ref_on_create = False _points_to_commits_only = True @@ -59,7 +53,7 @@ def __repr__(self): return '' % (self.__class__.__name__, self.path) def __eq__(self, other): - if hasattr(other, 'path'): + if hasattr(other, "path"): return self.path == other.path return False @@ -83,19 +77,19 @@ def abspath(self): @classmethod def _get_packed_refs_path(cls, repo): - return osp.join(repo.common_dir, 'packed-refs') + return osp.join(repo.common_dir, "packed-refs") @classmethod def _iter_packed_refs(cls, repo): """Returns an iterator yielding pairs of sha1/path pairs (as bytes) for the corresponding refs. :note: The packed refs file will be kept open as long as we iterate""" try: - with open(cls._get_packed_refs_path(repo), 'rt') as fp: + with open(cls._get_packed_refs_path(repo), "rt") as fp: for line in fp: line = line.strip() if not line: continue - if line.startswith('#'): + if line.startswith("#"): # "# pack-refs with: peeled fully-peeled sorted" # the git source code shows "peeled", # "fully-peeled" and "sorted" as the keywords @@ -104,18 +98,23 @@ def _iter_packed_refs(cls, repo): # I looked at master on 2017-10-11, # commit 111ef79afe, after tag v2.15.0-rc1 # from repo https://github.com/git/git.git - if line.startswith('# pack-refs with:') and 'peeled' not in line: - raise TypeError("PackingType of packed-Refs not understood: %r" % line) + if ( + line.startswith("# pack-refs with:") + and "peeled" not in line + ): + raise TypeError( + "PackingType of packed-Refs not understood: %r" % line + ) # END abort if we do not understand the packing scheme continue # END parse comment # skip dereferenced tag object entries - previous line was actual # tag reference for it - if line[0] == '^': + if line[0] == "^": continue - yield tuple(line.split(' ', 1)) + yield tuple(line.split(" ", 1)) # END for each line except (OSError, IOError): return @@ -145,12 +144,12 @@ def _get_ref_info_helper(cls, repo, ref_path): tokens = None repodir = _git_dir(repo, ref_path) try: - with open(osp.join(repodir, ref_path), 'rt') as fp: + with open(osp.join(repodir, ref_path), "rt") as fp: value = fp.read().rstrip() # Don't only split on spaces, but on whitespace, which allows to parse lines like # 60b64ef992065e2600bfef6187a97f92398a9144 branch 'master' of git-server:/path/to/repo tokens = value.split() - assert(len(tokens) != 0) + assert len(tokens) != 0 except (OSError, IOError): # Probably we are just packed, find our entry in the packed refs file # NOTE: We are not a symbolic ref if we are in a packed file, as these @@ -167,7 +166,7 @@ def _get_ref_info_helper(cls, repo, ref_path): raise ValueError("Reference at %r does not exist" % ref_path) # is it a reference ? - if tokens[0] == 'ref:': + if tokens[0] == "ref:": return (None, tokens[1]) # its a commit @@ -190,20 +189,25 @@ def _get_object(self): always point to the actual object as it gets re-created on each query""" # have to be dynamic here as we may be a tag which can point to anything # Our path will be resolved to the hexsha which will be used accordingly - return Object.new_from_sha(self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path))) + return Object.new_from_sha( + self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path)) + ) def _get_commit(self): """ :return: Commit object we point to, works for detached and non-detached - SymbolicReferences. The symbolic reference will be dereferenced recursively.""" + SymbolicReferences. The symbolic reference will be dereferenced recursively. + """ obj = self._get_object() - if obj.type == 'tag': + if obj.type == "tag": obj = obj.object # END dereference tag if obj.type != Commit.type: - raise TypeError("Symbolic Reference pointed to object %r, commit was required" % obj) + raise TypeError( + "Symbolic Reference pointed to object %r, commit was required" % obj + ) # END handle type return obj @@ -264,7 +268,9 @@ def set_object(self, object, logmsg=None): # @ReservedAssignment return self._get_reference().set_object(object, logmsg) commit = property(_get_commit, set_commit, doc="Query or set commits directly") - object = property(_get_object, set_object, doc="Return the object our ref currently refers to") + object = property( + _get_object, set_object, doc="Return the object our ref currently refers to" + ) def _get_reference(self): """:return: Reference Object we point to @@ -272,7 +278,9 @@ def _get_reference(self): to a reference, but to a commit""" sha, target_ref_path = self._get_ref_info(self.repo, self.path) if target_ref_path is None: - raise TypeError("%s is a detached symbolic reference as it points to %r" % (self, sha)) + raise TypeError( + "%s is a detached symbolic reference as it points to %r" % (self, sha) + ) return self.from_path(self.repo, target_ref_path) def set_reference(self, ref, logmsg=None): @@ -302,7 +310,7 @@ def set_reference(self, ref, logmsg=None): write_value = ref.hexsha elif isinstance(ref, string_types): try: - obj = self.repo.rev_parse(ref + "^{}") # optionally deref tags + obj = self.repo.rev_parse(ref + "^{}") # optionally deref tags write_value = obj.hexsha except (BadObject, BadName): raise ValueError("Could not extract object from %s" % ref) @@ -332,7 +340,7 @@ def set_reference(self, ref, logmsg=None): fd = lfd.open(write=True, stream=True) ok = True try: - fd.write(write_value.encode('ascii') + b'\n') + fd.write(write_value.encode("ascii") + b"\n") lfd.commit() ok = True finally: @@ -345,7 +353,9 @@ def set_reference(self, ref, logmsg=None): return self # aliased reference - reference = property(_get_reference, set_reference, doc="Returns the Reference we point to") + reference = property( + _get_reference, set_reference, doc="Returns the Reference we point to" + ) ref = reference def is_valid(self): @@ -378,7 +388,8 @@ def log(self): applied to this reference .. note:: As the log is parsed every time, its recommended to cache it for use - instead of calling this method repeatedly. It should be considered read-only.""" + instead of calling this method repeatedly. It should be considered read-only. + """ return RefLog.from_file(RefLog.path(self)) def log_append(self, oldbinsha, message, newbinsha=None): @@ -397,9 +408,13 @@ def log_append(self, oldbinsha, message, newbinsha=None): except ValueError: committer_or_reader = self.repo.config_reader() # end handle newly cloned repositories - return RefLog.append_entry(committer_or_reader, RefLog.path(self), oldbinsha, - (newbinsha is None and self.commit.binsha) or newbinsha, - message) + return RefLog.append_entry( + committer_or_reader, + RefLog.path(self), + oldbinsha, + (newbinsha is None and self.commit.binsha) or newbinsha, + message, + ) def log_entry(self, index): """:return: RefLogEntry at the given index @@ -421,7 +436,7 @@ def to_full_path(cls, path): if not cls._common_path_default: return full_ref_path if not path.startswith(cls._common_path_default + "/"): - full_ref_path = '%s/%s' % (cls._common_path_default, path) + full_ref_path = "%s/%s" % (cls._common_path_default, path) return full_ref_path @classmethod @@ -443,7 +458,7 @@ def delete(cls, repo, path): # check packed refs pack_file_path = cls._get_packed_refs_path(repo) try: - with open(pack_file_path, 'rb') as reader: + with open(pack_file_path, "rb") as reader: new_lines = [] made_change = False dropped_last_line = False @@ -453,8 +468,11 @@ def delete(cls, repo, path): # If we deleted the last line and this one is a tag-reference object, # we drop it as well line = line.decode(defenc) - if (line.startswith('#') or full_ref_path not in line) and \ - (not dropped_last_line or dropped_last_line and not line.startswith('^')): + if (line.startswith("#") or full_ref_path not in line) and ( + not dropped_last_line + or dropped_last_line + and not line.startswith("^") + ): new_lines.append(line) dropped_last_line = False continue @@ -468,8 +486,8 @@ def delete(cls, repo, path): if made_change: # write-binary is required, otherwise windows will # open the file in text mode and change LF to CRLF ! - with open(pack_file_path, 'wb') as fd: - fd.writelines(l.encode(defenc) for l in new_lines) + with open(pack_file_path, "wb") as fd: + fd.writelines(i.encode(defenc) for i in new_lines) except (OSError, IOError): pass # it didn't exist at all @@ -502,11 +520,13 @@ def _create(cls, repo, path, resolve, reference, force, logmsg=None): target_data = target.path if not resolve: target_data = "ref: " + target_data - with open(abs_ref_path, 'rb') as fd: + with open(abs_ref_path, "rb") as fd: existing_data = fd.read().decode(defenc).strip() if existing_data != target_data: - raise OSError("Reference at %r does already exist, pointing to %r, requested was %r" % - (full_ref_path, existing_data, target_data)) + raise OSError( + "Reference at %r does already exist, pointing to %r, requested was %r" + % (full_ref_path, existing_data, target_data) + ) # END no force handling ref = cls(repo, full_ref_path) @@ -514,7 +534,7 @@ def _create(cls, repo, path, resolve, reference, force, logmsg=None): return ref @classmethod - def create(cls, repo, path, reference='HEAD', force=False, logmsg=None): + def create(cls, repo, path, reference="HEAD", force=False, logmsg=None): """Create a new symbolic reference, hence a reference pointing to another reference. :param repo: @@ -543,7 +563,9 @@ def create(cls, repo, path, reference='HEAD', force=False, logmsg=None): already exists. :note: This does not alter the current HEAD, index or Working Tree""" - return cls._create(repo, path, cls._resolve_ref_on_create, reference, force, logmsg) + return cls._create( + repo, path, cls._resolve_ref_on_create, reference, force, logmsg + ) def rename(self, new_path, force=False): """Rename self to a new path @@ -558,7 +580,8 @@ def rename(self, new_path, force=False): already exists. It will be overwritten in that case :return: self - :raise OSError: In case a file at path but a different contents already exists """ + :raise OSError: In case a file at path but a different contents already exists + """ new_path = self.to_full_path(new_path) if self.path == new_path: return self @@ -568,9 +591,9 @@ def rename(self, new_path, force=False): if osp.isfile(new_abs_path): if not force: # if they point to the same file, its not an error - with open(new_abs_path, 'rb') as fd1: + with open(new_abs_path, "rb") as fd1: f1 = fd1.read().strip() - with open(cur_abs_path, 'rb') as fd2: + with open(cur_abs_path, "rb") as fd2: f2 = fd2.read().strip() if f1 != f2: raise OSError("File at path %r already exists" % new_abs_path) @@ -598,18 +621,22 @@ def _iter_items(cls, repo, common_path=None): # walk loose refs # Currently we do not follow links - for root, dirs, files in os.walk(join_path_native(repo.common_dir, common_path)): - if 'refs' not in root.split(os.sep): # skip non-refs subfolders - refs_id = [d for d in dirs if d == 'refs'] + for root, dirs, files in os.walk( + join_path_native(repo.common_dir, common_path) + ): + if "refs" not in root.split(os.sep): # skip non-refs subfolders + refs_id = [d for d in dirs if d == "refs"] if refs_id: - dirs[0:] = ['refs'] + dirs[0:] = ["refs"] # END prune non-refs folders for f in files: - if f == 'packed-refs': + if f == "packed-refs": continue abs_path = to_native_path_linux(join_path(root, f)) - rela_paths.add(abs_path.replace(to_native_path_linux(repo.common_dir) + '/', "")) + rela_paths.add( + abs_path.replace(to_native_path_linux(repo.common_dir) + "/", "") + ) # END for each file in root directory # END for each directory to walk @@ -645,8 +672,13 @@ def iter_items(cls, repo, common_path=None): ref which is not detached and pointing to a valid ref List is lexicographically sorted - The returned objects represent actual subclasses, such as Head or TagReference""" - return (r for r in cls._iter_items(repo, common_path) if r.__class__ == SymbolicReference or not r.is_detached) + The returned objects represent actual subclasses, such as Head or TagReference + """ + return ( + r + for r in cls._iter_items(repo, common_path) + if r.__class__ == SymbolicReference or not r.is_detached + ) @classmethod def from_path(cls, repo, path): @@ -662,7 +694,15 @@ def from_path(cls, repo, path): # Names like HEAD are inserted after the refs module is imported - we have an import dependency # cycle and don't want to import these names in-function from . import HEAD, Head, RemoteReference, TagReference, Reference - for ref_type in (HEAD, Head, RemoteReference, TagReference, Reference, SymbolicReference): + + for ref_type in ( + HEAD, + Head, + RemoteReference, + TagReference, + Reference, + SymbolicReference, + ): try: instance = ref_type(repo, path) if instance.__class__ == SymbolicReference and instance.is_detached: @@ -672,7 +712,9 @@ def from_path(cls, repo, path): pass # END exception handling # END for each type to try - raise ValueError("Could not find reference type suitable to handle path %r" % path) + raise ValueError( + "Could not find reference type suitable to handle path %r" % path + ) def is_remote(self): """:return: True if this symbolic reference points to a remote branch""" diff --git a/git/remote.py b/git/remote.py index 4f32540f0..7ac212090 100644 --- a/git/remote.py +++ b/git/remote.py @@ -398,6 +398,23 @@ class Remote(LazyMixin, Iterable): __slots__ = ("repo", "name", "_config_reader") _id_attribute_ = "name" + unsafe_git_fetch_options = [ + # This option allows users to execute arbitrary commands. + # https://git-scm.com/docs/git-fetch#Documentation/git-fetch.txt---upload-packltupload-packgt + "--upload-pack", + ] + unsafe_git_pull_options = [ + # This option allows users to execute arbitrary commands. + # https://git-scm.com/docs/git-pull#Documentation/git-pull.txt---upload-packltupload-packgt + "--upload-pack" + ] + unsafe_git_push_options = [ + # This option allows users to execute arbitrary commands. + # https://git-scm.com/docs/git-push#Documentation/git-push.txt---execltgit-receive-packgt + "--receive-pack", + "--exec", + ] + def __init__(self, repo, name): """Initialize a remote instance @@ -483,7 +500,13 @@ def iter_items(cls, repo): yield Remote(repo, section[lbound + 1:rbound]) # END for each configuration section - def set_url(self, new_url, old_url=None, **kwargs): + def set_url( + self, + new_url, + old_url=None, + allow_unsafe_protocols=False, + **kwargs + ): """Configure URLs on current remote (cf command git remote set_url) This command manages URLs on the remote. @@ -492,15 +515,23 @@ def set_url(self, new_url, old_url=None, **kwargs): :param old_url: when set, replaces this URL with new_url for the remote :return: self """ + if not allow_unsafe_protocols: + Git.check_unsafe_protocols(new_url) + scmd = 'set-url' kwargs['insert_kwargs_after'] = scmd if old_url: - self.repo.git.remote(scmd, self.name, new_url, old_url, **kwargs) + self.repo.git.remote(scmd, "--", self.name, new_url, old_url, **kwargs) else: - self.repo.git.remote(scmd, self.name, new_url, **kwargs) + self.repo.git.remote(scmd, "--", self.name, new_url, **kwargs) return self - def add_url(self, url, **kwargs): + def add_url( + self, + url, + allow_unsafe_protocols=False, + **kwargs + ): """Adds a new url on current remote (special case of git remote set_url) This command adds new URLs to a given remote, making it possible to have @@ -509,7 +540,7 @@ def add_url(self, url, **kwargs): :param url: string being the URL to add as an extra remote URL :return: self """ - return self.set_url(url, add=True) + return self.set_url(url, add=True, allow_unsafe_protocols=allow_unsafe_protocols) def delete_url(self, url, **kwargs): """Deletes a new url on current remote (special case of git remote set_url) @@ -598,7 +629,7 @@ def stale_refs(self): return out_refs @classmethod - def create(cls, repo, name, url, **kwargs): + def create(cls, repo, name, url, allow_unsafe_protocols=False, **kwargs): """Create a new remote to the given repository :param repo: Repository instance that is to receive the new remote :param name: Desired name of the remote @@ -608,7 +639,10 @@ def create(cls, repo, name, url, **kwargs): :raise GitCommandError: in case an origin with that name already exists""" scmd = 'add' kwargs['insert_kwargs_after'] = scmd - repo.git.remote(scmd, name, Git.polish_url(url), **kwargs) + url = Git.polish_url(url) + if not allow_unsafe_protocols: + Git.check_unsafe_protocols(url) + repo.git.remote(scmd, "--", name, url, **kwargs) return cls(repo, name) # add is an alias @@ -684,7 +718,7 @@ def _get_fetch_info_from_stderr(self, proc, progress): # read head information with open(osp.join(self.repo.common_dir, 'FETCH_HEAD'), 'rb') as fp: - fetch_head_info = [l.decode(defenc) for l in fp.readlines()] + fetch_head_info = [i.decode(defenc) for i in fp.readlines()] l_fil = len(fetch_info_lines) l_fhi = len(fetch_head_info) @@ -749,7 +783,14 @@ def _assert_refspec(self): finally: config.release() - def fetch(self, refspec=None, progress=None, **kwargs): + def fetch( + self, + refspec=None, + progress=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs + ): """Fetch the latest changes for this remote :param refspec: @@ -785,14 +826,29 @@ def fetch(self, refspec=None, progress=None, **kwargs): else: args = [refspec] - proc = self.repo.git.fetch(self, *args, as_process=True, with_stdout=False, + if not allow_unsafe_protocols: + for ref in args: + if ref: + Git.check_unsafe_protocols(ref) + + if not allow_unsafe_options: + Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_fetch_options) + + proc = self.repo.git.fetch("--", self, *args, as_process=True, with_stdout=False, universal_newlines=True, v=True, **kwargs) res = self._get_fetch_info_from_stderr(proc, progress) if hasattr(self.repo.odb, 'update_cache'): self.repo.odb.update_cache() return res - def pull(self, refspec=None, progress=None, **kwargs): + def pull( + self, + refspec=None, + progress=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs + ): """Pull changes from the given branch, being the same as a fetch followed by a merge of branch with your local branch. @@ -804,14 +860,29 @@ def pull(self, refspec=None, progress=None, **kwargs): # No argument refspec, then ensure the repo's config has a fetch refspec. self._assert_refspec() kwargs = add_progress(kwargs, self.repo.git, progress) - proc = self.repo.git.pull(self, refspec, with_stdout=False, as_process=True, + + refspec = Git._Git__unpack_args(refspec or []) + if not allow_unsafe_protocols: + for ref in refspec: + Git.check_unsafe_protocols(ref) + + if not allow_unsafe_options: + Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_pull_options) + + proc = self.repo.git.pull("--", self, refspec, with_stdout=False, as_process=True, universal_newlines=True, v=True, **kwargs) res = self._get_fetch_info_from_stderr(proc, progress) if hasattr(self.repo.odb, 'update_cache'): self.repo.odb.update_cache() return res - def push(self, refspec=None, progress=None, **kwargs): + def push( + self, + refspec=None, + progress=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs): """Push changes from source branch in refspec to target branch in refspec. :param refspec: see 'fetch' method @@ -839,7 +910,16 @@ def push(self, refspec=None, progress=None, **kwargs): If the operation fails completely, the length of the returned IterableList will be null.""" kwargs = add_progress(kwargs, self.repo.git, progress) - proc = self.repo.git.push(self, refspec, porcelain=True, as_process=True, + + refspec = Git._Git__unpack_args(refspec or []) + if not allow_unsafe_protocols: + for ref in refspec: + Git.check_unsafe_protocols(ref) + + if not allow_unsafe_options: + Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=self.unsafe_git_push_options) + + proc = self.repo.git.push("--", self, refspec, porcelain=True, as_process=True, universal_newlines=True, **kwargs) return self._get_push_info(proc, progress) @@ -853,7 +933,7 @@ def config_reader(self): def _clear_cache(self): try: - del(self._config_reader) + del (self._config_reader) except AttributeError: pass # END handle exception diff --git a/git/repo/base.py b/git/repo/base.py index f35870803..8f81c2208 100644 --- a/git/repo/base.py +++ b/git/repo/base.py @@ -37,10 +37,9 @@ import gitdb try: - import pathlib + from pathlib import Path except ImportError: - pathlib = None - + from pathlib2 import Path log = logging.getLogger(__name__) @@ -79,6 +78,18 @@ class Repo(object): re_author_committer_start = re.compile(r'^(author|committer)') re_tab_full_line = re.compile(r'^\t(.*)$') + unsafe_git_clone_options = [ + # This option allows users to execute arbitrary commands. + # https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---upload-packltupload-packgt + "--upload-pack", + "-u", + # Users can override configuration variables + # like `protocol.allow` or `core.gitProxy` to execute arbitrary commands. + # https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---configltkeygtltvaluegt + "--config", + "-c", + ] + # invariants # represents the configuration level of a configuration file config_level = ("system", "user", "global", "repository") @@ -931,7 +942,18 @@ def init(cls, path=None, mkdir=True, odbt=GitCmdObjectDB, expand_vars=True, **kw return cls(path, odbt=odbt) @classmethod - def _clone(cls, git, url, path, odb_default_type, progress, multi_options=None, **kwargs): + def _clone( + cls, + git, + url, + path, + odb_default_type, + progress, + multi_options=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs + ): if progress is not None: progress = to_progress_instance(progress) @@ -956,7 +978,15 @@ def _clone(cls, git, url, path, odb_default_type, progress, multi_options=None, multi = None if multi_options: multi = ' '.join(multi_options).split(' ') - proc = git.clone(multi, Git.polish_url(url), clone_path, with_extended_output=True, as_process=True, + + if not allow_unsafe_protocols: + Git.check_unsafe_protocols(str(url)) + if not allow_unsafe_options: + Git.check_unsafe_options(options=list(kwargs.keys()), unsafe_options=cls.unsafe_git_clone_options) + if not allow_unsafe_options and multi_options: + Git.check_unsafe_options(options=multi_options, unsafe_options=cls.unsafe_git_clone_options) + + proc = git.clone(multi, "--", Git.polish_url(url), clone_path, with_extended_output=True, as_process=True, v=True, universal_newlines=True, **add_progress(kwargs, git, progress)) if progress: handle_process_output(proc, None, progress.new_message_handler(), finalize_process, decode_streams=False) @@ -986,7 +1016,15 @@ def _clone(cls, git, url, path, odb_default_type, progress, multi_options=None, # END handle remote repo return repo - def clone(self, path, progress=None, multi_options=None, **kwargs): + def clone( + self, + path, + progress=None, + multi_options=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs + ): """Create a clone from this repository. :param path: is the full path of the new repo (traditionally ends with ./.git). @@ -995,16 +1033,37 @@ def clone(self, path, progress=None, multi_options=None, **kwargs): option per list item which is passed exactly as specified to clone. For example ['--config core.filemode=false', '--config core.ignorecase', '--recurse-submodule=repo1_path', '--recurse-submodule=repo2_path'] + :param unsafe_protocols: Allow unsafe protocols to be used, like ext :param kwargs: * odbt = ObjectDatabase Type, allowing to determine the object database implementation used by the returned Repo instance * All remaining keyword arguments are given to the git-clone command :return: ``git.Repo`` (the newly cloned repo)""" - return self._clone(self.git, self.common_dir, path, type(self.odb), progress, multi_options, **kwargs) + return self._clone( + self.git, + self.common_dir, + path, + type(self.odb), + progress, + multi_options, + allow_unsafe_protocols=allow_unsafe_protocols, + allow_unsafe_options=allow_unsafe_options, + **kwargs + ) @classmethod - def clone_from(cls, url, to_path, progress=None, env=None, multi_options=None, **kwargs): + def clone_from( + cls, + url, + to_path, + progress=None, + env=None, + multi_options=None, + allow_unsafe_protocols=False, + allow_unsafe_options=False, + **kwargs + ): """Create a clone from the given URL :param url: valid git url, see http://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS @@ -1013,11 +1072,22 @@ def clone_from(cls, url, to_path, progress=None, env=None, multi_options=None, * :param env: Optional dictionary containing the desired environment variables. :param mutli_options: See ``clone`` method :param kwargs: see the ``clone`` method + :param unsafe_protocols: Allow unsafe protocols to be used, like ext :return: Repo instance pointing to the cloned directory""" git = Git(os.getcwd()) if env is not None: git.update_environment(**env) - return cls._clone(git, url, to_path, GitCmdObjectDB, progress, multi_options, **kwargs) + return cls._clone( + git, + url, + to_path, + GitCmdObjectDB, + progress, + multi_options, + allow_unsafe_protocols=allow_unsafe_protocols, + allow_unsafe_options=allow_unsafe_options, + **kwargs + ) def archive(self, ostream, treeish=None, prefix=None, **kwargs): """Archive the tree at the given revision. @@ -1044,7 +1114,7 @@ def archive(self, ostream, treeish=None, prefix=None, **kwargs): path = [path] # end assure paths is list - self.git.archive(treeish, *path, **kwargs) + self.git.archive("--", treeish, *path, **kwargs) return self def has_separate_working_tree(self): diff --git a/git/test/lib/helper.py b/git/test/lib/helper.py index 1c06010f4..7d5f9008e 100644 --- a/git/test/lib/helper.py +++ b/git/test/lib/helper.py @@ -16,6 +16,7 @@ import textwrap import time import unittest +import virtualenv from git.compat import string_types, is_win from git.util import rmtree, cwd @@ -36,7 +37,8 @@ __all__ = ( 'fixture_path', 'fixture', 'StringProcessAdapter', 'with_rw_directory', 'with_rw_repo', 'with_rw_and_rw_remote_repo', - 'TestBase', 'TestCase', + 'TestBase', 'VirtualEnvironment', + 'TestCase', 'SkipTest', 'skipIf', 'GIT_REPO', 'GIT_DAEMON_PORT' ) @@ -83,13 +85,13 @@ def with_rw_directory(func): test succeeds, but leave it otherwise to aid additional debugging""" @wraps(func) - def wrapper(self): + def wrapper(self, *args, **kwargs): path = tempfile.mktemp(prefix=func.__name__) os.mkdir(path) keep = False try: try: - return func(self, path) + return func(self, path, *args, **kwargs) except Exception: log.info("Test %s.%s failed, output is at %r\n", type(self).__name__, func.__name__, path) @@ -379,3 +381,49 @@ def _make_file(self, rela_path, data, repo=None): with open(abs_path, "w") as fp: fp.write(data) return abs_path + + +class VirtualEnvironment: + """A newly created Python virtual environment for use in a test.""" + + __slots__ = ("_env_dir",) + + def __init__(self, env_dir, with_pip): + # On Python2 virtualenv the pip option and symlinks options aren't available + if os.name == "nt": + self._env_dir = osp.realpath(env_dir) + # venv.create(self.env_dir, symlinks=False, with_pip=with_pip) + virtualenv.cli_run([self.env_dir]) + else: + self._env_dir = env_dir + # venv.create(self.env_dir, symlinks=True, with_pip=with_pip) + virtualenv.cli_run([self.env_dir]) + + @property + def env_dir(self): + """The top-level directory of the environment.""" + return self._env_dir + + @property + def python(self): + """Path to the Python executable in the environment.""" + return self._executable("python") + + @property + def pip(self): + """Path to the pip executable in the environment, or RuntimeError if absent.""" + return self._executable("pip") + + @property + def sources(self): + """Path to a src directory in the environment, which may not exist yet.""" + return os.path.join(self.env_dir, "src") + + def _executable(self, basename): + if os.name == "nt": + path = osp.join(self.env_dir, "Scripts", basename + ".exe") + else: + path = osp.join(self.env_dir, "bin", basename) + if osp.isfile(path) or osp.islink(path): + return path + raise RuntimeError("no regular file or symlink " + str(path)) diff --git a/git/test/performance/test_streams.py b/git/test/performance/test_streams.py index 2e3772a02..5ff8fed18 100644 --- a/git/test/performance/test_streams.py +++ b/git/test/performance/test_streams.py @@ -106,7 +106,7 @@ def test_large_data_streaming(self, rwrepo): gitsha = proc.stdout.read().strip() proc.wait() gelapsed_add = time() - st - del(data) + del (data) assert gitsha == bin_to_hex(binsha) # we do it the same way, right ? # as its the same sha, we reuse our path diff --git a/git/test/test_git.py b/git/test/test_git.py index 4a189267e..8126244ac 100644 --- a/git/test/test_git.py +++ b/git/test/test_git.py @@ -4,10 +4,16 @@ # # This module is part of GitPython and is released under # the BSD License: http://www.opensource.org/licenses/bsd-license.php +from __future__ import print_function + +import contextlib import os +import shutil import subprocess import sys from tempfile import TemporaryFile +from backports.tempfile import TemporaryDirectory +import six from git import ( Git, @@ -38,8 +44,39 @@ except ImportError: import mock +try: + from pathlib import Path +except ImportError: + from pathlib2 import Path + from git.compat import is_win +@contextlib.contextmanager +def _chdir(new_dir): + """Context manager to temporarily change directory. Not reentrant.""" + old_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(old_dir) + + +@contextlib.contextmanager +def _patch_out_env(name): + try: + old_value = os.environ[name] + except KeyError: + old_value = None + else: + del os.environ[name] + try: + yield + finally: + if old_value is not None: + os.environ[name] = old_value + + class TestGit(TestBase): @@ -100,6 +137,24 @@ def test_it_transforms_kwargs_into_git_command_arguments(self): def test_it_executes_git_to_shell_and_returns_result(self): assert_match(r'^git version [\d\.]{2}.*$', self.git.execute(["git", "version"])) + def test_it_executes_git_not_from_cwd(self): + with TemporaryDirectory() as tmpdir: + if is_win: + # Copy an actual binary executable that is not git. + other_exe_path = os.path.join(os.getenv("WINDIR"), "system32", "hostname.exe") + impostor_path = os.path.join(tmpdir, "git.exe") + shutil.copy(other_exe_path, impostor_path) + else: + # Create a shell script that doesn't do anything. + impostor_path = os.path.join(tmpdir, "git") + with open(impostor_path, mode="w") as file: + print("#!/bin/sh", file=file) + os.chmod(impostor_path, 0o755) + + with _chdir(tmpdir): + # six.assertRegex(self.git.execute(["git", "version"]).encode("UTF-8"), r"^git version\b") + self.assertRegexpMatches(self.git.execute(["git", "version"]), r"^git version\b") + def test_it_accepts_stdin(self): filename = fixture_path("cat_file_blob") with open(filename, 'r') as fh: @@ -287,7 +342,7 @@ def counter_stderr(line): stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, - creationflags=cmd.PROC_CREATIONFLAGS, + # creationflags=cmd.PROC_CREATIONFLAGS, ) handle_process_output(proc, counter_stdout, counter_stderr, finalize_process) diff --git a/git/test/test_index.py b/git/test/test_index.py index a30d314b5..4738815f3 100644 --- a/git/test/test_index.py +++ b/git/test/test_index.py @@ -212,7 +212,7 @@ def test_index_file_from_tree(self, rw_repo): assert unmerged_blob_map # pick the first blob at the first stage we find and use it as resolved version - three_way_index.resolve_blobs(l[0][1] for l in unmerged_blob_map.values()) + three_way_index.resolve_blobs(i[0][1] for i in unmerged_blob_map.values()) tree = three_way_index.write_tree() assert isinstance(tree, Tree) num_blobs = 0 diff --git a/git/test/test_refs.py b/git/test/test_refs.py index 348c3d482..b73cb64fb 100644 --- a/git/test/test_refs.py +++ b/git/test/test_refs.py @@ -14,13 +14,10 @@ Commit, SymbolicReference, GitCommandError, - RefLog + RefLog, ) from git.objects.tag import TagObject -from git.test.lib import ( - TestBase, - with_rw_repo -) +from git.test.lib import TestBase, with_rw_repo from git.util import Actor import git.refs as refs @@ -32,7 +29,7 @@ class TestRefs(TestBase): def test_from_path(self): # should be able to create any reference directly for ref_type in (Reference, Head, TagReference, RemoteReference): - for name in ('rela_name', 'path/rela_name'): + for name in ("rela_name", "path/rela_name"): full_path = ref_type.to_full_path(name) instance = ref_type.from_path(self.rorepo, full_path) assert isinstance(instance, ref_type) @@ -54,7 +51,7 @@ def test_tag_base(self): tag_object_refs.append(tag) tagobj = tag.tag # have no dict - self.failUnlessRaises(AttributeError, setattr, tagobj, 'someattr', 1) + self.failUnlessRaises(AttributeError, setattr, tagobj, "someattr", 1) assert isinstance(tagobj, TagObject) assert tagobj.tag == tag.name assert isinstance(tagobj.tagger, Actor) @@ -63,18 +60,18 @@ def test_tag_base(self): assert tagobj.message assert tag.object == tagobj # can't assign the object - self.failUnlessRaises(AttributeError, setattr, tag, 'object', tagobj) + self.failUnlessRaises(AttributeError, setattr, tag, "object", tagobj) # END if we have a tag object # END for tag in repo-tags assert tag_object_refs - assert isinstance(self.rorepo.tags['0.1.5'], TagReference) + assert isinstance(self.rorepo.tags["0.1.5"], TagReference) def test_tags_author(self): tag = self.rorepo.tags[0] tagobj = tag.tag assert isinstance(tagobj.tagger, Actor) tagger_name = tagobj.tagger.name - assert tagger_name == 'Michael Trier' + assert tagger_name == "Michael Trier" def test_tags(self): # tag refs can point to tag objects or to commits @@ -92,7 +89,7 @@ def test_tags(self): assert len(s) == ref_count assert len(s | s) == ref_count - @with_rw_repo('HEAD', bare=False) + @with_rw_repo("HEAD", bare=False) def test_heads(self, rwrepo): for head in rwrepo.heads: assert head.name @@ -100,8 +97,8 @@ def test_heads(self, rwrepo): assert "refs/heads" in head.path prev_object = head.object cur_object = head.object - assert prev_object == cur_object # represent the same git object - assert prev_object is not cur_object # but are different instances + assert prev_object == cur_object # represent the same git object + assert prev_object is not cur_object # but are different instances with head.config_writer() as writer: tv = "testopt" @@ -119,17 +116,29 @@ def test_heads(self, rwrepo): assert head.tracking_branch() == remote_ref head.set_tracking_branch(None) assert head.tracking_branch() is None - - special_name = 'feature#123' - special_name_remote_ref = SymbolicReference.create(rwrepo, 'refs/remotes/origin/%s' % special_name) - gp_tracking_branch = rwrepo.create_head('gp_tracking#123') - special_name_remote_ref = rwrepo.remotes[0].refs[special_name] # get correct type + + special_name = "feature#123" + special_name_remote_ref = SymbolicReference.create( + rwrepo, "refs/remotes/origin/%s" % special_name + ) + gp_tracking_branch = rwrepo.create_head("gp_tracking#123") + special_name_remote_ref = rwrepo.remotes[0].refs[ + special_name + ] # get correct type gp_tracking_branch.set_tracking_branch(special_name_remote_ref) - assert gp_tracking_branch.tracking_branch().path == special_name_remote_ref.path - - git_tracking_branch = rwrepo.create_head('git_tracking#123') - rwrepo.git.branch('-u', special_name_remote_ref.name, git_tracking_branch.name) - assert git_tracking_branch.tracking_branch().name == special_name_remote_ref.name + assert ( + gp_tracking_branch.tracking_branch().path + == special_name_remote_ref.path + ) + + git_tracking_branch = rwrepo.create_head("git_tracking#123") + rwrepo.git.branch( + "-u", special_name_remote_ref.name, git_tracking_branch.name + ) + assert ( + git_tracking_branch.tracking_branch().name + == special_name_remote_ref.name + ) # END for each head # verify REFLOG gets altered @@ -139,7 +148,7 @@ def test_heads(self, rwrepo): pcommit = cur_head.commit.parents[0].parents[0] hlog_len = len(head.log()) blog_len = len(cur_head.log()) - assert head.set_reference(pcommit, 'detached head') is head + assert head.set_reference(pcommit, "detached head") is head # one new log-entry thlog = head.log() assert len(thlog) == hlog_len + 1 @@ -150,23 +159,25 @@ def test_heads(self, rwrepo): assert len(cur_head.log()) == blog_len # head changes once again, cur_head doesn't change - head.set_reference(cur_head, 'reattach head') + head.set_reference(cur_head, "reattach head") assert len(head.log()) == hlog_len + 2 assert len(cur_head.log()) == blog_len # adjusting the head-ref also adjust the head, so both reflogs are # altered - cur_head.set_commit(pcommit, 'changing commit') + cur_head.set_commit(pcommit, "changing commit") assert len(cur_head.log()) == blog_len + 1 assert len(head.log()) == hlog_len + 3 # with automatic dereferencing - assert head.set_commit(cur_commit, 'change commit once again') is head + assert head.set_commit(cur_commit, "change commit once again") is head assert len(head.log()) == hlog_len + 4 assert len(cur_head.log()) == blog_len + 2 # a new branch has just a single entry - other_head = Head.create(rwrepo, 'mynewhead', pcommit, logmsg='new head created') + other_head = Head.create( + rwrepo, "mynewhead", pcommit, logmsg="new head created" + ) log = other_head.log() assert len(log) == 1 assert log[0].oldhexsha == pcommit.NULL_HEX_SHA @@ -179,21 +190,21 @@ def test_refs(self): assert len(types_found) >= 3 def test_is_valid(self): - assert not Reference(self.rorepo, 'refs/doesnt/exist').is_valid() + assert not Reference(self.rorepo, "refs/doesnt/exist").is_valid() assert self.rorepo.head.is_valid() assert self.rorepo.head.reference.is_valid() - assert not SymbolicReference(self.rorepo, 'hellothere').is_valid() + assert not SymbolicReference(self.rorepo, "hellothere").is_valid() def test_orig_head(self): assert type(self.rorepo.head.orig_head()) == SymbolicReference - @with_rw_repo('0.1.6') + @with_rw_repo("0.1.6") def test_head_checkout_detached_head(self, rw_repo): res = rw_repo.remotes.origin.refs.master.checkout() assert isinstance(res, SymbolicReference) - assert res.name == 'HEAD' + assert res.name == "HEAD" - @with_rw_repo('0.1.6') + @with_rw_repo("0.1.6") def test_head_reset(self, rw_repo): cur_head = rw_repo.head old_head_commit = cur_head.commit @@ -201,7 +212,9 @@ def test_head_reset(self, rw_repo): cur_head.reset(new_head_commit, index=True) # index only assert cur_head.reference.commit == new_head_commit - self.failUnlessRaises(ValueError, cur_head.reset, new_head_commit, index=False, working_tree=True) + self.failUnlessRaises( + ValueError, cur_head.reset, new_head_commit, index=False, working_tree=True + ) new_head_commit = new_head_commit.parents[0] cur_head.reset(new_head_commit, index=True, working_tree=True) # index + wt assert cur_head.reference.commit == new_head_commit @@ -211,7 +224,13 @@ def test_head_reset(self, rw_repo): cur_head.reset(cur_head, paths="test") cur_head.reset(new_head_commit, paths="lib") # hard resets with paths don't work, its all or nothing - self.failUnlessRaises(GitCommandError, cur_head.reset, new_head_commit, working_tree=True, paths="lib") + self.failUnlessRaises( + GitCommandError, + cur_head.reset, + new_head_commit, + working_tree=True, + paths="lib", + ) # we can do a mixed reset, and then checkout from the index though cur_head.reset(new_head_commit) @@ -251,7 +270,7 @@ def test_head_reset(self, rw_repo): self.failUnlessRaises(ValueError, setattr, cur_head, "reference", "that") # head handling - commit = 'HEAD' + commit = "HEAD" prev_head_commit = cur_head.commit for count, new_name in enumerate(("my_new_head", "feature/feature1")): actual_commit = commit + "^" * count @@ -263,7 +282,9 @@ def test_head_reset(self, rw_repo): Head.create(rw_repo, new_name, new_head.commit) # its not fine with a different value - self.failUnlessRaises(OSError, Head.create, rw_repo, new_name, new_head.commit.parents[0]) + self.failUnlessRaises( + OSError, Head.create, rw_repo, new_name, new_head.commit.parents[0] + ) # force it new_head = Head.create(rw_repo, new_name, actual_commit, force=True) @@ -272,7 +293,9 @@ def test_head_reset(self, rw_repo): assert new_head.rename("hello").name == "hello" assert new_head.rename("hello/world").name == "hello/world" - assert new_head.rename(old_name).name == old_name and new_head.path == old_path + assert ( + new_head.rename(old_name).name == old_name and new_head.path == old_path + ) # rename with force tmp_head = Head.create(rw_repo, "tmphead") @@ -326,7 +349,7 @@ def test_head_reset(self, rw_repo): remote_head_name = "HEAD" if remote_head_name in refs: RemoteReference.delete(rw_repo, refs[remote_head_name]) - del(refs[remote_head_name]) + del refs[remote_head_name] # END handle HEAD deletion RemoteReference.delete(rw_repo, *refs) @@ -354,13 +377,13 @@ def test_head_reset(self, rw_repo): # setting a non-commit as commit fails, but succeeds as object head_tree = head.commit.tree - self.failUnlessRaises(ValueError, setattr, head, 'commit', head_tree) - assert head.commit == old_commit # and the ref did not change + self.failUnlessRaises(ValueError, setattr, head, "commit", head_tree) + assert head.commit == old_commit # and the ref did not change # we allow heds to point to any object head.object = head_tree assert head.object == head_tree # cannot query tree as commit - self.failUnlessRaises(TypeError, getattr, head, 'commit') + self.failUnlessRaises(TypeError, getattr, head, "commit") # set the commit directly using the head. This would never detach the head assert not cur_head.is_detached @@ -392,25 +415,25 @@ def test_head_reset(self, rw_repo): # checkout with force as we have a changed a file # clear file - open(new_head.commit.tree.blobs[-1].abspath, 'w').close() + open(new_head.commit.tree.blobs[-1].abspath, "w").close() assert len(new_head.commit.diff(None)) # create a new branch that is likely to touch the file we changed - far_away_head = rw_repo.create_head("far_head", 'HEAD~100') + far_away_head = rw_repo.create_head("far_head", "HEAD~100") self.failUnlessRaises(GitCommandError, far_away_head.checkout) assert active_branch == active_branch.checkout(force=True) assert rw_repo.head.reference != far_away_head # test reference creation - partial_ref = 'sub/ref' - full_ref = 'refs/%s' % partial_ref + partial_ref = "sub/ref" + full_ref = "refs/%s" % partial_ref ref = Reference.create(rw_repo, partial_ref) assert ref.path == full_ref assert ref.object == rw_repo.head.commit - self.failUnlessRaises(OSError, Reference.create, rw_repo, full_ref, 'HEAD~20') + self.failUnlessRaises(OSError, Reference.create, rw_repo, full_ref, "HEAD~20") # it works if it is at the same spot though and points to the same reference - assert Reference.create(rw_repo, full_ref, 'HEAD').path == full_ref + assert Reference.create(rw_repo, full_ref, "HEAD").path == full_ref Reference.delete(rw_repo, full_ref) # recreate the reference using a full_ref @@ -419,13 +442,13 @@ def test_head_reset(self, rw_repo): assert ref.object == rw_repo.head.commit # recreate using force - ref = Reference.create(rw_repo, partial_ref, 'HEAD~1', force=True) + ref = Reference.create(rw_repo, partial_ref, "HEAD~1", force=True) assert ref.path == full_ref assert ref.object == rw_repo.head.commit.parents[0] # rename it orig_obj = ref.object - for name in ('refs/absname', 'rela_name', 'feature/rela_name'): + for name in ("refs/absname", "rela_name", "feature/rela_name"): ref_new_name = ref.rename(name) assert isinstance(ref_new_name, Reference) assert name in ref_new_name.path @@ -434,7 +457,9 @@ def test_head_reset(self, rw_repo): # END for each name type # References that don't exist trigger an error if we want to access them - self.failUnlessRaises(ValueError, getattr, Reference(rw_repo, "refs/doesntexist"), 'commit') + self.failUnlessRaises( + ValueError, getattr, Reference(rw_repo, "refs/doesntexist"), "commit" + ) # exists, fail unless we force ex_ref_path = far_away_head.path @@ -451,9 +476,17 @@ def test_head_reset(self, rw_repo): assert symref.path == symref_path assert symref.reference == cur_head.reference - self.failUnlessRaises(OSError, SymbolicReference.create, rw_repo, symref_path, cur_head.reference.commit) + self.failUnlessRaises( + OSError, + SymbolicReference.create, + rw_repo, + symref_path, + cur_head.reference.commit, + ) # it works if the new ref points to the same reference - SymbolicReference.create(rw_repo, symref.path, symref.reference).path == symref.path # @NoEffect + SymbolicReference.create( + rw_repo, symref.path, symref.reference + ).path == symref.path # @NoEffect SymbolicReference.delete(rw_repo, symref) # would raise if the symref wouldn't have been deletedpbl symref = SymbolicReference.create(rw_repo, symref_path, cur_head.reference) @@ -471,7 +504,7 @@ def test_head_reset(self, rw_repo): assert osp.isfile(symbol_ref_abspath) assert symref.commit == new_head.commit - for name in ('absname', 'folder/rela_name'): + for name in ("absname", "folder/rela_name"): symref_new_name = symref.rename(name) assert isinstance(symref_new_name, SymbolicReference) assert name in symref_new_name.path @@ -520,7 +553,7 @@ def test_head_reset(self, rw_repo): rw_repo.head.reference = Head.create(rw_repo, "master") # At least the head should still exist - assert osp.isfile(osp.join(rw_repo.git_dir, 'HEAD')) + assert osp.isfile(osp.join(rw_repo.git_dir, "HEAD")) refs = list(SymbolicReference.iter_items(rw_repo)) assert len(refs) == 1 @@ -541,7 +574,7 @@ def test_head_reset(self, rw_repo): # if the assignment raises, the ref doesn't exist Reference.delete(ref.repo, ref.path) assert not ref.is_valid() - self.failUnlessRaises(ValueError, setattr, ref, 'commit', "nonsense") + self.failUnlessRaises(ValueError, setattr, ref, "commit", "nonsense") assert not ref.is_valid() # I am sure I had my reason to make it a class method at first, but @@ -555,14 +588,14 @@ def test_head_reset(self, rw_repo): Reference.delete(ref.repo, ref.path) assert not ref.is_valid() - self.failUnlessRaises(ValueError, setattr, ref, 'object', "nonsense") + self.failUnlessRaises(ValueError, setattr, ref, "object", "nonsense") assert not ref.is_valid() # END for each path def test_dereference_recursive(self): # for now, just test the HEAD - assert SymbolicReference.dereference_recursive(self.rorepo, 'HEAD') + assert SymbolicReference.dereference_recursive(self.rorepo, "HEAD") def test_reflog(self): assert isinstance(self.rorepo.heads.master.log(), RefLog) diff --git a/git/test/test_remote.py b/git/test/test_remote.py index 99949b9ea..ce5abad4f 100644 --- a/git/test/test_remote.py +++ b/git/test/test_remote.py @@ -19,9 +19,15 @@ RemoteReference, TagReference, Remote, - GitCommandError + GitCommandError, ) from git.cmd import Git +try: + from pathlib import Path +except ImportError: + from pathlib2 import Path + +from git.exc import UnsafeOptionError, UnsafeProtocolError from git.compat import string_types from git.test.lib import ( TestBase, @@ -29,7 +35,7 @@ with_rw_and_rw_remote_repo, fixture, GIT_DAEMON_PORT, - assert_raises + assert_raises, ) from git.util import IterableList, rmtree, HIDE_WINDOWS_FREEZE_ERRORS import os.path as osp @@ -40,7 +46,7 @@ class TestRemoteProgress(RemoteProgress): - __slots__ = ("_seen_lines", "_stages_per_op", '_num_progress_messages') + __slots__ = ("_seen_lines", "_stages_per_op", "_num_progress_messages") def __init__(self): super(TestRemoteProgress, self).__init__() @@ -62,21 +68,27 @@ def line_dropped(self, line): except ValueError: pass - def update(self, op_code, cur_count, max_count=None, message=''): + def update(self, op_code, cur_count, max_count=None, message=""): # check each stage only comes once op_id = op_code & self.OP_MASK assert op_id in (self.COUNTING, self.COMPRESSING, self.WRITING) if op_code & self.WRITING > 0: if op_code & self.BEGIN > 0: - assert not message, 'should not have message when remote begins writing' + assert not message, "should not have message when remote begins writing" elif op_code & self.END > 0: assert message - assert not message.startswith(', '), "Sanitize progress messages: '%s'" % message - assert not message.endswith(', '), "Sanitize progress messages: '%s'" % message + assert not message.startswith(", "), ( + "Sanitize progress messages: '%s'" % message + ) + assert not message.endswith(", "), ( + "Sanitize progress messages: '%s'" % message + ) self._stages_per_op.setdefault(op_id, 0) - self._stages_per_op[op_id] = self._stages_per_op[op_id] | (op_code & self.STAGE_MASK) + self._stages_per_op[op_id] = self._stages_per_op[op_id] | ( + op_code & self.STAGE_MASK + ) if op_code & (self.WRITING | self.END) == (self.WRITING | self.END): assert message @@ -106,6 +118,7 @@ class TestRemote(TestBase): def tearDown(self): import gc + gc.collect() def _print_fetchhead(self, repo): @@ -139,7 +152,11 @@ def _do_test_push_result(self, results, remote): self.assertIsInstance(info.old_commit, Commit) if info.flags & info.ERROR: has_one = False - for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE): + for bitflag in ( + info.REJECTED, + info.REMOTE_REJECTED, + info.REMOTE_FAILURE, + ): has_one |= bool(info.flags & bitflag) # END for each bitflag self.assertTrue(has_one) @@ -154,15 +171,22 @@ def _do_test_push_result(self, results, remote): # END for each info def _do_test_fetch_info(self, repo): - self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "nonsense", '') + self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "nonsense", "") self.failUnlessRaises( - ValueError, FetchInfo._from_line, repo, "? [up to date] 0.1.7RC -> origin/0.1.7RC", '') + ValueError, + FetchInfo._from_line, + repo, + "? [up to date] 0.1.7RC -> origin/0.1.7RC", + "", + ) def _commit_random_file(self, repo): # Create a file with a random name and random data and commit it to repo. # Return the committed absolute file path index = repo.index - new_file = self._make_file(osp.basename(tempfile.mktemp()), str(random.random()), repo) + new_file = self._make_file( + osp.basename(tempfile.mktemp()), str(random.random()), repo + ) index.add([new_file]) index.commit("Committing %s" % new_file) return new_file @@ -173,11 +197,12 @@ def _do_test_fetch(self, remote, rw_repo, remote_repo): def fetch_and_test(remote, **kwargs): progress = TestRemoteProgress() - kwargs['progress'] = progress + kwargs["progress"] = progress res = remote.fetch(**kwargs) progress.make_assertion() self._do_test_fetch_result(res, remote) return res + # END fetch and check def get_info(res, remote, name): @@ -197,7 +222,7 @@ def get_info(res, remote, name): remote_commit = rhead.commit rhead.reset("HEAD~2", index=False) res = fetch_and_test(remote) - mkey = "%s/%s" % (remote, 'master') + mkey = "%s/%s" % (remote, "master") master_info = res[mkey] self.assertTrue(master_info.flags & FetchInfo.FORCED_UPDATE) self.assertIsNotNone(master_info.note) @@ -234,10 +259,10 @@ def get_info(res, remote, name): # test single branch fetch with refspec including target remote res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master" % remote) self.assertEqual(len(res), 1) - self.assertTrue(get_info(res, remote, 'master')) + self.assertTrue(get_info(res, remote, "master")) # ... with respec and no target - res = fetch_and_test(remote, refspec='master') + res = fetch_and_test(remote, refspec="master") self.assertEqual(len(res), 1) # ... multiple refspecs ... works, but git command returns with error if one ref is wrong without @@ -279,8 +304,12 @@ def get_info(res, remote, name): # must clone with a local path for the repo implementation not to freak out # as it wants local paths only ( which I can understand ) other_repo = remote_repo.clone(other_repo_dir, shared=False) - remote_repo_url = osp.basename(remote_repo.git_dir) # git-daemon runs with appropriate `--base-path`. - remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, remote_repo_url)) + remote_repo_url = osp.basename( + remote_repo.git_dir + ) # git-daemon runs with appropriate `--base-path`. + remote_repo_url = Git.polish_url( + "git://localhost:%s/%s" % (GIT_DAEMON_PORT, remote_repo_url) + ) # put origin to git-url other_origin = other_repo.remotes.origin @@ -314,7 +343,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): except AttributeError: # if the author is on a non-master branch, the clones might not have # a local master yet. We simply create it - lhead.reference = rw_repo.create_head('master') + lhead.reference = rw_repo.create_head("master") # END master handling lhead.reset(remote.refs.master, working_tree=True) @@ -338,7 +367,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): self._do_test_push_result(res, remote) # force rejected pull - res = remote.push('+%s' % lhead.reference) + res = remote.push("+%s" % lhead.reference) self.assertEqual(res[0].flags & PushInfo.ERROR, 0) self.assertTrue(res[0].flags & PushInfo.FORCED_UPDATE) self._do_test_push_result(res, remote) @@ -350,7 +379,9 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): progress = TestRemoteProgress() to_be_updated = "my_tag.1.0RV" new_tag = TagReference.create(rw_repo, to_be_updated) # @UnusedVariable - other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", message="my message") + other_tag = TagReference.create( + rw_repo, "my_obj_tag.2.1aRV", message="my message" + ) res = remote.push(progress=progress, tags=True) self.assertTrue(res[-1].flags & PushInfo.NEW_TAG) progress.make_assertion() @@ -358,7 +389,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): # update push new tags # Rejection is default - new_tag = TagReference.create(rw_repo, to_be_updated, ref='HEAD~1', force=True) + new_tag = TagReference.create(rw_repo, to_be_updated, ref="HEAD~1", force=True) res = remote.push(tags=True) self._do_test_push_result(res, remote) self.assertTrue(res[-1].flags & PushInfo.REJECTED) @@ -396,7 +427,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): res = remote.push(all=True) self._do_test_push_result(res, remote) - remote.pull('master') + remote.pull("master") # cleanup - delete created tags and branches as we are in an innerloop on # the same repository @@ -404,7 +435,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): remote.push(":%s" % other_tag.path) @skipIf(HIDE_WINDOWS_FREEZE_ERRORS, "FIXME: Freezes!") - @with_rw_and_rw_remote_repo('0.1.6') + @with_rw_and_rw_remote_repo("0.1.6") def test_base(self, rw_repo, remote_repo): num_remotes = 0 remote_set = set() @@ -473,7 +504,7 @@ def test_base(self, rw_repo, remote_repo): self.assertTrue(num_remotes) self.assertEqual(num_remotes, len(remote_set)) - origin = rw_repo.remote('origin') + origin = rw_repo.remote("origin") assert origin == rw_repo.remotes.origin # Verify we can handle prunes when fetching @@ -486,15 +517,19 @@ def test_base(self, rw_repo, remote_repo): num_deleted = False for branch in remote_repo.heads: - if branch.name != 'master': + if branch.name != "master": branch.delete(remote_repo, branch, force=True) num_deleted += 1 # end # end for each branch self.assertGreater(num_deleted, 0) - self.assertEqual(len(rw_repo.remotes.origin.fetch(prune=True)), 1, "deleted everything but master") + self.assertEqual( + len(rw_repo.remotes.origin.fetch(prune=True)), + 1, + "deleted everything but master", + ) - @with_rw_repo('HEAD', bare=True) + @with_rw_repo("HEAD", bare=True) def test_creation_and_removal(self, bare_rw_repo): new_name = "test_new_one" arg_list = (new_name, "git@server:hello.git") @@ -507,7 +542,9 @@ def test_creation_and_removal(self, bare_rw_repo): self.failUnlessRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list) Remote.remove(bare_rw_repo, new_name) - self.assertTrue(remote.exists()) # We still have a cache that doesn't know we were deleted by name + self.assertTrue( + remote.exists() + ) # We still have a cache that doesn't know we were deleted by name remote._clear_cache() assert not remote.exists() # Cache should be renewed now. This is an issue ... @@ -518,86 +555,108 @@ def test_creation_and_removal(self, bare_rw_repo): # END for each remote # Issue #262 - the next call would fail if bug wasn't fixed - bare_rw_repo.create_remote('bogus', '/bogus/path', mirror='push') + bare_rw_repo.create_remote("bogus", "/bogus/path", mirror="push") def test_fetch_info(self): # assure we can handle remote-tracking branches - fetch_info_line_fmt = "c437ee5deb8d00cf02f03720693e4c802e99f390 not-for-merge %s '0.3' of " + fetch_info_line_fmt = ( + "c437ee5deb8d00cf02f03720693e4c802e99f390 not-for-merge %s '0.3' of " + ) fetch_info_line_fmt += "git://github.com/gitpython-developers/GitPython" remote_info_line_fmt = "* [new branch] nomatter -> %s" - self.failUnlessRaises(ValueError, FetchInfo._from_line, self.rorepo, - remote_info_line_fmt % "refs/something/branch", - "269c498e56feb93e408ed4558c8138d750de8893\t\t/Users/ben/test/foo\n") - - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % "local/master", - fetch_info_line_fmt % 'remote-tracking branch') + self.failUnlessRaises( + ValueError, + FetchInfo._from_line, + self.rorepo, + remote_info_line_fmt % "refs/something/branch", + "269c498e56feb93e408ed4558c8138d750de8893\t\t/Users/ben/test/foo\n", + ) + + fi = FetchInfo._from_line( + self.rorepo, + remote_info_line_fmt % "local/master", + fetch_info_line_fmt % "remote-tracking branch", + ) assert not fi.ref.is_valid() self.assertEqual(fi.ref.name, "local/master") # handles non-default refspecs: One can specify a different path in refs/remotes # or a special path just in refs/something for instance - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % "subdir/tagname", - fetch_info_line_fmt % 'tag') + fi = FetchInfo._from_line( + self.rorepo, + remote_info_line_fmt % "subdir/tagname", + fetch_info_line_fmt % "tag", + ) self.assertIsInstance(fi.ref, TagReference) - assert fi.ref.path.startswith('refs/tags'), fi.ref.path + assert fi.ref.path.startswith("refs/tags"), fi.ref.path # it could be in a remote direcftory though - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % "remotename/tags/tagname", - fetch_info_line_fmt % 'tag') + fi = FetchInfo._from_line( + self.rorepo, + remote_info_line_fmt % "remotename/tags/tagname", + fetch_info_line_fmt % "tag", + ) self.assertIsInstance(fi.ref, TagReference) - assert fi.ref.path.startswith('refs/remotes/'), fi.ref.path + assert fi.ref.path.startswith("refs/remotes/"), fi.ref.path # it can also be anywhere ! tag_path = "refs/something/remotename/tags/tagname" - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % tag_path, - fetch_info_line_fmt % 'tag') + fi = FetchInfo._from_line( + self.rorepo, remote_info_line_fmt % tag_path, fetch_info_line_fmt % "tag" + ) self.assertIsInstance(fi.ref, TagReference) self.assertEqual(fi.ref.path, tag_path) # branches default to refs/remotes - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % "remotename/branch", - fetch_info_line_fmt % 'branch') + fi = FetchInfo._from_line( + self.rorepo, + remote_info_line_fmt % "remotename/branch", + fetch_info_line_fmt % "branch", + ) self.assertIsInstance(fi.ref, RemoteReference) - self.assertEqual(fi.ref.remote_name, 'remotename') + self.assertEqual(fi.ref.remote_name, "remotename") # but you can force it anywhere, in which case we only have a references - fi = FetchInfo._from_line(self.rorepo, - remote_info_line_fmt % "refs/something/branch", - fetch_info_line_fmt % 'branch') + fi = FetchInfo._from_line( + self.rorepo, + remote_info_line_fmt % "refs/something/branch", + fetch_info_line_fmt % "branch", + ) assert type(fi.ref) is Reference, type(fi.ref) self.assertEqual(fi.ref.path, "refs/something/branch") def test_uncommon_branch_names(self): - stderr_lines = fixture('uncommon_branch_prefix_stderr').decode('ascii').splitlines() - fetch_lines = fixture('uncommon_branch_prefix_FETCH_HEAD').decode('ascii').splitlines() + stderr_lines = ( + fixture("uncommon_branch_prefix_stderr").decode("ascii").splitlines() + ) + fetch_lines = ( + fixture("uncommon_branch_prefix_FETCH_HEAD").decode("ascii").splitlines() + ) # The contents of the files above must be fetched with a custom refspec: # +refs/pull/*:refs/heads/pull/* - res = [FetchInfo._from_line('ShouldntMatterRepo', stderr, fetch_line) - for stderr, fetch_line in zip(stderr_lines, fetch_lines)] + res = [ + FetchInfo._from_line("ShouldntMatterRepo", stderr, fetch_line) + for stderr, fetch_line in zip(stderr_lines, fetch_lines) + ] self.assertGreater(len(res), 0) - self.assertEqual(res[0].remote_ref_path, 'refs/pull/1/head') - self.assertEqual(res[0].ref.path, 'refs/heads/pull/1/head') + self.assertEqual(res[0].remote_ref_path, "refs/pull/1/head") + self.assertEqual(res[0].ref.path, "refs/heads/pull/1/head") self.assertIsInstance(res[0].ref, Head) - @with_rw_repo('HEAD', bare=False) + @with_rw_repo("HEAD", bare=False) def test_multiple_urls(self, rw_repo): # test addresses - test1 = 'https://github.com/gitpython-developers/GitPython' - test2 = 'https://github.com/gitpython-developers/gitdb' - test3 = 'https://github.com/gitpython-developers/smmap' + test1 = "https://github.com/gitpython-developers/GitPython" + test2 = "https://github.com/gitpython-developers/gitdb" + test3 = "https://github.com/gitpython-developers/smmap" remote = rw_repo.remotes[0] # Testing setting a single URL @@ -623,7 +682,7 @@ def test_multiple_urls(self, rw_repo): assert_raises(GitCommandError, remote.set_url, test2, add=True, delete=True) # Testing on another remote, with the add/delete URL - remote = rw_repo.create_remote('another', url=test1) + remote = rw_repo.create_remote("another", url=test1) remote.add_url(test2) self.assertEqual(list(remote.urls), [test1, test2]) remote.add_url(test3) @@ -637,12 +696,274 @@ def test_multiple_urls(self, rw_repo): assert_raises(GitCommandError, remote.delete_url, test3) def test_fetch_error(self): - rem = self.rorepo.remote('origin') - with self.assertRaisesRegex(GitCommandError, "[Cc]ouldn't find remote ref __BAD_REF__"): - rem.fetch('__BAD_REF__') + rem = self.rorepo.remote("origin") + with self.assertRaisesRegex( + GitCommandError, "[Cc]ouldn't find remote ref __BAD_REF__" + ): + rem.fetch("__BAD_REF__") - @with_rw_repo('0.1.6', bare=False) + @with_rw_repo("0.1.6", bare=False) def test_push_error(self, repo): - rem = repo.remote('origin') - with self.assertRaisesRegex(GitCommandError, "src refspec __BAD_REF__ does not match any"): - rem.push('__BAD_REF__') + rem = repo.remote("origin") + with self.assertRaisesRegex( + GitCommandError, "src refspec __BAD_REF__ does not match any" + ): + rem.push("__BAD_REF__") + + @with_rw_repo("HEAD") + def test_set_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + remote.set_url(url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_set_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + remote.set_url(url, allow_unsafe_protocols=True) + assert list(remote.urls)[-1] == url + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_add_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + remote.add_url(url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_add_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + remote.add_url(url, allow_unsafe_protocols=True) + assert list(remote.urls)[-1] == url + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_create_remote_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + Remote.create(rw_repo, "origin", url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_create_remote_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for i, url in enumerate(urls): + remote = Remote.create( + rw_repo, "origin"+str(i), url, allow_unsafe_protocols=True + ) + assert remote.url == url + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_fetch_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + remote.fetch(url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_fetch_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + remote.fetch(url, allow_unsafe_protocols=True) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_fetch_unsafe_options(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [{"upload-pack": "touch " + str(tmp_file)}] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + remote.fetch(**unsafe_option) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_fetch_unsafe_options_allowed(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [{"upload-pack": "touch " + str(tmp_file)}] + for unsafe_option in unsafe_options: + # The options will be allowed, but the command will fail. + assert not tmp_file.exists() + with self.assertRaises(GitCommandError): + remote.fetch(allow_unsafe_options=True, **unsafe_option) + assert tmp_file.exists() + + @with_rw_repo("HEAD") + def test_pull_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + remote.pull(url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_pull_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% "+str(tmp_file), + "fd::17/foo", + ] + for url in urls: + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + remote.pull(url, allow_unsafe_protocols=True) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_pull_unsafe_options(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [{"upload-pack": "touch " + str(tmp_file)}] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + remote.pull(**unsafe_option) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_pull_unsafe_options_allowed(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [{"upload-pack": "touch " + str(tmp_file)}] + for unsafe_option in unsafe_options: + # The options will be allowed, but the command will fail. + assert not tmp_file.exists() + with self.assertRaises(GitCommandError): + remote.pull(allow_unsafe_options=True, **unsafe_option) + assert tmp_file.exists() + + @with_rw_repo("HEAD") + def test_push_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + remote.push(url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_push_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + remote = rw_repo.remote("origin") + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::17/foo", + ] + for url in urls: + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + remote.push(url, allow_unsafe_protocols=True) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_push_unsafe_options(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [ + { + "receive-pack": "touch " + str(tmp_file), + "exec": "touch " + str(tmp_file), + } + ] + for unsafe_option in unsafe_options: + assert not tmp_file.exists() + with self.assertRaises(UnsafeOptionError): + remote.push(**unsafe_option) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_push_unsafe_options_allowed(self, rw_repo): + remote = rw_repo.remote("origin") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [ + { + "receive-pack": "touch " + str(tmp_file), + "exec": "touch " + str(tmp_file), + } + ] + for unsafe_option in unsafe_options: + # The options will be allowed, but the command will fail. + assert not tmp_file.exists() + with self.assertRaises(GitCommandError): + remote.push(allow_unsafe_options=True, **unsafe_option) + assert tmp_file.exists() + tmp_file.unlink() diff --git a/git/test/test_repo.py b/git/test/test_repo.py index 0577bd589..a45d0dce7 100644 --- a/git/test/test_repo.py +++ b/git/test/test_repo.py @@ -34,7 +34,7 @@ GitCmdObjectDB, Remote, BadName, - GitCommandError + GitCommandError, ) from git.compat import ( PY3, @@ -44,6 +44,8 @@ ) from git.exc import ( BadObject, + UnsafeOptionError, + UnsafeProtocolError, ) from git.repo.fun import touch from git.test.lib import ( @@ -54,7 +56,7 @@ assert_false, assert_equal, assert_true, - raises + raises, ) from git.util import HIDE_WINDOWS_KNOWN_ERRORS, cygpath from git.test.lib import with_rw_directory @@ -74,7 +76,7 @@ def flatten(lol): return list(iter_flatten(lol)) -_tc_lock_fpaths = osp.join(osp.dirname(__file__), '../../.git/*.lock') +_tc_lock_fpaths = osp.join(osp.dirname(__file__), "../../.git/*.lock") def _rm_lock_files(): @@ -90,8 +92,9 @@ def setUp(self): def tearDown(self): for lfp in glob.glob(_tc_lock_fpaths): if osp.isfile(lfp): - raise AssertionError('Previous TC left hanging git-lock file: %s', lfp) + raise AssertionError("Previous TC left hanging git-lock file: %s", lfp) import gc + gc.collect() @raises(InvalidGitRepositoryError) @@ -102,15 +105,15 @@ def test_new_should_raise_on_invalid_repo_location(self): def test_new_should_raise_on_non_existent_path(self): Repo("repos/foobar") - @with_rw_repo('0.3.2.1') + @with_rw_repo("0.3.2.1") def test_repo_creation_from_different_paths(self, rw_repo): r_from_gitdir = Repo(rw_repo.git_dir) self.assertEqual(r_from_gitdir.git_dir, rw_repo.git_dir) - assert r_from_gitdir.git_dir.endswith('.git') - assert not rw_repo.git.working_dir.endswith('.git') + assert r_from_gitdir.git_dir.endswith(".git") + assert not rw_repo.git.working_dir.endswith(".git") self.assertEqual(r_from_gitdir.git.working_dir, rw_repo.git.working_dir) - @with_rw_repo('0.3.2.1') + @with_rw_repo("0.3.2.1") def test_repo_creation_pathlib(self, rw_repo): if pathlib is None: # pythons bellow 3.4 don't have pathlib raise SkipTest("pathlib was introduced in 3.4") @@ -134,33 +137,35 @@ def test_heads_should_populate_head_data(self): # END for each head self.assertIsInstance(self.rorepo.heads.master, Head) - self.assertIsInstance(self.rorepo.heads['master'], Head) + self.assertIsInstance(self.rorepo.heads["master"], Head) def test_tree_from_revision(self): - tree = self.rorepo.tree('0.1.6') + tree = self.rorepo.tree("0.1.6") self.assertEqual(len(tree.hexsha), 40) self.assertEqual(tree.type, "tree") self.assertEqual(self.rorepo.tree(tree), tree) # try from invalid revision that does not exist - self.failUnlessRaises(BadName, self.rorepo.tree, 'hello world') + self.failUnlessRaises(BadName, self.rorepo.tree, "hello world") def test_pickleable(self): pickle.loads(pickle.dumps(self.rorepo)) def test_commit_from_revision(self): - commit = self.rorepo.commit('0.1.4') - self.assertEqual(commit.type, 'commit') + commit = self.rorepo.commit("0.1.4") + self.assertEqual(commit.type, "commit") self.assertEqual(self.rorepo.commit(commit), commit) def test_commits(self): mc = 10 - commits = list(self.rorepo.iter_commits('0.1.6', max_count=mc)) + commits = list(self.rorepo.iter_commits("0.1.6", max_count=mc)) self.assertEqual(len(commits), mc) c = commits[0] - assert_equal('9a4b1d4d11eee3c5362a4152216376e634bd14cf', c.hexsha) - assert_equal(["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents]) + assert_equal("9a4b1d4d11eee3c5362a4152216376e634bd14cf", c.hexsha) + assert_equal( + ["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents] + ) assert_equal("ce41fc29549042f1aa09cc03174896cf23f112e3", c.tree.hexsha) assert_equal("Michael Trier", c.author.name) assert_equal("mtrier@gmail.com", c.author.email) @@ -178,7 +183,7 @@ def test_commits(self): def test_trees(self): mc = 30 num_trees = 0 - for tree in self.rorepo.iter_trees('0.1.5', max_count=mc): + for tree in self.rorepo.iter_trees("0.1.5", max_count=mc): num_trees += 1 self.assertIsInstance(tree, Tree) # END for each tree @@ -197,7 +202,7 @@ def _assert_empty_repo(self, repo): assert not repo.head.is_valid() # we can change the head to some other ref - head_ref = Head.from_path(repo, Head.to_full_path('some_head')) + head_ref = Head.from_path(repo, Head.to_full_path("some_head")) assert not head_ref.is_valid() repo.head.ref = head_ref @@ -216,7 +221,9 @@ def test_clone_from_keeps_env(self, rw_dir): original_repo = Repo.init(osp.join(rw_dir, "repo")) environment = {"entry1": "value", "another_entry": "10"} - cloned = Repo.clone_from(original_repo.git_dir, osp.join(rw_dir, "clone"), env=environment) + cloned = Repo.clone_from( + original_repo.git_dir, osp.join(rw_dir, "clone"), env=environment + ) assert_equal(environment, cloned.git.environment()) @@ -236,16 +243,220 @@ def test_clone_from_pathlib_withConfig(self, rw_dir): original_repo = Repo.init(osp.join(rw_dir, "repo")) - cloned = Repo.clone_from(original_repo.git_dir, pathlib.Path(rw_dir) / "clone_pathlib_withConfig", - multi_options=["--recurse-submodules=repo", - "--config core.filemode=false", - "--config submodule.repo.update=checkout"]) + cloned = Repo.clone_from( + original_repo.git_dir, + pathlib.Path(rw_dir) / "clone_pathlib_withConfig", + multi_options=[ + "--recurse-submodules=repo", + "--config core.filemode=false", + "--config submodule.repo.update=checkout", + ], + allow_unsafe_options=True, + ) + + assert_equal(cloned.config_reader().get_value("submodule", "active"), "repo") + assert_equal(cloned.config_reader().get_value("core", "filemode"), False) + assert_equal( + cloned.config_reader().get_value('submodule "repo"', "update"), "checkout" + ) + + @with_rw_repo("HEAD") + def test_clone_unsafe_options(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [ + "--upload-pack='touch " + str(tmp_file) + "'", + "-u 'touch " + str(tmp_file) + "'", + "--config=protocol.ext.allow=always", + "-c protocol.ext.allow=always", + ] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + rw_repo.clone(tmp_dir, multi_options=[unsafe_option]) + assert not tmp_file.exists() + + unsafe_options = [ + {"upload-pack": "touch "+str(tmp_file)}, + {"u": "touch "+str(tmp_file)}, + {"config": "protocol.ext.allow=always"}, + {"c": "protocol.ext.allow=always"}, + ] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + rw_repo.clone(tmp_dir, **unsafe_option) + assert not tmp_file.exists() + + # Gets an error about too many options + # @with_rw_repo("HEAD") + # def test_clone_unsafe_options_allowed(self, rw_repo): + # tmp_dir = pathlib.Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # ] + # for i, unsafe_option in enumerate(unsafe_options): + # destination = tmp_dir / str(i) + # assert not tmp_file.exists() + # # The options will be allowed, but the command will fail. + # with self.assertRaises(GitCommandError): + # rw_repo.clone( + # destination, + # multi_options=[unsafe_option], + # allow_unsafe_options=True, + # ) + # assert tmp_file.exists() + # tmp_file.unlink() + # unsafe_options = [ + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # for i, unsafe_option in enumerate(unsafe_options): + # destination = tmp_dir / str(i) + # assert not destination.exists() + # rw_repo.clone( + # destination, multi_options=[unsafe_option], allow_unsafe_options=True + # ) + # assert destination.exists() + + @with_rw_repo("HEAD") + def test_clone_safe_options(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + options = [ + "--depth=1", + "--single-branch", + "-q", + ] + for option in options: + destination = tmp_dir / option + assert not destination.exists() + rw_repo.clone(destination, multi_options=[option]) + assert destination.exists() + + @with_rw_repo("HEAD") + def test_clone_from_unsafe_options(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + unsafe_options = [ + "--upload-pack='touch " + str(tmp_file) + "'", + "-u 'touch " + str(tmp_file) + "'", + "--config=protocol.ext.allow=always", + "-c protocol.ext.allow=always", + ] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + Repo.clone_from( + rw_repo.working_dir, tmp_dir, multi_options=[unsafe_option] + ) + assert not tmp_file.exists() + + unsafe_options = [ + {"upload-pack": "touch " + str(tmp_file)}, + {"u": "touch " + str(tmp_file)}, + {"config": "protocol.ext.allow=always"}, + {"c": "protocol.ext.allow=always"}, + ] + for unsafe_option in unsafe_options: + with self.assertRaises(UnsafeOptionError): + Repo.clone_from(rw_repo.working_dir, tmp_dir, **unsafe_option) + assert not tmp_file.exists() + + # Gets an error about too many arguments + # @with_rw_repo("HEAD") + # def test_clone_from_unsafe_options_allowed(self, rw_repo): + # tmp_dir = pathlib.Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # ] + # for i, unsafe_option in enumerate(unsafe_options): + # destination = tmp_dir / str(i) + # assert not tmp_file.exists() + # # The options will be allowed, but the command will fail. + # with self.assertRaises(GitCommandError): + # Repo.clone_from( + # rw_repo.working_dir, + # destination, + # multi_options=[unsafe_option], + # allow_unsafe_options=True, + # ) + # assert tmp_file.exists() + # tmp_file.unlink() + # unsafe_options = [ + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # for i, unsafe_option in enumerate(unsafe_options): + # destination = tmp_dir / str(i) + # assert not destination.exists() + # Repo.clone_from( + # rw_repo.working_dir, + # destination, + # multi_options=[unsafe_option], + # allow_unsafe_options=True, + # ) + # assert destination.exists() + + @with_rw_repo("HEAD") + def test_clone_from_safe_options(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + options = [ + "--depth=1", + "--single-branch", + "-q", + ] + for option in options: + destination = tmp_dir / option + assert not destination.exists() + Repo.clone_from(rw_repo.common_dir, destination, multi_options=[option]) + assert destination.exists() + + def test_clone_from_unsafe_procol(self): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") - assert_equal(cloned.config_reader().get_value('submodule', 'active'), 'repo') - assert_equal(cloned.config_reader().get_value('core', 'filemode'), False) - assert_equal(cloned.config_reader().get_value('submodule "repo"', 'update'), 'checkout') + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::17/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + Repo.clone_from(url, tmp_dir) + assert not tmp_file.exists() + + def test_clone_from_unsafe_procol_allowed(self): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") - @with_rw_repo('HEAD') + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% /tmp/pwn", + "fd::/foo", + ] + for url in urls: + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + Repo.clone_from(url, tmp_dir, allow_unsafe_protocols=True) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") def test_max_chunk_size(self, repo): class TestOutputStream(object): def __init__(self, max_chunk_size): @@ -255,10 +466,20 @@ def write(self, b): assert_true(len(b) <= self.max_chunk_size) for chunk_size in [16, 128, 1024]: - repo.git.status(output_stream=TestOutputStream(chunk_size), max_chunk_size=chunk_size) - - repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=None) - repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=-10) + repo.git.status( + output_stream=TestOutputStream(chunk_size), max_chunk_size=chunk_size + ) + + repo.git.log( + n=100, + output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), + max_chunk_size=None, + ) + repo.git.log( + n=100, + output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), + max_chunk_size=-10, + ) repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE)) def test_init(self): @@ -342,7 +563,7 @@ def test_alternates(self): self.rorepo.alternates = cur_alternates def test_repr(self): - assert repr(self.rorepo).startswith(' 1) # END for each item to traverse assert c, "Should have executed at least one blame command" - assert nml, "There should at least be one blame commit that contains multiple lines" + assert ( + nml + ), "There should at least be one blame commit that contains multiple lines" - @patch.object(Git, '_call_process') + @patch.object(Git, "_call_process") def test_blame_incremental(self, git): # loop over two fixtures, create a test fixture for 2.11.1+ syntax - for git_fixture in ('blame_incremental', 'blame_incremental_2.11.1_plus'): + for git_fixture in ("blame_incremental", "blame_incremental_2.11.1_plus"): git.return_value = fixture(git_fixture) - blame_output = self.rorepo.blame_incremental('9debf6b0aafb6f7781ea9d1383c86939a1aacde3', 'AUTHORS') + blame_output = self.rorepo.blame_incremental( + "9debf6b0aafb6f7781ea9d1383c86939a1aacde3", "AUTHORS" + ) blame_output = list(blame_output) self.assertEqual(len(blame_output), 5) # Check all outputted line numbers ranges = flatten([entry.linenos for entry in blame_output]) - self.assertEqual(ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(3, 14), range(15, 17)])) + self.assertEqual( + ranges, + flatten( + [ + range(2, 3), + range(14, 15), + range(1, 2), + range(3, 14), + range(15, 17), + ] + ), + ) commits = [entry.commit.hexsha[:7] for entry in blame_output] - self.assertEqual(commits, ['82b8902', '82b8902', 'c76852d', 'c76852d', 'c76852d']) + self.assertEqual( + commits, ["82b8902", "82b8902", "c76852d", "c76852d", "c76852d"] + ) # Original filenames - self.assertSequenceEqual([entry.orig_path for entry in blame_output], [u'AUTHORS'] * len(blame_output)) + self.assertSequenceEqual( + [entry.orig_path for entry in blame_output], + ["AUTHORS"] * len(blame_output), + ) # Original line numbers orig_ranges = flatten([entry.orig_linenos for entry in blame_output]) - self.assertEqual(orig_ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(2, 13), range(13, 15)])) # noqa E501 - - @patch.object(Git, '_call_process') + self.assertEqual( + orig_ranges, + flatten( + [ + range(2, 3), + range(14, 15), + range(1, 2), + range(2, 13), + range(13, 15), + ] + ), + ) # noqa E501 + + @patch.object(Git, "_call_process") def test_blame_complex_revision(self, git): - git.return_value = fixture('blame_complex_revision') + git.return_value = fixture("blame_complex_revision") res = self.rorepo.blame("HEAD~10..HEAD", "README.md") self.assertEqual(len(res), 1) self.assertEqual(len(res[0][1]), 83, "Unexpected amount of parsed blame lines") - @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(), - """FIXME: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute + @skipIf( + HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(), + """FIXME: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute raise GitCommandError(command, status, stderr_value, stdout_value) GitCommandError: Cmd('git') failed due to: exit code(128) cmdline: git add 1__��ava verb��ten 1_test _myfile 1_test_other_file 1_��ava-----verb��ten stderr: 'fatal: pathspec '"1__çava verböten"' did not match any files' - """) - @with_rw_repo('HEAD', bare=False) + """, + ) + @with_rw_repo("HEAD", bare=False) def test_untracked_files(self, rwrepo): - for run, (repo_add, is_invoking_git) in enumerate(( + for run, (repo_add, is_invoking_git) in enumerate( + ( (rwrepo.index.add, False), (rwrepo.git.add, True), - )): + ) + ): base = rwrepo.working_tree_dir - files = (join_path_native(base, u"%i_test _myfile" % run), - join_path_native(base, "%i_test_other_file" % run), - join_path_native(base, u"%i__çava verböten" % run), - join_path_native(base, u"%i_çava-----verböten" % run)) + files = ( + join_path_native(base, u"%i_test _myfile" % run), + join_path_native(base, u"%i_test_other_file" % run), + join_path_native(base, u"%i__çava verböten" % run), + join_path_native(base, u"%i_çava-----verböten" % run), + ) num_recently_untracked = 0 for fpath in files: @@ -512,13 +779,15 @@ def test_untracked_files(self, rwrepo): repo_add = fnt.partial(repo_add, shell=True) untracked_files = [win_encode(f) for f in untracked_files] repo_add(untracked_files) - self.assertEqual(len(rwrepo.untracked_files), (num_recently_untracked - len(files))) + self.assertEqual( + len(rwrepo.untracked_files), (num_recently_untracked - len(files)) + ) # end for each run def test_config_reader(self): - reader = self.rorepo.config_reader() # all config files + reader = self.rorepo.config_reader() # all config files assert reader.read_only - reader = self.rorepo.config_reader("repository") # single config file + reader = self.rorepo.config_reader("repository") # single config file assert reader.read_only def test_config_writer(self): @@ -560,17 +829,17 @@ def test_comparison_and_hash(self): @with_rw_directory def test_tilde_and_env_vars_in_repo_path(self, rw_dir): - ph = os.environ.get('HOME') + ph = os.environ.get("HOME") try: - os.environ['HOME'] = rw_dir - Repo.init(osp.join('~', 'test.git'), bare=True) + os.environ["HOME"] = rw_dir + Repo.init(osp.join("~", "test.git"), bare=True) - os.environ['FOO'] = rw_dir - Repo.init(osp.join('$FOO', 'test.git'), bare=True) + os.environ["FOO"] = rw_dir + Repo.init(osp.join("$FOO", "test.git"), bare=True) finally: if ph: - os.environ['HOME'] = ph - del os.environ['FOO'] + os.environ["HOME"] = ph + del os.environ["FOO"] # end assure HOME gets reset to what it was def test_git_cmd(self): @@ -597,7 +866,7 @@ def mktiny(): s = mkfull() lines = s.readlines() self.assertEqual(len(lines), 3) - self.assertTrue(lines[-1].endswith(b'\n'), lines[-1]) + self.assertTrue(lines[-1].endswith(b"\n"), lines[-1]) self.assertEqual(s._stream.tell(), len(d)) # must have scrubbed to the end # realines line limit @@ -617,7 +886,7 @@ def mktiny(): self.assertEqual(s.readline(), l1) self.assertEqual(s.readline(), l2) self.assertEqual(s.readline(), l3) - self.assertEqual(s.readline(), b'') + self.assertEqual(s.readline(), b"") self.assertEqual(s._stream.tell(), len(d)) # readline limit @@ -628,13 +897,13 @@ def mktiny(): # readline on tiny section s = mktiny() self.assertEqual(s.readline(), l1p) - self.assertEqual(s.readline(), b'') + self.assertEqual(s.readline(), b"") self.assertEqual(s._stream.tell(), ts + 1) # read no limit s = mkfull() self.assertEqual(s.read(), d[:-1]) - self.assertEqual(s.read(), b'') + self.assertEqual(s.read(), b"") self.assertEqual(s._stream.tell(), len(d)) # read limit @@ -653,24 +922,24 @@ def mktiny(): def _assert_rev_parse_types(self, name, rev_obj): rev_parse = self.rorepo.rev_parse - if rev_obj.type == 'tag': + if rev_obj.type == "tag": rev_obj = rev_obj.object # tree and blob type - obj = rev_parse(name + '^{tree}') + obj = rev_parse(name + "^{tree}") self.assertEqual(obj, rev_obj.tree) - obj = rev_parse(name + ':CHANGES') - self.assertEqual(obj.type, 'blob') - self.assertEqual(obj.path, 'CHANGES') - self.assertEqual(rev_obj.tree['CHANGES'], obj) + obj = rev_parse(name + ":CHANGES") + self.assertEqual(obj.type, "blob") + self.assertEqual(obj.path, "CHANGES") + self.assertEqual(rev_obj.tree["CHANGES"], obj) def _assert_rev_parse(self, name): """tries multiple different rev-parse syntaxes with the given name :return: parsed object""" rev_parse = self.rorepo.rev_parse orig_obj = rev_parse(name) - if orig_obj.type == 'tag': + if orig_obj.type == "tag": obj = orig_obj.object else: obj = orig_obj @@ -711,17 +980,19 @@ def _assert_rev_parse(self, name): return orig_obj - @with_rw_repo('HEAD', bare=False) + @with_rw_repo("HEAD", bare=False) def test_rw_rev_parse(self, rwrepo): # verify it does not confuse branches with hexsha ids - ahead = rwrepo.create_head('aaaaaaaa') - assert(rwrepo.rev_parse(str(ahead)) == ahead.commit) + ahead = rwrepo.create_head("aaaaaaaa") + assert rwrepo.rev_parse(str(ahead)) == ahead.commit def test_rev_parse(self): rev_parse = self.rorepo.rev_parse # try special case: This one failed at some point, make sure its fixed - self.assertEqual(rev_parse("33ebe").hexsha, "33ebe7acec14b25c5f84f35a664803fcab2f7781") + self.assertEqual( + rev_parse("33ebe").hexsha, "33ebe7acec14b25c5f84f35a664803fcab2f7781" + ) # start from reference num_resolved = 0 @@ -729,7 +1000,7 @@ def test_rev_parse(self): for ref_no, ref in enumerate(Reference.iter_items(self.rorepo)): path_tokens = ref.path.split("/") for pt in range(len(path_tokens)): - path_section = '/'.join(path_tokens[-(pt + 1):]) + path_section = "/".join(path_tokens[-(pt + 1):]) try: obj = self._assert_rev_parse(path_section) self.assertEqual(obj.type, ref.object.type) @@ -746,17 +1017,17 @@ def test_rev_parse(self): assert num_resolved # it works with tags ! - tag = self._assert_rev_parse('0.1.4') - self.assertEqual(tag.type, 'tag') + tag = self._assert_rev_parse("0.1.4") + self.assertEqual(tag.type, "tag") # try full sha directly ( including type conversion ) self.assertEqual(tag.object, rev_parse(tag.object.hexsha)) self._assert_rev_parse_types(tag.object.hexsha, tag.object) # multiple tree types result in the same tree: HEAD^{tree}^{tree}:CHANGES - rev = '0.1.4^{tree}^{tree}' + rev = "0.1.4^{tree}^{tree}" self.assertEqual(rev_parse(rev), tag.object.tree) - self.assertEqual(rev_parse(rev + ':CHANGES'), tag.object.tree['CHANGES']) + self.assertEqual(rev_parse(rev + ":CHANGES"), tag.object.tree["CHANGES"]) # try to get parents from first revision - it should fail as no such revision # exists @@ -777,15 +1048,18 @@ def test_rev_parse(self): # needs a tag which points to a blob # ref^0 returns commit being pointed to, same with ref~0, and ^{} - tag = rev_parse('0.1.4') - for token in (('~0', '^0', '^{}')): - self.assertEqual(tag.object, rev_parse('0.1.4%s' % token)) + tag = rev_parse("0.1.4") + for token in ("~0", "^0", "^{}"): + self.assertEqual(tag.object, rev_parse("0.1.4%s" % token)) # END handle multiple tokens # try partial parsing max_items = 40 for i, binsha in enumerate(self.rorepo.odb.sha_iter()): - self.assertEqual(rev_parse(bin_to_hex(binsha)[:8 - (i % 2)].decode('ascii')).binsha, binsha) + self.assertEqual( + rev_parse(bin_to_hex(binsha)[: 8 - (i % 2)].decode("ascii")).binsha, + binsha, + ) if i > max_items: # this is rather slow currently, as rev_parse returns an object # which requires accessing packs, it has some additional overhead @@ -793,10 +1067,10 @@ def test_rev_parse(self): # END for each binsha in repo # missing closing brace commit^{tree - self.failUnlessRaises(ValueError, rev_parse, '0.1.4^{tree') + self.failUnlessRaises(ValueError, rev_parse, "0.1.4^{tree") # missing starting brace - self.failUnlessRaises(ValueError, rev_parse, '0.1.4^tree}') + self.failUnlessRaises(ValueError, rev_parse, "0.1.4^tree}") # REVLOG ####### @@ -806,23 +1080,23 @@ def test_rev_parse(self): self.failUnlessRaises(BadObject, rev_parse, "%s@{0}" % head.commit.hexsha) # uses HEAD.ref by default - self.assertEqual(rev_parse('@{0}'), head.commit) + self.assertEqual(rev_parse("@{0}"), head.commit) if not head.is_detached: - refspec = '%s@{0}' % head.ref.name + refspec = "%s@{0}" % head.ref.name self.assertEqual(rev_parse(refspec), head.ref.commit) # all additional specs work as well self.assertEqual(rev_parse(refspec + "^{tree}"), head.commit.tree) - self.assertEqual(rev_parse(refspec + ":CHANGES").type, 'blob') + self.assertEqual(rev_parse(refspec + ":CHANGES").type, "blob") # END operate on non-detached head # position doesn't exist - self.failUnlessRaises(IndexError, rev_parse, '@{10000}') + self.failUnlessRaises(IndexError, rev_parse, "@{10000}") # currently, nothing more is supported self.failUnlessRaises(NotImplementedError, rev_parse, "@{1 week ago}") # the last position - assert rev_parse('@{1}') != head.commit + assert rev_parse("@{1}") != head.commit def test_repo_odbtype(self): target_type = GitCmdObjectDB @@ -835,7 +1109,7 @@ def test_submodules(self): self.assertIsInstance(self.rorepo.submodule("gitdb"), Submodule) self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist") - @with_rw_repo('HEAD', bare=False) + @with_rw_repo("HEAD", bare=False) def test_submodule_update(self, rwrepo): # fails in bare mode rwrepo._bare = True @@ -844,27 +1118,31 @@ def test_submodule_update(self, rwrepo): # test create submodule sm = rwrepo.submodules[0] - sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path)) + sm = rwrepo.create_submodule( + "my_new_sub", + "some_path", + join_path_native(self.rorepo.working_tree_dir, sm.path), + ) self.assertIsInstance(sm, Submodule) # note: the rest of this functionality is tested in test_submodule - @with_rw_repo('HEAD') + @with_rw_repo("HEAD") def test_git_file(self, rwrepo): # Move the .git directory to another location and create the .git file. - real_path_abs = osp.abspath(join_path_native(rwrepo.working_tree_dir, '.real')) + real_path_abs = osp.abspath(join_path_native(rwrepo.working_tree_dir, ".real")) os.rename(rwrepo.git_dir, real_path_abs) - git_file_path = join_path_native(rwrepo.working_tree_dir, '.git') - with open(git_file_path, 'wb') as fp: - fp.write(fixture('git_file')) + git_file_path = join_path_native(rwrepo.working_tree_dir, ".git") + with open(git_file_path, "wb") as fp: + fp.write(fixture("git_file")) # Create a repo and make sure it's pointing to the relocated .git directory. git_file_repo = Repo(rwrepo.working_tree_dir) self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs) # Test using an absolute gitdir path in the .git file. - with open(git_file_path, 'wb') as fp: - fp.write(('gitdir: %s\n' % real_path_abs).encode('ascii')) + with open(git_file_path, "wb") as fp: + fp.write(("gitdir: %s\n" % real_path_abs).encode("ascii")) git_file_repo = Repo(rwrepo.working_tree_dir) self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs) @@ -881,13 +1159,13 @@ def last_commit(repo, rev, path): for _ in range(64): for repo_type in (GitCmdObjectDB, GitDB): repo = Repo(self.rorepo.working_tree_dir, odbt=repo_type) - last_commit(repo, 'master', 'git/test/test_base.py') + last_commit(repo, "master", "git/test/test_base.py") # end for each repository type # end for each iteration def test_remote_method(self): - self.failUnlessRaises(ValueError, self.rorepo.remote, 'foo-blue') - self.assertIsInstance(self.rorepo.remote(name='origin'), Remote) + self.failUnlessRaises(ValueError, self.rorepo.remote, "foo-blue") + self.assertIsInstance(self.rorepo.remote(name="origin"), Remote) @with_rw_directory def test_empty_repo(self, rw_dir): @@ -895,13 +1173,13 @@ def test_empty_repo(self, rw_dir): r = Repo.init(rw_dir, mkdir=False) # It's ok not to be able to iterate a commit, as there is none self.failUnlessRaises(ValueError, r.iter_commits) - self.assertEqual(r.active_branch.name, 'master') + self.assertEqual(r.active_branch.name, "master") assert not r.active_branch.is_valid(), "Branch is yet to be born" # actually, when trying to create a new branch without a commit, git itself fails # We should, however, not fail ungracefully - self.failUnlessRaises(BadName, r.create_head, 'foo') - self.failUnlessRaises(BadName, r.create_head, 'master') + self.failUnlessRaises(BadName, r.create_head, "foo") + self.failUnlessRaises(BadName, r.create_head, "master") # It's expected to not be able to access a tree self.failUnlessRaises(ValueError, r.tree) @@ -911,43 +1189,43 @@ def test_empty_repo(self, rw_dir): r.index.commit("initial commit\nBAD MESSAGE 1\n") # Now a branch should be creatable - nb = r.create_head('foo') + nb = r.create_head("foo") assert nb.is_valid() - with open(new_file_path, 'w') as f: - f.write('Line 1\n') + with open(new_file_path, "w") as f: + f.write("Line 1\n") r.index.add([new_file_path]) r.index.commit("add line 1\nBAD MESSAGE 2\n") - with open('%s/.git/logs/refs/heads/master' % (rw_dir,), 'r') as f: + with open("%s/.git/logs/refs/heads/master" % (rw_dir,), "r") as f: contents = f.read() - assert 'BAD MESSAGE' not in contents, 'log is corrupt' + assert "BAD MESSAGE" not in contents, "log is corrupt" def test_merge_base(self): repo = self.rorepo - c1 = 'f6aa8d1' - c2 = repo.commit('d46e3fe') - c3 = '763ef75' + c1 = "f6aa8d1" + c2 = repo.commit("d46e3fe") + c3 = "763ef75" self.failUnlessRaises(ValueError, repo.merge_base) - self.failUnlessRaises(ValueError, repo.merge_base, 'foo') + self.failUnlessRaises(ValueError, repo.merge_base, "foo") # two commit merge-base res = repo.merge_base(c1, c2) self.assertIsInstance(res, list) self.assertEqual(len(res), 1) self.assertIsInstance(res[0], Commit) - self.assertTrue(res[0].hexsha.startswith('3936084')) + self.assertTrue(res[0].hexsha.startswith("3936084")) - for kw in ('a', 'all'): + for kw in ("a", "all"): res = repo.merge_base(c1, c2, c3, **{kw: True}) self.assertIsInstance(res, list) self.assertEqual(len(res), 1) # end for each keyword signalling all merge-bases to be returned # Test for no merge base - can't do as we have - self.failUnlessRaises(GitCommandError, repo.merge_base, c1, 'ffffff') + self.failUnlessRaises(GitCommandError, repo.merge_base, c1, "ffffff") def test_is_ancestor(self): git = self.rorepo.git @@ -955,15 +1233,15 @@ def test_is_ancestor(self): raise SkipTest("git merge-base --is-ancestor feature unsupported") repo = self.rorepo - c1 = 'f6aa8d1' - c2 = '763ef75' + c1 = "f6aa8d1" + c2 = "763ef75" self.assertTrue(repo.is_ancestor(c1, c1)) self.assertTrue(repo.is_ancestor("master", "master")) self.assertTrue(repo.is_ancestor(c1, c2)) self.assertTrue(repo.is_ancestor(c1, "master")) self.assertFalse(repo.is_ancestor(c2, c1)) self.assertFalse(repo.is_ancestor("master", c1)) - for i, j in itertools.permutations([c1, 'ffffff', ''], r=2): + for i, j in itertools.permutations([c1, "ffffff", ""], r=2): self.assertRaises(GitCommandError, repo.is_ancestor, i, j) @with_rw_directory @@ -974,12 +1252,12 @@ def test_git_work_tree_dotgit(self, rw_dir): if git.version_info[:3] < (2, 5, 1): raise SkipTest("worktree feature unsupported") - rw_master = self.rorepo.clone(join_path_native(rw_dir, 'master_repo')) - branch = rw_master.create_head('aaaaaaaa') - worktree_path = join_path_native(rw_dir, 'worktree_repo') + rw_master = self.rorepo.clone(join_path_native(rw_dir, "master_repo")) + branch = rw_master.create_head("aaaaaaaa") + worktree_path = join_path_native(rw_dir, "worktree_repo") if Git.is_cygwin(): worktree_path = cygpath(worktree_path) - rw_master.git.worktree('add', worktree_path, branch.name) + rw_master.git.worktree("add", worktree_path, branch.name) # this ensures that we can read the repo's gitdir correctly repo = Repo(worktree_path) @@ -995,7 +1273,7 @@ def test_git_work_tree_dotgit(self, rw_dir): origin = repo.remotes.origin self.assertIsInstance(origin, Remote) - self.assertIsInstance(repo.heads['aaaaaaaa'], Head) + self.assertIsInstance(repo.heads["aaaaaaaa"], Head) @with_rw_directory def test_git_work_tree_env(self, rw_dir): @@ -1004,18 +1282,18 @@ def test_git_work_tree_env(self, rw_dir): # move .git directory to a subdirectory # set GIT_DIR and GIT_WORK_TREE appropriately # check that repo.working_tree_dir == rw_dir - self.rorepo.clone(join_path_native(rw_dir, 'master_repo')) + self.rorepo.clone(join_path_native(rw_dir, "master_repo")) - repo_dir = join_path_native(rw_dir, 'master_repo') - old_git_dir = join_path_native(repo_dir, '.git') - new_subdir = join_path_native(repo_dir, 'gitdir') - new_git_dir = join_path_native(new_subdir, 'git') + repo_dir = join_path_native(rw_dir, "master_repo") + old_git_dir = join_path_native(repo_dir, ".git") + new_subdir = join_path_native(repo_dir, "gitdir") + new_git_dir = join_path_native(new_subdir, "git") os.mkdir(new_subdir) os.rename(old_git_dir, new_git_dir) oldenv = os.environ.copy() - os.environ['GIT_DIR'] = new_git_dir - os.environ['GIT_WORK_TREE'] = repo_dir + os.environ["GIT_DIR"] = new_git_dir + os.environ["GIT_WORK_TREE"] = repo_dir try: r = Repo() @@ -1023,3 +1301,31 @@ def test_git_work_tree_env(self, rw_dir): self.assertEqual(r.working_dir, repo_dir) finally: os.environ = oldenv + + @with_rw_repo("HEAD") + def test_clone_command_injection(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + unexpected_file = tmp_dir / "pwn" + assert not unexpected_file.exists() + payload = "--upload-pack=touch " + str(unexpected_file) + rw_repo.clone(payload) + assert not unexpected_file.exists() + # A repo was cloned with the payload as name + assert pathlib.Path(payload).exists() + + @with_rw_repo("HEAD") + def test_clone_from_command_injection(self, rw_repo): + if pathlib is None: # pythons bellow 3.4 don't have pathlib + raise SkipTest("pathlib was introduced in 3.4") + + tmp_dir = pathlib.Path(tempfile.mkdtemp()) + temp_repo = Repo.init(tmp_dir / "repo") + unexpected_file = tmp_dir / "pwn" + assert not unexpected_file.exists() + payload = "--upload-pack=touch " + str(unexpected_file) + with self.assertRaises(GitCommandError): + rw_repo.clone_from(payload, temp_repo.common_dir) + assert not unexpected_file.exists() diff --git a/git/test/test_submodule.py b/git/test/test_submodule.py index 94028d834..9b23470a2 100644 --- a/git/test/test_submodule.py +++ b/git/test/test_submodule.py @@ -3,6 +3,12 @@ # the BSD License: http://www.opensource.org/licenses/bsd-license.php import os import shutil +import tempfile +try: + from pathlib import Path +except ImportError: + from pathlib2 import Path + import sys from unittest import skipIf @@ -10,8 +16,11 @@ from git.cmd import Git from git.compat import string_types, is_win from git.exc import ( + GitCommandError, InvalidGitRepositoryError, - RepositoryDirtyError + RepositoryDirtyError, + UnsafeOptionError, + UnsafeProtocolError, ) from git.objects.submodule.base import Submodule from git.objects.submodule.root import RootModule, RootUpdateProgress @@ -87,7 +96,7 @@ def _do_base_tests(self, rwrepo): assert smold != sm # the name changed # force it to reread its information - del(smold._url) + del (smold._url) smold.url == sm.url # @NoEffect # test config_reader/writer methods @@ -241,7 +250,7 @@ def _do_base_tests(self, rwrepo): for repo in smods: assert repo.head.commit == repo.head.ref.tracking_branch().commit # END for each repo to check - del(smods) + del (smods) # if the head is detached, it still works ( but warns ) smref = sm.module().head.ref @@ -936,3 +945,144 @@ class Repo(object): relative_path = Submodule._to_relative_path(super_repo, submodule_path) msg = '_to_relative_path should be "submodule_path" but was "%s"' % relative_path assert relative_path == 'submodule_path', msg + + @with_rw_repo("HEAD") + def test_submodule_add_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::/foo", + ] + for url in urls: + with self.assertRaises(UnsafeProtocolError): + Submodule.add(rw_repo, "new", "new", url) + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_submodule_add_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::/foo", + ] + for url in urls: + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + Submodule.add(rw_repo, "new", "new", url, allow_unsafe_protocols=True) + assert not tmp_file.exists() + + # We don't have clone_multi_options in this version + # @with_rw_repo("HEAD") + # def test_submodule_add_unsafe_options(self, rw_repo): + # tmp_dir = Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # for unsafe_option in unsafe_options: + # with self.assertRaises(UnsafeOptionError): + # Submodule.add(rw_repo, "new", "new", str(tmp_dir), clone_multi_options=[unsafe_option]) + # assert not tmp_file.exists() + + # We don't have clone_multi_options in this version + # @with_rw_repo("HEAD") + # def test_submodule_add_unsafe_options_allowed(self, rw_repo): + # tmp_dir = Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # ] + # for unsafe_option in unsafe_options: + # # The options will be allowed, but the command will fail. + # with self.assertRaises(GitCommandError): + # Submodule.add( + # rw_repo, "new", "new", str(tmp_dir), clone_multi_options=[unsafe_option], allow_unsafe_options=True + # ) + # assert not tmp_file.exists() + # unsafe_options = [ + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # for unsafe_option in unsafe_options: + # with self.assertRaises(GitCommandError): + # Submodule.add( + # rw_repo, "new", "new", str(tmp_dir), clone_multi_options=[unsafe_option], allow_unsafe_options=True + # ) + + @with_rw_repo("HEAD") + def test_submodule_update_unsafe_url(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::/foo", + ] + for url in urls: + submodule = Submodule(rw_repo, b"\0" * 20, name="new", path="new", url=url) + with self.assertRaises(UnsafeProtocolError): + submodule.update() + assert not tmp_file.exists() + + @with_rw_repo("HEAD") + def test_submodule_update_unsafe_url_allowed(self, rw_repo): + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file = tmp_dir / "pwn" + urls = [ + "ext::sh -c touch% " + str(tmp_file), + "fd::/foo", + ] + for url in urls: + submodule = Submodule(rw_repo, b"\0" * 20, name="new", path="new", url=url) + # The URL will be allowed into the command, but the command will + # fail since we don't have that protocol enabled in the Git config file. + with self.assertRaises(GitCommandError): + submodule.update(allow_unsafe_protocols=True) + assert not tmp_file.exists() + + # We don't have clone_multi_options in this version + # @with_rw_repo("HEAD") + # def test_submodule_update_unsafe_options(self, rw_repo): + # tmp_dir = Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # submodule = Submodule(rw_repo, b"\0" * 20, name="new", path="new", url=str(tmp_dir)) + # for unsafe_option in unsafe_options: + # with self.assertRaises(UnsafeOptionError): + # submodule.update(clone_multi_options=[unsafe_option]) + # assert not tmp_file.exists() + + # We don't have clone_multi_options in this version + # @with_rw_repo("HEAD") + # def test_submodule_update_unsafe_options_allowed(self, rw_repo): + # tmp_dir = Path(tempfile.mkdtemp()) + # tmp_file = tmp_dir / "pwn" + # unsafe_options = [ + # "--upload-pack='touch " + str(tmp_file) + "'", + # "-u 'touch " + str(tmp_file) + "'", + # ] + # submodule = Submodule(rw_repo, b"\0" * 20, name="new", path="new", url=str(tmp_dir)) + # for unsafe_option in unsafe_options: + # # The options will be allowed, but the command will fail. + # with self.assertRaises(GitCommandError): + # submodule.update(clone_multi_options=[unsafe_option], allow_unsafe_options=True) + # assert not tmp_file.exists() + # unsafe_options = [ + # "--config=protocol.ext.allow=always", + # "-c protocol.ext.allow=always", + # ] + # submodule = Submodule(rw_repo, b"\0" * 20, name="new", path="new", url=str(tmp_dir)) + # for unsafe_option in unsafe_options: + # with self.assertRaises(GitCommandError): + # submodule.update(clone_multi_options=[unsafe_option], allow_unsafe_options=True) diff --git a/git/test/test_tree.py b/git/test/test_tree.py index dc23f29ca..f8e93b9f7 100644 --- a/git/test/test_tree.py +++ b/git/test/test_tree.py @@ -8,10 +8,7 @@ import sys from unittest import skipIf -from git import ( - Tree, - Blob -) +from git import Tree, Blob from git.test.lib import TestBase from git.util import HIDE_WINDOWS_KNOWN_ERRORS @@ -20,21 +17,24 @@ class TestTree(TestBase): - @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and sys.version_info[:2] == (3, 5), """ + @skipIf( + HIDE_WINDOWS_KNOWN_ERRORS and sys.version_info[:2] == (3, 5), + """ File "C:\\projects\\gitpython\\git\\cmd.py", line 559, in execute raise GitCommandNotFound(command, err) git.exc.GitCommandNotFound: Cmd('git') not found due to: OSError('[WinError 6] The handle is invalid') - cmdline: git cat-file --batch-check""") + cmdline: git cat-file --batch-check""", + ) def test_serializable(self): # tree at the given commit contains a submodule as well - roottree = self.rorepo.tree('6c1faef799095f3990e9970bc2cb10aa0221cf9c') + roottree = self.rorepo.tree("6c1faef799095f3990e9970bc2cb10aa0221cf9c") for item in roottree.traverse(ignore_self=False): if item.type != Tree.type: continue # END skip non-trees tree = item # trees have no dict - self.failUnlessRaises(AttributeError, setattr, tree, 'someattr', 1) + self.failUnlessRaises(AttributeError, setattr, tree, "someattr", 1) orig_data = tree.data_stream.read() orig_cache = tree._cache @@ -44,22 +44,25 @@ def test_serializable(self): assert stream.getvalue() == orig_data stream.seek(0) - testtree = Tree(self.rorepo, Tree.NULL_BIN_SHA, 0, '') + testtree = Tree(self.rorepo, Tree.NULL_BIN_SHA, 0, "") testtree._deserialize(stream) assert testtree._cache == orig_cache # replaces cache, but we make sure of it - del(testtree._cache) + del testtree._cache testtree._deserialize(stream) # END for each item in tree - @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and sys.version_info[:2] == (3, 5), """ + @skipIf( + HIDE_WINDOWS_KNOWN_ERRORS and sys.version_info[:2] == (3, 5), + """ File "C:\\projects\\gitpython\\git\\cmd.py", line 559, in execute raise GitCommandNotFound(command, err) git.exc.GitCommandNotFound: Cmd('git') not found due to: OSError('[WinError 6] The handle is invalid') - cmdline: git cat-file --batch-check""") + cmdline: git cat-file --batch-check""", + ) def test_traverse(self): - root = self.rorepo.tree('0.1.6') + root = self.rorepo.tree("0.1.6") num_recursive = 0 all_items = [] for obj in root.traverse(): @@ -73,7 +76,7 @@ def test_traverse(self): # limit recursion level to 0 - should be same as default iteration assert all_items - assert 'CHANGES' in root + assert "CHANGES" in root assert len(list(root)) == len(list(root.traverse(depth=1))) # only choose trees @@ -88,7 +91,9 @@ def test_traverse(self): # trees and blobs assert len(set(trees) | set(root.trees)) == len(trees) - assert len({b for b in root if isinstance(b, Blob)} | set(root.blobs)) == len(root.blobs) + assert len({b for b in root if isinstance(b, Blob)} | set(root.blobs)) == len( + root.blobs + ) subitem = trees[0][0] assert "/" in subitem.path assert subitem.name == osp.basename(subitem.path) @@ -97,7 +102,7 @@ def test_traverse(self): found_slash = False for item in root.traverse(): assert osp.isabs(item.abspath) - if '/' in item.path: + if "/" in item.path: found_slash = True # END check for slash diff --git a/git/test/test_util.py b/git/test/test_util.py index b5f9d2228..37894f992 100644 --- a/git/test/test_util.py +++ b/git/test/test_util.py @@ -19,11 +19,9 @@ verify_utctz, parse_date, tzoffset, - from_timestamp) -from git.test.lib import ( - TestBase, - assert_equal + from_timestamp, ) +from git.test.lib import TestBase, assert_equal from git.util import ( LockFile, BlockingLockFile, @@ -31,40 +29,35 @@ Actor, IterableList, cygpath, - decygpath + decygpath, ) _norm_cygpath_pairs = ( - (r'foo\bar', 'foo/bar'), - (r'foo/bar', 'foo/bar'), - - (r'C:\Users', '/cygdrive/c/Users'), - (r'C:\d/e', '/cygdrive/c/d/e'), - - ('C:\\', '/cygdrive/c/'), - - (r'\\server\C$\Users', '//server/C$/Users'), - (r'\\server\C$', '//server/C$'), - ('\\\\server\\c$\\', '//server/c$/'), - (r'\\server\BAR/', '//server/BAR/'), - - (r'D:/Apps', '/cygdrive/d/Apps'), - (r'D:/Apps\fOO', '/cygdrive/d/Apps/fOO'), - (r'D:\Apps/123', '/cygdrive/d/Apps/123'), + (r"foo\bar", "foo/bar"), + (r"foo/bar", "foo/bar"), + (r"C:\Users", "/cygdrive/c/Users"), + (r"C:\d/e", "/cygdrive/c/d/e"), + ("C:\\", "/cygdrive/c/"), + (r"\\server\C$\Users", "//server/C$/Users"), + (r"\\server\C$", "//server/C$"), + ("\\\\server\\c$\\", "//server/c$/"), + (r"\\server\BAR/", "//server/BAR/"), + (r"D:/Apps", "/cygdrive/d/Apps"), + (r"D:/Apps\fOO", "/cygdrive/d/Apps/fOO"), + (r"D:\Apps/123", "/cygdrive/d/Apps/123"), ) _unc_cygpath_pairs = ( - (r'\\?\a:\com', '/cygdrive/a/com'), - (r'\\?\a:/com', '/cygdrive/a/com'), - - (r'\\?\UNC\server\D$\Apps', '//server/D$/Apps'), + (r"\\?\a:\com", "/cygdrive/a/com"), + (r"\\?\a:/com", "/cygdrive/a/com"), + (r"\\?\UNC\server\D$\Apps", "//server/D$/Apps"), ) class TestIterableMember(object): - """A member of an iterable list""" + __slots__ = "name" def __init__(self, name): @@ -93,11 +86,11 @@ def test_cygpath_ok(self, case): @skipIf(not is_win, "Paths specifically for Windows.") @ddt.data( - (r'./bar', 'bar'), - (r'.\bar', 'bar'), - (r'../bar', '../bar'), - (r'..\bar', '../bar'), - (r'../bar/.\foo/../chu', '../bar/chu'), + (r"./bar", "bar"), + (r".\bar", "bar"), + (r"../bar", "../bar"), + (r"..\bar", "../bar"), + (r"../bar/.\foo/../chu", "../bar/chu"), ) def test_cygpath_norm_ok(self, case): wpath, cpath = case @@ -106,27 +99,27 @@ def test_cygpath_norm_ok(self, case): @skipIf(not is_win, "Paths specifically for Windows.") @ddt.data( - r'C:', - r'C:Relative', - r'D:Apps\123', - r'D:Apps/123', - r'\\?\a:rel', - r'\\share\a:rel', + r"C:", + r"C:Relative", + r"D:Apps\123", + r"D:Apps/123", + r"\\?\a:rel", + r"\\share\a:rel", ) def test_cygpath_invalids(self, wpath): cwpath = cygpath(wpath) - self.assertEqual(cwpath, wpath.replace('\\', '/'), wpath) + self.assertEqual(cwpath, wpath.replace("\\", "/"), wpath) @skipIf(not is_win, "Paths specifically for Windows.") @ddt.idata(_norm_cygpath_pairs) def test_decygpath(self, case): wpath, cpath = case wcpath = decygpath(cpath) - self.assertEqual(wcpath, wpath.replace('/', '\\'), cpath) + self.assertEqual(wcpath, wpath.replace("/", "\\"), cpath) def test_it_should_dashify(self): - assert_equal('this-is-my-argument', dashify('this_is_my_argument')) - assert_equal('foo', dashify('foo')) + assert_equal("this-is-my-argument", dashify("this_is_my_argument")) + assert_equal("foo", dashify("foo")) def test_lock_file(self): my_file = tempfile.mktemp() @@ -151,7 +144,7 @@ def test_lock_file(self): self.failUnlessRaises(IOError, lock_file._obtain_lock_or_raise) # auto-release on destruction - del(other_lock_file) + del other_lock_file lock_file._obtain_lock_or_raise() lock_file._release_lock() @@ -166,14 +159,16 @@ def test_blocking_lock_file(self): wait_lock = BlockingLockFile(my_file, 0.05, wait_time) self.failUnlessRaises(IOError, wait_lock._obtain_lock) elapsed = time.time() - start - extra_time = 0.02 + # extra_time = 0.02 + # Takes longer on a local machine + extra_time = 0.05 if is_win: # for Appveyor extra_time *= 6 # NOTE: Indeterministic failures here... self.assertLess(elapsed, wait_time + extra_time) def test_user_id(self): - self.assertIn('@', get_user_id()) + self.assertIn("@", get_user_id()) def test_parse_date(self): # test all supported formats @@ -188,6 +183,7 @@ def assert_rval(rval, veri_time, offset=0): utctz = altz_to_utctz_str(offset) self.assertIsInstance(utctz, string_types) self.assertEqual(utctz_to_altz(verify_utctz(utctz)), offset) + # END assert rval utility rfc = ("Thu, 07 Apr 2005 22:13:11 +0000", 0) @@ -196,15 +192,15 @@ def assert_rval(rval, veri_time, offset=0): iso3 = ("2005.04.07 22:13:11 -0000", 0) alt = ("04/07/2005 22:13:11", 0) alt2 = ("07.04.2005 22:13:11", 0) - veri_time_utc = 1112911991 # the time this represents, in time since epoch, UTC + veri_time_utc = 1112911991 # the time this represents, in time since epoch, UTC for date, offset in (rfc, iso, iso2, iso3, alt, alt2): assert_rval(parse_date(date), veri_time_utc, offset) # END for each date type # and failure - self.failUnlessRaises(ValueError, parse_date, 'invalid format') - self.failUnlessRaises(ValueError, parse_date, '123456789 -02000') - self.failUnlessRaises(ValueError, parse_date, ' 123456789 -0200') + self.failUnlessRaises(ValueError, parse_date, "invalid format") + self.failUnlessRaises(ValueError, parse_date, "123456789 -02000") + self.failUnlessRaises(ValueError, parse_date, " 123456789 -0200") def test_actor(self): for cr in (None, self.rorepo.config_reader()): @@ -215,10 +211,12 @@ def test_actor(self): def test_actor_from_string(self): self.assertEqual(Actor._from_string("name"), Actor("name", None)) self.assertEqual(Actor._from_string("name <>"), Actor("name", "")) - self.assertEqual(Actor._from_string("name last another "), - Actor("name last another", "some-very-long-email@example.com")) + self.assertEqual( + Actor._from_string("name last another "), + Actor("name last another", "some-very-long-email@example.com"), + ) - @ddt.data(('name', ''), ('name', 'prefix_')) + @ddt.data(("name", ""), ("name", "prefix_")) def test_iterable_list(self, case): name, prefix = case ilist = IterableList(name, prefix) @@ -237,7 +235,7 @@ def test_iterable_list(self, case): self.assertIn(name2, ilist) self.assertIn(m2, ilist) self.assertIn(m2, ilist) - self.assertNotIn('invalid', ilist) + self.assertNotIn("invalid", ilist) # with string index self.assertIs(ilist[name1], m1) @@ -252,31 +250,40 @@ def test_iterable_list(self, case): self.assertIs(ilist.two, m2) # test exceptions - self.failUnlessRaises(AttributeError, getattr, ilist, 'something') - self.failUnlessRaises(IndexError, ilist.__getitem__, 'something') + self.failUnlessRaises(AttributeError, getattr, ilist, "something") + self.failUnlessRaises(IndexError, ilist.__getitem__, "something") # delete by name and index - self.failUnlessRaises(IndexError, ilist.__delitem__, 'something') - del(ilist[name2]) + self.failUnlessRaises(IndexError, ilist.__delitem__, "something") + del ilist[name2] self.assertEqual(len(ilist), 1) self.assertNotIn(name2, ilist) self.assertIn(name1, ilist) - del(ilist[0]) + del ilist[0] self.assertNotIn(name1, ilist) self.assertEqual(len(ilist), 0) self.failUnlessRaises(IndexError, ilist.__delitem__, 0) - self.failUnlessRaises(IndexError, ilist.__delitem__, 'something') + self.failUnlessRaises(IndexError, ilist.__delitem__, "something") def test_from_timestamp(self): # Correct offset: UTC+2, should return datetime + tzoffset(+2) - altz = utctz_to_altz('+0200') - self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(altz)), from_timestamp(1522827734, altz)) + altz = utctz_to_altz("+0200") + self.assertEqual( + datetime.fromtimestamp(1522827734, tzoffset(altz)), + from_timestamp(1522827734, altz), + ) # Wrong offset: UTC+58, should return datetime + tzoffset(UTC) - altz = utctz_to_altz('+5800') - self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(0)), from_timestamp(1522827734, altz)) + altz = utctz_to_altz("+5800") + self.assertEqual( + datetime.fromtimestamp(1522827734, tzoffset(0)), + from_timestamp(1522827734, altz), + ) # Wrong offset: UTC-9000, should return datetime + tzoffset(UTC) - altz = utctz_to_altz('-9000') - self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(0)), from_timestamp(1522827734, altz)) + altz = utctz_to_altz("-9000") + self.assertEqual( + datetime.fromtimestamp(1522827734, tzoffset(0)), + from_timestamp(1522827734, altz), + ) diff --git a/git/util.py b/git/util.py index 7ca0564eb..0e397045e 100644 --- a/git/util.py +++ b/git/util.py @@ -192,6 +192,17 @@ def _get_exe_extensions(): def py_where(program, path=None): + """Perform a path search to assist :func:`is_cygwin_git`. + This is not robust for general use. It is an implementation detail of + :func:`is_cygwin_git`. When a search following all shell rules is needed, + :func:`shutil.which` can be used instead. + :note: Neither this function nor :func:`shutil.which` will predict the effect of an + executable search on a native Windows system due to a :class:`subprocess.Popen` + call without ``shell=True``, because shell and non-shell executable search on + Windows differ considerably. + """ + + # From: http://stackoverflow.com/a/377028/548792 winprog_exts = _get_exe_extensions() diff --git a/requirements.txt b/requirements.txt index d312d11bb..bf38ff544 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ gitdb2>=2,<3 +mock diff --git a/test-requirements.txt b/test-requirements.txt index ec0e4c561..47e7c99b5 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,12 @@ -ddt>=1.1.1 +ddt>=1.1.1; python_version>'3.0' +ddt>=1.1.1,<=1.6.0; python_version =='2.7' coverage flake8 nose tox mock; python_version=='2.7' +pathlib2; python_version=='2.7' +backports.tempfile +six +virtualenv +