diff --git a/git/cmd.py b/git/cmd.py index a92b2f3ce..f07573017 100644 --- a/git/cmd.py +++ b/git/cmd.py @@ -42,12 +42,12 @@ ) -execute_kwargs = set(('istream', 'with_keep_cwd', 'with_extended_output', +execute_kwargs = set(('istream', 'with_extended_output', 'with_exceptions', 'as_process', 'stdout_as_string', 'output_stream', 'with_stdout', 'kill_after_timeout', 'universal_newlines', 'shell')) -log = logging.getLogger('git.cmd') +log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) __all__ = ('Git',) @@ -59,7 +59,8 @@ # Documentation ## @{ -def handle_process_output(process, stdout_handler, stderr_handler, finalizer, decode_streams=True): +def handle_process_output(process, stdout_handler, stderr_handler, + finalizer=None, decode_streams=True): """Registers for notifications to lean that process output is ready to read, and dispatches lines to the respective line handlers. This function returns once the finalizer returns @@ -108,10 +109,13 @@ def pump_stream(cmdline, name, stream, is_decode, handler): t.start() threads.append(t) + ## FIXME: Why Join?? Will block if `stdin` needs feeding... + # for t in threads: t.join() - return finalizer(process) + if finalizer: + return finalizer(process) def dashify(string): @@ -186,14 +190,18 @@ def __setstate__(self, d): # Override this value using `Git.USE_SHELL = True` USE_SHELL = False - class AutoInterrupt(object): + @classmethod + def polish_url(cls, url): + return url.replace("\\\\", "\\").replace("\\", "/") + class AutoInterrupt(object): """Kill/Interrupt the stored process instance once this instance goes out of scope. It is used to prevent processes piling up in case iterators stop reading. Besides all attributes are wired through to the contained process object. The wait method was overridden to perform automatic status code checking and possibly raise.""" + __slots__ = ("proc", "args") def __init__(self, proc, args): @@ -239,7 +247,7 @@ def __del__(self): def __getattr__(self, attr): return getattr(self.proc, attr) - def wait(self, stderr=b''): + def wait(self, stderr=b''): # TODO: Bad choice to mimic `proc.wait()` but with different args. """Wait for the process and return its status code. :param stderr: Previously read value of stderr, in case stderr is already closed. @@ -418,7 +426,6 @@ def version_info(self): def execute(self, command, istream=None, - with_keep_cwd=False, with_extended_output=False, with_exceptions=True, as_process=False, @@ -441,11 +448,6 @@ def execute(self, command, :param istream: Standard input filehandle passed to subprocess.Popen. - :param with_keep_cwd: - Whether to use the current working directory from os.getcwd(). - The cmd otherwise uses its own working_dir that it has been initialized - with if possible. - :param with_extended_output: Whether to return a (status, stdout, stderr) tuple. @@ -518,10 +520,7 @@ def execute(self, command, log.info(' '.join(command)) # Allow the user to have the command executed in their working dir. - if with_keep_cwd or self._working_dir is None: - cwd = os.getcwd() - else: - cwd = self._working_dir + cwd = self._working_dir or os.getcwd() # Start the process env = os.environ.copy() @@ -544,6 +543,9 @@ def execute(self, command, cmd_not_found_exception = OSError # end handle + stdout_sink = (PIPE + if with_stdout + else getattr(subprocess, 'DEVNULL', open(os.devnull, 'wb'))) log.debug("Popen(%s, cwd=%s, universal_newlines=%s, shell=%s)", command, cwd, universal_newlines, shell) try: @@ -553,9 +555,9 @@ def execute(self, command, bufsize=-1, stdin=istream, stderr=PIPE, - stdout=PIPE if with_stdout else open(os.devnull, 'wb'), + stdout=stdout_sink, shell=shell is not None and shell or self.USE_SHELL, - close_fds=(is_posix), # unsupported on windows + close_fds=is_posix, # unsupported on windows universal_newlines=universal_newlines, creationflags=PROC_CREATIONFLAGS, **subprocess_kwargs @@ -647,10 +649,7 @@ def as_text(stdout_value): # END handle debug printing if with_exceptions and status != 0: - if with_extended_output: - raise GitCommandError(command, status, stderr_value, stdout_value) - else: - raise GitCommandError(command, status, stderr_value) + raise GitCommandError(command, status, stderr_value, stdout_value) if isinstance(stdout_value, bytes) and stdout_as_string: # could also be output_stream stdout_value = safe_decode(stdout_value) diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py index 999c452be..9bb563d7b 100644 --- a/git/objects/submodule/base.py +++ b/git/objects/submodule/base.py @@ -41,6 +41,7 @@ from unittest.case import SkipTest from git.util import HIDE_WINDOWS_KNOWN_ERRORS from git.objects.base import IndexObject, Object +from git.cmd import Git __all__ = ["Submodule", "UpdateProgress"] @@ -394,6 +395,9 @@ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False): mrepo = cls._clone_repo(repo, url, path, name, **kwargs) # END verify url + ## See #525 for ensuring git urls in config-files valid under Windows. + url = Git.polish_url(url) + # It's important to add the URL to the parent config, to let `git submodule` know. # otherwise there is a '-' character in front of the submodule listing # a38efa84daef914e4de58d1905a500d8d14aaf45 mymodule (v0.9.0-1-ga38efa8) diff --git a/git/remote.py b/git/remote.py index a7d3fe7e9..71585a41b 100644 --- a/git/remote.py +++ b/git/remote.py @@ -27,9 +27,8 @@ ) from git.util import ( join_path, - finalize_process ) -from git.cmd import handle_process_output +from git.cmd import handle_process_output, Git from gitdb.util import join from git.compat import (defenc, force_text, is_win) import logging @@ -570,7 +569,7 @@ def create(cls, repo, name, url, **kwargs): :raise GitCommandError: in case an origin with that name already exists""" scmd = 'add' kwargs['insert_kwargs_after'] = scmd - repo.git.remote(scmd, name, url, **kwargs) + repo.git.remote(scmd, name, Git.polish_url(url), **kwargs) return cls(repo, name) # add is an alias @@ -630,25 +629,19 @@ def _get_fetch_info_from_stderr(self, proc, progress): cmds = set(PushInfo._flag_map.keys()) & set(FetchInfo._flag_map.keys()) progress_handler = progress.new_message_handler() + handle_process_output(proc, None, progress_handler, finalizer=None, decode_streams=False) - stderr_text = None + stderr_text = progress.error_lines and '\n'.join(progress.error_lines) or '' + proc.wait(stderr=stderr_text) + if stderr_text: + log.warning("Error lines received while fetching: %s", stderr_text) - for line in proc.stderr: + for line in progress.other_lines: line = force_text(line) - for pline in progress_handler(line): - # END handle special messages - for cmd in cmds: - if len(line) > 1 and line[0] == ' ' and line[1] == cmd: - fetch_info_lines.append(line) - continue - # end find command code - # end for each comand code we know - # end for each line progress didn't handle - # end - if progress.error_lines(): - stderr_text = '\n'.join(progress.error_lines()) - - finalize_process(proc, stderr=stderr_text) + for cmd in cmds: + if len(line) > 1 and line[0] == ' ' and line[1] == cmd: + fetch_info_lines.append(line) + continue # read head information with open(join(self.repo.git_dir, 'FETCH_HEAD'), 'rb') as fp: @@ -687,16 +680,19 @@ def stdout_handler(line): try: output.append(PushInfo._from_line(self, line)) except ValueError: - # if an error happens, additional info is given which we cannot parse + # If an error happens, additional info is given which we parse below. pass - # END exception handling - # END for each line + handle_process_output(proc, stdout_handler, progress_handler, finalizer=None, decode_streams=False) + stderr_text = progress.error_lines and '\n'.join(progress.error_lines) or '' try: - handle_process_output(proc, stdout_handler, progress_handler, finalize_process, decode_streams=False) + proc.wait(stderr=stderr_text) except Exception: - if len(output) == 0: + if not output: raise + elif stderr_text: + log.warning("Error lines received while fetching: %s", stderr_text) + return output def _assert_refspec(self): diff --git a/git/repo/base.py b/git/repo/base.py index 8b68b5ff2..c5cdce7c6 100644 --- a/git/repo/base.py +++ b/git/repo/base.py @@ -62,8 +62,11 @@ import os import sys import re +import logging from collections import namedtuple +log = logging.getLogger(__name__) + DefaultDBType = GitCmdObjectDB if sys.version_info[:2] < (2, 5): # python 2.4 compatiblity DefaultDBType = GitCmdObjectDB @@ -871,46 +874,15 @@ def _clone(cls, git, url, path, odb_default_type, progress, **kwargs): if progress is not None: progress = to_progress_instance(progress) - # special handling for windows for path at which the clone should be - # created. - # tilde '~' will be expanded to the HOME no matter where the ~ occours. Hence - # we at least give a proper error instead of letting git fail - prev_cwd = None - prev_path = None odbt = kwargs.pop('odbt', odb_default_type) - if is_win: - if '~' in path: - raise OSError("Git cannot handle the ~ character in path %r correctly" % path) - - # on windows, git will think paths like c: are relative and prepend the - # current working dir ( before it fails ). We temporarily adjust the working - # dir to make this actually work - match = re.match("(\w:[/\\\])(.*)", path) - if match: - prev_cwd = os.getcwd() - prev_path = path - drive, rest_of_path = match.groups() - os.chdir(drive) - path = rest_of_path - kwargs['with_keep_cwd'] = True - # END cwd preparation - # END windows handling - - try: - proc = git.clone(url, path, with_extended_output=True, as_process=True, - v=True, **add_progress(kwargs, git, progress)) - if progress: - handle_process_output(proc, None, progress.new_message_handler(), finalize_process) - else: - (stdout, stderr) = proc.communicate() # FIXME: Will block of outputs are big! - finalize_process(proc, stderr=stderr) - # end handle progress - finally: - if prev_cwd is not None: - os.chdir(prev_cwd) - path = prev_path - # END reset previous working dir - # END bad windows handling + proc = git.clone(url, path, with_extended_output=True, as_process=True, + v=True, **add_progress(kwargs, git, progress)) + if progress: + handle_process_output(proc, None, progress.new_message_handler(), finalize_process) + else: + (stdout, stderr) = proc.communicate() # FIXME: Will block of outputs are big! + log.debug("Cmd(%s)'s unused stdout: %s", getattr(proc, 'args', ''), stdout) + finalize_process(proc, stderr=stderr) # our git command could have a different working dir than our actual # environment, hence we prepend its working dir if required @@ -922,10 +894,10 @@ def _clone(cls, git, url, path, odb_default_type, progress, **kwargs): # that contains the remote from which we were clones, git stops liking it # as it will escape the backslashes. Hence we undo the escaping just to be # sure - repo = cls(os.path.abspath(path), odbt=odbt) + repo = cls(path, odbt=odbt) if repo.remotes: with repo.remotes[0].config_writer as writer: - writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/")) + writer.set_value('url', Git.polish_url(repo.remotes[0].url)) # END handle remote repo return repo diff --git a/git/test/lib/helper.py b/git/test/lib/helper.py index 092068b9f..c5a003ea1 100644 --- a/git/test/lib/helper.py +++ b/git/test/lib/helper.py @@ -5,26 +5,29 @@ # the BSD License: http://www.opensource.org/licenses/bsd-license.php from __future__ import print_function -import os -from unittest import TestCase -import time -import tempfile +from functools import wraps import io import logging +import os +import tempfile +import textwrap +import time +from unittest import TestCase +import unittest -from functools import wraps - +from git.compat import string_types, is_win, PY3 from git.util import rmtree -from git.compat import string_types, is_win -import textwrap -osp = os.path.dirname +import os.path as osp + + +ospd = osp.dirname -GIT_REPO = os.environ.get("GIT_PYTHON_TEST_GIT_REPO_BASE", osp(osp(osp(osp(__file__))))) -GIT_DAEMON_PORT = os.environ.get("GIT_PYTHON_TEST_GIT_DAEMON_PORT", "9418") +GIT_REPO = os.environ.get("GIT_PYTHON_TEST_GIT_REPO_BASE", ospd(ospd(ospd(ospd(__file__))))) +GIT_DAEMON_PORT = os.environ.get("GIT_PYTHON_TEST_GIT_DAEMON_PORT", "19418") __all__ = ( - 'fixture_path', 'fixture', 'absolute_project_path', 'StringProcessAdapter', + 'fixture_path', 'fixture', 'StringProcessAdapter', 'with_rw_directory', 'with_rw_repo', 'with_rw_and_rw_remote_repo', 'TestBase', 'TestCase', 'GIT_REPO', 'GIT_DAEMON_PORT' ) @@ -35,18 +38,13 @@ def fixture_path(name): - test_dir = osp(osp(__file__)) - return os.path.join(test_dir, "fixtures", name) + return osp.join(ospd(ospd(__file__)), 'fixtures', name) def fixture(name): with open(fixture_path(name), 'rb') as fd: return fd.read() - -def absolute_project_path(): - return os.path.abspath(os.path.join(osp(__file__), "..", "..")) - #} END routines #{ Adapters @@ -71,18 +69,6 @@ def wait(self): #{ Decorators -def _mktemp(*args): - """Wrapper around default tempfile.mktemp to fix an osx issue - :note: the OSX special case was removed as it was unclear why that was needed in the first place. It seems - to be just fine without it. However, if we leave this special case, and if TMPDIR is set to something custom, - prefixing /private/ will lead to incorrect paths on OSX.""" - tdir = tempfile.mktemp(*args) - # See :note: above to learn why this is comented out. - # if is_darwin: - # tdir = '/private' + tdir - return tdir - - def with_rw_directory(func): """Create a temporary directory which can be written to, remove it if the test succeeds, but leave it otherwise to aid additional debugging""" @@ -132,7 +118,7 @@ def repo_creator(self): if bare: prefix = '' # END handle prefix - repo_dir = _mktemp("%sbare_%s" % (prefix, func.__name__)) + repo_dir = tempfile.mktemp("%sbare_%s" % (prefix, func.__name__)) rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=bare, n=True) rw_repo.head.commit = rw_repo.commit(working_tree_ref) @@ -165,26 +151,31 @@ def repo_creator(self): return argument_passer -def launch_git_daemon(temp_dir, ip, port): +def launch_git_daemon(base_path, ip, port): from git import Git if is_win: ## On MINGW-git, daemon exists in .\Git\mingw64\libexec\git-core\, # but if invoked as 'git daemon', it detaches from parent `git` cmd, # and then CANNOT DIE! # So, invoke it as a single command. - ## Cygwin-git has no daemon. + ## Cygwin-git has no daemon. But it can use MINGW's. # - daemon_cmd = ['git-daemon', temp_dir, + daemon_cmd = ['git-daemon', '--enable=receive-pack', '--listen=%s' % ip, - '--port=%s' % port] + '--port=%s' % port, + '--base-path=%s' % base_path, + base_path] gd = Git().execute(daemon_cmd, as_process=True) else: - gd = Git().daemon(temp_dir, + gd = Git().daemon(base_path, enable='receive-pack', listen=ip, port=port, + base_path=base_path, as_process=True) + # yes, I know ... fortunately, this is always going to work if sleep time is just large enough + time.sleep(0.5) return gd @@ -212,15 +203,16 @@ def case(self, rw_repo, rw_remote_repo) See working dir info in with_rw_repo :note: We attempt to launch our own invocation of git-daemon, which will be shutdown at the end of the test. """ - from git import Remote, GitCommandError + from git import Git, Remote # To avoid circular deps. + assert isinstance(working_tree_ref, string_types), "Decorator requires ref name for working tree checkout" def argument_passer(func): @wraps(func) def remote_repo_creator(self): - remote_repo_dir = _mktemp("remote_repo_%s" % func.__name__) - repo_dir = _mktemp("remote_clone_non_bare_repo") + remote_repo_dir = tempfile.mktemp("remote_repo_%s" % func.__name__) + repo_dir = tempfile.mktemp("remote_clone_non_bare_repo") rw_remote_repo = self.rorepo.clone(remote_repo_dir, shared=True, bare=True) # recursive alternates info ? @@ -240,54 +232,38 @@ def remote_repo_creator(self): pass crw.set(section, "receivepack", True) - # initialize the remote - first do it as local remote and pull, then - # we change the url to point to the daemon. The daemon should be started - # by the user, not by us + # Initialize the remote - first do it as local remote and pull, then + # we change the url to point to the daemon. d_remote = Remote.create(rw_repo, "daemon_origin", remote_repo_dir) d_remote.fetch() - remote_repo_url = "git://localhost:%s%s" % (GIT_DAEMON_PORT, remote_repo_dir) + base_path, rel_repo_dir = osp.split(remote_repo_dir) + + remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, rel_repo_dir)) with d_remote.config_writer as cw: cw.set('url', remote_repo_url) - temp_dir = osp(_mktemp()) - gd = launch_git_daemon(temp_dir, '127.0.0.1', GIT_DAEMON_PORT) try: - # yes, I know ... fortunately, this is always going to work if sleep time is just large enough - time.sleep(0.5) - # end - - # try to list remotes to diagnoes whether the server is up - try: - rw_repo.git.ls_remote(d_remote) - except GitCommandError as e: - # We assume in good faith that we didn't start the daemon - but make sure we kill it anyway - # Of course we expect it to work here already, but maybe there are timing constraints - # on some platforms ? - try: - gd.proc.terminate() - except Exception as ex: - log.debug("Ignoring %r while terminating proc after %r.", ex, e) - log.warning('git(%s) ls-remote failed due to:%s', - rw_repo.git_dir, e) - if is_win: - msg = textwrap.dedent(""" - MINGW yet has problems with paths, and `git-daemon.exe` must be in PATH - (look into .\Git\mingw64\libexec\git-core\); - CYGWIN has no daemon, but if one exists, it gets along fine (has also paths problems) - Anyhow, alternatively try starting `git-daemon` manually:""") - else: - msg = "Please try starting `git-daemon` manually:" - - msg += textwrap.dedent(""" - git daemon --enable=receive-pack '%s' - You can also run the daemon on a different port by passing --port=" - and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to - """ % temp_dir) - from unittest import SkipTest - raise SkipTest(msg) if is_win else AssertionError(msg) - # END make assertion - # END catch ls remote error + gd = launch_git_daemon(Git.polish_url(base_path), '127.0.0.1', GIT_DAEMON_PORT) + except Exception as ex: + if is_win: + msg = textwrap.dedent(""" + The `git-daemon.exe` must be in PATH. + For MINGW, look into .\Git\mingw64\libexec\git-core\), but problems with paths might appear. + CYGWIN has no daemon, but if one exists, it gets along fine (has also paths problems) + Anyhow, alternatively try starting `git-daemon` manually:""") + else: + msg = "Please try starting `git-daemon` manually:" + msg += textwrap.dedent(""" + git daemon --enable=receive-pack --base-path=%s %s + You can also run the daemon on a different port by passing --port=" + and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to + """ % (base_path, base_path)) + raise AssertionError(ex, msg) + # END make assertion + else: + # Try listing remotes, to diagnose whether the daemon is up. + rw_repo.git.ls_remote(d_remote) # adjust working dir prev_cwd = os.getcwd() @@ -305,6 +281,7 @@ def remote_repo_creator(self): finally: try: + log.debug("Killing git-daemon...") gd.proc.kill() except: ## Either it has died (and we're here), or it won't die, again here... @@ -352,9 +329,13 @@ class TestBase(TestCase): of the project history ( to assure tests don't fail for others ). """ + if not PY3: + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + def _small_repo_url(self): """:return" a path to a small, clonable repository""" - return os.path.join(self.rorepo.working_tree_dir, 'git/ext/gitdb/gitdb/ext/smmap') + from git.cmd import Git + return Git.polish_url(osp.join(self.rorepo.working_tree_dir, 'git/ext/gitdb/gitdb/ext/smmap')) @classmethod def setUpClass(cls): @@ -378,7 +359,7 @@ def _make_file(self, rela_path, data, repo=None): with the given data. Returns absolute path to created file. """ repo = repo or self.rorepo - abs_path = os.path.join(repo.working_tree_dir, rela_path) + abs_path = osp.join(repo.working_tree_dir, rela_path) with open(abs_path, "w") as fp: fp.write(data) return abs_path diff --git a/git/test/test_base.py b/git/test/test_base.py index 2956f3d48..7fc3096f3 100644 --- a/git/test/test_base.py +++ b/git/test/test_base.py @@ -41,7 +41,7 @@ def tearDown(self): def test_base_object(self): # test interface of base object classes types = (Blob, Tree, Commit, TagObject) - assert len(types) == len(self.type_tuples) + self.assertEqual(len(types), len(self.type_tuples)) s = set() num_objs = 0 @@ -55,12 +55,12 @@ def test_base_object(self): item = obj_type(self.rorepo, binsha, 0, path) # END handle index objects num_objs += 1 - assert item.hexsha == hexsha - assert item.type == typename + self.assertEqual(item.hexsha, hexsha) + self.assertEqual(item.type, typename) assert item.size - assert item == item - assert not item != item - assert str(item) == item.hexsha + self.assertEqual(item, item) + self.assertNotEqual(not item, item) + self.assertEqual(str(item), item.hexsha) assert repr(item) s.add(item) @@ -78,16 +78,16 @@ def test_base_object(self): tmpfilename = tempfile.mktemp(suffix='test-stream') with open(tmpfilename, 'wb+') as tmpfile: - assert item == item.stream_data(tmpfile) + self.assertEqual(item, item.stream_data(tmpfile)) tmpfile.seek(0) - assert tmpfile.read() == data + self.assertEqual(tmpfile.read(), data) os.remove(tmpfilename) # END for each object type to create # each has a unique sha - assert len(s) == num_objs - assert len(s | s) == num_objs - assert num_index_objs == 2 + self.assertEqual(len(s), num_objs) + self.assertEqual(len(s | s), num_objs) + self.assertEqual(num_index_objs, 2) def test_get_object_type_by_name(self): for tname in base.Object.TYPES: @@ -98,7 +98,7 @@ def test_get_object_type_by_name(self): def test_object_resolution(self): # objects must be resolved to shas so they compare equal - assert self.rorepo.head.reference.object == self.rorepo.active_branch.object + self.assertEqual(self.rorepo.head.reference.object, self.rorepo.active_branch.object) @with_rw_repo('HEAD', bare=True) def test_with_bare_rw_repo(self, bare_rw_repo): @@ -110,17 +110,7 @@ def test_with_rw_repo(self, rw_repo): assert not rw_repo.config_reader("repository").getboolean("core", "bare") assert os.path.isdir(os.path.join(rw_repo.working_tree_dir, 'lib')) - # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, """ - # FIXME: helper.wrapper fails with: - # PermissionError: [WinError 5] Access is denied: - # 'C:\\Users\\appveyor\\AppData\\Local\\Temp\\1\\test_work_tree_unsupportedryfa60di\\ - # master_repo\\.git\\objects\\pack\\pack-bc9e0787aef9f69e1591ef38ea0a6f566ec66fe3.idx' - # AND - # FIXME: git-daemon failing with: - # git.exc.GitCommandError: Cmd('git') failed due to: exit code(128) - # cmdline: git ls-remote daemon_origin - # stderr: 'fatal: bad config line 15 in file .git/config' - # """) + #@skipIf(HIDE_WINDOWS_FREEZE_ERRORS, "FIXME: Freezes! sometimes...") @with_rw_and_rw_remote_repo('0.1.6') def test_with_rw_remote_and_rw_repo(self, rw_repo, rw_remote_repo): assert not rw_repo.config_reader("repository").getboolean("core", "bare") diff --git a/git/test/test_docs.py b/git/test/test_docs.py index e2bfcb21f..f3c75f79f 100644 --- a/git/test/test_docs.py +++ b/git/test/test_docs.py @@ -16,7 +16,7 @@ def tearDown(self): import gc gc.collect() - # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, + # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, ## ACTUALLY skipped by `git.submodule.base#L869`. # "FIXME: helper.wrapper fails with: PermissionError: [WinError 5] Access is denied: " # "'C:\\Users\\appveyor\\AppData\\Local\\Temp\\1\\test_work_tree_unsupportedryfa60di\\master_repo\\.git\\objects\\pack\\pack-bc9e0787aef9f69e1591ef38ea0a6f566ec66fe3.idx") # noqa E501 @with_rw_directory diff --git a/git/test/test_git.py b/git/test/test_git.py index 58ee8e9c4..bd8ebee2c 100644 --- a/git/test/test_git.py +++ b/git/test/test_git.py @@ -207,18 +207,15 @@ def test_environment(self, rw_dir): rw_repo = Repo.init(os.path.join(rw_dir, 'repo')) remote = rw_repo.create_remote('ssh-origin', "ssh://git@server/foo") - # This only works if we are not evaluating git-push/pull output in a thread ! - import select - if hasattr(select, 'poll'): - with rw_repo.git.custom_environment(GIT_SSH=path): - try: - remote.fetch() - except GitCommandError as err: - if sys.version_info[0] < 3 and is_darwin: - self.assertIn('ssh-orig, ' in str(err)) - self.assertEqual(err.status, 128) - else: - self.assertIn('FOO', str(err)) + with rw_repo.git.custom_environment(GIT_SSH=path): + try: + remote.fetch() + except GitCommandError as err: + if sys.version_info[0] < 3 and is_darwin: + self.assertIn('ssh-orig, ' in str(err)) + self.assertEqual(err.status, 128) + else: + self.assertIn('FOO', str(err)) def test_handle_process_output(self): from git.cmd import handle_process_output diff --git a/git/test/test_remote.py b/git/test/test_remote.py index e0b00e0c5..8b50ea35c 100644 --- a/git/test/test_remote.py +++ b/git/test/test_remote.py @@ -4,14 +4,10 @@ # This module is part of GitPython and is released under # the BSD License: http://www.opensource.org/licenses/bsd-license.php -from git.test.lib import ( - TestBase, - with_rw_repo, - with_rw_and_rw_remote_repo, - fixture, - GIT_DAEMON_PORT, - assert_raises -) +import random +import tempfile +from unittest.case import skipIf + from git import ( RemoteProgress, FetchInfo, @@ -25,11 +21,19 @@ Remote, GitCommandError ) -from git.util import IterableList, rmtree +from git.cmd import Git from git.compat import string_types -import tempfile -import os -import random +from git.test.lib import ( + TestBase, + with_rw_repo, + with_rw_and_rw_remote_repo, + fixture, + GIT_DAEMON_PORT, + assert_raises +) +from git.util import IterableList, rmtree, HIDE_WINDOWS_FREEZE_ERRORS +import os.path as osp + # assure we have repeatable results random.seed(0) @@ -86,7 +90,7 @@ def make_assertion(self): return # sometimes objects are not compressed which is okay - assert len(self._seen_ops) in (2, 3) + assert len(self._seen_ops) in (2, 3), len(self._seen_ops) assert self._stages_per_op # must have seen all stages @@ -105,45 +109,47 @@ def tearDown(self): gc.collect() def _print_fetchhead(self, repo): - with open(os.path.join(repo.git_dir, "FETCH_HEAD")): + with open(osp.join(repo.git_dir, "FETCH_HEAD")): pass def _do_test_fetch_result(self, results, remote): # self._print_fetchhead(remote.repo) - assert len(results) > 0 and isinstance(results[0], FetchInfo) + self.assertGreater(len(results), 0) + self.assertIsInstance(results[0], FetchInfo) for info in results: - assert isinstance(info.note, string_types) + self.assertIsInstance(info.note, string_types) if isinstance(info.ref, Reference): - assert info.flags != 0 + self.assertTrue(info.flags) # END reference type flags handling - assert isinstance(info.ref, (SymbolicReference, Reference)) + self.assertIsInstance(info.ref, (SymbolicReference, Reference)) if info.flags & (info.FORCED_UPDATE | info.FAST_FORWARD): - assert isinstance(info.old_commit, Commit) + self.assertIsInstance(info.old_commit, Commit) else: - assert info.old_commit is None + self.assertIsNone(info.old_commit) # END forced update checking # END for each info def _do_test_push_result(self, results, remote): - assert len(results) > 0 and isinstance(results[0], PushInfo) + self.assertGreater(len(results), 0) + self.assertIsInstance(results[0], PushInfo) for info in results: - assert info.flags - assert isinstance(info.summary, string_types) + self.assertTrue(info.flags) + self.assertIsInstance(info.summary, string_types) if info.old_commit is not None: - assert isinstance(info.old_commit, Commit) + self.assertIsInstance(info.old_commit, Commit) if info.flags & info.ERROR: has_one = False for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE): has_one |= bool(info.flags & bitflag) # END for each bitflag - assert has_one + self.assertTrue(has_one) else: # there must be a remote commit if info.flags & info.DELETED == 0: - assert isinstance(info.local_ref, Reference) + self.assertIsInstance(info.local_ref, Reference) else: - assert info.local_ref is None - assert type(info.remote_ref) in (TagReference, RemoteReference) + self.assertIsNone(info.local_ref) + self.assertIn(type(info.remote_ref), (TagReference, RemoteReference)) # END error checking # END for each info @@ -156,7 +162,7 @@ def _commit_random_file(self, repo): # Create a file with a random name and random data and commit it to repo. # Return the commited absolute file path index = repo.index - new_file = self._make_file(os.path.basename(tempfile.mktemp()), str(random.random()), repo) + new_file = self._make_file(osp.basename(tempfile.mktemp()), str(random.random()), repo) index.add([new_file]) index.commit("Committing %s" % new_file) return new_file @@ -183,7 +189,7 @@ def get_info(res, remote, name): res = fetch_and_test(remote) # all up to date for info in res: - assert info.flags & info.HEAD_UPTODATE + self.assertTrue(info.flags & info.HEAD_UPTODATE) # rewind remote head to trigger rejection # index must be false as remote is a bare repo @@ -193,24 +199,25 @@ def get_info(res, remote, name): res = fetch_and_test(remote) mkey = "%s/%s" % (remote, 'master') master_info = res[mkey] - assert master_info.flags & FetchInfo.FORCED_UPDATE and master_info.note is not None + self.assertTrue(master_info.flags & FetchInfo.FORCED_UPDATE) + self.assertIsNotNone(master_info.note) # normal fast forward - set head back to previous one rhead.commit = remote_commit res = fetch_and_test(remote) - assert res[mkey].flags & FetchInfo.FAST_FORWARD + self.assertTrue(res[mkey].flags & FetchInfo.FAST_FORWARD) # new remote branch new_remote_branch = Head.create(remote_repo, "new_branch") res = fetch_and_test(remote) new_branch_info = get_info(res, remote, new_remote_branch) - assert new_branch_info.flags & FetchInfo.NEW_HEAD + self.assertTrue(new_branch_info.flags & FetchInfo.NEW_HEAD) # remote branch rename ( causes creation of a new one locally ) new_remote_branch.rename("other_branch_name") res = fetch_and_test(remote) other_branch_info = get_info(res, remote, new_remote_branch) - assert other_branch_info.ref.commit == new_branch_info.ref.commit + self.assertEqual(other_branch_info.ref.commit, new_branch_info.ref.commit) # remove new branch Head.delete(new_remote_branch.repo, new_remote_branch) @@ -220,35 +227,38 @@ def get_info(res, remote, name): # prune stale tracking branches stale_refs = remote.stale_refs - assert len(stale_refs) == 2 and isinstance(stale_refs[0], RemoteReference) + self.assertEqual(len(stale_refs), 2) + self.assertIsInstance(stale_refs[0], RemoteReference) RemoteReference.delete(rw_repo, *stale_refs) # test single branch fetch with refspec including target remote res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master" % remote) - assert len(res) == 1 and get_info(res, remote, 'master') + self.assertEqual(len(res), 1) + self.assertTrue(get_info(res, remote, 'master')) # ... with respec and no target res = fetch_and_test(remote, refspec='master') - assert len(res) == 1 + self.assertEqual(len(res), 1) # ... multiple refspecs ... works, but git command returns with error if one ref is wrong without # doing anything. This is new in later binaries # res = fetch_and_test(remote, refspec=['master', 'fred']) - # assert len(res) == 1 + # self.assertEqual(len(res), 1) # add new tag reference rtag = TagReference.create(remote_repo, "1.0-RV_hello.there") res = fetch_and_test(remote, tags=True) tinfo = res[str(rtag)] - assert isinstance(tinfo.ref, TagReference) and tinfo.ref.commit == rtag.commit - assert tinfo.flags & tinfo.NEW_TAG + self.assertIsInstance(tinfo.ref, TagReference) + self.assertEqual(tinfo.ref.commit, rtag.commit) + self.assertTrue(tinfo.flags & tinfo.NEW_TAG) # adjust tag commit Reference.set_object(rtag, rhead.commit.parents[0].parents[0]) res = fetch_and_test(remote, tags=True) tinfo = res[str(rtag)] - assert tinfo.commit == rtag.commit - assert tinfo.flags & tinfo.TAG_UPDATE + self.assertEqual(tinfo.commit, rtag.commit) + self.assertTrue(tinfo.flags & tinfo.TAG_UPDATE) # delete remote tag - local one will stay TagReference.delete(remote_repo, rtag) @@ -263,7 +273,8 @@ def get_info(res, remote, name): # must clone with a local path for the repo implementation not to freak out # as it wants local paths only ( which I can understand ) other_repo = remote_repo.clone(other_repo_dir, shared=False) - remote_repo_url = "git://localhost:%s%s" % (GIT_DAEMON_PORT, remote_repo.git_dir) + remote_repo_url = osp.basename(remote_repo.git_dir) # git-daemon runs with appropriate `--base-path`. + remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, remote_repo_url)) # put origin to git-url other_origin = other_repo.remotes.origin @@ -309,21 +320,21 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): self._commit_random_file(rw_repo) progress = TestRemoteProgress() res = remote.push(lhead.reference, progress) - assert isinstance(res, IterableList) + self.assertIsInstance(res, IterableList) self._do_test_push_result(res, remote) progress.make_assertion() # rejected - undo last commit lhead.reset("HEAD~1") res = remote.push(lhead.reference) - assert res[0].flags & PushInfo.ERROR - assert res[0].flags & PushInfo.REJECTED + self.assertTrue(res[0].flags & PushInfo.ERROR) + self.assertTrue(res[0].flags & PushInfo.REJECTED) self._do_test_push_result(res, remote) # force rejected pull res = remote.push('+%s' % lhead.reference) - assert res[0].flags & PushInfo.ERROR == 0 - assert res[0].flags & PushInfo.FORCED_UPDATE + self.assertEqual(res[0].flags & PushInfo.ERROR, 0) + self.assertTrue(res[0].flags & PushInfo.FORCED_UPDATE) self._do_test_push_result(res, remote) # invalid refspec @@ -335,7 +346,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): new_tag = TagReference.create(rw_repo, to_be_updated) # @UnusedVariable other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", message="my message") res = remote.push(progress=progress, tags=True) - assert res[-1].flags & PushInfo.NEW_TAG + self.assertTrue(res[-1].flags & PushInfo.NEW_TAG) progress.make_assertion() self._do_test_push_result(res, remote) @@ -344,16 +355,18 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): new_tag = TagReference.create(rw_repo, to_be_updated, ref='HEAD~1', force=True) res = remote.push(tags=True) self._do_test_push_result(res, remote) - assert res[-1].flags & PushInfo.REJECTED and res[-1].flags & PushInfo.ERROR + self.assertTrue(res[-1].flags & PushInfo.REJECTED) + self.assertTrue(res[-1].flags & PushInfo.ERROR) # push force this tag res = remote.push("+%s" % new_tag.path) - assert res[-1].flags & PushInfo.ERROR == 0 and res[-1].flags & PushInfo.FORCED_UPDATE + self.assertEqual(res[-1].flags & PushInfo.ERROR, 0) + self.assertTrue(res[-1].flags & PushInfo.FORCED_UPDATE) # delete tag - have to do it using refspec res = remote.push(":%s" % new_tag.path) self._do_test_push_result(res, remote) - assert res[0].flags & PushInfo.DELETED + self.assertTrue(res[0].flags & PushInfo.DELETED) # Currently progress is not properly transferred, especially not using # the git daemon # progress.assert_received_message() @@ -362,8 +375,8 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): new_head = Head.create(rw_repo, "my_new_branch") progress = TestRemoteProgress() res = remote.push(new_head, progress) - assert len(res) > 0 - assert res[0].flags & PushInfo.NEW_HEAD + self.assertGreater(len(res), 0) + self.assertTrue(res[0].flags & PushInfo.NEW_HEAD) progress.make_assertion() self._do_test_push_result(res, remote) @@ -371,7 +384,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): res = remote.push(":%s" % new_head.path) self._do_test_push_result(res, remote) Head.delete(rw_repo, new_head) - assert res[-1].flags & PushInfo.DELETED + self.assertTrue(res[-1].flags & PushInfo.DELETED) # --all res = remote.push(all=True) @@ -384,12 +397,7 @@ def _assert_push_and_pull(self, remote, rw_repo, remote_repo): TagReference.delete(rw_repo, new_tag, other_tag) remote.push(":%s" % other_tag.path) - # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, """ - # FIXME: git-daemon failing with: - # git.exc.GitCommandError: Cmd('git') failed due to: exit code(128) - # cmdline: git ls-remote daemon_origin - # stderr: 'fatal: bad config line 15 in file .git/config' - # """) + @skipIf(HIDE_WINDOWS_FREEZE_ERRORS, "FIXME: Freezes!") @with_rw_and_rw_remote_repo('0.1.6') def test_base(self, rw_repo, remote_repo): num_remotes = 0 @@ -398,17 +406,16 @@ def test_base(self, rw_repo, remote_repo): for remote in rw_repo.remotes: num_remotes += 1 - assert remote == remote - assert str(remote) != repr(remote) + self.assertEqual(remote, remote) + self.assertNotEqual(str(remote), repr(remote)) remote_set.add(remote) remote_set.add(remote) # should already exist - # REFS refs = remote.refs - assert refs + self.assertTrue(refs) for ref in refs: - assert ref.remote_name == remote.name - assert ref.remote_head + self.assertEqual(ref.remote_name, remote.name) + self.assertTrue(ref.remote_head) # END for each ref # OPTIONS @@ -435,11 +442,11 @@ def test_base(self, rw_repo, remote_repo): # RENAME other_name = "totally_other_name" prev_name = remote.name - assert remote.rename(other_name) == remote - assert prev_name != remote.name + self.assertEqual(remote.rename(other_name), remote) + self.assertNotEqual(prev_name, remote.name) # multiple times for _ in range(2): - assert remote.rename(prev_name).name == prev_name + self.assertEqual(remote.rename(prev_name).name, prev_name) # END for each rename ( back to prev_name ) # PUSH/PULL TESTING @@ -456,9 +463,9 @@ def test_base(self, rw_repo, remote_repo): remote.update() # END for each remote - assert ran_fetch_test - assert num_remotes - assert num_remotes == len(remote_set) + self.assertTrue(ran_fetch_test) + self.assertTrue(num_remotes) + self.assertEqual(num_remotes, len(remote_set)) origin = rw_repo.remote('origin') assert origin == rw_repo.remotes.origin @@ -478,23 +485,23 @@ def test_base(self, rw_repo, remote_repo): num_deleted += 1 # end # end for each branch - assert num_deleted > 0 - assert len(rw_repo.remotes.origin.fetch(prune=True)) == 1, "deleted everything but master" + self.assertGreater(num_deleted, 0) + self.assertEqual(len(rw_repo.remotes.origin.fetch(prune=True)), 1, "deleted everything but master") @with_rw_repo('HEAD', bare=True) def test_creation_and_removal(self, bare_rw_repo): new_name = "test_new_one" arg_list = (new_name, "git@server:hello.git") remote = Remote.create(bare_rw_repo, *arg_list) - assert remote.name == "test_new_one" - assert remote in bare_rw_repo.remotes - assert remote.exists() + self.assertEqual(remote.name, "test_new_one") + self.assertIn(remote, bare_rw_repo.remotes) + self.assertTrue(remote.exists()) # create same one again self.failUnlessRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list) Remote.remove(bare_rw_repo, new_name) - assert remote.exists() # We still have a cache that doesn't know we were deleted by name + self.assertTrue(remote.exists()) # We still have a cache that doesn't know we were deleted by name remote._clear_cache() assert not remote.exists() # Cache should be renewed now. This is an issue ... @@ -521,7 +528,7 @@ def test_fetch_info(self): remote_info_line_fmt % "local/master", fetch_info_line_fmt % 'remote-tracking branch') assert not fi.ref.is_valid() - assert fi.ref.name == "local/master" + self.assertEqual(fi.ref.name, "local/master") # handles non-default refspecs: One can specify a different path in refs/remotes # or a special path just in refs/something for instance @@ -530,16 +537,16 @@ def test_fetch_info(self): remote_info_line_fmt % "subdir/tagname", fetch_info_line_fmt % 'tag') - assert isinstance(fi.ref, TagReference) - assert fi.ref.path.startswith('refs/tags') + self.assertIsInstance(fi.ref, TagReference) + assert fi.ref.path.startswith('refs/tags'), fi.ref.path # it could be in a remote direcftory though fi = FetchInfo._from_line(self.rorepo, remote_info_line_fmt % "remotename/tags/tagname", fetch_info_line_fmt % 'tag') - assert isinstance(fi.ref, TagReference) - assert fi.ref.path.startswith('refs/remotes/') + self.assertIsInstance(fi.ref, TagReference) + assert fi.ref.path.startswith('refs/remotes/'), fi.ref.path # it can also be anywhere ! tag_path = "refs/something/remotename/tags/tagname" @@ -547,24 +554,24 @@ def test_fetch_info(self): remote_info_line_fmt % tag_path, fetch_info_line_fmt % 'tag') - assert isinstance(fi.ref, TagReference) - assert fi.ref.path == tag_path + self.assertIsInstance(fi.ref, TagReference) + self.assertEqual(fi.ref.path, tag_path) # branches default to refs/remotes fi = FetchInfo._from_line(self.rorepo, remote_info_line_fmt % "remotename/branch", fetch_info_line_fmt % 'branch') - assert isinstance(fi.ref, RemoteReference) - assert fi.ref.remote_name == 'remotename' + self.assertIsInstance(fi.ref, RemoteReference) + self.assertEqual(fi.ref.remote_name, 'remotename') # but you can force it anywhere, in which case we only have a references fi = FetchInfo._from_line(self.rorepo, remote_info_line_fmt % "refs/something/branch", fetch_info_line_fmt % 'branch') - assert type(fi.ref) is Reference - assert fi.ref.path == "refs/something/branch" + assert type(fi.ref) is Reference, type(fi.ref) + self.assertEqual(fi.ref.path, "refs/something/branch") def test_uncommon_branch_names(self): stderr_lines = fixture('uncommon_branch_prefix_stderr').decode('ascii').splitlines() @@ -574,10 +581,10 @@ def test_uncommon_branch_names(self): # +refs/pull/*:refs/heads/pull/* res = [FetchInfo._from_line('ShouldntMatterRepo', stderr, fetch_line) for stderr, fetch_line in zip(stderr_lines, fetch_lines)] - assert len(res) - assert res[0].remote_ref_path == 'refs/pull/1/head' - assert res[0].ref.path == 'refs/heads/pull/1/head' - assert isinstance(res[0].ref, Head) + self.assertGreater(len(res), 0) + self.assertEqual(res[0].remote_ref_path, 'refs/pull/1/head') + self.assertEqual(res[0].ref.path, 'refs/heads/pull/1/head') + self.assertIsInstance(res[0].ref, Head) @with_rw_repo('HEAD', bare=False) def test_multiple_urls(self, rw_repo): @@ -589,22 +596,22 @@ def test_multiple_urls(self, rw_repo): remote = rw_repo.remotes[0] # Testing setting a single URL remote.set_url(test1) - assert list(remote.urls) == [test1] + self.assertEqual(list(remote.urls), [test1]) # Testing replacing that single URL remote.set_url(test1) - assert list(remote.urls) == [test1] + self.assertEqual(list(remote.urls), [test1]) # Testing adding new URLs remote.set_url(test2, add=True) - assert list(remote.urls) == [test1, test2] + self.assertEqual(list(remote.urls), [test1, test2]) remote.set_url(test3, add=True) - assert list(remote.urls) == [test1, test2, test3] + self.assertEqual(list(remote.urls), [test1, test2, test3]) # Testing removing an URL remote.set_url(test2, delete=True) - assert list(remote.urls) == [test1, test3] + self.assertEqual(list(remote.urls), [test1, test3]) # Testing changing an URL remote.set_url(test3, test2) - assert list(remote.urls) == [test1, test2] + self.assertEqual(list(remote.urls), [test1, test2]) # will raise: fatal: --add --delete doesn't make sense assert_raises(GitCommandError, remote.set_url, test2, add=True, delete=True) @@ -612,13 +619,24 @@ def test_multiple_urls(self, rw_repo): # Testing on another remote, with the add/delete URL remote = rw_repo.create_remote('another', url=test1) remote.add_url(test2) - assert list(remote.urls) == [test1, test2] + self.assertEqual(list(remote.urls), [test1, test2]) remote.add_url(test3) - assert list(remote.urls) == [test1, test2, test3] + self.assertEqual(list(remote.urls), [test1, test2, test3]) # Testing removing all the URLs remote.delete_url(test2) - assert list(remote.urls) == [test1, test3] + self.assertEqual(list(remote.urls), [test1, test3]) remote.delete_url(test1) - assert list(remote.urls) == [test3] + self.assertEqual(list(remote.urls), [test3]) # will raise fatal: Will not delete all non-push URLs assert_raises(GitCommandError, remote.delete_url, test3) + + def test_fetch_error(self): + rem = self.rorepo.remote('origin') + with self.assertRaisesRegex(GitCommandError, "Couldn't find remote ref __BAD_REF__"): + rem.fetch('__BAD_REF__') + + @with_rw_repo('0.1.6', bare=False) + def test_push_error(self, repo): + rem = repo.remote('origin') + with self.assertRaisesRegex(GitCommandError, "src refspec __BAD_REF__ does not match any"): + rem.push('__BAD_REF__') diff --git a/git/test/test_repo.py b/git/test/test_repo.py index 2c0847e19..a0a6a5b00 100644 --- a/git/test/test_repo.py +++ b/git/test/test_repo.py @@ -472,12 +472,16 @@ def test_creation_deletion(self): head = self.rorepo.create_head("new_head", "HEAD~1") self.rorepo.delete_head(head) - tag = self.rorepo.create_tag("new_tag", "HEAD~2") - self.rorepo.delete_tag(tag) + try: + tag = self.rorepo.create_tag("new_tag", "HEAD~2") + finally: + self.rorepo.delete_tag(tag) with self.rorepo.config_writer(): pass - remote = self.rorepo.create_remote("new_remote", "git@server:repo.git") - self.rorepo.delete_remote(remote) + try: + remote = self.rorepo.create_remote("new_remote", "git@server:repo.git") + finally: + self.rorepo.delete_remote(remote) def test_comparison_and_hash(self): # this is only a preliminary test, more testing done in test_index diff --git a/git/test/test_submodule.py b/git/test/test_submodule.py index e935017fb..9db4f9c90 100644 --- a/git/test/test_submodule.py +++ b/git/test/test_submodule.py @@ -5,6 +5,7 @@ from unittest.case import skipIf import git +from git.cmd import Git from git.compat import string_types, is_win from git.exc import ( InvalidGitRepositoryError, @@ -23,6 +24,7 @@ from git.test.lib import with_rw_directory from git.util import HIDE_WINDOWS_KNOWN_ERRORS from git.util import to_native_path_linux, join_path_native +import os.path as osp # Change the configuration if possible to prevent the underlying memory manager @@ -111,7 +113,7 @@ def _do_base_tests(self, rwrepo): else: with sm.config_writer() as writer: # for faster checkout, set the url to the local path - new_smclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path)) + new_smclone_path = Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path)) writer.set_value('url', new_smclone_path) writer.release() assert sm.config_reader().get_value('url') == new_smclone_path @@ -168,7 +170,7 @@ def _do_base_tests(self, rwrepo): ################# # lets update it - its a recursive one too - newdir = os.path.join(sm.abspath, 'dir') + newdir = osp.join(sm.abspath, 'dir') os.makedirs(newdir) # update fails if the path already exists non-empty @@ -213,7 +215,7 @@ def _do_base_tests(self, rwrepo): csm_repopath = csm.path # adjust the path of the submodules module to point to the local destination - new_csmclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path, csm.path)) + new_csmclone_path = Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path, csm.path)) with csm.config_writer() as writer: writer.set_value('url', new_csmclone_path) assert csm.url == new_csmclone_path @@ -301,7 +303,7 @@ def _do_base_tests(self, rwrepo): csm.update() assert csm.module_exists() assert csm.exists() - assert os.path.isdir(csm.module().working_tree_dir) + assert osp.isdir(csm.module().working_tree_dir) # this would work assert sm.remove(force=True, dry_run=True) is sm @@ -354,7 +356,7 @@ def _do_base_tests(self, rwrepo): assert nsm.module_exists() assert nsm.exists() # its not checked out - assert not os.path.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file)) + assert not osp.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file)) assert len(rwrepo.submodules) == 1 # add another submodule, but into the root, not as submodule @@ -362,7 +364,7 @@ def _do_base_tests(self, rwrepo): assert osm != nsm assert osm.module_exists() assert osm.exists() - assert os.path.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py')) + assert osp.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py')) assert len(rwrepo.submodules) == 2 @@ -418,7 +420,7 @@ def _do_base_tests(self, rwrepo): # Error if there is no submodule file here self.failUnlessRaises(IOError, Submodule._config_parser, rwrepo, rwrepo.commit(self.k_no_subm_tag), True) - # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, + # @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, ## ACTUALLY skipped by `git.submodule.base#L869`. # "FIXME: fails with: PermissionError: [WinError 32] The process cannot access the file because" # "it is being used by another process: " # "'C:\\Users\\ankostis\\AppData\\Local\\Temp\\tmp95c3z83bnon_bare_test_base_rw\\git\\ext\\gitdb\\gitdb\\ext\\smmap'") # noqa E501 @@ -479,7 +481,7 @@ def test_root_module(self, rwrepo): # assure we clone from a local source with sm.config_writer() as writer: - writer.set_value('url', to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path))) + writer.set_value('url', Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path))) # dry-run does nothing sm.update(recursive=False, dry_run=True, progress=prog) @@ -513,7 +515,7 @@ def test_root_module(self, rwrepo): #================ nsmn = "newsubmodule" nsmp = "submrepo" - subrepo_url = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1])) + subrepo_url = Git.polish_url(osp.join(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1])) nsm = Submodule.add(rwrepo, nsmn, nsmp, url=subrepo_url) csmadded = rwrepo.index.commit("Added submodule").hexsha # make sure we don't keep the repo reference nsm.set_parent_commit(csmadded) @@ -535,24 +537,24 @@ def test_root_module(self, rwrepo): sm.set_parent_commit(csmadded) smp = sm.abspath assert not sm.remove(module=False).exists() - assert os.path.isdir(smp) # module still exists + assert osp.isdir(smp) # module still exists csmremoved = rwrepo.index.commit("Removed submodule") # an update will remove the module # not in dry_run rm.update(recursive=False, dry_run=True, force_remove=True) - assert os.path.isdir(smp) + assert osp.isdir(smp) # when removing submodules, we may get new commits as nested submodules are auto-committing changes # to allow deletions without force, as the index would be dirty otherwise. # QUESTION: Why does this seem to work in test_git_submodule_compatibility() ? self.failUnlessRaises(InvalidGitRepositoryError, rm.update, recursive=False, force_remove=False) rm.update(recursive=False, force_remove=True) - assert not os.path.isdir(smp) + assert not osp.isdir(smp) # 'apply work' to the nested submodule and assure this is not removed/altered during updates # Need to commit first, otherwise submodule.update wouldn't have a reason to change the head - touch(os.path.join(nsm.module().working_tree_dir, 'new-file')) + touch(osp.join(nsm.module().working_tree_dir, 'new-file')) # We cannot expect is_dirty to even run as we wouldn't reset a head to the same location assert nsm.module().head.commit.hexsha == nsm.hexsha nsm.module().index.add([nsm]) @@ -574,7 +576,7 @@ def test_root_module(self, rwrepo): # ... to the first repository, this way we have a fast checkout, and a completely different # repository at the different url nsm.set_parent_commit(csmremoved) - nsmurl = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0])) + nsmurl = Git.polish_url(osp.join(self.rorepo.working_tree_dir, rsmsp[0])) with nsm.config_writer() as writer: writer.set_value('url', nsmurl) csmpathchange = rwrepo.index.commit("changed url") @@ -648,21 +650,21 @@ def test_first_submodule(self, rwrepo): assert len(list(rwrepo.iter_submodules())) == 0 for sm_name, sm_path in (('first', 'submodules/first'), - ('second', os.path.join(rwrepo.working_tree_dir, 'submodules/second'))): + ('second', osp.join(rwrepo.working_tree_dir, 'submodules/second'))): sm = rwrepo.create_submodule(sm_name, sm_path, rwrepo.git_dir, no_checkout=True) assert sm.exists() and sm.module_exists() rwrepo.index.commit("Added submodule " + sm_name) # end for each submodule path to add - self.failUnlessRaises(ValueError, rwrepo.create_submodule, 'fail', os.path.expanduser('~')) + self.failUnlessRaises(ValueError, rwrepo.create_submodule, 'fail', osp.expanduser('~')) self.failUnlessRaises(ValueError, rwrepo.create_submodule, 'fail-too', - rwrepo.working_tree_dir + os.path.sep) + rwrepo.working_tree_dir + osp.sep) @with_rw_directory def test_add_empty_repo(self, rwdir): - empty_repo_dir = os.path.join(rwdir, 'empty-repo') + empty_repo_dir = osp.join(rwdir, 'empty-repo') - parent = git.Repo.init(os.path.join(rwdir, 'parent')) + parent = git.Repo.init(osp.join(rwdir, 'parent')) git.Repo.init(empty_repo_dir) for checkout_mode in range(2): @@ -673,7 +675,7 @@ def test_add_empty_repo(self, rwdir): @with_rw_directory def test_git_submodules_and_add_sm_with_new_commit(self, rwdir): - parent = git.Repo.init(os.path.join(rwdir, 'parent')) + parent = git.Repo.init(osp.join(rwdir, 'parent')) parent.git.submodule('add', self._small_repo_url(), 'module') parent.index.commit("added submodule") @@ -683,7 +685,7 @@ def test_git_submodules_and_add_sm_with_new_commit(self, rwdir): assert sm.exists() and sm.module_exists() clone = git.Repo.clone_from(self._small_repo_url(), - os.path.join(parent.working_tree_dir, 'existing-subrepository')) + osp.join(parent.working_tree_dir, 'existing-subrepository')) sm2 = parent.create_submodule('nongit-file-submodule', clone.working_tree_dir) assert len(parent.submodules) == 2 @@ -700,7 +702,7 @@ def test_git_submodules_and_add_sm_with_new_commit(self, rwdir): parent.index.commit("moved submodules") smm = sm.module() - fp = os.path.join(smm.working_tree_dir, 'empty-file') + fp = osp.join(smm.working_tree_dir, 'empty-file') with open(fp, 'w'): pass smm.git.add(fp) @@ -733,7 +735,7 @@ def test_git_submodules_and_add_sm_with_new_commit(self, rwdir): # "'C:\\Users\\appveyor\\AppData\\Local\\Temp\\1\\test_work_tree_unsupportedryfa60di\\master_repo\\.git\\objects\\pack\\pack-bc9e0787aef9f69e1591ef38ea0a6f566ec66fe3.idx") # noqa E501 @with_rw_directory def test_git_submodule_compatibility(self, rwdir): - parent = git.Repo.init(os.path.join(rwdir, 'parent')) + parent = git.Repo.init(osp.join(rwdir, 'parent')) sm_path = join_path_native('submodules', 'intermediate', 'one') sm = parent.create_submodule('mymodules/myname', sm_path, url=self._small_repo_url()) parent.index.commit("added submodule") @@ -747,13 +749,13 @@ def assert_exists(sm, value=True): # muss it up. That's the only reason why the test is still here ... . assert len(parent.git.submodule().splitlines()) == 1 - module_repo_path = os.path.join(sm.module().working_tree_dir, '.git') - assert module_repo_path.startswith(os.path.join(parent.working_tree_dir, sm_path)) + module_repo_path = osp.join(sm.module().working_tree_dir, '.git') + assert module_repo_path.startswith(osp.join(parent.working_tree_dir, sm_path)) if not sm._need_gitfile_submodules(parent.git): - assert os.path.isdir(module_repo_path) + assert osp.isdir(module_repo_path) assert not sm.module().has_separate_working_tree() else: - assert os.path.isfile(module_repo_path) + assert osp.isfile(module_repo_path) assert sm.module().has_separate_working_tree() assert find_git_dir(module_repo_path) is not None, "module pointed to by .git file must be valid" # end verify submodule 'style' @@ -803,12 +805,12 @@ def assert_exists(sm, value=True): for dry_run in (True, False): sm.remove(dry_run=dry_run, force=True) assert_exists(sm, value=dry_run) - assert os.path.isdir(sm_module_path) == dry_run + assert osp.isdir(sm_module_path) == dry_run # end for each dry-run mode @with_rw_directory def test_remove_norefs(self, rwdir): - parent = git.Repo.init(os.path.join(rwdir, 'parent')) + parent = git.Repo.init(osp.join(rwdir, 'parent')) sm_name = 'mymodules/myname' sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url()) assert sm.exists() @@ -817,7 +819,7 @@ def test_remove_norefs(self, rwdir): assert sm.repo is parent # yoh was surprised since expected sm repo!! # so created a new instance for submodule - smrepo = git.Repo(os.path.join(rwdir, 'parent', sm.path)) + smrepo = git.Repo(osp.join(rwdir, 'parent', sm.path)) # Adding a remote without fetching so would have no references smrepo.create_remote('special', 'git@server-shouldnotmatter:repo.git') # And we should be able to remove it just fine @@ -826,7 +828,7 @@ def test_remove_norefs(self, rwdir): @with_rw_directory def test_rename(self, rwdir): - parent = git.Repo.init(os.path.join(rwdir, 'parent')) + parent = git.Repo.init(osp.join(rwdir, 'parent')) sm_name = 'mymodules/myname' sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url()) parent.index.commit("Added submodule") @@ -843,7 +845,7 @@ def test_rename(self, rwdir): assert sm.exists() sm_mod = sm.module() - if os.path.isfile(os.path.join(sm_mod.working_tree_dir, '.git')) == sm._need_gitfile_submodules(parent.git): + if osp.isfile(osp.join(sm_mod.working_tree_dir, '.git')) == sm._need_gitfile_submodules(parent.git): assert sm_mod.git_dir.endswith(join_path_native('.git', 'modules', new_sm_name)) # end @@ -852,8 +854,8 @@ def test_branch_renames(self, rw_dir): # Setup initial sandbox: # parent repo has one submodule, which has all the latest changes source_url = self._small_repo_url() - sm_source_repo = git.Repo.clone_from(source_url, os.path.join(rw_dir, 'sm-source'), b='master') - parent_repo = git.Repo.init(os.path.join(rw_dir, 'parent')) + sm_source_repo = git.Repo.clone_from(source_url, osp.join(rw_dir, 'sm-source'), b='master') + parent_repo = git.Repo.init(osp.join(rw_dir, 'parent')) sm = parent_repo.create_submodule('mysubmodule', 'subdir/submodule', sm_source_repo.working_tree_dir, branch='master') parent_repo.index.commit('added submodule') @@ -862,7 +864,7 @@ def test_branch_renames(self, rw_dir): # Create feature branch with one new commit in submodule source sm_fb = sm_source_repo.create_head('feature') sm_fb.checkout() - new_file = touch(os.path.join(sm_source_repo.working_tree_dir, 'new-file')) + new_file = touch(osp.join(sm_source_repo.working_tree_dir, 'new-file')) sm_source_repo.index.add([new_file]) sm.repo.index.commit("added new file") @@ -888,7 +890,7 @@ def test_branch_renames(self, rw_dir): # To make it even 'harder', we shall fork and create a new commit sm_pfb = sm_source_repo.create_head('past-feature', commit='HEAD~20') sm_pfb.checkout() - sm_source_repo.index.add([touch(os.path.join(sm_source_repo.working_tree_dir, 'new-file'))]) + sm_source_repo.index.add([touch(osp.join(sm_source_repo.working_tree_dir, 'new-file'))]) sm_source_repo.index.commit("new file added, to past of '%r'" % sm_fb) # Change designated submodule checkout branch to a new commit in its own past @@ -897,7 +899,7 @@ def test_branch_renames(self, rw_dir): sm.repo.index.commit("changed submodule branch to '%s'" % sm_pfb) # Test submodule updates - must fail if submodule is dirty - touch(os.path.join(sm_mod.working_tree_dir, 'unstaged file')) + touch(osp.join(sm_mod.working_tree_dir, 'unstaged file')) # This doesn't fail as our own submodule binsha didn't change, and the reset is only triggered if # to latest revision is True. parent_repo.submodule_update(to_latest_revision=False) diff --git a/git/util.py b/git/util.py index 57e056c3a..d00de1e4b 100644 --- a/git/util.py +++ b/git/util.py @@ -17,7 +17,7 @@ from functools import wraps from git.compat import is_win -from gitdb.util import (# NOQA +from gitdb.util import (# NOQA @IgnorePep8 make_sha, LockedFD, # @UnusedImport file_contents_ro, # @UnusedImport @@ -51,6 +51,7 @@ #: so the errors marked with this var are considered "acknowledged" ones, awaiting remedy, #: till then, we wish to hide them. HIDE_WINDOWS_KNOWN_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_KNOWN_ERRORS', True) +HIDE_WINDOWS_FREEZE_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_FREEZE_ERRORS', HIDE_WINDOWS_KNOWN_ERRORS) #{ Utility Methods @@ -198,33 +199,34 @@ class RemoteProgress(object): DONE_TOKEN = 'done.' TOKEN_SEPARATOR = ', ' - __slots__ = ("_cur_line", "_seen_ops", "_error_lines") + __slots__ = ('_cur_line', + '_seen_ops', + 'error_lines', # Lines that started with 'error:' or 'fatal:'. + 'other_lines') # Lines not denoting progress (i.e.g. push-infos). re_op_absolute = re.compile(r"(remote: )?([\w\s]+):\s+()(\d+)()(.*)") re_op_relative = re.compile(r"(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)") def __init__(self): self._seen_ops = list() self._cur_line = None - self._error_lines = [] - - def error_lines(self): - """Returns all lines that started with error: or fatal:""" - return self._error_lines + self.error_lines = [] + self.other_lines = [] def _parse_progress_line(self, line): """Parse progress information from the given line as retrieved by git-push or git-fetch. - Lines that seem to contain an error (i.e. start with error: or fatal:) are stored - separately and can be queried using `error_lines()`. + - Lines that do not contain progress info are stored in :attr:`other_lines`. + - Lines that seem to contain an error (i.e. start with error: or fatal:) are stored + in :attr:`error_lines`. :return: list(line, ...) list of lines that could not be processed""" # handle # Counting objects: 4, done. # Compressing objects: 50% (1/2) \rCompressing objects: 100% (2/2) \rCompressing objects: 100% (2/2), done. self._cur_line = line - if len(self._error_lines) > 0 or self._cur_line.startswith(('error:', 'fatal:')): - self._error_lines.append(self._cur_line) + if len(self.error_lines) > 0 or self._cur_line.startswith(('error:', 'fatal:')): + self.error_lines.append(self._cur_line) return [] sub_lines = line.split('\r') @@ -283,6 +285,7 @@ def _parse_progress_line(self, line): self.line_dropped(sline) # Note: Don't add this line to the failed lines, as we have to silently # drop it + self.other_lines.extend(failed_lines) return failed_lines # END handle op code @@ -308,6 +311,7 @@ def _parse_progress_line(self, line): max_count and float(max_count), message) # END for each sub line + self.other_lines.extend(failed_lines) return failed_lines def new_message_handler(self):