diff options
Diffstat (limited to 'git')
-rw-r--r-- | git/compat.py | 4 | ||||
-rw-r--r-- | git/config.py | 9 | ||||
-rw-r--r-- | git/index/base.py | 23 | ||||
-rw-r--r-- | git/index/fun.py | 2 | ||||
-rw-r--r-- | git/index/typ.py | 2 | ||||
-rw-r--r-- | git/repo/base.py | 5 | ||||
-rw-r--r-- | git/test/test_index.py | 37 | ||||
-rw-r--r-- | git/util.py | 9 |
8 files changed, 52 insertions, 39 deletions
diff --git a/git/compat.py b/git/compat.py index b9205418..5c330e5b 100644 --- a/git/compat.py +++ b/git/compat.py @@ -31,6 +31,8 @@ if PY3: return b def bchr(n): return bytes([n]) + def mviter(d): + return d.values() else: FileType = file # usually, this is just ascii, which might not enough for our encoding needs @@ -39,6 +41,8 @@ else: defenc = 'utf-8' byte_ord = ord bchr = chr + def mviter(d): + return d.itervalues() def with_metaclass(meta, *bases): diff --git a/git/config.py b/git/config.py index 7917bc5a..96991b84 100644 --- a/git/config.py +++ b/git/config.py @@ -99,6 +99,13 @@ class SectionConstraint(object): self._config = config self._section_name = section + def __del__(self): + # Yes, for some reason, we have to call it explicitly for it to work in PY3 ! + # Apparently __del__ doesn't get call anymore if refcount becomes 0 + # Ridiculous ... . + self._config.__del__() + # del self._config + def __getattr__(self, attr): if attr in self._valid_attrs_: return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs) @@ -193,7 +200,7 @@ class GitConfigParser(with_metaclass(MetaParserBuilder, cp.RawConfigParser, obje """Write pending changes if required and release locks""" # checking for the lock here makes sure we do not raise during write() # in case an invalid parser was created who could not get a lock - if self.read_only or not self._lock._has_lock(): + if self.read_only or (self._lock and not self._lock._has_lock()): return try: diff --git a/git/index/base.py b/git/index/base.py index a994e7b6..cc883469 100644 --- a/git/index/base.py +++ b/git/index/base.py @@ -43,7 +43,9 @@ from git.compat import ( izip, xrange, string_types, - force_bytes + force_bytes, + defenc, + mviter ) from git.util import ( @@ -105,7 +107,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): repository's index on demand.""" self.repo = repo self.version = self._VERSION - self._extension_data = '' + self._extension_data = b'' self._file_path = file_path or self._index_path() def _set_cache_(self, attr): @@ -165,9 +167,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): def _entries_sorted(self): """:return: list of entries, in a sorted fashion, first by path, then by stage""" - entries_sorted = self.entries.values() - entries_sorted.sort(key=lambda e: (e.path, e.stage)) # use path/stage as sort key - return entries_sorted + return sorted(self.entries.values(), key=lambda e: (e.path, e.stage)) def _serialize(self, stream, ignore_tree_extension_data=False): entries = self._entries_sorted() @@ -399,7 +399,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): fprogress(filepath, False, item) rval = None try: - proc.stdin.write("%s\n" % filepath) + proc.stdin.write(("%s\n" % filepath).encode(defenc)) except IOError: # pipe broke, usually because some error happend raise fmakeexc() @@ -418,7 +418,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): Function(t) returning True if tuple(stage, Blob) should be yielded by the iterator. A default filter, the BlobFilter, allows you to yield blobs only if they match a given list of paths. """ - for entry in self.entries.itervalues(): + for entry in mviter(self.entries): blob = entry.to_blob(self.repo) blob.size = entry.size output = (entry.stage, blob) @@ -443,7 +443,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): for stage, blob in self.iter_blobs(is_unmerged_blob): path_map.setdefault(blob.path, list()).append((stage, blob)) # END for each unmerged blob - for l in path_map.itervalues(): + for l in mviter(path_map): l.sort() return path_map @@ -860,7 +860,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): # parse result - first 0:n/2 lines are 'checking ', the remaining ones # are the 'renaming' ones which we parse - for ln in xrange(len(mvlines) / 2, len(mvlines)): + for ln in xrange(int(len(mvlines) / 2), len(mvlines)): tokens = mvlines[ln].split(' to ') assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln] @@ -958,6 +958,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): if not stderr: return # line contents: + stderr = stderr.decode(defenc) # git-checkout-index: this already exists failed_files = list() failed_reasons = list() @@ -1006,7 +1007,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): proc = self.repo.git.checkout_index(*args, **kwargs) proc.wait() fprogress(None, True, None) - rval_iter = (e.path for e in self.entries.itervalues()) + rval_iter = (e.path for e in mviter(self.entries)) handle_stderr(proc, rval_iter) return rval_iter else: @@ -1036,7 +1037,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable): dir = co_path if not dir.endswith('/'): dir += '/' - for entry in self.entries.itervalues(): + for entry in mviter(self.entries): if entry.path.startswith(dir): p = entry.path self._write_path_to_stdin(proc, p, p, make_exc, diff --git a/git/index/fun.py b/git/index/fun.py index 3e66a7ba..f0dee961 100644 --- a/git/index/fun.py +++ b/git/index/fun.py @@ -73,7 +73,7 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1 # header version = 2 - write("DIRC") + write(b"DIRC") write(pack(">LL", version, len(entries))) # body diff --git a/git/index/typ.py b/git/index/typ.py index 692e1e18..0998ecb0 100644 --- a/git/index/typ.py +++ b/git/index/typ.py @@ -75,7 +75,7 @@ class BaseIndexEntry(tuple): @property def hexsha(self): """hex version of our sha""" - return b2a_hex(self[1]) + return b2a_hex(self[1]).decode('ascii') @property def stage(self): diff --git a/git/repo/base.py b/git/repo/base.py index 27c640ff..3e0e51cc 100644 --- a/git/repo/base.py +++ b/git/repo/base.py @@ -732,7 +732,10 @@ class Repo(object): # sure repo = cls(os.path.abspath(path), odbt=odbt) if repo.remotes: - repo.remotes[0].config_writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/")) + writer = repo.remotes[0].config_writer + writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/")) + # PY3: be sure cleanup is performed and lock is released + del writer # END handle remote repo return repo diff --git a/git/test/test_index.py b/git/test/test_index.py index 38cc3563..d81d08ef 100644 --- a/git/test/test_index.py +++ b/git/test/test_index.py @@ -48,7 +48,7 @@ class TestIndex(TestBase): def _assert_fprogress(self, entries): assert len(entries) == len(self._fprogress_map) - for path, call_count in self._fprogress_map.iteritems(): + for path, call_count in self._fprogress_map.items(): assert call_count == 2 # END for each item in progress map self._reset_progress() @@ -86,7 +86,7 @@ class TestIndex(TestBase): assert index.version > 0 # test entry - entry = index.entries.itervalues().next() + entry = next(iter(index.entries.values())) for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid", "gid", "size", "binsha", "hexsha", "stage"): getattr(entry, attr) @@ -100,7 +100,7 @@ class TestIndex(TestBase): # test stage index_merge = IndexFile(self.rorepo, fixture_path("index_merge")) assert len(index_merge.entries) == 106 - assert len(list(e for e in index_merge.entries.itervalues() if e.stage != 0)) + assert len(list(e for e in index_merge.entries.values() if e.stage != 0)) # write the data - it must match the original tmpfile = tempfile.mktemp() @@ -167,7 +167,7 @@ class TestIndex(TestBase): assert unmerged_blob_map # pick the first blob at the first stage we find and use it as resolved version - three_way_index.resolve_blobs(l[0][1] for l in unmerged_blob_map.itervalues()) + three_way_index.resolve_blobs(l[0][1] for l in unmerged_blob_map.values()) tree = three_way_index.write_tree() assert isinstance(tree, Tree) num_blobs = 0 @@ -201,7 +201,7 @@ class TestIndex(TestBase): # Add a change with a NULL sha that should conflict with next_commit. We # pretend there was a change, but we do not even bother adding a proper # sha for it ( which makes things faster of course ) - manifest_fake_entry = BaseIndexEntry((manifest_entry[0], "\0" * 20, 0, manifest_entry[3])) + manifest_fake_entry = BaseIndexEntry((manifest_entry[0], b"\0" * 20, 0, manifest_entry[3])) # try write flag self._assert_entries(rw_repo.index.add([manifest_fake_entry], write=False)) # add actually resolves the null-hex-sha for us as a feature, but we can @@ -236,7 +236,7 @@ class TestIndex(TestBase): # now make a proper three way merge with unmerged entries unmerged_tree = IndexFile.from_tree(rw_repo, parent_commit, tree, next_commit) unmerged_blobs = unmerged_tree.unmerged_blobs() - assert len(unmerged_blobs) == 1 and unmerged_blobs.keys()[0] == manifest_key[0] + assert len(unmerged_blobs) == 1 and list(unmerged_blobs.keys())[0] == manifest_key[0] @with_rw_repo('0.1.6') def test_index_file_diffing(self, rw_repo): @@ -295,7 +295,7 @@ class TestIndex(TestBase): assert index.diff(None) # reset the working copy as well to current head,to pull 'back' as well - new_data = "will be reverted" + new_data = b"will be reverted" file_path = os.path.join(rw_repo.working_tree_dir, "CHANGES") fp = open(file_path, "wb") fp.write(new_data) @@ -312,7 +312,7 @@ class TestIndex(TestBase): # test full checkout test_file = os.path.join(rw_repo.working_tree_dir, "CHANGES") - open(test_file, 'ab').write("some data") + open(test_file, 'ab').write(b"some data") rval = index.checkout(None, force=True, fprogress=self._fprogress) assert 'CHANGES' in list(rval) self._assert_fprogress([None]) @@ -336,7 +336,7 @@ class TestIndex(TestBase): self.failUnlessRaises(CheckoutError, index.checkout, paths=["doesnt/exist"]) # checkout file with modifications - append_data = "hello" + append_data = b"hello" fp = open(test_file, "ab") fp.write(append_data) fp.close() @@ -346,13 +346,13 @@ class TestIndex(TestBase): assert len(e.failed_files) == 1 and e.failed_files[0] == os.path.basename(test_file) assert (len(e.failed_files) == len(e.failed_reasons)) and isinstance(e.failed_reasons[0], string_types) assert len(e.valid_files) == 0 - assert open(test_file).read().endswith(append_data) + assert open(test_file, 'rb').read().endswith(append_data) else: raise AssertionError("Exception CheckoutError not thrown") # if we force it it should work index.checkout(test_file, force=True) - assert not open(test_file).read().endswith(append_data) + assert not open(test_file, 'rb').read().endswith(append_data) # checkout directory shutil.rmtree(os.path.join(rw_repo.working_tree_dir, "lib")) @@ -379,14 +379,15 @@ class TestIndex(TestBase): uname = "Some Developer" umail = "sd@company.com" - rw_repo.config_writer().set_value("user", "name", uname) - rw_repo.config_writer().set_value("user", "email", umail) + writer = rw_repo.config_writer() + writer.set_value("user", "name", uname) + writer.set_value("user", "email", umail) # remove all of the files, provide a wild mix of paths, BaseIndexEntries, # IndexEntries def mixed_iterator(): count = 0 - for entry in index.entries.itervalues(): + for entry in index.entries.values(): type_id = count % 4 if type_id == 0: # path yield entry.path @@ -500,7 +501,7 @@ class TestIndex(TestBase): # mode 0 not allowed null_hex_sha = Diff.NULL_HEX_SHA - null_bin_sha = "\0" * 20 + null_bin_sha = b"\0" * 20 self.failUnlessRaises(ValueError, index.reset( new_commit).add, [BaseIndexEntry((0, null_bin_sha, 0, "doesntmatter"))]) @@ -526,7 +527,7 @@ class TestIndex(TestBase): assert S_ISLNK(index.entries[index.entry_key("my_real_symlink", 0)].mode) # we expect only the target to be written - assert index.repo.odb.stream(entries[0].binsha).read() == target + assert index.repo.odb.stream(entries[0].binsha).read().decode('ascii') == target # END real symlink test # add fake symlink and assure it checks-our as symlink @@ -618,7 +619,7 @@ class TestIndex(TestBase): for fid in range(3): fname = 'newfile%i' % fid - open(fname, 'wb').write("abcd") + open(fname, 'wb').write(b"abcd") yield Blob(rw_repo, Blob.NULL_BIN_SHA, 0o100644, fname) # END for each new file # END path producer @@ -716,5 +717,5 @@ class TestIndex(TestBase): try: rw_bare_repo.index.add([path]) except Exception as e: - asserted = "does not have a working tree" in e.message + asserted = "does not have a working tree" in str(e) assert asserted, "Adding using a filename is not correctly asserted." diff --git a/git/util.py b/git/util.py index b3a22883..4de736d3 100644 --- a/git/util.py +++ b/git/util.py @@ -446,7 +446,7 @@ class IndexFileSHA1Writer(object): def __init__(self, f): self.f = f - self.sha1 = make_sha("") + self.sha1 = make_sha(b"") def write(self, data): self.sha1.update(data) @@ -490,10 +490,7 @@ class LockFile(object): def _has_lock(self): """:return: True if we have a lock and if the lockfile still exists :raise AssertionError: if our lock-file does not exist""" - if not self._owns_lock: - return False - - return True + return self._owns_lock def _obtain_lock_or_raise(self): """Create a lock file as flag for other instances, mark our instance as lock-holder @@ -531,7 +528,7 @@ class LockFile(object): # on bloody windows, the file needs write permissions to be removable. # Why ... if os.name == 'nt': - os.chmod(lfp, int("0777", 8)) + os.chmod(lfp, 0o777) # END handle win32 os.remove(lfp) except OSError: |