diff options
author | Sebastian Thiel <byronimo@gmail.com> | 2010-06-21 20:16:22 +0200 |
---|---|---|
committer | Sebastian Thiel <byronimo@gmail.com> | 2010-06-21 20:16:22 +0200 |
commit | 1044116d25f0311033e0951d2ab30579bba4b051 (patch) | |
tree | 98832affe9aaf5df77ee11c6a8e5ed2293828e15 /lib/git/index/fun.py | |
parent | 91725f0fc59aa05ef68ab96e9b29009ce84668a5 (diff) | |
download | gitpython-1044116d25f0311033e0951d2ab30579bba4b051.tar.gz |
index: put serialization methods into new 'fun' module, this makes the calls faster as it removes one level of indirection, and makes the main file smaller, improving maintainability
Diffstat (limited to 'lib/git/index/fun.py')
-rw-r--r-- | lib/git/index/fun.py | 138 |
1 files changed, 138 insertions, 0 deletions
diff --git a/lib/git/index/fun.py b/lib/git/index/fun.py new file mode 100644 index 00000000..2e653ea6 --- /dev/null +++ b/lib/git/index/fun.py @@ -0,0 +1,138 @@ +""" +Contains standalone functions to accompany the index implementation and make it +more versatile +""" +from git.utils import ( + IndexFileSHA1Writer, + ) + +from typ import ( + IndexEntry, + CE_NAMEMASK + ) + +from util import ( + pack, + unpack + ) + +from binascii import ( + hexlify, + unhexlify + ) + +__all__ = ('write_cache', 'read_cache' ) + +def write_cache_entry(entry, stream): + """Write the given entry to the stream""" + beginoffset = stream.tell() + write = stream.write + write(entry[4]) # ctime + write(entry[5]) # mtime + path = entry[3] + plen = len(path) & CE_NAMEMASK # path length + assert plen == len(path), "Path %s too long to fit into index" % entry[3] + flags = plen | entry[2] + write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0], + entry[8], entry[9], entry[10], unhexlify(entry[1]), flags)) + write(path) + real_size = ((stream.tell() - beginoffset + 8) & ~7) + write("\0" * ((beginoffset + real_size) - stream.tell())) + +def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer): + """Write the cache represented by entries to a stream + :param entries: **sorted** list of entries + :param stream: stream to wrap into the AdapterStreamCls - it is used for + final output. + :param ShaStreamCls: Type to use when writing to the stream. It produces a sha + while writing to it, before the data is passed on to the wrapped stream + :param extension_data: any kind of data to write as a trailer, it must begin + a 4 byte identifier, followed by its size ( 4 bytes )""" + # wrap the stream into a compatible writer + stream = ShaStreamCls(stream) + + # header + version = 2 + stream.write("DIRC") + stream.write(pack(">LL", version, len(entries))) + + # body + for entry in entries: + write_cache_entry(entry, stream) + # END for each entry + + # write previously cached extensions data + if extension_data is not None: + stream.write(extension_data) + + # write the sha over the content + stream.write_sha() + +def read_entry(stream): + """Return: One entry of the given stream""" + beginoffset = stream.tell() + ctime = unpack(">8s", stream.read(8))[0] + mtime = unpack(">8s", stream.read(8))[0] + (dev, ino, mode, uid, gid, size, sha, flags) = \ + unpack(">LLLLLL20sH", stream.read(20 + 4 * 6 + 2)) + path_size = flags & CE_NAMEMASK + path = stream.read(path_size) + + real_size = ((stream.tell() - beginoffset + 8) & ~7) + data = stream.read((beginoffset + real_size) - stream.tell()) + return IndexEntry((mode, hexlify(sha), flags, path, ctime, mtime, dev, ino, uid, gid, size)) + +def read_header(stream): + """Return tuple(version_long, num_entries) from the given stream""" + type_id = stream.read(4) + if type_id != "DIRC": + raise AssertionError("Invalid index file header: %r" % type_id) + version, num_entries = unpack(">LL", stream.read(4 * 2)) + + # TODO: handle version 3: extended data, see read-cache.c + assert version in (1, 2) + return version, num_entries + +def entry_key(*entry): + """:return: Key suitable to be used for the index.entries dictionary + :param *entry: One instance of type BaseIndexEntry or the path and the stage""" + if len(entry) == 1: + return (entry[0].path, entry[0].stage) + else: + return tuple(entry) + # END handle entry + +def read_cache(stream): + """Read a cache file from the given stream + :return: tuple(version, entries_dict, extension_data, content_sha) + * version is the integer version number + * entries dict is a dictionary which maps IndexEntry instances to a path + at a stage + * extension_data is '' or 4 bytes of type + 4 bytes of size + size bytes + * content_sha is a 20 byte sha on all cache file contents""" + version, num_entries = read_header(stream) + count = 0 + entries = dict() + while count < num_entries: + entry = read_entry(stream) + # entry_key would be the method to use, but we safe the effort + entries[(entry.path, entry.stage)] = entry + count += 1 + # END for each entry + + # the footer contains extension data and a sha on the content so far + # Keep the extension footer,and verify we have a sha in the end + # Extension data format is: + # 4 bytes ID + # 4 bytes length of chunk + # repeated 0 - N times + extension_data = stream.read(~0) + assert len(extension_data) > 19, "Index Footer was not at least a sha on content as it was only %i bytes in size" % len(extension_data) + + content_sha = extension_data[-20:] + + # truncate the sha in the end as we will dynamically create it anyway + extension_data = extension_data[:-20] + + return (version, entries, extension_data, content_sha) + |