summaryrefslogtreecommitdiff
path: root/git/objects
diff options
context:
space:
mode:
Diffstat (limited to 'git/objects')
-rw-r--r--git/objects/__init__.py2
-rw-r--r--git/objects/base.py47
-rw-r--r--git/objects/blob.py2
-rw-r--r--git/objects/commit.py98
-rw-r--r--git/objects/fun.py44
-rw-r--r--git/objects/submodule/base.py213
-rw-r--r--git/objects/submodule/root.py74
-rw-r--r--git/objects/submodule/util.py18
-rw-r--r--git/objects/tag.py17
-rw-r--r--git/objects/tree.py66
-rw-r--r--git/objects/util.py86
11 files changed, 331 insertions, 336 deletions
diff --git a/git/objects/__init__.py b/git/objects/__init__.py
index 77f69d29..90fe81a8 100644
--- a/git/objects/__init__.py
+++ b/git/objects/__init__.py
@@ -18,4 +18,4 @@ from commit import *
from tree import *
__all__ = [ name for name, obj in locals().items()
- if not (name.startswith('_') or inspect.ismodule(obj)) ] \ No newline at end of file
+ if not (name.startswith('_') or inspect.ismodule(obj)) ]
diff --git a/git/objects/base.py b/git/objects/base.py
index 03b22863..9e73e2f3 100644
--- a/git/objects/base.py
+++ b/git/objects/base.py
@@ -12,7 +12,7 @@ from gitdb.util import (
)
import gitdb.typ as dbtyp
-
+
_assertion_msg_format = "Created object %r whose python type %r disagrees with the acutal git object type %r"
__all__ = ("Object", "IndexObject")
@@ -21,17 +21,17 @@ class Object(LazyMixin):
"""Implements an Object which may be Blobs, Trees, Commits and Tags"""
NULL_HEX_SHA = '0'*40
NULL_BIN_SHA = '\0'*20
-
+
TYPES = (dbtyp.str_blob_type, dbtyp.str_tree_type, dbtyp.str_commit_type, dbtyp.str_tag_type)
__slots__ = ("repo", "binsha", "size" )
type = None # to be set by subclass
-
+
def __init__(self, repo, binsha):
"""Initialize an object by identifying it by its binary sha.
All keyword arguments will be set on demand if None.
-
+
:param repo: repository this object is located in
-
+
:param binsha: 20 byte SHA1"""
super(Object,self).__init__()
self.repo = repo
@@ -44,13 +44,13 @@ class Object(LazyMixin):
:return: New Object instance of a type appropriate to the object type behind
id. The id of the newly created object will be a binsha even though
the input id may have been a Reference or Rev-Spec
-
+
:param id: reference, rev-spec, or hexsha
-
+
:note: This cannot be a __new__ method as it would always call __init__
with the input id which is not necessarily a binsha."""
return repo.rev_parse(str(id))
-
+
@classmethod
def new_from_sha(cls, repo, sha1):
"""
@@ -65,36 +65,36 @@ class Object(LazyMixin):
inst = get_object_type_by_name(oinfo.type)(repo, oinfo.binsha)
inst.size = oinfo.size
return inst
-
+
def _set_cache_(self, attr):
"""Retrieve object information"""
- if attr == "size":
+ if attr == "size":
oinfo = self.repo.odb.info(self.binsha)
self.size = oinfo.size
# assert oinfo.type == self.type, _assertion_msg_format % (self.binsha, oinfo.type, self.type)
else:
super(Object,self)._set_cache_(attr)
-
+
def __eq__(self, other):
""":return: True if the objects have the same SHA1"""
if not hasattr(other, 'binsha'):
return False
return self.binsha == other.binsha
-
+
def __ne__(self, other):
""":return: True if the objects do not have the same SHA1 """
if not hasattr(other, 'binsha'):
return True
return self.binsha != other.binsha
-
+
def __hash__(self):
""":return: Hash of our id allowing objects to be used in dicts and sets"""
return hash(self.binsha)
-
+
def __str__(self):
""":return: string of our SHA1 as understood by all git commands"""
return bin_to_hex(self.binsha)
-
+
def __repr__(self):
""":return: string with pythonic representation of our object"""
return '<git.%s "%s">' % (self.__class__.__name__, self.hexsha)
@@ -117,16 +117,16 @@ class Object(LazyMixin):
istream = self.repo.odb.stream(self.binsha)
stream_copy(istream, ostream)
return self
-
+
class IndexObject(Object):
"""Base for all objects that can be part of the index file , namely Tree, Blob and
SubModule objects"""
__slots__ = ("path", "mode")
-
+
# for compatability with iterable lists
_id_attribute_ = 'path'
-
+
def __init__(self, repo, binsha, mode=None, path=None):
"""Initialize a newly instanced IndexObject
:param repo: is the Repo we are located in
@@ -144,13 +144,13 @@ class IndexObject(Object):
self.mode = mode
if path is not None:
self.path = path
-
+
def __hash__(self):
""":return:
Hash of our path as index items are uniquely identifyable by path, not
by their data !"""
return hash(self.path)
-
+
def _set_cache_(self, attr):
if attr in IndexObject.__slots__:
# they cannot be retrieved lateron ( not without searching for them )
@@ -158,19 +158,18 @@ class IndexObject(Object):
else:
super(IndexObject, self)._set_cache_(attr)
# END hanlde slot attribute
-
+
@property
def name(self):
""":return: Name portion of the path, effectively being the basename"""
return basename(self.path)
-
+
@property
def abspath(self):
"""
:return:
Absolute path to this index object in the file system ( as opposed to the
.path field which is a path relative to the git repository ).
-
+
The returned path will be native to the system and contains '\' on windows. """
return join_path_native(self.repo.working_tree_dir, self.path)
-
diff --git a/git/objects/blob.py b/git/objects/blob.py
index e96555c6..fd748537 100644
--- a/git/objects/blob.py
+++ b/git/objects/blob.py
@@ -13,7 +13,7 @@ class Blob(base.IndexObject):
"""A Blob encapsulates a git blob object"""
DEFAULT_MIME_TYPE = "text/plain"
type = "blob"
-
+
# valid blob modes
executable_mode = 0100755
file_mode = 0100644
diff --git a/git/objects/commit.py b/git/objects/commit.py
index bc437e8b..db1493d5 100644
--- a/git/objects/commit.py
+++ b/git/objects/commit.py
@@ -4,7 +4,7 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.util import (
+from git.util import (
Actor,
Iterable,
Stats,
@@ -37,22 +37,22 @@ __all__ = ('Commit', )
class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
"""Wraps a git Commit object.
-
+
This class will act lazily on some of its attributes and will query the
value on demand only if it involves calling the git binary."""
-
+
# ENVIRONMENT VARIABLES
# read when creating new commits
env_author_date = "GIT_AUTHOR_DATE"
env_committer_date = "GIT_COMMITTER_DATE"
-
+
# CONFIGURATION KEYS
conf_encoding = 'i18n.commitencoding'
-
+
# INVARIANTS
default_encoding = "UTF-8"
-
-
+
+
# object configuration
type = "commit"
__slots__ = ("tree",
@@ -60,7 +60,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
"committer", "committed_date", "committer_tz_offset",
"message", "parents", "encoding", "gpgsig")
_id_attribute_ = "binsha"
-
+
def __init__(self, repo, binsha, tree=None, author=None, authored_date=None, author_tz_offset=None,
committer=None, committed_date=None, committer_tz_offset=None,
message=None, parents=None, encoding=None, gpgsig=None):
@@ -93,7 +93,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
List or tuple of Commit objects which are our parent(s) in the commit
dependency graph
:return: git.Commit
-
+
:note: Timezone information is in the same format and in the same sign
as what time.altzone returns. The sign is inverted compared to git's
UTC timezone."""
@@ -139,7 +139,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
def summary(self):
""":return: First line of the commit message"""
return self.message.split('\n', 1)[0]
-
+
def count(self, paths='', **kwargs):
"""Count the number of commits reachable from this commit
@@ -157,7 +157,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
return len(self.repo.git.rev_list(self.hexsha, '--', paths, **kwargs).splitlines())
else:
return len(self.repo.git.rev_list(self.hexsha, **kwargs).splitlines())
-
+
@property
def name_rev(self):
@@ -192,10 +192,10 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
proc = repo.git.rev_list(rev, args, as_process=True, **kwargs)
return cls._iter_from_process_or_stream(repo, proc)
-
+
def iter_parents(self, paths='', **kwargs):
"""Iterate _all_ parents of this commit.
-
+
:param paths:
Optional path or list of paths limiting the Commits to those that
contain at least one of the paths
@@ -206,14 +206,14 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
if skip == 0: # skip ourselves
skip = 1
kwargs['skip'] = skip
-
+
return self.iter_items(self.repo, self, paths, **kwargs)
@property
def stats(self):
"""Create a git stat from changes between this commit and its first parent
or from all changes done if this is the very first commit.
-
+
:return: git.Stats"""
if not self.parents:
text = self.repo.git.diff_tree(self.hexsha, '--', numstat=True, root=True)
@@ -237,7 +237,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
stream = proc_or_stream
if not hasattr(stream,'readline'):
stream = proc_or_stream.stdout
-
+
readline = stream.readline
while True:
line = readline()
@@ -248,7 +248,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
# split additional information, as returned by bisect for instance
hexsha, rest = line.split(None, 1)
# END handle extra info
-
+
assert len(hexsha) == 40, "Invalid line: %s" % hexsha
yield Commit(repo, hex_to_bin(hexsha))
# END for each line in stream
@@ -256,12 +256,12 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
# due to many developers trying to fix the open file handles issue
if hasattr(proc_or_stream, 'wait'):
finalize_process(proc_or_stream)
-
-
+
+
@classmethod
def create_from_tree(cls, repo, tree, message, parent_commits=None, head=False):
"""Commit the given tree, creating a commit object.
-
+
:param repo: Repo object the commit should be part of
:param tree: Tree object or hex or bin sha
the tree of the new commit
@@ -277,9 +277,9 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
If True, the HEAD will be advanced to the new commit automatically.
Else the HEAD will remain pointing on the previous commit. This could
lead to undesired results when diffing files.
-
+
:return: Commit object representing the new commit
-
+
:note:
Additional information about the committer and Author are taken from the
environment or from the git configuration, see git-commit-tree for
@@ -293,63 +293,63 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
parent_commits = list()
# END handle parent commits
# END if parent commits are unset
-
+
# retrieve all additional information, create a commit object, and
# serialize it
# Generally:
# * Environment variables override configuration values
# * Sensible defaults are set according to the git documentation
-
+
# COMMITER AND AUTHOR INFO
cr = repo.config_reader()
env = os.environ
-
+
committer = Actor.committer(cr)
author = Actor.author(cr)
-
+
# PARSE THE DATES
unix_time = int(time())
offset = altzone
-
+
author_date_str = env.get(cls.env_author_date, '')
if author_date_str:
author_time, author_offset = parse_date(author_date_str)
else:
author_time, author_offset = unix_time, offset
# END set author time
-
+
committer_date_str = env.get(cls.env_committer_date, '')
if committer_date_str:
committer_time, committer_offset = parse_date(committer_date_str)
else:
committer_time, committer_offset = unix_time, offset
# END set committer time
-
+
# assume utf8 encoding
enc_section, enc_option = cls.conf_encoding.split('.')
conf_encoding = cr.get_value(enc_section, enc_option, cls.default_encoding)
-
-
+
+
# if the tree is no object, make sure we create one - otherwise
# the created commit object is invalid
if isinstance(tree, str):
tree = repo.tree(tree)
# END tree conversion
-
+
# CREATE NEW COMMIT
new_commit = cls(repo, cls.NULL_BIN_SHA, tree,
author, author_time, author_offset,
committer, committer_time, committer_offset,
message, parent_commits, conf_encoding)
-
+
stream = StringIO()
new_commit._serialize(stream)
streamlen = stream.tell()
stream.seek(0)
-
+
istream = repo.odb.store(IStream(cls.type, streamlen, stream))
new_commit.binsha = istream.binsha
-
+
if head:
# need late import here, importing git at the very beginning throws
# as well ...
@@ -364,29 +364,29 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
repo.head.set_reference(master, logmsg='commit: Switching to %s' % master)
# END handle empty repositories
# END advance head handling
-
+
return new_commit
-
+
#{ Serializable Implementation
-
+
def _serialize(self, stream):
write = stream.write
write("tree %s\n" % self.tree)
for p in self.parents:
write("parent %s\n" % p)
-
+
a = self.author
aname = a.name
if isinstance(aname, unicode):
aname = aname.encode(self.encoding)
# END handle unicode in name
-
+
c = self.committer
fmt = "%s %s <%s> %s %s\n"
write(fmt % ("author", aname, a.email,
self.authored_date,
altz_to_utctz_str(self.author_tz_offset)))
-
+
# encode committer
aname = c.name
if isinstance(aname, unicode):
@@ -395,7 +395,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
write(fmt % ("committer", aname, c.email,
self.committed_date,
altz_to_utctz_str(self.committer_tz_offset)))
-
+
if self.encoding != self.default_encoding:
write("encoding %s\n" % self.encoding)
@@ -405,7 +405,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
write(" "+sigline+"\n")
write("\n")
-
+
# write plain bytes, be sure its encoded according to our encoding
if isinstance(self.message, unicode):
write(self.message.encode(self.encoding))
@@ -413,7 +413,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
write(self.message)
# END handle encoding
return self
-
+
def _deserialize(self, stream):
""":param from_rev_list: if true, the stream format is coming from the rev-list command
Otherwise it is assumed to be a plain data stream from our object"""
@@ -431,7 +431,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
self.parents.append(type(self)(self.repo, hex_to_bin(parent_line.split()[-1])))
# END for each parent line
self.parents = tuple(self.parents)
-
+
self.author, self.authored_date, self.author_tz_offset = parse_actor_and_date(next_line)
self.committer, self.committed_date, self.committer_tz_offset = parse_actor_and_date(readline())
@@ -441,7 +441,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
next_line = readline()
while next_line.startswith(' '):
next_line = readline()
-
+
# now we can have the encoding line, or an empty line followed by the optional
# message.
self.encoding = self.default_encoding
@@ -474,14 +474,14 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
except UnicodeDecodeError:
print >> sys.stderr, "Failed to decode author name '%s' using encoding %s" % (self.author.name, self.encoding)
# END handle author's encoding
-
+
# decode committer name
try:
self.committer.name = self.committer.name.decode(self.encoding)
except UnicodeDecodeError:
print >> sys.stderr, "Failed to decode committer name '%s' using encoding %s" % (self.committer.name, self.encoding)
# END handle author's encoding
-
+
# a stream from our data simply gives us the plain message
# The end of our message stream is marked with a newline that we strip
self.message = stream.read()
@@ -491,5 +491,5 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
print >> sys.stderr, "Failed to decode message '%s' using encoding %s" % (self.message, self.encoding)
# END exception handling
return self
-
+
#} END serializable implementation
diff --git a/git/objects/fun.py b/git/objects/fun.py
index 66b7998e..a046d6d5 100644
--- a/git/objects/fun.py
+++ b/git/objects/fun.py
@@ -5,7 +5,7 @@ __all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive
'traverse_tree_recursive')
-
+
def tree_to_stream(entries, write):
"""Write the give list of entries into a stream using its write method
@@ -13,13 +13,13 @@ def tree_to_stream(entries, write):
:param write: write method which takes a data string"""
ord_zero = ord('0')
bit_mask = 7 # 3 bits set
-
+
for binsha, mode, name in entries:
mode_str = ''
for i in xrange(6):
mode_str = chr(((mode >> (i*3)) & bit_mask) + ord_zero) + mode_str
# END for each 8 octal value
-
+
# git slices away the first octal if its zero
if mode_str[0] == '0':
mode_str = mode_str[1:]
@@ -46,7 +46,7 @@ def tree_entries_from_data(data):
out = list()
while i < len_data:
mode = 0
-
+
# read mode
# Some git versions truncate the leading 0, some don't
# The type will be extracted from the mode later
@@ -56,17 +56,17 @@ def tree_entries_from_data(data):
mode = (mode << 3) + (ord(data[i]) - ord_zero)
i += 1
# END while reading mode
-
+
# byte is space now, skip it
i += 1
-
+
# parse name, it is NULL separated
-
+
ns = i
while data[i] != '\0':
i += 1
# END while not reached NULL
-
+
# default encoding for strings in git is utf8
# Only use the respective unicode object if the byte stream was encoded
name = data[ns:i]
@@ -78,7 +78,7 @@ def tree_entries_from_data(data):
if len(name) > len(name_enc):
name = name_enc
# END handle encoding
-
+
# byte is NULL, get next 20
i += 1
sha = data[i:i+20]
@@ -86,8 +86,8 @@ def tree_entries_from_data(data):
out.append((sha, mode, name))
# END for each byte in data stream
return out
-
-
+
+
def _find_by_name(tree_data, name, is_dir, start_at):
"""return data entry matching the given name and tree mode
@@ -96,7 +96,7 @@ def _find_by_name(tree_data, name, is_dir, start_at):
None in the tree_data list to mark it done"""
try:
item = tree_data[start_at]
- if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
+ if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
tree_data[start_at] = None
return item
except IndexError:
@@ -115,7 +115,7 @@ def _to_full_path(item, path_prefix):
if not item:
return item
return (item[0], item[1], path_prefix+item[2])
-
+
def traverse_trees_recursive(odb, tree_shas, path_prefix):
"""
:return: list with entries according to the given binary tree-shas.
@@ -141,10 +141,10 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix):
# END handle muted trees
trees_data.append(data)
# END for each sha to get data for
-
+
out = list()
out_append = out.append
-
+
# find all matching entries and recursively process them together if the match
# is a tree. If the match is a non-tree item, put it into the result.
# Processed items will be set None
@@ -157,7 +157,7 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix):
entries[ti] = item
sha, mode, name = item # its faster to unpack
is_dir = S_ISDIR(mode) # type mode bits
-
+
# find this item in all other tree data items
# wrap around, but stop one before our current index, hence
# ti+nt, not ti+1+nt
@@ -165,23 +165,23 @@ def traverse_trees_recursive(odb, tree_shas, path_prefix):
tio = tio % nt
entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii)
# END for each other item data
-
+
# if we are a directory, enter recursion
if is_dir:
out.extend(traverse_trees_recursive(odb, [((ei and ei[0]) or None) for ei in entries], path_prefix+name+'/'))
else:
out_append(tuple(_to_full_path(e, path_prefix) for e in entries))
# END handle recursion
-
+
# finally mark it done
tree_data[ii] = None
# END for each item
-
+
# we are done with one tree, set all its data empty
del(tree_data[:])
# END for each tree_data chunk
return out
-
+
def traverse_tree_recursive(odb, tree_sha, path_prefix):
"""
:return: list of entries of the tree pointed to by the binary tree_sha. An entry
@@ -192,7 +192,7 @@ def traverse_tree_recursive(odb, tree_sha, path_prefix):
:param path_prefix: prefix to prepend to the front of all returned paths"""
entries = list()
data = tree_entries_from_data(odb.stream(tree_sha).read())
-
+
# unpacking/packing is faster than accessing individual items
for sha, mode, name in data:
if S_ISDIR(mode):
@@ -200,5 +200,5 @@ def traverse_tree_recursive(odb, tree_sha, path_prefix):
else:
entries.append((sha, mode, path_prefix+name))
# END for each item
-
+
return entries
diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py
index 99d54076..730642ed 100644
--- a/git/objects/submodule/base.py
+++ b/git/objects/submodule/base.py
@@ -38,10 +38,10 @@ class UpdateProgress(RemoteProgress):
derive from it and implement the ``update(...)`` message"""
CLONE, FETCH, UPDWKTREE = [1 << x for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes+3)]
_num_op_codes = RemoteProgress._num_op_codes + 3
-
+
__slots__ = tuple()
-
-
+
+
BEGIN = UpdateProgress.BEGIN
END = UpdateProgress.END
CLONE = UpdateProgress.CLONE
@@ -58,25 +58,25 @@ class Submodule(util.IndexObject, Iterable, Traversable):
at the path of this instance.
The submodule type does not have a string type associated with it, as it exists
solely as a marker in the tree and index.
-
+
All methods work in bare and non-bare repositories."""
-
+
_id_attribute_ = "name"
k_modules_file = '.gitmodules'
k_head_option = 'branch'
k_head_default = 'master'
k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status
-
+
# this is a bogus type for base class compatability
type = 'submodule'
-
+
__slots__ = ('_parent_commit', '_url', '_branch_path', '_name', '__weakref__')
_cache_attrs = ('path', '_url', '_branch_path')
-
+
def __init__(self, repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, branch_path=None):
"""Initialize this instance with its attributes. We only document the ones
that differ from ``IndexObject``
-
+
:param repo: Our parent repository
:param binsha: binary sha referring to a commit in the remote repository, see url parameter
:param parent_commit: see set_parent_commit()
@@ -93,7 +93,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
self._branch_path = branch_path
if name is not None:
self._name = name
-
+
def _set_cache_(self, attr):
if attr == '_parent_commit':
# set a default value, which is the root tree of the current head
@@ -110,7 +110,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
else:
super(Submodule, self)._set_cache_(attr)
# END handle attribute name
-
+
def _get_intermediate_items(self, item):
""":return: all the submodules of our module repository"""
try:
@@ -118,28 +118,28 @@ class Submodule(util.IndexObject, Iterable, Traversable):
except InvalidGitRepositoryError:
return list()
# END handle intermeditate items
-
+
def __eq__(self, other):
"""Compare with another submodule"""
# we may only compare by name as this should be the ID they are hashed with
# Otherwise this type wouldn't be hashable
# return self.path == other.path and self.url == other.url and super(Submodule, self).__eq__(other)
return self._name == other._name
-
+
def __ne__(self, other):
"""Compare with another submodule for inequality"""
return not (self == other)
-
+
def __hash__(self):
"""Hash this instance using its logical id, not the sha"""
return hash(self._name)
-
+
def __str__(self):
return self._name
-
+
def __repr__(self):
return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)" % (type(self).__name__, self._name, self.path, self.url, self.branch_path)
-
+
@classmethod
def _config_parser(cls, repo, parent_commit, read_only):
""":return: Config Parser constrained to our submodule in read or write mode
@@ -161,11 +161,11 @@ class Submodule(util.IndexObject, Iterable, Traversable):
raise IOError("Could not find %s file in the tree of parent commit %s" % (cls.k_modules_file, parent_commit))
# END handle exceptions
# END handle non-bare working tree
-
+
if not read_only and (repo.bare or not parent_matches_head):
raise ValueError("Cannot write blobs of 'historical' submodule configurations")
# END handle writes of historical submodules
-
+
return SubmoduleConfigParser(fp_module, read_only = read_only)
def _clear_cache(self):
@@ -177,29 +177,29 @@ class Submodule(util.IndexObject, Iterable, Traversable):
pass
# END try attr deletion
# END for each name to delete
-
+
@classmethod
def _sio_modules(cls, parent_commit):
""":return: Configuration file as StringIO - we only access it through the respective blob's data"""
sio = StringIO(parent_commit.tree[cls.k_modules_file].data_stream.read())
sio.name = cls.k_modules_file
return sio
-
+
def _config_parser_constrained(self, read_only):
""":return: Config Parser constrained to our submodule in read or write mode"""
parser = self._config_parser(self.repo, self._parent_commit, read_only)
parser.set_submodule(self)
return SectionConstraint(parser, sm_section(self.name))
-
+
#{ Edit Interface
-
+
@classmethod
def add(cls, repo, name, path, url=None, branch=None, no_checkout=False):
"""Add a new submodule to the given repository. This will alter the index
as well as the .gitmodules file, but will not create a new commit.
If the submodule already exists, no matter if the configuration differs
from the one provided, the existing submodule will be returned.
-
+
:param repo: Repository instance which should receive the submodule
:param name: The name/identifier for the submodule
:param path: repository-relative or absolute path at which the submodule
@@ -225,18 +225,18 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if repo.bare:
raise InvalidGitRepositoryError("Cannot add submodules to bare repositories")
# END handle bare repos
-
+
path = to_native_path_linux(path)
if path.endswith('/'):
path = path[:-1]
# END handle trailing slash
-
+
# assure we never put backslashes into the url, as some operating systems
# like it ...
if url != None:
url = to_native_path_linux(url)
#END assure url correctness
-
+
# INSTANTIATE INTERMEDIATE SM
sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name)
if sm.exists():
@@ -251,7 +251,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
return sm
# END handle exceptions
# END handle existing
-
+
# fake-repo - we only need the functionality on the branch instance
br = git.Head(repo, git.Head.to_full_path(str(branch) or cls.k_head_default))
has_module = sm.module_exists()
@@ -261,7 +261,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
raise ValueError("Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath))
# END check url
# END verify urls match
-
+
mrepo = None
if url is None:
if not has_module:
@@ -281,13 +281,13 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END setup checkout-branch
mrepo = git.Repo.clone_from(url, path, **kwargs)
# END verify url
-
+
# update configuration and index
index = sm.repo.index
writer = sm.config_writer(index=index, write=False)
writer.set_value('url', url)
writer.set_value('path', path)
-
+
sm._url = url
if not branch_is_default:
# store full path
@@ -295,20 +295,20 @@ class Submodule(util.IndexObject, Iterable, Traversable):
sm._branch_path = br.path
# END handle path
del(writer)
-
+
# we deliberatly assume that our head matches our index !
pcommit = repo.head.commit
sm._parent_commit = pcommit
sm.binsha = mrepo.head.commit.binsha
index.add([sm], write=True)
-
+
return sm
-
+
def update(self, recursive=False, init=True, to_latest_revision=False, progress=None,
dry_run=False):
"""Update the repository of this submodule to point to the checkout
we point at with the binsha of this instance.
-
+
:param recursive: if True, we will operate recursively and update child-
modules as well.
:param init: if True, the module repository will be cloned into place if necessary
@@ -326,7 +326,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if self.repo.bare:
return self
#END pass in bare mode
-
+
if progress is None:
progress = UpdateProgress()
#END handle progress
@@ -334,12 +334,12 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if dry_run:
prefix = "DRY-RUN: "
#END handle prefix
-
+
# to keep things plausible in dry-run mode
if dry_run:
mrepo = None
#END init mrepo
-
+
# ASSURE REPO IS PRESENT AND UPTODATE
#####################################
try:
@@ -351,7 +351,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if i == 0:
op |= BEGIN
#END handle start
-
+
progress.update(op, i, len_rmts, prefix+"Fetching remote %s of submodule %r" % (remote, self.name))
#===============================
if not dry_run:
@@ -368,7 +368,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
return self
# END early abort if init is not allowed
import git
-
+
# there is no git-repository yet - but delete empty paths
module_path = join_path_native(self.repo.working_tree_dir, self.path)
if not dry_run and os.path.isdir(module_path):
@@ -378,7 +378,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
raise OSError("Module directory at %r does already exist and is non-empty" % module_path)
# END handle OSError
# END handle directory removal
-
+
# don't check it out at first - nonetheless it will create a local
# branch according to the remote-HEAD if possible
progress.update(BEGIN|CLONE, 0, 1, prefix+"Cloning %s to %s in submodule %r" % (self.url, module_path, self.name))
@@ -386,27 +386,27 @@ class Submodule(util.IndexObject, Iterable, Traversable):
mrepo = git.Repo.clone_from(self.url, module_path, n=True)
#END handle dry-run
progress.update(END|CLONE, 0, 1, prefix+"Done cloning to %s" % module_path)
-
-
+
+
if not dry_run:
# see whether we have a valid branch to checkout
try:
# find a remote which has our branch - we try to be flexible
remote_branch = find_first_remote_branch(mrepo.remotes, self.branch_name)
local_branch = mkhead(mrepo, self.branch_path)
-
+
# have a valid branch, but no checkout - make sure we can figure
# that out by marking the commit with a null_sha
local_branch.set_object(util.Object(mrepo, self.NULL_BIN_SHA))
# END initial checkout + branch creation
-
+
# make sure HEAD is not detached
mrepo.head.set_reference(local_branch, logmsg="submodule: attaching head to %s" % local_branch)
mrepo.head.ref.set_tracking_branch(remote_branch)
except IndexError:
print >> sys.stderr, "Warning: Failed to checkout tracking branch %s" % self.branch_path
#END handle tracking branch
-
+
# NOTE: Have to write the repo config file as well, otherwise
# the default implementation will be offended and not update the repository
# Maybe this is a good way to assure it doesn't get into our way, but
@@ -414,8 +414,8 @@ class Submodule(util.IndexObject, Iterable, Traversable):
self.repo.config_writer().set_value(sm_section(self.name), 'url', self.url)
#END handle dry_run
#END handle initalization
-
-
+
+
# DETERMINE SHAS TO CHECKOUT
############################
binsha = self.binsha
@@ -424,7 +424,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# mrepo is only set if we are not in dry-run mode or if the module existed
is_detached = mrepo.head.is_detached
#END handle dry_run
-
+
if mrepo is not None and to_latest_revision:
msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir
if not is_detached:
@@ -440,7 +440,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
print >> sys.stderr, "%s there was no local tracking branch" % msg_base
# END handle detached head
# END handle to_latest_revision option
-
+
# update the working tree
# handles dry_run
if mrepo is not None and mrepo.head.commit.binsha != binsha:
@@ -461,7 +461,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
#END handle dry_run
progress.update(END|UPDWKTREE, 0, 1, prefix+"Done updating working tree for submodule %r" % self.name)
# END update to new commit only if needed
-
+
# HANDLE RECURSION
##################
if recursive:
@@ -472,15 +472,15 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END handle recursive update
#END handle dry run
# END for each submodule
-
+
return self
-
+
@unbare_repo
def move(self, module_path, configuration=True, module=True):
"""Move the submodule to a another module path. This involves physically moving
the repository at our current path, changing the configuration, as well as
adjusting our index entry accordingly.
-
+
:param module_path: the path to which to move our module, given as
repository-relative path. Intermediate directories will be created
accordingly. If the path already exists, it must be empty.
@@ -499,36 +499,36 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if module + configuration < 1:
raise ValueError("You must specify to move at least the module or the configuration of the submodule")
#END handle input
-
+
module_path = to_native_path_linux(module_path)
if module_path.endswith('/'):
module_path = module_path[:-1]
# END handle trailing slash
-
+
# VERIFY DESTINATION
if module_path == self.path:
return self
#END handle no change
-
+
dest_path = join_path_native(self.repo.working_tree_dir, module_path)
if os.path.isfile(dest_path):
raise ValueError("Cannot move repository onto a file: %s" % dest_path)
# END handle target files
-
+
index = self.repo.index
tekey = index.entry_key(module_path, 0)
# if the target item already exists, fail
if configuration and tekey in index.entries:
raise ValueError("Index entry for target path did alredy exist")
#END handle index key already there
-
+
# remove existing destination
if module:
if os.path.exists(dest_path):
if len(os.listdir(dest_path)):
raise ValueError("Destination module directory was not empty")
#END handle non-emptyness
-
+
if os.path.islink(dest_path):
os.remove(dest_path)
else:
@@ -540,7 +540,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
pass
#END handle existance
# END handle module
-
+
# move the module into place if possible
cur_path = self.abspath
renamed_module = False
@@ -548,8 +548,8 @@ class Submodule(util.IndexObject, Iterable, Traversable):
os.renames(cur_path, dest_path)
renamed_module = True
#END move physical module
-
-
+
+
# rename the index entry - have to manipulate the index directly as
# git-mv cannot be used on submodules ... yeah
try:
@@ -563,7 +563,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
except KeyError:
raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path))
#END handle submodule doesn't exist
-
+
# update configuration
writer = self.config_writer(index=index) # auto-write
writer.set_value('path', module_path)
@@ -576,14 +576,14 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END undo module renaming
raise
#END handle undo rename
-
+
return self
-
+
@unbare_repo
def remove(self, module=True, force=False, configuration=True, dry_run=False):
"""Remove this submodule from the repository. This will remove our entry
from the .gitmodules file and the entry in the .git/config file.
-
+
:param module: If True, the module we point to will be deleted
as well. If the module is currently on a commit which is not part
of any branch in the remote, if the currently checked out branch
@@ -608,7 +608,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if not (module + configuration):
raise ValueError("Need to specify to delete at least the module, or the configuration")
# END handle params
-
+
# DELETE MODULE REPOSITORY
##########################
if module and self.module_exists():
@@ -635,7 +635,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if mod.is_dirty(untracked_files=True):
raise InvalidGitRepositoryError("Cannot delete module at %s with any modifications, unless force is specified" % mod.working_tree_dir)
# END check for dirt
-
+
# figure out whether we have new commits compared to the remotes
# NOTE: If the user pulled all the time, the remote heads might
# not have been updated, so commits coming from the remote look
@@ -659,13 +659,13 @@ class Submodule(util.IndexObject, Iterable, Traversable):
del(rrefs)
del(remote)
# END for each remote
-
+
# gently remove all submodule repositories
for sm in self.children():
sm.remove(module=True, force=False, configuration=False, dry_run=dry_run)
del(sm)
# END for each child-submodule
-
+
# finally delete our own submodule
if not dry_run:
wtd = mod.working_tree_dir
@@ -674,7 +674,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END delete tree if possible
# END handle force
# END handle module deletion
-
+
# DELETE CONFIGURATION
######################
if configuration and not dry_run:
@@ -686,7 +686,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
pass
#END delete entry
index.write()
-
+
# now git config - need the config intact, otherwise we can't query
# inforamtion anymore
self.repo.config_writer().remove_section(sm_section(self.name))
@@ -695,13 +695,13 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# void our data not to delay invalid access
self._clear_cache()
-
+
return self
-
+
def set_parent_commit(self, commit, check=True):
"""Set this instance to use the given commit whose tree is supposed to
contain the .gitmodules blob.
-
+
:param commit: Commit'ish reference pointing at the root_tree
:param check: if True, relatively expensive checks will be performed to verify
validity of the submodule.
@@ -714,10 +714,10 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if self.k_modules_file not in pctree:
raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file))
# END handle exceptions
-
+
prev_pc = self._parent_commit
self._parent_commit = pcommit
-
+
if check:
parser = self._config_parser(self.repo, self._parent_commit, read_only=True)
if not parser.has_section(sm_section(self.name)):
@@ -725,19 +725,19 @@ class Submodule(util.IndexObject, Iterable, Traversable):
raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit))
# END handle submodule did not exist
# END handle checking mode
-
+
# update our sha, it could have changed
self.binsha = pctree[self.path].binsha
-
+
self._clear_cache()
-
+
return self
-
+
@unbare_repo
def config_writer(self, index=None, write=True):
""":return: a config writer instance allowing you to read and write the data
belonging to this submodule into the .gitmodules file.
-
+
:param index: if not None, an IndexFile instance which should be written.
defaults to the index of the Submodule's parent repository.
:param write: if True, the index will be written each time a configuration
@@ -753,11 +753,11 @@ class Submodule(util.IndexObject, Iterable, Traversable):
writer.config._index = index
writer.config._auto_write = write
return writer
-
+
#} END edit interface
-
+
#{ Query Interface
-
+
@unbare_repo
def module(self):
""":return: Repo instance initialized from the repository at our submodule path
@@ -775,7 +775,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
else:
raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_path)
# END handle exceptions
-
+
def module_exists(self):
""":return: True if our module exists and is a valid git repository. See module() method"""
try:
@@ -784,7 +784,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
except Exception:
return False
# END handle exception
-
+
def exists(self):
"""
:return: True if the submodule exists, False otherwise. Please note that
@@ -799,7 +799,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END if we have the attribute cache
#END for each attr
self._clear_cache()
-
+
try:
try:
self.path
@@ -814,38 +814,38 @@ class Submodule(util.IndexObject, Iterable, Traversable):
# END if we have a cache
# END reapply each attribute
# END handle object state consistency
-
+
@property
def branch(self):
""":return: The branch instance that we are to checkout
:raise InvalidGitRepositoryError: if our module is not yet checked out"""
return mkhead(self.module(), self._branch_path)
-
+
@property
def branch_path(self):
"""
:return: full (relative) path as string to the branch we would checkout
from the remote and track"""
return self._branch_path
-
+
@property
def branch_name(self):
""":return: the name of the branch, which is the shortest possible branch name"""
# use an instance method, for this we create a temporary Head instance
# which uses a repository that is available at least ( it makes no difference )
return git.Head(self.repo, self._branch_path).name
-
+
@property
def url(self):
""":return: The url to the repository which our module-repository refers to"""
return self._url
-
+
@property
def parent_commit(self):
""":return: Commit instance with the tree containing the .gitmodules file
:note: will always point to the current head's commit if it was not set explicitly"""
return self._parent_commit
-
+
@property
def name(self):
""":return: The name of this submodule. It is used to identify it within the
@@ -856,7 +856,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
easily
"""
return self._name
-
+
def config_reader(self):
"""
:return: ConfigReader instance which allows you to qurey the configuration values
@@ -866,17 +866,17 @@ class Submodule(util.IndexObject, Iterable, Traversable):
:note: Should be cached by the caller and only kept as long as needed
:raise IOError: If the .gitmodules file/blob could not be read"""
return self._config_parser_constrained(read_only=True)
-
+
def children(self):
"""
:return: IterableList(Submodule, ...) an iterable list of submodules instances
which are children of this submodule or 0 if the submodule is not checked out"""
return self._get_intermediate_items(self)
-
+
#} END query interface
-
+
#{ Iterable Interface
-
+
@classmethod
def iter_items(cls, repo, parent_commit='HEAD'):
""":return: iterator yielding Submodule instances available in the given repository"""
@@ -886,9 +886,9 @@ class Submodule(util.IndexObject, Iterable, Traversable):
except IOError:
raise StopIteration
# END handle empty iterator
-
+
rt = pc.tree # root tree
-
+
for sms in parser.sections():
n = sm_name(sms)
p = parser.get_value(sms, 'path')
@@ -897,7 +897,7 @@ class Submodule(util.IndexObject, Iterable, Traversable):
if parser.has_option(sms, cls.k_head_option):
b = str(parser.get_value(sms, cls.k_head_option))
# END handle optional information
-
+
# get the binsha
index = repo.index
try:
@@ -911,15 +911,14 @@ class Submodule(util.IndexObject, Iterable, Traversable):
raise InvalidGitRepositoryError("Gitmodule path %r did not exist in revision of parent commit %s" % (p, parent_commit))
# END handle keyerror
# END handle critical error
-
+
# fill in remaining info - saves time as it doesn't have to be parsed again
sm._name = n
sm._parent_commit = pc
sm._branch_path = git.Head.to_full_path(b)
sm._url = u
-
+
yield sm
# END for each section
-
- #} END iterable interface
+ #} END iterable interface
diff --git a/git/objects/submodule/root.py b/git/objects/submodule/root.py
index d9764b36..fb0a65c3 100644
--- a/git/objects/submodule/root.py
+++ b/git/objects/submodule/root.py
@@ -14,7 +14,7 @@ class RootUpdateProgress(UpdateProgress):
"""Utility class which adds more opcodes to the UpdateProgress"""
REMOVE, PATHCHANGE, BRANCHCHANGE, URLCHANGE = [1 << x for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes+4)]
_num_op_codes = UpdateProgress._num_op_codes+4
-
+
__slots__ = tuple()
BEGIN = RootUpdateProgress.BEGIN
@@ -27,11 +27,11 @@ PATHCHANGE = RootUpdateProgress.PATHCHANGE
class RootModule(Submodule):
"""A (virtual) Root of all submodules in the given repository. It can be used
to more easily traverse all submodules of the master repository"""
-
+
__slots__ = tuple()
-
+
k_root_name = '__ROOT__'
-
+
def __init__(self, repo):
# repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
super(RootModule, self).__init__(
@@ -44,14 +44,14 @@ class RootModule(Submodule):
url = '',
branch_path = git.Head.to_full_path(self.k_head_default)
)
-
-
+
+
def _clear_cache(self):
"""May not do anything"""
pass
-
+
#{ Interface
-
+
def update(self, previous_commit=None, recursive=True, force_remove=False, init=True,
to_latest_revision=False, progress=None, dry_run=False):
"""Update the submodules of this repository to the current HEAD commit.
@@ -60,7 +60,7 @@ class RootModule(Submodule):
checked out. This works if the submodules ID does not change.
Additionally it will detect addition and removal of submodules, which will be handled
gracefully.
-
+
:param previous_commit: If set to a commit'ish, the commit we should use
as the previous commit the HEAD pointed to before it was set to the commit it points to now.
If None, it defaults to HEAD@{1} otherwise
@@ -79,17 +79,17 @@ class RootModule(Submodule):
if self.repo.bare:
raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
# END handle bare
-
+
if progress is None:
progress = RootUpdateProgress()
#END assure progress is set
-
+
prefix = ''
if dry_run:
prefix = 'DRY-RUN: '
-
+
repo = self.repo
-
+
# SETUP BASE COMMIT
###################
cur_commit = repo.head.commit
@@ -106,13 +106,13 @@ class RootModule(Submodule):
else:
previous_commit = repo.commit(previous_commit) # obtain commit object
# END handle previous commit
-
-
+
+
psms = self.list_items(repo, parent_commit=previous_commit)
sms = self.list_items(repo)
spsms = set(psms)
ssms = set(sms)
-
+
# HANDLE REMOVALS
###################
rrsm = (spsms - ssms)
@@ -122,7 +122,7 @@ class RootModule(Submodule):
if i == 0:
op |= BEGIN
#END handle begin
-
+
# fake it into thinking its at the current commit to allow deletion
# of previous module. Trigger the cache to be updated before that
progress.update(op, i, len_rrsm, prefix+"Removing submodule %r at %s" % (rsm.name, rsm.abspath))
@@ -130,13 +130,13 @@ class RootModule(Submodule):
if not dry_run:
rsm.remove(configuration=False, module=True, force=force_remove)
#END handle dry-run
-
+
if i == len_rrsm-1:
op |= END
#END handle end
progress.update(op, i, len_rrsm, prefix+"Done removing submodule %r" % rsm.name)
# END for each removed submodule
-
+
# HANDLE PATH RENAMES
#####################
# url changes + branch changes
@@ -145,7 +145,7 @@ class RootModule(Submodule):
for i, csm in enumerate(csms):
psm = psms[csm.name]
sm = sms[csm.name]
-
+
#PATH CHANGES
##############
if sm.path != psm.path and psm.module_exists():
@@ -156,7 +156,7 @@ class RootModule(Submodule):
#END handle dry_run
progress.update(END|PATHCHANGE, i, len_csms, prefix+"Done moving repository of submodule %r" % sm.name)
# END handle path changes
-
+
if sm.module_exists():
# HANDLE URL CHANGE
###################
@@ -167,22 +167,22 @@ class RootModule(Submodule):
nn = '__new_origin__'
smm = sm.module()
rmts = smm.remotes
-
+
# don't do anything if we already have the url we search in place
if len([r for r in rmts if r.url == sm.url]) == 0:
progress.update(BEGIN|URLCHANGE, i, len_csms, prefix+"Changing url of submodule %r from %s to %s" % (sm.name, psm.url, sm.url))
-
+
if not dry_run:
assert nn not in [r.name for r in rmts]
smr = smm.create_remote(nn, sm.url)
smr.fetch(progress=progress)
-
+
# If we have a tracking branch, it should be available
# in the new remote as well.
if len([r for r in smr.refs if r.remote_head == sm.branch_name]) == 0:
raise ValueError("Submodule branch named %r was not available in new submodule remote at %r" % (sm.branch_name, sm.url))
# END head is not detached
-
+
# now delete the changed one
rmt_for_deletion = None
for remote in rmts:
@@ -191,7 +191,7 @@ class RootModule(Submodule):
break
# END if urls match
# END for each remote
-
+
# if we didn't find a matching remote, but have exactly one,
# we can safely use this one
if rmt_for_deletion is None:
@@ -206,7 +206,7 @@ class RootModule(Submodule):
raise InvalidGitRepositoryError("Couldn't find original remote-repo at url %r" % psm.url)
#END handle one single remote
# END handle check we found a remote
-
+
orig_name = rmt_for_deletion.name
smm.delete_remote(rmt_for_deletion)
# NOTE: Currently we leave tags from the deleted remotes
@@ -215,10 +215,10 @@ class RootModule(Submodule):
# another project ). At some point, one might want to clean
# it up, but the danger is high to remove stuff the user
# has added explicitly
-
+
# rename the new remote back to what it was
smr.rename(orig_name)
-
+
# early on, we verified that the our current tracking branch
# exists in the remote. Now we have to assure that the
# sha we point to is still contained in the new remote
@@ -232,7 +232,7 @@ class RootModule(Submodule):
break
# END traverse all commits in search for sha
# END for each commit
-
+
if not found:
# adjust our internal binsha to use the one of the remote
# this way, it will be checked out in the next step
@@ -241,13 +241,13 @@ class RootModule(Submodule):
print >> sys.stderr, "WARNING: Current sha %s was not contained in the tracking branch at the new remote, setting it the the remote's tracking branch" % sm.hexsha
sm.binsha = rref.commit.binsha
#END reset binsha
-
+
#NOTE: All checkout is performed by the base implementation of update
#END handle dry_run
progress.update(END|URLCHANGE, i, len_csms, prefix+"Done adjusting url of submodule %r" % (sm.name))
# END skip remote handling if new url already exists in module
# END handle url
-
+
# HANDLE PATH CHANGES
#####################
if sm.branch_path != psm.branch_path:
@@ -263,7 +263,7 @@ class RootModule(Submodule):
# ... or reuse the existing one
tbr = git.Head(smm, sm.branch_path)
#END assure tracking branch exists
-
+
tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch_name))
# figure out whether the previous tracking branch contains
# new commits compared to the other one, if not we can
@@ -278,22 +278,22 @@ class RootModule(Submodule):
# current remotes, this just means we can't handle it
pass
# END exception handling
-
+
#NOTE: All checkout is done in the base implementation of update
#END handle dry_run
-
+
progress.update(END|BRANCHCHANGE, i, len_csms, prefix+"Done changing branch of submodule %r" % sm.name)
#END handle branch
#END handle
# END for each common submodule
-
+
# FINALLY UPDATE ALL ACTUAL SUBMODULES
######################################
for sm in sms:
# update the submodule using the default method
sm.update(recursive=False, init=init, to_latest_revision=to_latest_revision,
progress=progress, dry_run=dry_run)
-
+
# update recursively depth first - question is which inconsitent
# state will be better in case it fails somewhere. Defective branch
# or defective depth. The RootSubmodule type will never process itself,
diff --git a/git/objects/submodule/util.py b/git/objects/submodule/util.py
index 492d9dbe..237321e2 100644
--- a/git/objects/submodule/util.py
+++ b/git/objects/submodule/util.py
@@ -17,11 +17,11 @@ def sm_name(section):
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1]
-
+
def mkhead(repo, path):
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
-
+
def unbare_repo(func):
"""Methods with this decorator raise InvalidGitRepositoryError if they
encounter a bare repository"""
@@ -33,7 +33,7 @@ def unbare_repo(func):
# END wrapper
wrapper.__name__ = func.__name__
return wrapper
-
+
def find_first_remote_branch(remotes, branch_name):
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
@@ -44,7 +44,7 @@ def find_first_remote_branch(remotes, branch_name):
# END exception handling
#END for remote
raise InvalidGitRepositoryError("Didn't find remote branch %r in any of the given remotes", branch_name)
-
+
#} END utilities
@@ -56,16 +56,16 @@ class SubmoduleConfigParser(GitConfigParser):
with the new data, if we have written into a stream. Otherwise it will
add the local file to the index to make it correspond with the working tree.
Additionally, the cache must be cleared
-
+
Please note that no mutating method will work in bare mode
"""
-
+
def __init__(self, *args, **kwargs):
self._smref = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
-
+
#{ Interface
def set_submodule(self, submodule):
"""Set this instance's submodule. It must be called before
@@ -77,7 +77,7 @@ class SubmoduleConfigParser(GitConfigParser):
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, StringIO)
-
+
sm = self._smref()
if sm is not None:
index = self._index
@@ -89,7 +89,7 @@ class SubmoduleConfigParser(GitConfigParser):
# END handle weakref
#} END interface
-
+
#{ Overridden Methods
def write(self):
rval = super(SubmoduleConfigParser, self).write()
diff --git a/git/objects/tag.py b/git/objects/tag.py
index d0b5a11a..b34c5945 100644
--- a/git/objects/tag.py
+++ b/git/objects/tag.py
@@ -17,11 +17,11 @@ class TagObject(base.Object):
"""Non-Lightweight tag carrying additional information about an object we are pointing to."""
type = "tag"
__slots__ = ( "object", "tag", "tagger", "tagged_date", "tagger_tz_offset", "message" )
-
+
def __init__(self, repo, binsha, object=None, tag=None,
tagger=None, tagged_date=None, tagger_tz_offset=None, message=None):
"""Initialize a tag object with additional data
-
+
:param repo: repository this object is located in
:param binsha: 20 byte SHA1
:param object: Object instance of object we are pointing to
@@ -45,22 +45,22 @@ class TagObject(base.Object):
self.tagger_tz_offset = tagger_tz_offset
if message is not None:
self.message = message
-
+
def _set_cache_(self, attr):
"""Cache all our attributes at once"""
if attr in TagObject.__slots__:
ostream = self.repo.odb.stream(self.binsha)
lines = ostream.read().splitlines()
-
+
obj, hexsha = lines[0].split(" ") # object <hexsha>
type_token, type_name = lines[1].split(" ") # type <type_name>
self.object = get_object_type_by_name(type_name)(self.repo, hex_to_bin(hexsha))
-
+
self.tag = lines[2][4:] # tag <tag name>
-
+
tagger_info = lines[3]# tagger <actor> <date>
self.tagger, self.tagged_date, self.tagger_tz_offset = parse_actor_and_date(tagger_info)
-
+
# line 4 empty - it could mark the beginning of the next header
# in case there really is no message, it would not exist. Otherwise
# a newline separates header from message
@@ -71,6 +71,3 @@ class TagObject(base.Object):
# END check our attributes
else:
super(TagObject, self)._set_cache_(attr)
-
-
-
diff --git a/git/objects/tree.py b/git/objects/tree.py
index d1e827f5..92b0feca 100644
--- a/git/objects/tree.py
+++ b/git/objects/tree.py
@@ -23,14 +23,14 @@ __all__ = ("TreeModifier", "Tree")
class TreeModifier(object):
"""A utility class providing methods to alter the underlying cache in a list-like fashion.
-
+
Once all adjustments are complete, the _cache, which really is a refernce to
the cache of a tree, will be sorted. Assuring it will be in a serializable state"""
__slots__ = '_cache'
-
+
def __init__(self, cache):
self._cache = cache
-
+
def _index_by_name(self, name):
""":return: index of an item with name, or -1 if not found"""
for i, t in enumerate(self._cache):
@@ -39,7 +39,7 @@ class TreeModifier(object):
# END found item
# END for each item in cache
return -1
-
+
#{ Interface
def set_done(self):
"""Call this method once you are done modifying the tree information.
@@ -49,14 +49,14 @@ class TreeModifier(object):
self._cache.sort(key=lambda t: t[2]) # sort by name
return self
#} END interface
-
+
#{ Mutators
def add(self, sha, mode, name, force=False):
"""Add the given item to the tree. If an item with the given name already
exists, nothing will be done, but a ValueError will be raised if the
sha and mode of the existing item do not match the one you add, unless
force is True
-
+
:param sha: The 20 or 40 byte sha of the item to add
:param mode: int representing the stat compatible mode of the item
:param force: If True, an item with your name and information will overwrite
@@ -66,7 +66,7 @@ class TreeModifier(object):
raise ValueError("Name must not contain '/' characters")
if (mode >> 12) not in Tree._map_id_to_type:
raise ValueError("Invalid object type according to mode %o" % mode)
-
+
sha = to_bin_sha(sha)
index = self._index_by_name(name)
item = (sha, mode, name)
@@ -83,52 +83,52 @@ class TreeModifier(object):
# END handle force
# END handle name exists
return self
-
+
def add_unchecked(self, binsha, mode, name):
"""Add the given item to the tree, its correctness is assumed, which
puts the caller into responsibility to assure the input is correct.
For more information on the parameters, see ``add``
:param binsha: 20 byte binary sha"""
self._cache.append((binsha, mode, name))
-
+
def __delitem__(self, name):
"""Deletes an item with the given name if it exists"""
index = self._index_by_name(name)
if index > -1:
del(self._cache[index])
-
+
#} END mutators
class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
"""Tree objects represent an ordered list of Blobs and other Trees.
-
+
``Tree as a list``::
-
+
Access a specific blob using the
tree['filename'] notation.
-
+
You may as well access by index
blob = tree[0]
"""
-
+
type = "tree"
__slots__ = "_cache"
-
+
# actual integer ids for comparison
commit_id = 016 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link
blob_id = 010
symlink_id = 012
tree_id = 004
-
+
_map_id_to_type = {
commit_id : Submodule,
blob_id : Blob,
symlink_id : Blob
# tree id added once Tree is defined
}
-
-
+
+
def __init__(self, repo, binsha, mode=tree_id<<12, path=None):
super(Tree, self).__init__(repo, binsha, mode, path)
@@ -161,7 +161,7 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
def __div__(self, file):
"""Find the named object in this tree's contents
:return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``
-
+
:raise KeyError: if given file or tree does not exist in tree"""
msg = "Blob or Tree named %r not found"
if '/' in file:
@@ -195,7 +195,7 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
def trees(self):
""":return: list(Tree, ...) list of trees directly below this tree"""
return [ i for i in self if i.type == "tree" ]
-
+
@property
def blobs(self):
""":return: list(Blob, ...) list of blobs directly below this tree"""
@@ -220,26 +220,26 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
# List protocol
def __getslice__(self, i, j):
return list(self._iter_convert_to_object(self._cache[i:j]))
-
+
def __iter__(self):
return self._iter_convert_to_object(self._cache)
-
+
def __len__(self):
return len(self._cache)
-
+
def __getitem__(self, item):
if isinstance(item, int):
info = self._cache[item]
return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
-
+
if isinstance(item, basestring):
# compatability
return self.__div__(item)
# END index is basestring
-
+
raise TypeError( "Invalid index type: %r" % item )
-
-
+
+
def __contains__(self, item):
if isinstance(item, IndexObject):
for info in self._cache:
@@ -249,7 +249,7 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
# END for each entry
# END handle item is index object
# compatability
-
+
# treat item as repo-relative path
path = self.path
for info in self._cache:
@@ -257,10 +257,10 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
return True
# END for each item
return False
-
+
def __reversed__(self):
return reversed(self._iter_convert_to_object(self._cache))
-
+
def _serialize(self, stream):
"""Serialize this tree into the stream. Please note that we will assume
our tree data to be in a sorted state. If this is not the case, serialization
@@ -268,12 +268,12 @@ class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
by algorithms"""
tree_to_stream(self._cache, stream.write)
return self
-
+
def _deserialize(self, stream):
self._cache = tree_entries_from_data(stream.read())
return self
-
-
+
+
# END tree
# finalize map definition
diff --git a/git/objects/util.py b/git/objects/util.py
index cdf72bed..f9dffd81 100644
--- a/git/objects/util.py
+++ b/git/objects/util.py
@@ -40,9 +40,9 @@ def get_object_type_by_name(object_type_name):
"""
:return: type suitable to handle the given object type name.
Use the type to create new instances.
-
+
:param object_type_name: Member of TYPES
-
+
:raise ValueError: In case object_type_name is unknown"""
if object_type_name == "commit":
import commit
@@ -58,14 +58,14 @@ def get_object_type_by_name(object_type_name):
return tree.Tree
else:
raise ValueError("Cannot handle unknown object type: %s" % object_type_name)
-
+
def utctz_to_altz(utctz):
"""we convert utctz to the timezone in seconds, it is the format time.altzone
returns. Git stores it as UTC timezone which has the opposite sign as well,
which explains the -1 * ( that was made explicit here )
:param utctz: git utc timezone string, i.e. +0200"""
return -1 * int(float(utctz)/100*3600)
-
+
def altz_to_utctz_str(altz):
"""As above, but inverses the operation, returning a string that can be used
in commit objects"""
@@ -74,7 +74,7 @@ def altz_to_utctz_str(altz):
utcs = "0"*(4-len(utcs)) + utcs
prefix = (utci < 0 and '-') or '+'
return prefix + utcs
-
+
def verify_utctz(offset):
""":raise ValueError: if offset is incorrect
@@ -95,12 +95,12 @@ def verify_utctz(offset):
def parse_date(string_date):
"""
Parse the given date as one of the following
-
+
* Git internal format: timestamp offset
* RFC 2822: Thu, 07 Apr 2005 22:13:13 +0200.
* ISO 8601 2005-04-07T22:13:13
The T can be a space as well
-
+
:return: Tuple(int(timestamp), int(offset)), both in seconds since epoch
:raise ValueError: If the format could not be understood
:note: Date can also be YYYY.MM.DD, MM/DD/YYYY and DD.MM.YYYY"""
@@ -116,7 +116,7 @@ def parse_date(string_date):
offset = verify_utctz(string_date[-5:])
string_date = string_date[:-6] # skip space as well
# END split timezone info
-
+
# now figure out the date and time portion - split time
date_formats = list()
splitter = -1
@@ -129,22 +129,22 @@ def parse_date(string_date):
date_formats.append("%Y.%m.%d")
date_formats.append("%m/%d/%Y")
date_formats.append("%d.%m.%Y")
-
+
splitter = string_date.rfind('T')
if splitter == -1:
splitter = string_date.rfind(' ')
# END handle 'T' and ' '
# END handle rfc or iso
-
+
assert splitter > -1
-
+
# split date and time
time_part = string_date[splitter+1:] # skip space
date_part = string_date[:splitter]
-
+
# parse time
tstruct = time.strptime(time_part, "%H:%M:%S")
-
+
for fmt in date_formats:
try:
dtstruct = time.strptime(date_part, fmt)
@@ -156,7 +156,7 @@ def parse_date(string_date):
continue
# END exception handling
# END for each fmt
-
+
# still here ? fail
raise ValueError("no format matched")
# END handle format
@@ -164,16 +164,16 @@ def parse_date(string_date):
raise ValueError("Unsupported date format: %s" % string_date)
# END handle exceptions
-
+
# precompiled regex
_re_actor_epoch = re.compile(r'^.+? (.*) (\d+) ([+-]\d+).*$')
_re_only_actor = re.compile(r'^.+? (.*)$')
def parse_actor_and_date(line):
"""Parse out the actor (author or committer) info from a line like::
-
+
author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
-
+
:return: [Actor, int_seconds_since_epoch, int_timezone_offset]"""
actor, epoch, offset = '', 0, 0
m = _re_actor_epoch.search(line)
@@ -188,10 +188,10 @@ def parse_actor_and_date(line):
#{ Classes
-
+
class ProcessStreamAdapter(object):
"""Class wireing all calls to the contained Process instance.
-
+
Use this type to hide the underlying process to provide access only to a specified
stream. The process is usually wrapped into an AutoInterrupt class to kill
it if the instance goes out of scope."""
@@ -199,18 +199,18 @@ class ProcessStreamAdapter(object):
def __init__(self, process, stream_name):
self._proc = process
self._stream = getattr(process, stream_name)
-
+
def __getattr__(self, attr):
return getattr(self._stream, attr)
-
-
+
+
class Traversable(object):
"""Simple interface to perforam depth-first or breadth-first traversals
into one direction.
Subclasses only need to implement one function.
Instances of the Subclass must be hashable"""
__slots__ = tuple()
-
+
@classmethod
def _get_intermediate_items(cls, item):
"""
@@ -219,7 +219,7 @@ class Traversable(object):
Must be implemented in subclass
"""
raise NotImplementedError("To be implemented in subclass")
-
+
def list_traverse(self, *args, **kwargs):
"""
:return: IterableList with the results of the traversal as produced by
@@ -227,36 +227,36 @@ class Traversable(object):
out = IterableList(self._id_attribute_)
out.extend(self.traverse(*args, **kwargs))
return out
-
+
def traverse( self, predicate = lambda i,d: True,
prune = lambda i,d: False, depth = -1, branch_first=True,
visit_once = True, ignore_self=1, as_edge = False ):
""":return: iterator yieling of items found when traversing self
-
+
:param predicate: f(i,d) returns False if item i at depth d should not be included in the result
-
+
:param prune:
f(i,d) return True if the search should stop at item i at depth d.
Item i will not be returned.
-
+
:param depth:
define at which level the iteration should not go deeper
if -1, there is no limit
if 0, you would effectively only get self, the root of the iteration
i.e. if 1, you would only get the first level of predessessors/successors
-
+
:param branch_first:
if True, items will be returned branch first, otherwise depth first
-
+
:param visit_once:
if True, items will only be returned once, although they might be encountered
several times. Loops are prevented that way.
-
+
:param ignore_self:
if True, self will be ignored and automatically pruned from
the result. Otherwise it will be the first item to be returned.
If as_edge is True, the source of the first edge is None
-
+
:param as_edge:
if True, return a pair of items, first being the source, second the
destinatination, i.e. tuple(src, dest) with the edge spanning from
@@ -264,7 +264,7 @@ class Traversable(object):
visited = set()
stack = Deque()
stack.append( ( 0 ,self, None ) ) # self is always depth level 0
-
+
def addToStack( stack, item, branch_first, depth ):
lst = self._get_intermediate_items( item )
if not lst:
@@ -275,44 +275,44 @@ class Traversable(object):
reviter = ( ( depth , lst[i], item ) for i in range( len( lst )-1,-1,-1) )
stack.extend( reviter )
# END addToStack local method
-
+
while stack:
d, item, src = stack.pop() # depth of item, item, item_source
-
+
if visit_once and item in visited:
continue
-
+
if visit_once:
visited.add(item)
-
+
rval = ( as_edge and (src, item) ) or item
if prune( rval, d ):
continue
-
+
skipStartItem = ignore_self and ( item is self )
if not skipStartItem and predicate( rval, d ):
yield rval
-
+
# only continue to next level if this is appropriate !
nd = d + 1
if depth > -1 and nd > depth:
continue
-
+
addToStack( stack, item, branch_first, nd )
# END for each item on work stack
-
+
class Serializable(object):
"""Defines methods to serialize and deserialize objects from and into a data stream"""
__slots__ = tuple()
-
+
def _serialize(self, stream):
"""Serialize the data of this object into the given data stream
:note: a serialized object would ``_deserialize`` into the same objet
:param stream: a file-like object
:return: self"""
raise NotImplementedError("To be implemented in subclass")
-
+
def _deserialize(self, stream):
"""Deserialize all information regarding this object from the stream
:param stream: a file-like object