summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/git/config.py72
m---------lib/git/ext/gitdb0
-rw-r--r--lib/git/index/base.py12
-rw-r--r--lib/git/index/fun.py69
-rw-r--r--lib/git/objects/__init__.py11
-rw-r--r--lib/git/objects/base.py21
-rw-r--r--lib/git/objects/commit.py28
-rw-r--r--lib/git/objects/submodule.py16
-rw-r--r--lib/git/objects/submodule/__init__.py1
-rw-r--r--lib/git/objects/submodule/base.py816
-rw-r--r--lib/git/objects/submodule/root.py263
-rw-r--r--lib/git/objects/submodule/util.py101
-rw-r--r--lib/git/objects/tag.py13
-rw-r--r--lib/git/objects/tree.py2
-rw-r--r--lib/git/objects/util.py10
-rw-r--r--lib/git/refs.py133
-rw-r--r--lib/git/remote.py41
-rw-r--r--lib/git/repo/base.py39
-rw-r--r--lib/git/util.py5
-rw-r--r--test/git/test_base.py2
-rw-r--r--test/git/test_commit.py2
-rw-r--r--test/git/test_config.py4
-rw-r--r--test/git/test_index.py2
-rw-r--r--test/git/test_refs.py55
-rw-r--r--test/git/test_remote.py2
-rw-r--r--test/git/test_repo.py26
-rw-r--r--test/git/test_submodule.py478
-rw-r--r--test/git/test_tree.py2
-rw-r--r--test/testlib/helper.py53
29 files changed, 2093 insertions, 186 deletions
diff --git a/lib/git/config.py b/lib/git/config.py
index 09bad0b6..f1a8832e 100644
--- a/lib/git/config.py
+++ b/lib/git/config.py
@@ -15,7 +15,7 @@ import cStringIO
from git.odict import OrderedDict
from git.util import LockFile
-__all__ = ('GitConfigParser', )
+__all__ = ('GitConfigParser', 'SectionConstraint')
class MetaParserBuilder(type):
"""Utlity class wrapping base-class methods into decorators that assure read-only properties"""
@@ -23,19 +23,23 @@ class MetaParserBuilder(type):
"""
Equip all base-class methods with a needs_values decorator, and all non-const methods
with a set_dirty_and_flush_changes decorator in addition to that."""
- mutating_methods = clsdict['_mutating_methods_']
- for base in bases:
- methods = ( t for t in inspect.getmembers(base, inspect.ismethod) if not t[0].startswith("_") )
- for name, method in methods:
- if name in clsdict:
- continue
- method_with_values = needs_values(method)
- if name in mutating_methods:
- method_with_values = set_dirty_and_flush_changes(method_with_values)
- # END mutating methods handling
-
- clsdict[name] = method_with_values
- # END for each base
+ kmm = '_mutating_methods_'
+ if kmm in clsdict:
+ mutating_methods = clsdict[kmm]
+ for base in bases:
+ methods = ( t for t in inspect.getmembers(base, inspect.ismethod) if not t[0].startswith("_") )
+ for name, method in methods:
+ if name in clsdict:
+ continue
+ method_with_values = needs_values(method)
+ if name in mutating_methods:
+ method_with_values = set_dirty_and_flush_changes(method_with_values)
+ # END mutating methods handling
+
+ clsdict[name] = method_with_values
+ # END for each name/method pair
+ # END for each base
+ # END if mutating methods configuration is set
new_type = super(MetaParserBuilder, metacls).__new__(metacls, name, bases, clsdict)
return new_type
@@ -63,7 +67,35 @@ def set_dirty_and_flush_changes(non_const_func):
flush_changes.__name__ = non_const_func.__name__
return flush_changes
+
+class SectionConstraint(object):
+ """Constrains a ConfigParser to only option commands which are constrained to
+ always use the section we have been initialized with.
+
+ It supports all ConfigParser methods that operate on an option"""
+ __slots__ = ("_config", "_section_name")
+ _valid_attrs_ = ("get_value", "set_value", "get", "set", "getint", "getfloat", "getboolean", "has_option",
+ "remove_section", "remove_option", "options")
+ def __init__(self, config, section):
+ self._config = config
+ self._section_name = section
+
+ def __getattr__(self, attr):
+ if attr in self._valid_attrs_:
+ return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
+ return super(SectionConstraint,self).__getattribute__(attr)
+
+ def _call_config(self, method, *args, **kwargs):
+ """Call the configuration at the given method which must take a section name
+ as first argument"""
+ return getattr(self._config, method)(self._section_name, *args, **kwargs)
+
+ @property
+ def config(self):
+ """return: Configparser instance we constrain"""
+ return self._config
+
class GitConfigParser(cp.RawConfigParser, object):
"""Implements specifics required to read git style configuration files.
@@ -249,9 +281,9 @@ class GitConfigParser(cp.RawConfigParser, object):
if not hasattr(file_object, "seek"):
try:
fp = open(file_object)
+ close_fp = True
except IOError,e:
continue
- close_fp = True
# END fp handling
try:
@@ -286,17 +318,21 @@ class GitConfigParser(cp.RawConfigParser, object):
:raise IOError: if this is a read-only writer instance or if we could not obtain
a file lock"""
self._assure_writable("write")
- self._lock._obtain_lock()
-
fp = self._file_or_files
close_fp = False
+ # we have a physical file on disk, so get a lock
+ if isinstance(fp, (basestring, file)):
+ self._lock._obtain_lock()
+ # END get lock for physical files
+
if not hasattr(fp, "seek"):
fp = open(self._file_or_files, "w")
close_fp = True
else:
fp.seek(0)
+ # END handle stream or file
# WRITE DATA
try:
@@ -368,7 +404,7 @@ class GitConfigParser(cp.RawConfigParser, object):
return valuestr
@needs_values
- @set_dirty_and_flush_changes
+ @set_dirty_and_flush_changes
def set_value(self, section, option, value):
"""Sets the given option in section to the given value.
It will create the section if required, and will not throw as opposed to the default
diff --git a/lib/git/ext/gitdb b/lib/git/ext/gitdb
-Subproject 78665b13ff4125f4ce3e5311d040c027bdc92a9
+Subproject 2ddc5bad224d8f545ef3bb2ab3df98dfe063c5b
diff --git a/lib/git/index/base.py b/lib/git/index/base.py
index 05501ba1..05caa06d 100644
--- a/lib/git/index/base.py
+++ b/lib/git/index/base.py
@@ -35,7 +35,8 @@ from git.exc import (
)
from git.objects import (
- Blob,
+ Blob,
+ Submodule,
Tree,
Object,
Commit,
@@ -553,7 +554,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
for item in items:
if isinstance(item, basestring):
paths.append(self._to_relative_path(item))
- elif isinstance(item, Blob):
+ elif isinstance(item, (Blob, Submodule)):
entries.append(BaseIndexEntry.from_blob(item))
elif isinstance(item, BaseIndexEntry):
entries.append(item)
@@ -588,7 +589,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
They are added at stage 0
- - Blob object
+ - Blob or Submodule object
Blobs are added as they are assuming a valid mode is set.
The file they refer to may or may not exist in the file system, but
must be a path relative to our repository.
@@ -612,6 +613,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
explicitly set. Please note that Index Entries require binary sha's.
:param force:
+ **CURRENTLY INEFFECTIVE**
If True, otherwise ignored or excluded files will be
added anyway.
As opposed to the git-add command, we enable this flag by default
@@ -748,7 +750,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
may be absolute or relative paths, entries or blobs"""
paths = list()
for item in items:
- if isinstance(item, (BaseIndexEntry,Blob)):
+ if isinstance(item, (BaseIndexEntry,(Blob, Submodule))):
paths.append(self._to_relative_path(item.path))
elif isinstance(item, basestring):
paths.append(self._to_relative_path(item))
@@ -775,7 +777,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
The path string may include globs, such as *.c.
- - Blob object
+ - Blob Object
Only the path portion is used in this case.
- BaseIndexEntry or compatible type
diff --git a/lib/git/index/fun.py b/lib/git/index/fun.py
index 48c4fa74..87fdf1a9 100644
--- a/lib/git/index/fun.py
+++ b/lib/git/index/fun.py
@@ -30,6 +30,7 @@ from typ import (
CE_NAMEMASK,
CE_STAGESHIFT
)
+CE_NAMEMASK_INV = ~CE_NAMEMASK
from util import (
pack,
@@ -53,22 +54,6 @@ def stat_mode_to_index_mode(mode):
return S_IFREG | 0644 | (mode & 0100) # blobs with or without executable bit
-def write_cache_entry(entry, stream):
- """Write the given entry to the stream"""
- beginoffset = stream.tell()
- write = stream.write
- write(entry[4]) # ctime
- write(entry[5]) # mtime
- path = entry[3]
- plen = len(path) & CE_NAMEMASK # path length
- assert plen == len(path), "Path %s too long to fit into index" % entry[3]
- flags = plen | entry[2]
- write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0],
- entry[8], entry[9], entry[10], entry[1], flags))
- write(path)
- real_size = ((stream.tell() - beginoffset + 8) & ~7)
- write("\0" * ((beginoffset + real_size) - stream.tell()))
-
def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer):
"""Write the cache represented by entries to a stream
@@ -83,15 +68,29 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
a 4 byte identifier, followed by its size ( 4 bytes )"""
# wrap the stream into a compatible writer
stream = ShaStreamCls(stream)
+
+ tell = stream.tell
+ write = stream.write
# header
version = 2
- stream.write("DIRC")
- stream.write(pack(">LL", version, len(entries)))
+ write("DIRC")
+ write(pack(">LL", version, len(entries)))
# body
for entry in entries:
- write_cache_entry(entry, stream)
+ beginoffset = tell()
+ write(entry[4]) # ctime
+ write(entry[5]) # mtime
+ path = entry[3]
+ plen = len(path) & CE_NAMEMASK # path length
+ assert plen == len(path), "Path %s too long to fit into index" % entry[3]
+ flags = plen | (entry[2] & CE_NAMEMASK_INV) # clear possible previous values
+ write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0],
+ entry[8], entry[9], entry[10], entry[1], flags))
+ write(path)
+ real_size = ((tell() - beginoffset + 8) & ~7)
+ write("\0" * ((beginoffset + real_size) - tell()))
# END for each entry
# write previously cached extensions data
@@ -101,21 +100,6 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
# write the sha over the content
stream.write_sha()
-def read_entry(stream):
- """Return: One entry of the given stream"""
- beginoffset = stream.tell()
- read = stream.read
- ctime = unpack(">8s", read(8))[0]
- mtime = unpack(">8s", read(8))[0]
- (dev, ino, mode, uid, gid, size, sha, flags) = \
- unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
- path_size = flags & CE_NAMEMASK
- path = read(path_size)
-
- real_size = ((stream.tell() - beginoffset + 8) & ~7)
- data = read((beginoffset + real_size) - stream.tell())
- return IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
-
def read_header(stream):
"""Return tuple(version_long, num_entries) from the given stream"""
type_id = stream.read(4)
@@ -147,10 +131,23 @@ def read_cache(stream):
version, num_entries = read_header(stream)
count = 0
entries = dict()
+
+ read = stream.read
+ tell = stream.tell
while count < num_entries:
- entry = read_entry(stream)
+ beginoffset = tell()
+ ctime = unpack(">8s", read(8))[0]
+ mtime = unpack(">8s", read(8))[0]
+ (dev, ino, mode, uid, gid, size, sha, flags) = \
+ unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
+ path_size = flags & CE_NAMEMASK
+ path = read(path_size)
+
+ real_size = ((tell() - beginoffset + 8) & ~7)
+ data = read((beginoffset + real_size) - tell())
+ entry = IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
# entry_key would be the method to use, but we safe the effort
- entries[(entry.path, entry.stage)] = entry
+ entries[(path, entry.stage)] = entry
count += 1
# END for each entry
diff --git a/lib/git/objects/__init__.py b/lib/git/objects/__init__.py
index 85c7e38c..e8e0ef39 100644
--- a/lib/git/objects/__init__.py
+++ b/lib/git/objects/__init__.py
@@ -3,11 +3,18 @@ Import all submodules main classes into the package space
"""
import inspect
from base import *
+# Fix import dependency - add IndexObject to the util module, so that it can be
+# imported by the submodule.base
+import submodule.util
+submodule.util.IndexObject = IndexObject
+from submodule.base import *
+from submodule.root import *
+
+# must come after submodule was made available
from tag import *
from blob import *
-from tree import *
from commit import *
-from submodule import *
+from tree import *
from util import Actor
__all__ = [ name for name, obj in locals().items()
diff --git a/lib/git/objects/base.py b/lib/git/objects/base.py
index 41862ac2..b8cec47f 100644
--- a/lib/git/objects/base.py
+++ b/lib/git/objects/base.py
@@ -62,17 +62,6 @@ class Object(LazyMixin):
inst.size = oinfo.size
return inst
- def _set_self_from_args_(self, args_dict):
- """Initialize attributes on self from the given dict that was retrieved
- from locals() in the calling method.
-
- Will only set an attribute on self if the corresponding value in args_dict
- is not None"""
- for attr, val in args_dict.items():
- if attr != "self" and val is not None:
- setattr( self, attr, val )
- # END set all non-None attributes
-
def _set_cache_(self, attr):
"""Retrieve object information"""
if attr == "size":
@@ -125,7 +114,10 @@ class Object(LazyMixin):
class IndexObject(Object):
"""Base for all objects that can be part of the index file , namely Tree, Blob and
SubModule objects"""
- __slots__ = ("path", "mode")
+ __slots__ = ("path", "mode")
+
+ # for compatability with iterable lists
+ _id_attribute_ = 'path'
def __init__(self, repo, binsha, mode=None, path=None):
"""Initialize a newly instanced IndexObject
@@ -140,7 +132,10 @@ class IndexObject(Object):
Path may not be set of the index object has been created directly as it cannot
be retrieved without knowing the parent tree."""
super(IndexObject, self).__init__(repo, binsha)
- self._set_self_from_args_(locals())
+ if mode is not None:
+ self.mode = mode
+ if path is not None:
+ self.path = path
def __hash__(self):
""":return:
diff --git a/lib/git/objects/commit.py b/lib/git/objects/commit.py
index 58c82da2..a2b6c554 100644
--- a/lib/git/objects/commit.py
+++ b/lib/git/objects/commit.py
@@ -108,7 +108,26 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
super(Commit,self).__init__(repo, binsha)
if tree is not None:
assert isinstance(tree, Tree), "Tree needs to be a Tree instance, was %s" % type(tree)
- self._set_self_from_args_(locals())
+ if tree is not None:
+ self.tree = tree
+ if author is not None:
+ self.author = author
+ if authored_date is not None:
+ self.authored_date = authored_date
+ if author_tz_offset is not None:
+ self.author_tz_offset = author_tz_offset
+ if committer is not None:
+ self.committer = committer
+ if committed_date is not None:
+ self.committed_date = committed_date
+ if committer_tz_offset is not None:
+ self.committer_tz_offset = committer_tz_offset
+ if message is not None:
+ self.message = message
+ if parents is not None:
+ self.parents = parents
+ if encoding is not None:
+ self.encoding = encoding
@classmethod
def _get_intermediate_items(cls, commit):
@@ -346,6 +365,9 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
new_commit.binsha = istream.binsha
if head:
+ # need late import here, importing git at the very beginning throws
+ # as well ...
+ import git.refs
try:
repo.head.commit = new_commit
except ValueError:
@@ -434,7 +456,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
try:
self.author.name = self.author.name.decode(self.encoding)
except UnicodeDecodeError:
- print >> sys.stderr, "Failed to decode author name: %s" % self.author.name
+ print >> sys.stderr, "Failed to decode author name '%s' using encoding %s" % (self.author.name, self.encoding)
# END handle author's encoding
# a stream from our data simply gives us the plain message
@@ -443,7 +465,7 @@ class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
try:
self.message = self.message.decode(self.encoding)
except UnicodeDecodeError:
- print >> sys.stderr, "Failed to decode message: %s" % self.message
+ print >> sys.stderr, "Failed to decode message '%s' using encoding %s" % (self.message, self.encoding)
# END exception handling
return self
diff --git a/lib/git/objects/submodule.py b/lib/git/objects/submodule.py
deleted file mode 100644
index 1f571a48..00000000
--- a/lib/git/objects/submodule.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import base
-
-__all__ = ("Submodule", )
-
-class Submodule(base.IndexObject):
- """Implements access to a git submodule. They are special in that their sha
- represents a commit in the submodule's repository which is to be checked out
- at the path of this instance.
- The submodule type does not have a string type associated with it, as it exists
- solely as a marker in the tree and index"""
-
- # this is a bogus type for base class compatability
- type = 'submodule'
-
- # TODO: Add functions to retrieve a repo for the submodule, to allow
- # its initiailization and handling
diff --git a/lib/git/objects/submodule/__init__.py b/lib/git/objects/submodule/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/lib/git/objects/submodule/__init__.py
@@ -0,0 +1 @@
+
diff --git a/lib/git/objects/submodule/base.py b/lib/git/objects/submodule/base.py
new file mode 100644
index 00000000..347af58e
--- /dev/null
+++ b/lib/git/objects/submodule/base.py
@@ -0,0 +1,816 @@
+import util
+from util import (
+ mkhead,
+ sm_name,
+ sm_section,
+ unbare_repo,
+ SubmoduleConfigParser,
+ find_first_remote_branch
+ )
+from git.objects.util import Traversable
+from StringIO import StringIO # need a dict to set bloody .name field
+from git.util import (
+ Iterable,
+ join_path_native,
+ to_native_path_linux
+ )
+from git.config import SectionConstraint
+from git.exc import (
+ InvalidGitRepositoryError,
+ NoSuchPathError
+ )
+import stat
+import git
+
+import os
+import sys
+
+import shutil
+
+__all__ = ["Submodule"]
+
+
+# IndexObject comes via util module, its a 'hacky' fix thanks to pythons import
+# mechanism which cause plenty of trouble of the only reason for packages and
+# modules is refactoring - subpackages shoudn't depend on parent packages
+class Submodule(util.IndexObject, Iterable, Traversable):
+ """Implements access to a git submodule. They are special in that their sha
+ represents a commit in the submodule's repository which is to be checked out
+ at the path of this instance.
+ The submodule type does not have a string type associated with it, as it exists
+ solely as a marker in the tree and index.
+
+ All methods work in bare and non-bare repositories."""
+
+ _id_attribute_ = "name"
+ k_modules_file = '.gitmodules'
+ k_head_option = 'branch'
+ k_head_default = 'master'
+ k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status
+
+ # this is a bogus type for base class compatability
+ type = 'submodule'
+
+ __slots__ = ('_parent_commit', '_url', '_branch', '_name', '__weakref__')
+ _cache_attrs = ('path', '_url', '_branch')
+
+ def __init__(self, repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, branch=None):
+ """Initialize this instance with its attributes. We only document the ones
+ that differ from ``IndexObject``
+
+ :param repo: Our parent repository
+ :param binsha: binary sha referring to a commit in the remote repository, see url parameter
+ :param parent_commit: see set_parent_commit()
+ :param url: The url to the remote repository which is the submodule
+ :param branch: Head instance to checkout when cloning the remote repository"""
+ super(Submodule, self).__init__(repo, binsha, mode, path)
+ self.size = 0
+ if parent_commit is not None:
+ self._parent_commit = parent_commit
+ if url is not None:
+ self._url = url
+ if branch is not None:
+ assert isinstance(branch, git.Head)
+ self._branch = branch
+ if name is not None:
+ self._name = name
+
+ def _set_cache_(self, attr):
+ if attr == '_parent_commit':
+ # set a default value, which is the root tree of the current head
+ self._parent_commit = self.repo.commit()
+ elif attr in ('path', '_url', '_branch'):
+ reader = self.config_reader()
+ # default submodule values
+ self.path = reader.get_value('path')
+ self._url = reader.get_value('url')
+ # git-python extension values - optional
+ self._branch = mkhead(self.repo, reader.get_value(self.k_head_option, self.k_head_default))
+ elif attr == '_name':
+ raise AttributeError("Cannot retrieve the name of a submodule if it was not set initially")
+ else:
+ super(Submodule, self)._set_cache_(attr)
+ # END handle attribute name
+
+ def _get_intermediate_items(self, item):
+ """:return: all the submodules of our module repository"""
+ try:
+ return type(self).list_items(item.module())
+ except InvalidGitRepositoryError:
+ return list()
+ # END handle intermeditate items
+
+ def __eq__(self, other):
+ """Compare with another submodule"""
+ # we may only compare by name as this should be the ID they are hashed with
+ # Otherwise this type wouldn't be hashable
+ # return self.path == other.path and self.url == other.url and super(Submodule, self).__eq__(other)
+ return self._name == other._name
+
+ def __ne__(self, other):
+ """Compare with another submodule for inequality"""
+ return not (self == other)
+
+ def __hash__(self):
+ """Hash this instance using its logical id, not the sha"""
+ return hash(self._name)
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return "git.%s(name=%s, path=%s, url=%s, branch=%s)" % (type(self).__name__, self._name, self.path, self.url, self.branch)
+
+ @classmethod
+ def _config_parser(cls, repo, parent_commit, read_only):
+ """:return: Config Parser constrained to our submodule in read or write mode
+ :raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
+ at the given parent commit. Otherwise the exception would be delayed until the first
+ access of the config parser"""
+ parent_matches_head = repo.head.commit == parent_commit
+ if not repo.bare and parent_matches_head:
+ fp_module = cls.k_modules_file
+ fp_module_path = os.path.join(repo.working_tree_dir, fp_module)
+ if not os.path.isfile(fp_module_path):
+ raise IOError("%s file was not accessible" % fp_module_path)
+ # END handle existance
+ fp_module = fp_module_path
+ else:
+ try:
+ fp_module = cls._sio_modules(parent_commit)
+ except KeyError:
+ raise IOError("Could not find %s file in the tree of parent commit %s" % (cls.k_modules_file, parent_commit))
+ # END handle exceptions
+ # END handle non-bare working tree
+
+ if not read_only and (repo.bare or not parent_matches_head):
+ raise ValueError("Cannot write blobs of 'historical' submodule configurations")
+ # END handle writes of historical submodules
+
+ return SubmoduleConfigParser(fp_module, read_only = read_only)
+
+ def _clear_cache(self):
+ # clear the possibly changed values
+ for name in self._cache_attrs:
+ try:
+ delattr(self, name)
+ except AttributeError:
+ pass
+ # END try attr deletion
+ # END for each name to delete
+
+ @classmethod
+ def _sio_modules(cls, parent_commit):
+ """:return: Configuration file as StringIO - we only access it through the respective blob's data"""
+ sio = StringIO(parent_commit.tree[cls.k_modules_file].data_stream.read())
+ sio.name = cls.k_modules_file
+ return sio
+
+ def _config_parser_constrained(self, read_only):
+ """:return: Config Parser constrained to our submodule in read or write mode"""
+ parser = self._config_parser(self.repo, self._parent_commit, read_only)
+ parser.set_submodule(self)
+ return SectionConstraint(parser, sm_section(self.name))
+
+ #{ Edit Interface
+
+ @classmethod
+ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False):
+ """Add a new submodule to the given repository. This will alter the index
+ as well as the .gitmodules file, but will not create a new commit.
+ If the submodule already exists, no matter if the configuration differs
+ from the one provided, the existing submodule will be returned.
+
+ :param repo: Repository instance which should receive the submodule
+ :param name: The name/identifier for the submodule
+ :param path: repository-relative or absolute path at which the submodule
+ should be located
+ It will be created as required during the repository initialization.
+ :param url: git-clone compatible URL, see git-clone reference for more information
+ If None, the repository is assumed to exist, and the url of the first
+ remote is taken instead. This is useful if you want to make an existing
+ repository a submodule of anotherone.
+ :param branch: branch at which the submodule should (later) be checked out.
+ The given branch must exist in the remote repository, and will be checked
+ out locally as a tracking branch.
+ It will only be written into the configuration if it not None, which is
+ when the checked out branch will be the one the remote HEAD pointed to.
+ The result you get in these situation is somewhat fuzzy, and it is recommended
+ to specify at least 'master' here
+ :param no_checkout: if True, and if the repository has to be cloned manually,
+ no checkout will be performed
+ :return: The newly created submodule instance
+ :note: works atomically, such that no change will be done if the repository
+ update fails for instance"""
+ if repo.bare:
+ raise InvalidGitRepositoryError("Cannot add submodules to bare repositories")
+ # END handle bare repos
+
+ path = to_native_path_linux(path)
+ if path.endswith('/'):
+ path = path[:-1]
+ # END handle trailing slash
+
+ # INSTANTIATE INTERMEDIATE SM
+ sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name)
+ if sm.exists():
+ # reretrieve submodule from tree
+ return repo.head.commit.tree[path]
+ # END handle existing
+
+ br = mkhead(repo, branch or cls.k_head_default)
+ has_module = sm.module_exists()
+ branch_is_default = branch is None
+ if has_module and url is not None:
+ if url not in [r.url for r in sm.module().remotes]:
+ raise ValueError("Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath))
+ # END check url
+ # END verify urls match
+
+ mrepo = None
+ if url is None:
+ if not has_module:
+ raise ValueError("A URL was not given and existing repository did not exsit at %s" % path)
+ # END check url
+ mrepo = sm.module()
+ urls = [r.url for r in mrepo.remotes]
+ if not urls:
+ raise ValueError("Didn't find any remote url in repository at %s" % sm.abspath)
+ # END verify we have url
+ url = urls[0]
+ else:
+ # clone new repo
+ kwargs = {'n' : no_checkout}
+ if not branch_is_default:
+ kwargs['b'] = str(br)
+ # END setup checkout-branch
+ mrepo = git.Repo.clone_from(url, path, **kwargs)
+ # END verify url
+
+ # update configuration and index
+ index = sm.repo.index
+ writer = sm.config_writer(index=index, write=False)
+ writer.set_value('url', url)
+ writer.set_value('path', path)
+
+ sm._url = url
+ if not branch_is_default:
+ # store full path
+ writer.set_value(cls.k_head_option, br.path)
+ sm._branch = br.path
+ # END handle path
+ del(writer)
+
+ # NOTE: Have to write the repo config file as well, otherwise
+ # the default implementation will be offended and not update the repository
+ # Maybe this is a good way to assure it doesn't get into our way, but
+ # we want to stay backwards compatible too ... . Its so redundant !
+ repo.config_writer().set_value(sm_section(sm.name), 'url', url)
+
+ # we deliberatly assume that our head matches our index !
+ pcommit = repo.head.commit
+ sm._parent_commit = pcommit
+ sm.binsha = mrepo.head.commit.binsha
+ index.add([sm], write=True)
+
+ return sm
+
+ def update(self, recursive=False, init=True, to_latest_revision=False):
+ """Update the repository of this submodule to point to the checkout
+ we point at with the binsha of this instance.
+
+ :param recursive: if True, we will operate recursively and update child-
+ modules as well.
+ :param init: if True, the module repository will be cloned into place if necessary
+ :param to_latest_revision: if True, the submodule's sha will be ignored during checkout.
+ Instead, the remote will be fetched, and the local tracking branch updated.
+ This only works if we have a local tracking branch, which is the case
+ if the remote repository had a master branch, or of the 'branch' option
+ was specified for this submodule and the branch existed remotely
+ :note: does nothing in bare repositories
+ :note: method is definitely not atomic if recurisve is True
+ :return: self"""
+ if self.repo.bare:
+ return self
+ #END pass in bare mode
+
+
+ # ASSURE REPO IS PRESENT AND UPTODATE
+ #####################################
+ try:
+ mrepo = self.module()
+ for remote in mrepo.remotes:
+ remote.fetch()
+ #END fetch new data
+ except InvalidGitRepositoryError:
+ if not init:
+ return self
+ # END early abort if init is not allowed
+ import git
+
+ # there is no git-repository yet - but delete empty paths
+ module_path = join_path_native(self.repo.working_tree_dir, self.path)
+ if os.path.isdir(module_path):
+ try:
+ os.rmdir(module_path)
+ except OSError:
+ raise OSError("Module directory at %r does already exist and is non-empty" % module_path)
+ # END handle OSError
+ # END handle directory removal
+
+ # don't check it out at first - nonetheless it will create a local
+ # branch according to the remote-HEAD if possible
+ mrepo = git.Repo.clone_from(self.url, module_path, n=True)
+
+ # see whether we have a valid branch to checkout
+ try:
+ # find a remote which has our branch - we try to be flexible
+ remote_branch = find_first_remote_branch(mrepo.remotes, self.branch)
+ local_branch = self.branch
+ if not local_branch.is_valid():
+ # Setup a tracking configuration - branch doesn't need to
+ # exist to do that
+ local_branch.set_tracking_branch(remote_branch)
+ #END handle local branch
+
+ # have a valid branch, but no checkout - make sure we can figure
+ # that out by marking the commit with a null_sha
+ # have to write it directly as .commit = NULLSHA tries to resolve the sha
+ # This will bring the branch into existance
+ refpath = join_path_native(mrepo.git_dir, local_branch.path)
+ refdir = os.path.dirname(refpath)
+ if not os.path.isdir(refdir):
+ os.makedirs(refdir)
+ #END handle directory
+ open(refpath, 'w').write(self.NULL_HEX_SHA)
+ # END initial checkout + branch creation
+
+ # make sure HEAD is not detached
+ mrepo.head.ref = local_branch
+ except IndexError:
+ print >> sys.stderr, "Warning: Failed to checkout tracking branch %s" % self.branch
+ #END handle tracking branch
+ #END handle initalization
+
+
+ # DETERMINE SHAS TO CHECKOUT
+ ############################
+ binsha = self.binsha
+ hexsha = self.hexsha
+ is_detached = mrepo.head.is_detached
+ if to_latest_revision:
+ msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir
+ if not is_detached:
+ rref = mrepo.head.ref.tracking_branch()
+ if rref is not None:
+ rcommit = rref.commit
+ binsha = rcommit.binsha
+ hexsha = rcommit.hexsha
+ else:
+ print >> sys.stderr, "%s a tracking branch was not set for local branch '%s'" % (msg_base, mrepo.head.ref)
+ # END handle remote ref
+ else:
+ print >> sys.stderr, "%s there was no local tracking branch" % msg_base
+ # END handle detached head
+ # END handle to_latest_revision option
+
+ # update the working tree
+ if mrepo.head.commit.binsha != binsha:
+ if is_detached:
+ mrepo.git.checkout(hexsha)
+ else:
+ # TODO: allow to specify a rebase, merge, or reset
+ # TODO: Warn if the hexsha forces the tracking branch off the remote
+ # branch - this should be prevented when setting the branch option
+ mrepo.head.reset(hexsha, index=True, working_tree=True)
+ # END handle checkout
+ # END update to new commit only if needed
+
+ # HANDLE RECURSION
+ ##################
+ if recursive:
+ for submodule in self.iter_items(self.module()):
+ submodule.update(recursive, init, to_latest_revision)
+ # END handle recursive update
+ # END for each submodule
+
+ return self
+
+ @unbare_repo
+ def move(self, module_path, configuration=True, module=True):
+ """Move the submodule to a another module path. This involves physically moving
+ the repository at our current path, changing the configuration, as well as
+ adjusting our index entry accordingly.
+
+ :param module_path: the path to which to move our module, given as
+ repository-relative path. Intermediate directories will be created
+ accordingly. If the path already exists, it must be empty.
+ Trailling (back)slashes are removed automatically
+ :param configuration: if True, the configuration will be adjusted to let
+ the submodule point to the given path.
+ :param module: if True, the repository managed by this submodule
+ will be moved, not the configuration. This will effectively
+ leave your repository in an inconsistent state unless the configuration
+ and index already point to the target location.
+ :return: self
+ :raise ValueError: if the module path existed and was not empty, or was a file
+ :note: Currently the method is not atomic, and it could leave the repository
+ in an inconsistent state if a sub-step fails for some reason
+ """
+ if module + configuration < 1:
+ raise ValueError("You must specify to move at least the module or the configuration of the submodule")
+ #END handle input
+
+ module_path = to_native_path_linux(module_path)
+ if module_path.endswith('/'):
+ module_path = module_path[:-1]
+ # END handle trailing slash
+
+ # VERIFY DESTINATION
+ if module_path == self.path:
+ return self
+ #END handle no change
+
+ dest_path = join_path_native(self.repo.working_tree_dir, module_path)
+ if os.path.isfile(dest_path):
+ raise ValueError("Cannot move repository onto a file: %s" % dest_path)
+ # END handle target files
+
+ index = self.repo.index
+ tekey = index.entry_key(module_path, 0)
+ # if the target item already exists, fail
+ if configuration and tekey in index.entries:
+ raise ValueError("Index entry for target path did alredy exist")
+ #END handle index key already there
+
+ # remove existing destination
+ if module:
+ if os.path.exists(dest_path):
+ if len(os.listdir(dest_path)):
+ raise ValueError("Destination module directory was not empty")
+ #END handle non-emptyness
+
+ if os.path.islink(dest_path):
+ os.remove(dest_path)
+ else:
+ os.rmdir(dest_path)
+ #END handle link
+ else:
+ # recreate parent directories
+ # NOTE: renames() does that now
+ pass
+ #END handle existance
+ # END handle module
+
+ # move the module into place if possible
+ cur_path = self.abspath
+ renamed_module = False
+ if module and os.path.exists(cur_path):
+ os.renames(cur_path, dest_path)
+ renamed_module = True
+ #END move physical module
+
+
+ # rename the index entry - have to manipulate the index directly as
+ # git-mv cannot be used on submodules ... yeah
+ try:
+ if configuration:
+ try:
+ ekey = index.entry_key(self.path, 0)
+ entry = index.entries[ekey]
+ del(index.entries[ekey])
+ nentry = git.IndexEntry(entry[:3]+(module_path,)+entry[4:])
+ index.entries[tekey] = nentry
+ except KeyError:
+ raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path))
+ #END handle submodule doesn't exist
+
+ # update configuration
+ writer = self.config_writer(index=index) # auto-write
+ writer.set_value('path', module_path)
+ self.path = module_path
+ del(writer)
+ # END handle configuration flag
+ except Exception:
+ if renamed_module:
+ os.renames(dest_path, cur_path)
+ # END undo module renaming
+ raise
+ #END handle undo rename
+
+ return self
+
+ @unbare_repo
+ def remove(self, module=True, force=False, configuration=True, dry_run=False):
+ """Remove this submodule from the repository. This will remove our entry
+ from the .gitmodules file and the entry in the .git/config file.
+
+ :param module: If True, the module we point to will be deleted
+ as well. If the module is currently on a commit which is not part
+ of any branch in the remote, if the currently checked out branch
+ is ahead of its tracking branch, if you have modifications in the
+ working tree, or untracked files,
+ In case the removal of the repository fails for these reasons, the
+ submodule status will not have been altered.
+ If this submodule has child-modules on its own, these will be deleted
+ prior to touching the own module.
+ :param force: Enforces the deletion of the module even though it contains
+ modifications. This basically enforces a brute-force file system based
+ deletion.
+ :param configuration: if True, the submodule is deleted from the configuration,
+ otherwise it isn't. Although this should be enabled most of the times,
+ this flag enables you to safely delete the repository of your submodule.
+ :param dry_run: if True, we will not actually do anything, but throw the errors
+ we would usually throw
+ :return: self
+ :note: doesn't work in bare repositories
+ :raise InvalidGitRepositoryError: thrown if the repository cannot be deleted
+ :raise OSError: if directories or files could not be removed"""
+ if not (module + configuration):
+ raise ValueError("Need to specify to delete at least the module, or the configuration")
+ # END handle params
+
+ # DELETE MODULE REPOSITORY
+ ##########################
+ if module and self.module_exists():
+ if force:
+ # take the fast lane and just delete everything in our module path
+ # TODO: If we run into permission problems, we have a highly inconsistent
+ # state. Delete the .git folders last, start with the submodules first
+ mp = self.abspath
+ method = None
+ if os.path.islink(mp):
+ method = os.remove
+ elif os.path.isdir(mp):
+ method = shutil.rmtree
+ elif os.path.exists(mp):
+ raise AssertionError("Cannot forcibly delete repository as it was neither a link, nor a directory")
+ #END handle brutal deletion
+ if not dry_run:
+ assert method
+ method(mp)
+ #END apply deletion method
+ else:
+ # verify we may delete our module
+ mod = self.module()
+ if mod.is_dirty(untracked_files=True):
+ raise InvalidGitRepositoryError("Cannot delete module at %s with any modifications, unless force is specified" % mod.working_tree_dir)
+ # END check for dirt
+
+ # figure out whether we have new commits compared to the remotes
+ # NOTE: If the user pulled all the time, the remote heads might
+ # not have been updated, so commits coming from the remote look
+ # as if they come from us. But we stay strictly read-only and
+ # don't fetch beforhand.
+ for remote in mod.remotes:
+ num_branches_with_new_commits = 0
+ rrefs = remote.refs
+ for rref in rrefs:
+ num_branches_with_new_commits = len(mod.git.cherry(rref)) != 0
+ # END for each remote ref
+ # not a single remote branch contained all our commits
+ if num_branches_with_new_commits == len(rrefs):
+ raise InvalidGitRepositoryError("Cannot delete module at %s as there are new commits" % mod.working_tree_dir)
+ # END handle new commits
+ # END for each remote
+
+ # gently remove all submodule repositories
+ for sm in self.children():
+ sm.remove(module=True, force=False, configuration=False, dry_run=dry_run)
+ # END for each child-submodule
+
+ # finally delete our own submodule
+ if not dry_run:
+ shutil.rmtree(mod.working_tree_dir)
+ # END delete tree if possible
+ # END handle force
+ # END handle module deletion
+
+ # DELETE CONFIGURATION
+ ######################
+ if configuration and not dry_run:
+ # first the index-entry
+ index = self.repo.index
+ try:
+ del(index.entries[index.entry_key(self.path, 0)])
+ except KeyError:
+ pass
+ #END delete entry
+ index.write()
+
+ # now git config - need the config intact, otherwise we can't query
+ # inforamtion anymore
+ self.repo.config_writer().remove_section(sm_section(self.name))
+ self.config_writer().remove_section()
+ # END delete configuration
+
+ return self
+
+ def set_parent_commit(self, commit, check=True):
+ """Set this instance to use the given commit whose tree is supposed to
+ contain the .gitmodules blob.
+
+ :param commit: Commit'ish reference pointing at the root_tree
+ :param check: if True, relatively expensive checks will be performed to verify
+ validity of the submodule.
+ :raise ValueError: if the commit's tree didn't contain the .gitmodules blob.
+ :raise ValueError: if the parent commit didn't store this submodule under the
+ current path
+ :return: self"""
+ pcommit = self.repo.commit(commit)
+ pctree = pcommit.tree
+ if self.k_modules_file not in pctree:
+ raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file))
+ # END handle exceptions
+
+ prev_pc = self._parent_commit
+ self._parent_commit = pcommit
+
+ if check:
+ parser = self._config_parser(self.repo, self._parent_commit, read_only=True)
+ if not parser.has_section(sm_section(self.name)):
+ self._parent_commit = prev_pc
+ raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit))
+ # END handle submodule did not exist
+ # END handle checking mode
+
+ # update our sha, it could have changed
+ self.binsha = pctree[self.path].binsha
+
+ self._clear_cache()
+
+ return self
+
+ @unbare_repo
+ def config_writer(self, index=None, write=True):
+ """:return: a config writer instance allowing you to read and write the data
+ belonging to this submodule into the .gitmodules file.
+
+ :param index: if not None, an IndexFile instance which should be written.
+ defaults to the index of the Submodule's parent repository.
+ :param write: if True, the index will be written each time a configuration
+ value changes.
+ :note: the parameters allow for a more efficient writing of the index,
+ as you can pass in a modified index on your own, prevent automatic writing,
+ and write yourself once the whole operation is complete
+ :raise ValueError: if trying to get a writer on a parent_commit which does not
+ match the current head commit
+ :raise IOError: If the .gitmodules file/blob could not be read"""
+ writer = self._config_parser_constrained(read_only=False)
+ if index is not None:
+ writer.config._index = index
+ writer.config._auto_write = write
+ return writer
+
+ #} END edit interface
+
+ #{ Query Interface
+
+ @unbare_repo
+ def module(self):
+ """:return: Repo instance initialized from the repository at our submodule path
+ :raise InvalidGitRepositoryError: if a repository was not available. This could
+ also mean that it was not yet initialized"""
+ # late import to workaround circular dependencies
+ module_path = self.abspath
+ try:
+ repo = git.Repo(module_path)
+ if repo != self.repo:
+ return repo
+ # END handle repo uninitialized
+ except (InvalidGitRepositoryError, NoSuchPathError):
+ raise InvalidGitRepositoryError("No valid repository at %s" % self.path)
+ else:
+ raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_path)
+ # END handle exceptions
+
+ def module_exists(self):
+ """:return: True if our module exists and is a valid git repository. See module() method"""
+ try:
+ self.module()
+ return True
+ except Exception:
+ return False
+ # END handle exception
+
+ def exists(self):
+ """:return: True if the submodule exists, False otherwise. Please note that
+ a submodule may exist (in the .gitmodules file) even though its module
+ doesn't exist"""
+ # keep attributes for later, and restore them if we have no valid data
+ # this way we do not actually alter the state of the object
+ loc = locals()
+ for attr in self._cache_attrs:
+ if hasattr(self, attr):
+ loc[attr] = getattr(self, attr)
+ # END if we have the attribute cache
+ #END for each attr
+ self._clear_cache()
+
+ try:
+ try:
+ self.path
+ return True
+ except Exception:
+ return False
+ # END handle exceptions
+ finally:
+ for attr in self._cache_attrs:
+ if attr in loc:
+ setattr(self, attr, loc[attr])
+ # END if we have a cache
+ # END reapply each attribute
+ # END handle object state consistency
+
+ @property
+ def branch(self):
+ """:return: The branch instance that we are to checkout"""
+ return self._branch
+
+ @property
+ def url(self):
+ """:return: The url to the repository which our module-repository refers to"""
+ return self._url
+
+ @property
+ def parent_commit(self):
+ """:return: Commit instance with the tree containing the .gitmodules file
+ :note: will always point to the current head's commit if it was not set explicitly"""
+ return self._parent_commit
+
+ @property
+ def name(self):
+ """:return: The name of this submodule. It is used to identify it within the
+ .gitmodules file.
+ :note: by default, the name is the path at which to find the submodule, but
+ in git-python it should be a unique identifier similar to the identifiers
+ used for remotes, which allows to change the path of the submodule
+ easily
+ """
+ return self._name
+
+ def config_reader(self):
+ """:return: ConfigReader instance which allows you to qurey the configuration values
+ of this submodule, as provided by the .gitmodules file
+ :note: The config reader will actually read the data directly from the repository
+ and thus does not need nor care about your working tree.
+ :note: Should be cached by the caller and only kept as long as needed
+ :raise IOError: If the .gitmodules file/blob could not be read"""
+ return self._config_parser_constrained(read_only=True)
+
+ def children(self):
+ """:return: IterableList(Submodule, ...) an iterable list of submodules instances
+ which are children of this submodule
+ :raise InvalidGitRepositoryError: if the submodule is not checked-out"""
+ return self._get_intermediate_items(self)
+
+ #} END query interface
+
+ #{ Iterable Interface
+
+ @classmethod
+ def iter_items(cls, repo, parent_commit='HEAD'):
+ """:return: iterator yielding Submodule instances available in the given repository"""
+ pc = repo.commit(parent_commit) # parent commit instance
+ try:
+ parser = cls._config_parser(repo, pc, read_only=True)
+ except IOError:
+ raise StopIteration
+ # END handle empty iterator
+
+ rt = pc.tree # root tree
+
+ for sms in parser.sections():
+ n = sm_name(sms)
+ p = parser.get_value(sms, 'path')
+ u = parser.get_value(sms, 'url')
+ b = cls.k_head_default
+ if parser.has_option(sms, cls.k_head_option):
+ b = parser.get_value(sms, cls.k_head_option)
+ # END handle optional information
+
+ # get the binsha
+ index = repo.index
+ try:
+ sm = rt[p]
+ except KeyError:
+ # try the index, maybe it was just added
+ try:
+ entry = index.entries[index.entry_key(p, 0)]
+ sm = cls(repo, entry.binsha, entry.mode, entry.path)
+ except KeyError:
+ raise InvalidGitRepositoryError("Gitmodule path %r did not exist in revision of parent commit %s" % (p, parent_commit))
+ # END handle keyerror
+ # END handle critical error
+
+ # fill in remaining info - saves time as it doesn't have to be parsed again
+ sm._name = n
+ sm._parent_commit = pc
+ sm._branch = mkhead(repo, b)
+ sm._url = u
+
+ yield sm
+ # END for each section
+
+ #} END iterable interface
+
diff --git a/lib/git/objects/submodule/root.py b/lib/git/objects/submodule/root.py
new file mode 100644
index 00000000..82b8b271
--- /dev/null
+++ b/lib/git/objects/submodule/root.py
@@ -0,0 +1,263 @@
+from base import Submodule
+from util import (
+ mkhead,
+ find_first_remote_branch
+ )
+from git.exc import InvalidGitRepositoryError
+import git
+
+import sys
+
+__all__ = ["RootModule"]
+
+
+class RootModule(Submodule):
+ """A (virtual) Root of all submodules in the given repository. It can be used
+ to more easily traverse all submodules of the master repository"""
+
+ __slots__ = tuple()
+
+ k_root_name = '__ROOT__'
+
+ def __init__(self, repo):
+ # repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
+ super(RootModule, self).__init__(
+ repo,
+ binsha = self.NULL_BIN_SHA,
+ mode = self.k_default_mode,
+ path = '',
+ name = self.k_root_name,
+ parent_commit = repo.head.commit,
+ url = '',
+ branch = mkhead(repo, self.k_head_default)
+ )
+
+
+ def _clear_cache(self):
+ """May not do anything"""
+ pass
+
+ #{ Interface
+
+ def update(self, previous_commit=None, recursive=True, force_remove=False, init=True, to_latest_revision=False):
+ """Update the submodules of this repository to the current HEAD commit.
+ This method behaves smartly by determining changes of the path of a submodules
+ repository, next to changes to the to-be-checked-out commit or the branch to be
+ checked out. This works if the submodules ID does not change.
+ Additionally it will detect addition and removal of submodules, which will be handled
+ gracefully.
+
+ :param previous_commit: If set to a commit'ish, the commit we should use
+ as the previous commit the HEAD pointed to before it was set to the commit it points to now.
+ If None, it defaults to ORIG_HEAD otherwise, or the parent of the current
+ commit if it is not given
+ :param recursive: if True, the children of submodules will be updated as well
+ using the same technique
+ :param force_remove: If submodules have been deleted, they will be forcibly removed.
+ Otherwise the update may fail if a submodule's repository cannot be deleted as
+ changes have been made to it (see Submodule.update() for more information)
+ :param init: If we encounter a new module which would need to be initialized, then do it.
+ :param to_latest_revision: If True, instead of checking out the revision pointed to
+ by this submodule's sha, the checked out tracking branch will be merged with the
+ newest remote branch fetched from the repository's origin"""
+ if self.repo.bare:
+ raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
+ # END handle bare
+
+ repo = self.repo
+
+ # HANDLE COMMITS
+ ##################
+ cur_commit = repo.head.commit
+ if previous_commit is None:
+ symref = repo.head.orig_head()
+ try:
+ previous_commit = symref.commit
+ except Exception:
+ pcommits = cur_commit.parents
+ if pcommits:
+ previous_commit = pcommits[0]
+ else:
+ # in this special case, we just diff against ourselve, which
+ # means exactly no change
+ previous_commit = cur_commit
+ # END handle initial commit
+ # END no ORIG_HEAD
+ else:
+ previous_commit = repo.commit(previous_commit) # obtain commit object
+ # END handle previous commit
+
+
+ psms = self.list_items(repo, parent_commit=previous_commit)
+ sms = self.list_items(self.module())
+ spsms = set(psms)
+ ssms = set(sms)
+
+ # HANDLE REMOVALS
+ ###################
+ for rsm in (spsms - ssms):
+ # fake it into thinking its at the current commit to allow deletion
+ # of previous module. Trigger the cache to be updated before that
+ #rsm.url
+ rsm._parent_commit = repo.head.commit
+ rsm.remove(configuration=False, module=True, force=force_remove)
+ # END for each removed submodule
+
+ # HANDLE PATH RENAMES
+ #####################
+ # url changes + branch changes
+ for csm in (spsms & ssms):
+ psm = psms[csm.name]
+ sm = sms[csm.name]
+
+ if sm.path != psm.path and psm.module_exists():
+ # move the module to the new path
+ psm.move(sm.path, module=True, configuration=False)
+ # END handle path changes
+
+ if sm.module_exists():
+ # handle url change
+ if sm.url != psm.url:
+ # Add the new remote, remove the old one
+ # This way, if the url just changes, the commits will not
+ # have to be re-retrieved
+ nn = '__new_origin__'
+ smm = sm.module()
+ rmts = smm.remotes
+
+ # don't do anything if we already have the url we search in place
+ if len([r for r in rmts if r.url == sm.url]) == 0:
+
+
+ assert nn not in [r.name for r in rmts]
+ smr = smm.create_remote(nn, sm.url)
+ smr.fetch()
+
+ # If we have a tracking branch, it should be available
+ # in the new remote as well.
+ if len([r for r in smr.refs if r.remote_head == sm.branch.name]) == 0:
+ raise ValueError("Submodule branch named %r was not available in new submodule remote at %r" % (sm.branch.name, sm.url))
+ # END head is not detached
+
+ # now delete the changed one
+ rmt_for_deletion = None
+ for remote in rmts:
+ if remote.url == psm.url:
+ rmt_for_deletion = remote
+ break
+ # END if urls match
+ # END for each remote
+
+ # if we didn't find a matching remote, but have exactly one,
+ # we can safely use this one
+ if rmt_for_deletion is None:
+ if len(rmts) == 1:
+ rmt_for_deletion = rmts[0]
+ else:
+ # if we have not found any remote with the original url
+ # we may not have a name. This is a special case,
+ # and its okay to fail here
+ # Alternatively we could just generate a unique name and leave all
+ # existing ones in place
+ raise InvalidGitRepositoryError("Couldn't find original remote-repo at url %r" % psm.url)
+ #END handle one single remote
+ # END handle check we found a remote
+
+ orig_name = rmt_for_deletion.name
+ smm.delete_remote(rmt_for_deletion)
+ # NOTE: Currently we leave tags from the deleted remotes
+ # as well as separate tracking branches in the possibly totally
+ # changed repository ( someone could have changed the url to
+ # another project ). At some point, one might want to clean
+ # it up, but the danger is high to remove stuff the user
+ # has added explicitly
+
+ # rename the new remote back to what it was
+ smr.rename(orig_name)
+
+ # early on, we verified that the our current tracking branch
+ # exists in the remote. Now we have to assure that the
+ # sha we point to is still contained in the new remote
+ # tracking branch.
+ smsha = sm.binsha
+ found = False
+ rref = smr.refs[self.branch.name]
+ for c in rref.commit.traverse():
+ if c.binsha == smsha:
+ found = True
+ break
+ # END traverse all commits in search for sha
+ # END for each commit
+
+ if not found:
+ # adjust our internal binsha to use the one of the remote
+ # this way, it will be checked out in the next step
+ # This will change the submodule relative to us, so
+ # the user will be able to commit the change easily
+ print >> sys.stderr, "WARNING: Current sha %s was not contained in the tracking branch at the new remote, setting it the the remote's tracking branch" % sm.hexsha
+ sm.binsha = rref.commit.binsha
+ #END reset binsha
+
+ #NOTE: All checkout is performed by the base implementation of update
+
+ # END skip remote handling if new url already exists in module
+ # END handle url
+
+ if sm.branch != psm.branch:
+ # finally, create a new tracking branch which tracks the
+ # new remote branch
+ smm = sm.module()
+ smmr = smm.remotes
+ try:
+ tbr = git.Head.create(smm, sm.branch.name)
+ except git.GitCommandError, e:
+ if e.status != 128:
+ raise
+ #END handle something unexpected
+
+ # ... or reuse the existing one
+ tbr = git.Head(smm, git.Head.to_full_path(sm.branch.name))
+ #END assure tracking branch exists
+
+ tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch))
+ # figure out whether the previous tracking branch contains
+ # new commits compared to the other one, if not we can
+ # delete it.
+ try:
+ tbr = find_first_remote_branch(smmr, psm.branch)
+ if len(smm.git.cherry(tbr, psm.branch)) == 0:
+ psm.branch.delete(smm, psm.branch)
+ #END delete original tracking branch if there are no changes
+ except InvalidGitRepositoryError:
+ # ignore it if the previous branch couldn't be found in the
+ # current remotes, this just means we can't handle it
+ pass
+ # END exception handling
+
+ #NOTE: All checkout is done in the base implementation of update
+
+ #END handle branch
+ #END handle
+ # END for each common submodule
+
+ # FINALLY UPDATE ALL ACTUAL SUBMODULES
+ ######################################
+ for sm in sms:
+ # update the submodule using the default method
+ sm.update(recursive=True, init=init, to_latest_revision=to_latest_revision)
+
+ # update recursively depth first - question is which inconsitent
+ # state will be better in case it fails somewhere. Defective branch
+ # or defective depth. The RootSubmodule type will never process itself,
+ # which was done in the previous expression
+ if recursive:
+ type(self)(sm.module()).update(recursive=True, force_remove=force_remove,
+ init=init, to_latest_revision=to_latest_revision)
+ #END handle recursive
+ # END for each submodule to update
+
+ def module(self):
+ """:return: the actual repository containing the submodules"""
+ return self.repo
+ #} END interface
+#} END classes
diff --git a/lib/git/objects/submodule/util.py b/lib/git/objects/submodule/util.py
new file mode 100644
index 00000000..ab5e345a
--- /dev/null
+++ b/lib/git/objects/submodule/util.py
@@ -0,0 +1,101 @@
+import git
+from git.exc import InvalidGitRepositoryError
+from git.config import GitConfigParser
+from StringIO import StringIO
+import weakref
+
+__all__ = ( 'sm_section', 'sm_name', 'mkhead', 'unbare_repo', 'find_first_remote_branch',
+ 'SubmoduleConfigParser')
+
+#{ Utilities
+
+def sm_section(name):
+ """:return: section title used in .gitmodules configuration file"""
+ return 'submodule "%s"' % name
+
+def sm_name(section):
+ """:return: name of the submodule as parsed from the section name"""
+ section = section.strip()
+ return section[11:-1]
+
+def mkhead(repo, path):
+ """:return: New branch/head instance"""
+ return git.Head(repo, git.Head.to_full_path(path))
+
+def unbare_repo(func):
+ """Methods with this decorator raise InvalidGitRepositoryError if they
+ encounter a bare repository"""
+ def wrapper(self, *args, **kwargs):
+ if self.repo.bare:
+ raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
+ #END bare method
+ return func(self, *args, **kwargs)
+ # END wrapper
+ wrapper.__name__ = func.__name__
+ return wrapper
+
+def find_first_remote_branch(remotes, branch):
+ """Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
+ for remote in remotes:
+ try:
+ return remote.refs[branch.name]
+ except IndexError:
+ continue
+ # END exception handling
+ #END for remote
+ raise InvalidGitRepositoryError("Didn't find remote branch %r in any of the given remotes", branch)
+
+#} END utilities
+
+
+#{ Classes
+
+class SubmoduleConfigParser(GitConfigParser):
+ """
+ Catches calls to _write, and updates the .gitmodules blob in the index
+ with the new data, if we have written into a stream. Otherwise it will
+ add the local file to the index to make it correspond with the working tree.
+ Additionally, the cache must be cleared
+
+ Please note that no mutating method will work in bare mode
+ """
+
+ def __init__(self, *args, **kwargs):
+ self._smref = None
+ self._index = None
+ self._auto_write = True
+ super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
+
+ #{ Interface
+ def set_submodule(self, submodule):
+ """Set this instance's submodule. It must be called before
+ the first write operation begins"""
+ self._smref = weakref.ref(submodule)
+
+ def flush_to_index(self):
+ """Flush changes in our configuration file to the index"""
+ assert self._smref is not None
+ # should always have a file here
+ assert not isinstance(self._file_or_files, StringIO)
+
+ sm = self._smref()
+ if sm is not None:
+ index = self._index
+ if index is None:
+ index = sm.repo.index
+ # END handle index
+ index.add([sm.k_modules_file], write=self._auto_write)
+ sm._clear_cache()
+ # END handle weakref
+
+ #} END interface
+
+ #{ Overridden Methods
+ def write(self):
+ rval = super(SubmoduleConfigParser, self).write()
+ self.flush_to_index()
+ return rval
+ # END overridden methods
+
+
+#} END classes
diff --git a/lib/git/objects/tag.py b/lib/git/objects/tag.py
index ea480fc2..c7d02abe 100644
--- a/lib/git/objects/tag.py
+++ b/lib/git/objects/tag.py
@@ -33,7 +33,18 @@ class TagObject(base.Object):
:param tagged_tz_offset: int_seconds_west_of_utc is the timezone that the
authored_date is in, in a format similar to time.altzone"""
super(TagObject, self).__init__(repo, binsha )
- self._set_self_from_args_(locals())
+ if object is not None:
+ self.object = object
+ if tag is not None:
+ self.tag = tag
+ if tagger is not None:
+ self.tagger = tagger
+ if tagged_date is not None:
+ self.tagged_date = tagged_date
+ if tagger_tz_offset is not None:
+ self.tagger_tz_offset = tagger_tz_offset
+ if message is not None:
+ self.message = message
def _set_cache_(self, attr):
"""Cache all our attributes at once"""
diff --git a/lib/git/objects/tree.py b/lib/git/objects/tree.py
index 68c1ef2d..67431686 100644
--- a/lib/git/objects/tree.py
+++ b/lib/git/objects/tree.py
@@ -7,7 +7,7 @@ import util
from base import IndexObject
from git.util import join_path
from blob import Blob
-from submodule import Submodule
+from submodule.base import Submodule
import git.diff as diff
from fun import (
diff --git a/lib/git/objects/util.py b/lib/git/objects/util.py
index 21833080..81544e26 100644
--- a/lib/git/objects/util.py
+++ b/lib/git/objects/util.py
@@ -4,6 +4,8 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
"""Module for general utility functions"""
+from git.util import IterableList
+
import re
from collections import deque as Deque
import platform
@@ -273,6 +275,12 @@ class Traversable(object):
"""
raise NotImplementedError("To be implemented in subclass")
+ def list_traverse(self, *args, **kwargs):
+ """:return: IterableList with the results of the traversal as produced by
+ traverse()"""
+ out = IterableList(self._id_attribute_)
+ out.extend(self.traverse(*args, **kwargs))
+ return out
def traverse( self, predicate = lambda i,d: True,
prune = lambda i,d: False, depth = -1, branch_first=True,
@@ -335,7 +343,7 @@ class Traversable(object):
if prune( rval, d ):
continue
- skipStartItem = ignore_self and ( item == self )
+ skipStartItem = ignore_self and ( item is self )
if not skipStartItem and predicate( rval, d ):
yield rval
diff --git a/lib/git/refs.py b/lib/git/refs.py
index af7284ff..fcf5fd10 100644
--- a/lib/git/refs.py
+++ b/lib/git/refs.py
@@ -29,6 +29,11 @@ from gitdb.util import (
hex_to_bin
)
+from config import (
+ GitConfigParser,
+ SectionConstraint
+ )
+
from exc import GitCommandError
__all__ = ("SymbolicReference", "Reference", "HEAD", "Head", "TagReference",
@@ -219,12 +224,30 @@ class SymbolicReference(object):
# END end try string
# END try commit attribute
+ # maintain the orig-head if we are currently checked-out
+ head = HEAD(self.repo)
+ try:
+ if head.ref == self:
+ try:
+ # TODO: implement this atomically, if we fail below, orig_head is at an incorrect spot
+ # Enforce the creation of ORIG_HEAD
+ SymbolicReference.create(self.repo, head.orig_head().name, self.commit, force=True)
+ except ValueError:
+ pass
+ #END exception handling
+ # END if we are checked-out
+ except TypeError:
+ pass
+ # END handle detached heads
+
# if we are writing a ref, use symbolic ref to get the reflog and more
# checking
- # Otherwise we detach it and have to do it manually
+ # Otherwise we detach it and have to do it manually. Besides, this works
+ # recursively automaitcally, but should be replaced with a python implementation
+ # soon
if write_value.startswith('ref:'):
self.repo.git.symbolic_ref(self.path, write_value[5:])
- return
+ return
# END non-detached handling
path = self._abs_path()
@@ -238,10 +261,10 @@ class SymbolicReference(object):
finally:
fp.close()
# END writing
-
- reference = property(_get_reference, _set_reference, doc="Returns the Reference we point to")
- # alias
+
+ # aliased reference
+ reference = property(_get_reference, _set_reference, doc="Returns the Reference we point to")
ref = reference
def is_valid(self):
@@ -272,7 +295,7 @@ class SymbolicReference(object):
@classmethod
def to_full_path(cls, path):
"""
- :return: string with a full path name which can be used to initialize
+ :return: string with a full repository-relative path which can be used to initialize
a Reference instance, for instance by using ``Reference.from_path``"""
if isinstance(path, SymbolicReference):
path = path.path
@@ -489,6 +512,8 @@ class SymbolicReference(object):
@classmethod
def from_path(cls, repo, path):
"""
+ :param path: full .git-directory-relative path name to the Reference to instantiate
+ :note: use to_full_path() if you only have a partial path of a known Reference Type
:return:
Instance of type Reference, Head, or Tag
depending on the given path"""
@@ -546,7 +571,6 @@ class Reference(SymbolicReference, LazyMixin, Iterable):
:note:
TypeChecking is done by the git command"""
- # check for existence, touch it if required
abs_path = self._abs_path()
existed = True
if not isfile(abs_path):
@@ -611,6 +635,7 @@ class HEAD(SymbolicReference):
"""Special case of a Symbolic Reference as it represents the repository's
HEAD reference."""
_HEAD_NAME = 'HEAD'
+ _ORIG_HEAD_NAME = 'ORIG_HEAD'
__slots__ = tuple()
def __init__(self, repo, path=_HEAD_NAME):
@@ -618,6 +643,27 @@ class HEAD(SymbolicReference):
raise ValueError("HEAD instance must point to %r, got %r" % (self._HEAD_NAME, path))
super(HEAD, self).__init__(repo, path)
+ def orig_head(self):
+ """:return: SymbolicReference pointing at the ORIG_HEAD, which is maintained
+ to contain the previous value of HEAD"""
+ return SymbolicReference(self.repo, self._ORIG_HEAD_NAME)
+
+ def _set_reference(self, ref):
+ """If someone changes the reference through us, we must manually update
+ the ORIG_HEAD if we are detached. The underlying implementation can only
+ handle un-detached heads as it has to check whether the current head
+ is the checked-out one"""
+ if self.is_detached:
+ prev_commit = self.commit
+ super(HEAD, self)._set_reference(ref)
+ SymbolicReference.create(self.repo, self._ORIG_HEAD_NAME, prev_commit, force=True)
+ else:
+ super(HEAD, self)._set_reference(ref)
+ # END handle detached mode
+
+ # aliased reference
+ reference = property(SymbolicReference._get_reference, _set_reference, doc="Returns the Reference we point to")
+ ref = reference
def reset(self, commit='HEAD', index=True, working_tree = False,
paths=None, **kwargs):
@@ -699,6 +745,8 @@ class Head(Reference):
>>> head.commit.hexsha
'1c09f116cbc2cb4100fb6935bb162daa4723f455'"""
_common_path_default = "refs/heads"
+ k_config_remote = "remote"
+ k_config_remote_ref = "merge" # branch to merge from remote
@classmethod
def create(cls, repo, path, commit='HEAD', force=False, **kwargs):
@@ -745,6 +793,44 @@ class Head(Reference):
flag = "-D"
repo.git.branch(flag, *heads)
+
+ def set_tracking_branch(self, remote_reference):
+ """Configure this branch to track the given remote reference. This will alter
+ this branch's configuration accordingly.
+ :param remote_reference: The remote reference to track or None to untrack
+ any references
+ :return: self"""
+ if remote_reference is not None and not isinstance(remote_reference, RemoteReference):
+ raise ValueError("Incorrect parameter type: %r" % remote_reference)
+ # END handle type
+
+ writer = self.config_writer()
+ if remote_reference is None:
+ writer.remove_option(self.k_config_remote)
+ writer.remove_option(self.k_config_remote_ref)
+ if len(writer.options()) == 0:
+ writer.remove_section()
+ # END handle remove section
+ else:
+ writer.set_value(self.k_config_remote, remote_reference.remote_name)
+ writer.set_value(self.k_config_remote_ref, Head.to_full_path(remote_reference.remote_head))
+ # END handle ref value
+
+ return self
+
+
+ def tracking_branch(self):
+ """:return: The remote_reference we are tracking, or None if we are
+ not a tracking branch"""
+ reader = self.config_reader()
+ if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
+ ref = Head(self.repo, Head.to_full_path(reader.get_value(self.k_config_remote_ref)))
+ remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
+ return RemoteReference(self.repo, remote_refpath)
+ # END handle have tracking branch
+
+ # we are not a tracking branch
+ return None
def rename(self, new_path, force=False):
"""Rename self to a new path
@@ -798,6 +884,29 @@ class Head(Reference):
self.repo.git.checkout(self, **kwargs)
return self.repo.active_branch
+ #{ Configruation
+
+ def _config_parser(self, read_only):
+ if read_only:
+ parser = self.repo.config_reader()
+ else:
+ parser = self.repo.config_writer()
+ # END handle parser instance
+
+ return SectionConstraint(parser, 'branch "%s"' % self.name)
+
+ def config_reader(self):
+ """:return: A configuration parser instance constrained to only read
+ this instance's values"""
+ return self._config_parser(read_only=True)
+
+ def config_writer(self):
+ """:return: A configuration writer instance with read-and write acccess
+ to options of this head"""
+ return self._config_parser(read_only=False)
+
+ #} END configuration
+
class TagReference(Reference):
"""Class representing a lightweight tag reference which either points to a commit
@@ -891,6 +1000,16 @@ class RemoteReference(Head):
"""Represents a reference pointing to a remote head."""
_common_path_default = "refs/remotes"
+
+ @classmethod
+ def iter_items(cls, repo, common_path = None, remote=None):
+ """Iterate remote references, and if given, constrain them to the given remote"""
+ common_path = common_path or cls._common_path_default
+ if remote is not None:
+ common_path = join_path(common_path, str(remote))
+ # END handle remote constraint
+ return super(RemoteReference, cls).iter_items(repo, common_path)
+
@property
def remote_name(self):
"""
diff --git a/lib/git/remote.py b/lib/git/remote.py
index 52dd787d..a06da222 100644
--- a/lib/git/remote.py
+++ b/lib/git/remote.py
@@ -7,7 +7,8 @@
from exc import GitCommandError
from objects import Commit
-from ConfigParser import NoOptionError
+from ConfigParser import NoOptionError
+from config import SectionConstraint
from git.util import (
LazyMixin,
@@ -27,32 +28,10 @@ from gitdb.util import join
import re
import os
+import sys
__all__ = ('RemoteProgress', 'PushInfo', 'FetchInfo', 'Remote')
-class _SectionConstraint(object):
- """Constrains a ConfigParser to only option commands which are constrained to
- always use the section we have been initialized with.
-
- It supports all ConfigParser methods that operate on an option"""
- __slots__ = ("_config", "_section_name")
- _valid_attrs_ = ("get_value", "set_value", "get", "set", "getint", "getfloat", "getboolean", "has_option")
-
- def __init__(self, config, section):
- self._config = config
- self._section_name = section
-
- def __getattr__(self, attr):
- if attr in self._valid_attrs_:
- return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
- return super(_SectionConstraint,self).__getattribute__(attr)
-
- def _call_config(self, method, *args, **kwargs):
- """Call the configuration at the given method which must take a section name
- as first argument"""
- return getattr(self._config, method)(self._section_name, *args, **kwargs)
-
-
class RemoteProgress(object):
"""
Handler providing an interface to parse progress information emitted by git-push
@@ -449,7 +428,7 @@ class Remote(LazyMixin, Iterable):
def _set_cache_(self, attr):
if attr == "_config_reader":
- self._config_reader = _SectionConstraint(self.repo.config_reader(), self._config_section_name())
+ self._config_reader = SectionConstraint(self.repo.config_reader(), self._config_section_name())
else:
super(Remote, self)._set_cache_(attr)
@@ -490,11 +469,7 @@ class Remote(LazyMixin, Iterable):
you to omit the remote path portion, i.e.::
remote.refs.master # yields RemoteReference('/refs/remotes/origin/master')"""
out_refs = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
- for ref in RemoteReference.list_items(self.repo):
- if ref.remote_name == self.name:
- out_refs.append(ref)
- # END if names match
- # END for each ref
+ out_refs.extend(RemoteReference.list_items(self.repo, remote=self.name))
assert out_refs, "Remote %s did not have any references" % self.name
return out_refs
@@ -617,6 +592,10 @@ class Remote(LazyMixin, Iterable):
for line in self._digest_process_messages(proc.stderr, progress):
if line.startswith('From') or line.startswith('remote: Total'):
continue
+ elif line.startswith('warning:'):
+ print >> sys.stderr, line
+ continue
+ # END handle special messages
fetch_info_lines.append(line)
# END for each line
@@ -735,4 +714,4 @@ class Remote(LazyMixin, Iterable):
# clear our cache to assure we re-read the possibly changed configuration
del(self._config_reader)
- return _SectionConstraint(writer, self._config_section_name())
+ return SectionConstraint(writer, self._config_section_name())
diff --git a/lib/git/repo/base.py b/lib/git/repo/base.py
index 790b1283..aa00d028 100644
--- a/lib/git/repo/base.py
+++ b/lib/git/repo/base.py
@@ -6,7 +6,6 @@
from git.exc import InvalidGitRepositoryError, NoSuchPathError
from git.cmd import Git
-from git.objects import Actor
from git.refs import *
from git.index import IndexFile
from git.objects import *
@@ -222,6 +221,44 @@ class Repo(object):
""":return: Remote with the specified name
:raise ValueError: if no remote with such a name exists"""
return Remote(self, name)
+
+ #{ Submodules
+
+ @property
+ def submodules(self):
+ """:return: git.IterableList(Submodule, ...) of direct submodules
+ available from the current head"""
+ return Submodule.list_items(self)
+
+ def submodule(self, name):
+ """:return: Submodule with the given name
+ :raise ValueError: If no such submodule exists"""
+ try:
+ return self.submodules[name]
+ except IndexError:
+ raise ValueError("Didn't find submodule named %r" % name)
+ # END exception handling
+
+ def create_submodule(self, *args, **kwargs):
+ """Create a new submodule
+ :note: See the documentation of Submodule.add for a description of the
+ applicable parameters
+ :return: created submodules"""
+ return Submodule.add(self, *args, **kwargs)
+
+ def iter_submodules(self, *args, **kwargs):
+ """An iterator yielding Submodule instances, see Traversable interface
+ for a description of args and kwargs
+ :return: Iterator"""
+ return RootModule(self).traverse(*args, **kwargs)
+
+ def submodule_update(self, *args, **kwargs):
+ """Update the submodules, keeping the repository consistent as it will
+ take the previous state into consideration. For more information, please
+ see the documentation of RootModule.update"""
+ return RootModule(self).update(*args, **kwargs)
+
+ #}END submodules
@property
def tags(self):
diff --git a/lib/git/util.py b/lib/git/util.py
index fcb50585..c945e6a3 100644
--- a/lib/git/util.py
+++ b/lib/git/util.py
@@ -296,6 +296,9 @@ class IterableList(list):
def __init__(self, id_attr, prefix=''):
self._id_attr = id_attr
self._prefix = prefix
+ if not isinstance(id_attr, basestring):
+ raise ValueError("First parameter must be a string identifying the name-property. Extend the list after initialization")
+ # END help debugging !
def __getattr__(self, attr):
attr = self._prefix + attr
@@ -313,7 +316,7 @@ class IterableList(list):
return getattr(self, index)
except AttributeError:
raise IndexError( "No item found with id %r" % (self._prefix + index) )
-
+
class Iterable(object):
"""Defines an interface for iterable items which is to assure a uniform
diff --git a/test/git/test_base.py b/test/git/test_base.py
index db13feae..25d1e4e9 100644
--- a/test/git/test_base.py
+++ b/test/git/test_base.py
@@ -83,7 +83,7 @@ class TestBase(TestBase):
# objects must be resolved to shas so they compare equal
assert self.rorepo.head.reference.object == self.rorepo.active_branch.object
- @with_bare_rw_repo
+ @with_rw_repo('HEAD', bare=True)
def test_with_bare_rw_repo(self, bare_rw_repo):
assert bare_rw_repo.config_reader("repository").getboolean("core", "bare")
assert os.path.isfile(os.path.join(bare_rw_repo.git_dir,'HEAD'))
diff --git a/test/git/test_commit.py b/test/git/test_commit.py
index 2692938f..c3ce5c92 100644
--- a/test/git/test_commit.py
+++ b/test/git/test_commit.py
@@ -237,7 +237,7 @@ class TestCommit(TestBase):
name_rev = self.rorepo.head.commit.name_rev
assert isinstance(name_rev, basestring)
- @with_bare_rw_repo
+ @with_rw_repo('HEAD', bare=True)
def test_serialization(self, rwrepo):
# create all commits of our repo
assert_commit_serialization(rwrepo, '0.1.6')
diff --git a/test/git/test_config.py b/test/git/test_config.py
index 604a25f4..8c846b99 100644
--- a/test/git/test_config.py
+++ b/test/git/test_config.py
@@ -14,9 +14,7 @@ class TestBase(TestCase):
def _to_memcache(self, file_path):
fp = open(file_path, "r")
- sio = StringIO.StringIO()
- sio.write(fp.read())
- sio.seek(0)
+ sio = StringIO.StringIO(fp.read())
sio.name = file_path
return sio
diff --git a/test/git/test_index.py b/test/git/test_index.py
index b5600eeb..29a7404d 100644
--- a/test/git/test_index.py
+++ b/test/git/test_index.py
@@ -409,6 +409,7 @@ class TestIndex(TestBase):
commit_message = "commit default head"
new_commit = index.commit(commit_message, head=False)
+ assert cur_commit != new_commit
assert new_commit.author.name == uname
assert new_commit.author.email == umail
assert new_commit.committer.name == uname
@@ -421,6 +422,7 @@ class TestIndex(TestBase):
# same index, no parents
commit_message = "index without parents"
commit_no_parents = index.commit(commit_message, parent_commits=list(), head=True)
+ assert SymbolicReference(rw_repo, 'ORIG_HEAD').commit == cur_commit
assert commit_no_parents.message == commit_message
assert len(commit_no_parents.parents) == 0
assert cur_head.commit == commit_no_parents
diff --git a/test/git/test_refs.py b/test/git/test_refs.py
index 5f13d0b7..fa26bae9 100644
--- a/test/git/test_refs.py
+++ b/test/git/test_refs.py
@@ -63,8 +63,9 @@ class TestRefs(TestBase):
assert len(s) == ref_count
assert len(s|s) == ref_count
- def test_heads(self):
- for head in self.rorepo.heads:
+ @with_rw_repo('HEAD', bare=False)
+ def test_heads(self, rwrepo):
+ for head in rwrepo.heads:
assert head.name
assert head.path
assert "refs/heads" in head.path
@@ -72,8 +73,56 @@ class TestRefs(TestBase):
cur_object = head.object
assert prev_object == cur_object # represent the same git object
assert prev_object is not cur_object # but are different instances
+
+ writer = head.config_writer()
+ tv = "testopt"
+ writer.set_value(tv, 1)
+ assert writer.get_value(tv) == 1
+ del(writer)
+ assert head.config_reader().get_value(tv) == 1
+ head.config_writer().remove_option(tv)
+
+ # after the clone, we might still have a tracking branch setup
+ head.set_tracking_branch(None)
+ assert head.tracking_branch() is None
+ remote_ref = rwrepo.remotes[0].refs[0]
+ assert head.set_tracking_branch(remote_ref) is head
+ assert head.tracking_branch() == remote_ref
+ head.set_tracking_branch(None)
+ assert head.tracking_branch() is None
# END for each head
+ # verify ORIG_HEAD gets set for detached heads
+ head = rwrepo.head
+ orig_head = head.orig_head()
+ cur_head = head.ref
+ cur_commit = cur_head.commit
+ pcommit = cur_head.commit.parents[0].parents[0]
+ head.ref = pcommit # detach head
+ assert orig_head.commit == cur_commit
+
+ # even if we set it through its reference - chaning the ref
+ # will adjust the orig_head, which still points to cur_commit
+ head.ref = cur_head
+ assert orig_head.commit == pcommit
+ assert head.commit == cur_commit == cur_head.commit
+
+ cur_head.commit = pcommit
+ assert head.commit == pcommit
+ assert orig_head.commit == cur_commit
+
+ # with automatic dereferencing
+ head.commit = cur_commit
+ assert orig_head.commit == pcommit
+
+ # changing branches which are not checked out doesn't affect the ORIG_HEAD
+ other_head = Head.create(rwrepo, 'mynewhead', pcommit)
+ assert other_head.commit == pcommit
+ assert orig_head.commit == pcommit
+ other_head.commit = pcommit.parents[0]
+ assert orig_head.commit == pcommit
+
+
def test_refs(self):
types_found = set()
for ref in self.rorepo.refs:
@@ -208,6 +257,8 @@ class TestRefs(TestBase):
refs = remote.refs
RemoteReference.delete(rw_repo, *refs)
remote_refs_so_far += len(refs)
+ for ref in refs:
+ assert ref.remote_name == remote.name
# END for each ref to delete
assert remote_refs_so_far
diff --git a/test/git/test_remote.py b/test/git/test_remote.py
index 1db4bc32..c52f907e 100644
--- a/test/git/test_remote.py
+++ b/test/git/test_remote.py
@@ -422,7 +422,7 @@ class TestRemote(TestBase):
origin = rw_repo.remote('origin')
assert origin == rw_repo.remotes.origin
- @with_bare_rw_repo
+ @with_rw_repo('HEAD', bare=True)
def test_creation_and_removal(self, bare_rw_repo):
new_name = "test_new_one"
arg_list = (new_name, "git@server:hello.git")
diff --git a/test/git/test_repo.py b/test/git/test_repo.py
index 65dce590..a6047bf5 100644
--- a/test/git/test_repo.py
+++ b/test/git/test_repo.py
@@ -208,8 +208,10 @@ class TestRepo(TestBase):
assert_equal('<git.Repo "%s">' % path, repr(self.rorepo))
def test_is_dirty_with_bare_repository(self):
+ orig_value = self.rorepo._bare
self.rorepo._bare = True
assert_false(self.rorepo.is_dirty())
+ self.rorepo._bare = orig_value
def test_is_dirty(self):
self.rorepo._bare = False
@@ -220,8 +222,10 @@ class TestRepo(TestBase):
# END untracked files
# END working tree
# END index
+ orig_val = self.rorepo._bare
self.rorepo._bare = True
assert self.rorepo.is_dirty() == False
+ self.rorepo._bare = orig_val
def test_head(self):
assert self.rorepo.head.reference.object == self.rorepo.active_branch.object
@@ -552,3 +556,25 @@ class TestRepo(TestBase):
target_type = GitCmdObjectDB
assert isinstance(self.rorepo.odb, target_type)
+ def test_submodules(self):
+ assert len(self.rorepo.submodules) == 1 # non-recursive
+ assert len(list(self.rorepo.iter_submodules())) == 2
+
+ assert isinstance(self.rorepo.submodule("lib/git/ext/gitdb"), Submodule)
+ self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist")
+
+ @with_rw_repo('HEAD', bare=False)
+ def test_submodule_update(self, rwrepo):
+ # fails in bare mode
+ rwrepo._bare = True
+ self.failUnlessRaises(InvalidGitRepositoryError, rwrepo.submodule_update)
+ rwrepo._bare = False
+
+ # test create submodule
+ sm = rwrepo.submodules[0]
+ sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path))
+ assert isinstance(sm, Submodule)
+
+ # note: the rest of this functionality is tested in test_submodule
+
+
diff --git a/test/git/test_submodule.py b/test/git/test_submodule.py
index 5f78b6e8..e7807dcd 100644
--- a/test/git/test_submodule.py
+++ b/test/git/test_submodule.py
@@ -2,11 +2,481 @@
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from test.testlib import *
-from git import *
+from git.exc import *
+from git.objects.submodule.base import Submodule
+from git.objects.submodule.root import RootModule
+from git.util import to_native_path_linux, join_path_native
+import shutil
+import git
+import os
class TestSubmodule(TestBase):
- def test_base(self):
- # TODO
- pass
+ k_subm_current = "00ce31ad308ff4c7ef874d2fa64374f47980c85c"
+ k_subm_changed = "394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3"
+ k_no_subm_tag = "0.1.6"
+
+ def _do_base_tests(self, rwrepo):
+ """Perform all tests in the given repository, it may be bare or nonbare"""
+ # manual instantiation
+ smm = Submodule(rwrepo, "\0"*20)
+ # name needs to be set in advance
+ self.failUnlessRaises(AttributeError, getattr, smm, 'name')
+
+ # iterate - 1 submodule
+ sms = Submodule.list_items(rwrepo, self.k_subm_current)
+ assert len(sms) == 1
+ sm = sms[0]
+
+ # at a different time, there is None
+ assert len(Submodule.list_items(rwrepo, self.k_no_subm_tag)) == 0
+
+ assert sm.path == 'lib/git/ext/gitdb'
+ assert sm.path == sm.name # for now, this is True
+ assert sm.url == 'git://gitorious.org/git-python/gitdb.git'
+ assert sm.branch.name == 'master' # its unset in this case
+ assert sm.parent_commit == rwrepo.head.commit
+ # size is always 0
+ assert sm.size == 0
+
+ # some commits earlier we still have a submodule, but its at a different commit
+ smold = Submodule.iter_items(rwrepo, self.k_subm_changed).next()
+ assert smold.binsha != sm.binsha
+ assert smold == sm # the name is still the same
+
+ # force it to reread its information
+ del(smold._url)
+ smold.url == sm.url
+
+ # test config_reader/writer methods
+ sm.config_reader()
+ new_smclone_path = None # keep custom paths for later
+ new_csmclone_path = None #
+ if rwrepo.bare:
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.config_writer)
+ else:
+ writer = sm.config_writer()
+ # for faster checkout, set the url to the local path
+ new_smclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path))
+ writer.set_value('url', new_smclone_path)
+ del(writer)
+ assert sm.config_reader().get_value('url') == new_smclone_path
+ assert sm.url == new_smclone_path
+ # END handle bare repo
+ smold.config_reader()
+
+ # cannot get a writer on historical submodules
+ if not rwrepo.bare:
+ self.failUnlessRaises(ValueError, smold.config_writer)
+ # END handle bare repo
+
+ # make the old into a new
+ prev_parent_commit = smold.parent_commit
+ assert smold.set_parent_commit(self.k_subm_current) is smold
+ assert smold.parent_commit != prev_parent_commit
+ assert smold.binsha == sm.binsha
+ smold.set_parent_commit(prev_parent_commit)
+ assert smold.binsha != sm.binsha
+
+ # raises if the sm didn't exist in new parent - it keeps its
+ # parent_commit unchanged
+ self.failUnlessRaises(ValueError, smold.set_parent_commit, self.k_no_subm_tag)
+
+ # TEST TODO: if a path in the gitmodules file, but not in the index, it raises
+
+ # TEST UPDATE
+ ##############
+ # module retrieval is not always possible
+ if rwrepo.bare:
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.add, rwrepo, 'here', 'there')
+ else:
+ # its not checked out in our case
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
+ assert not sm.module_exists()
+
+ # currently there is only one submodule
+ assert len(list(rwrepo.iter_submodules())) == 1
+
+ # TEST ADD
+ ###########
+ # preliminary tests
+ # adding existing returns exactly the existing
+ sma = Submodule.add(rwrepo, sm.name, sm.path)
+ assert sma.path == sm.path
+
+ # no url and no module at path fails
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", "pathtorepo", url=None)
+
+ # CONTINUE UPDATE
+ #################
+
+ # lets update it - its a recursive one too
+ newdir = os.path.join(sm.abspath, 'dir')
+ os.makedirs(newdir)
+
+ # update fails if the path already exists non-empty
+ self.failUnlessRaises(OSError, sm.update)
+ os.rmdir(newdir)
+
+ assert sm.update() is sm
+ sm_repopath = sm.path # cache for later
+ assert sm.module_exists()
+ assert isinstance(sm.module(), git.Repo)
+ assert sm.module().working_tree_dir == sm.abspath
+
+ # INTERLEAVE ADD TEST
+ #####################
+ # url must match the one in the existing repository ( if submodule name suggests a new one )
+ # or we raise
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", sm.path, "git://someurl/repo.git")
+
+
+ # CONTINUE UPDATE
+ #################
+ # we should have setup a tracking branch, which is also active
+ assert sm.module().head.ref.tracking_branch() is not None
+
+ # delete the whole directory and re-initialize
+ shutil.rmtree(sm.abspath)
+ sm.update(recursive=False)
+ assert len(list(rwrepo.iter_submodules())) == 2
+ assert len(sm.children()) == 1 # its not checked out yet
+ csm = sm.children()[0]
+ assert not csm.module_exists()
+ csm_repopath = csm.path
+
+ # adjust the path of the submodules module to point to the local destination
+ new_csmclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path, csm.path))
+ csm.config_writer().set_value('url', new_csmclone_path)
+ assert csm.url == new_csmclone_path
+
+ # update recuesively again
+ sm.update(recursive=True)
+
+ # tracking branch once again
+ csm.module().head.ref.tracking_branch() is not None
+
+ # this flushed in a sub-submodule
+ assert len(list(rwrepo.iter_submodules())) == 2
+
+
+ # reset both heads to the previous version, verify that to_latest_revision works
+ for repo in (csm.module(), sm.module()):
+ repo.head.reset('HEAD~1', working_tree=1)
+ # END for each repo to reset
+
+ sm.update(recursive=True, to_latest_revision=True)
+ for repo in (sm.module(), csm.module()):
+ assert repo.head.commit == repo.head.ref.tracking_branch().commit
+ # END for each repo to check
+
+ # if the head is detached, it still works ( but warns )
+ smref = sm.module().head.ref
+ sm.module().head.ref = 'HEAD~1'
+ # if there is no tracking branch, we get a warning as well
+ csm_tracking_branch = csm.module().head.ref.tracking_branch()
+ csm.module().head.ref.set_tracking_branch(None)
+ sm.update(recursive=True, to_latest_revision=True)
+
+ # undo the changes
+ sm.module().head.ref = smref
+ csm.module().head.ref.set_tracking_branch(csm_tracking_branch)
+
+ # REMOVAL OF REPOSITOTRY
+ ########################
+ # must delete something
+ self.failUnlessRaises(ValueError, csm.remove, module=False, configuration=False)
+ # We have modified the configuration, hence the index is dirty, and the
+ # deletion will fail
+ # NOTE: As we did a few updates in the meanwhile, the indices where reset
+ # Hence we restore some changes
+ sm.config_writer().set_value("somekey", "somevalue")
+ csm.config_writer().set_value("okey", "ovalue")
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+ # if we remove the dirty index, it would work
+ sm.module().index.reset()
+ # still, we have the file modified
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove, dry_run=True)
+ sm.module().index.reset(working_tree=True)
+
+ # this would work
+ assert sm.remove(dry_run=True) is sm
+ assert sm.module_exists()
+ sm.remove(force=True, dry_run=True)
+ assert sm.module_exists()
+
+ # but ... we have untracked files in the child submodule
+ fn = join_path_native(csm.module().working_tree_dir, "newfile")
+ open(fn, 'w').write("hi")
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+
+ # forcibly delete the child repository
+ assert csm.remove(force=True) is csm
+ assert not csm.exists()
+ assert not csm.module_exists()
+ assert len(sm.children()) == 0
+ # now we have a changed index, as configuration was altered.
+ # fix this
+ sm.module().index.reset(working_tree=True)
+
+ # now delete only the module of the main submodule
+ assert sm.module_exists()
+ sm.remove(configuration=False)
+ assert sm.exists()
+ assert not sm.module_exists()
+ assert sm.config_reader().get_value('url')
+
+ # delete the rest
+ sm.remove()
+ assert not sm.exists()
+ assert not sm.module_exists()
+
+ assert len(rwrepo.submodules) == 0
+
+ # ADD NEW SUBMODULE
+ ###################
+ # add a simple remote repo - trailing slashes are no problem
+ smid = "newsub"
+ osmid = "othersub"
+ nsm = Submodule.add(rwrepo, smid, sm_repopath, new_smclone_path+"/", None, no_checkout=True)
+ assert nsm.name == smid
+ assert nsm.module_exists()
+ assert nsm.exists()
+ # its not checked out
+ assert not os.path.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file))
+ assert len(rwrepo.submodules) == 1
+
+ # add another submodule, but into the root, not as submodule
+ osm = Submodule.add(rwrepo, osmid, csm_repopath, new_csmclone_path, Submodule.k_head_default)
+ assert osm != nsm
+ assert osm.module_exists()
+ assert osm.exists()
+ assert os.path.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py'))
+
+ assert len(rwrepo.submodules) == 2
+
+ # commit the changes, just to finalize the operation
+ rwrepo.index.commit("my submod commit")
+ assert len(rwrepo.submodules) == 2
+
+ # needs update as the head changed, it thinks its in the history
+ # of the repo otherwise
+ nsm.set_parent_commit(rwrepo.head.commit)
+ osm.set_parent_commit(rwrepo.head.commit)
+
+ # MOVE MODULE
+ #############
+ # invalid inptu
+ self.failUnlessRaises(ValueError, nsm.move, 'doesntmatter', module=False, configuration=False)
+
+ # renaming to the same path does nothing
+ assert nsm.move(sm.path) is nsm
+
+ # rename a module
+ nmp = join_path_native("new", "module", "dir") + "/" # new module path
+ pmp = nsm.path
+ abspmp = nsm.abspath
+ assert nsm.move(nmp) is nsm
+ nmp = nmp[:-1] # cut last /
+ assert nsm.path == nmp
+ assert rwrepo.submodules[0].path == nmp
+
+ # move it back - but there is a file now - this doesn't work
+ # as the empty directories where removed.
+ self.failUnlessRaises(IOError, open, abspmp, 'w')
+
+ mpath = 'newsubmodule'
+ absmpath = join_path_native(rwrepo.working_tree_dir, mpath)
+ open(absmpath, 'w').write('')
+ self.failUnlessRaises(ValueError, nsm.move, mpath)
+ os.remove(absmpath)
+
+ # now it works, as we just move it back
+ nsm.move(pmp)
+ assert nsm.path == pmp
+ assert rwrepo.submodules[0].path == pmp
+
+ # TODO lowprio: test remaining exceptions ... for now its okay, the code looks right
+
+ # REMOVE 'EM ALL
+ ################
+ # if a submodule's repo has no remotes, it can't be added without an explicit url
+ osmod = osm.module()
+
+ osm.remove(module=False)
+ for remote in osmod.remotes:
+ remote.remove(osmod, remote.name)
+ assert not osm.exists()
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, osmid, csm_repopath, url=None)
+ # END handle bare mode
+
+ # Error if there is no submodule file here
+ self.failUnlessRaises(IOError, Submodule._config_parser, rwrepo, rwrepo.commit(self.k_no_subm_tag), True)
+
+ @with_rw_repo(k_subm_current)
+ def test_base_rw(self, rwrepo):
+ self._do_base_tests(rwrepo)
+
+ @with_rw_repo(k_subm_current, bare=True)
+ def test_base_bare(self, rwrepo):
+ self._do_base_tests(rwrepo)
+
+ @with_rw_repo(k_subm_current, bare=False)
+ def test_root_module(self, rwrepo):
+ # Can query everything without problems
+ rm = RootModule(self.rorepo)
+ assert rm.module() is self.rorepo
+
+ # try attributes
+ rm.binsha
+ rm.mode
+ rm.path
+ assert rm.name == rm.k_root_name
+ assert rm.parent_commit == self.rorepo.head.commit
+ rm.url
+ rm.branch
+
+ assert len(rm.list_items(rm.module())) == 1
+ rm.config_reader()
+ rm.config_writer()
+
+ # deep traversal gitdb / async
+ rsms = list(rm.traverse())
+ assert len(rsms) == 2 # gitdb and async, async being a child of gitdb
+
+ # cannot set the parent commit as root module's path didn't exist
+ self.failUnlessRaises(ValueError, rm.set_parent_commit, 'HEAD')
+
+ # TEST UPDATE
+ #############
+ # setup commit which remove existing, add new and modify existing submodules
+ rm = RootModule(rwrepo)
+ assert len(rm.children()) == 1
+
+ # modify path without modifying the index entry
+ # ( which is what the move method would do properly )
+ #==================================================
+ sm = rm.children()[0]
+ pp = "path/prefix"
+ fp = join_path_native(pp, sm.path)
+ prep = sm.path
+ assert not sm.module_exists() # was never updated after rwrepo's clone
+
+ # assure we clone from a local source
+ sm.config_writer().set_value('url', join_path_native(self.rorepo.working_tree_dir, sm.path))
+ sm.update(recursive=False)
+ assert sm.module_exists()
+ sm.config_writer().set_value('path', fp) # change path to something with prefix AFTER url change
+
+ # update fails as list_items in such a situations cannot work, as it cannot
+ # find the entry at the changed path
+ self.failUnlessRaises(InvalidGitRepositoryError, rm.update, recursive=False)
+
+ # move it properly - doesn't work as it its path currently points to an indexentry
+ # which doesn't exist ( move it to some path, it doesn't matter here )
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.move, pp)
+ # reset the path(cache) to where it was, now it works
+ sm.path = prep
+ sm.move(fp, module=False) # leave it at the old location
+
+ assert not sm.module_exists()
+ cpathchange = rwrepo.index.commit("changed sm path") # finally we can commit
+
+ # update puts the module into place
+ rm.update(recursive=False)
+ sm.set_parent_commit(cpathchange)
+ assert sm.module_exists()
+
+ # add submodule
+ #================
+ nsmn = "newsubmodule"
+ nsmp = "submrepo"
+ async_url = join_path_native(self.rorepo.working_tree_dir, rsms[0].path, rsms[1].path)
+ nsm = Submodule.add(rwrepo, nsmn, nsmp, url=async_url)
+ csmadded = rwrepo.index.commit("Added submodule")
+ nsm.set_parent_commit(csmadded)
+ assert nsm.module_exists()
+ # in our case, the module should not exist, which happens if we update a parent
+ # repo and a new submodule comes into life
+ nsm.remove(configuration=False, module=True)
+ assert not nsm.module_exists() and nsm.exists()
+
+ rm.update(recursive=False)
+ assert nsm.module_exists()
+
+
+
+ # remove submodule - the previous one
+ #====================================
+ sm.set_parent_commit(csmadded)
+ smp = sm.abspath
+ assert not sm.remove(module=False).exists()
+ assert os.path.isdir(smp) # module still exists
+ csmremoved = rwrepo.index.commit("Removed submodule")
+
+ # an update will remove the module
+ rm.update(recursive=False)
+ assert not os.path.isdir(smp)
+
+
+ # change url
+ #=============
+ # to the first repository, this way we have a fast checkout, and a completely different
+ # repository at the different url
+ nsm.set_parent_commit(csmremoved)
+ nsmurl = join_path_native(self.rorepo.working_tree_dir, rsms[0].path)
+ nsm.config_writer().set_value('url', nsmurl)
+ csmpathchange = rwrepo.index.commit("changed url")
+ nsm.set_parent_commit(csmpathchange)
+
+ prev_commit = nsm.module().head.commit
+ rm.update(recursive=False)
+ assert nsm.module().remotes.origin.url == nsmurl
+ # head changed, as the remote url and its commit changed
+ assert prev_commit != nsm.module().head.commit
+
+ # add the submodule's changed commit to the index, which is what the
+ # user would do
+ # beforehand, update our instance's binsha with the new one
+ nsm.binsha = nsm.module().head.commit.binsha
+ rwrepo.index.add([nsm])
+
+ # change branch
+ #=================
+ # we only have one branch, so we switch to a virtual one, and back
+ # to the current one to trigger the difference
+ cur_branch = nsm.branch
+ nsmm = nsm.module()
+ prev_commit = nsmm.head.commit
+ for branch in ("some_virtual_branch", cur_branch.name):
+ nsm.config_writer().set_value(Submodule.k_head_option, branch)
+ csmbranchchange = rwrepo.index.commit("changed branch to %s" % branch)
+ nsm.set_parent_commit(csmbranchchange)
+ # END for each branch to change
+
+ # Lets remove our tracking branch to simulate some changes
+ nsmmh = nsmm.head
+ assert nsmmh.ref.tracking_branch() is None # never set it up until now
+ assert not nsmmh.is_detached
+
+ rm.update(recursive=False)
+
+ assert nsmmh.ref.tracking_branch() is not None
+ assert not nsmmh.is_detached
+
+ # recursive update
+ # =================
+ # finally we recursively update a module, just to run the code at least once
+ # remove the module so that it has more work
+ assert len(nsm.children()) == 1
+ assert nsm.exists() and nsm.module_exists() and len(nsm.children()) == 1
+ # assure we pull locally only
+ nsmc = nsm.children()[0]
+ nsmc.config_writer().set_value('url', async_url)
+ rm.update(recursive=True)
+
+ assert len(nsm.children()) == 1 and nsmc.module_exists()
+
diff --git a/test/git/test_tree.py b/test/git/test_tree.py
index d08999bd..18688424 100644
--- a/test/git/test_tree.py
+++ b/test/git/test_tree.py
@@ -102,6 +102,8 @@ class TestTree(TestBase):
assert isinstance(obj, (Blob, Tree))
all_items.append(obj)
# END for each object
+ assert all_items == root.list_traverse()
+
# limit recursion level to 0 - should be same as default iteration
assert all_items
assert 'CHANGES' in root
diff --git a/test/testlib/helper.py b/test/testlib/helper.py
index b5b6fad7..c79ecaa1 100644
--- a/test/testlib/helper.py
+++ b/test/testlib/helper.py
@@ -14,6 +14,11 @@ import cStringIO
GIT_REPO = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+__all__ = (
+ 'fixture_path', 'fixture', 'absolute_project_path', 'StringProcessAdapter',
+ 'with_rw_repo', 'with_rw_and_rw_remote_repo', 'TestBase', 'TestCase', 'GIT_REPO'
+ )
+
#{ Routines
def fixture_path(name):
@@ -58,41 +63,7 @@ def _rmtree_onerror(osremove, fullpath, exec_info):
os.chmod(fullpath, 0777)
os.remove(fullpath)
-def with_bare_rw_repo(func):
- """
- Decorator providing a specially made read-write repository to the test case
- decorated with it. The test case requires the following signature::
- def case(self, rw_repo)
-
- The rwrepo will be a bare clone or the types rorepo. Once the method finishes,
- it will be removed completely.
-
- Use this if you want to make purely index based adjustments, change refs, create
- heads, generally operations that do not need a working tree."""
- def bare_repo_creator(self):
- repo_dir = tempfile.mktemp("bare_repo_%s" % func.__name__)
- rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=True)
- prev_cwd = os.getcwd()
- try:
- try:
- return func(self, rw_repo)
- except:
- # assure we keep the repo for debugging
- print >> sys.stderr, "Keeping bare repo after failure: %s" % repo_dir
- repo_dir = None
- raise
- # END handle exceptions
- finally:
- rw_repo.git.clear_cache()
- if repo_dir is not None:
- shutil.rmtree(repo_dir, onerror=_rmtree_onerror)
- # END remove repo dir
- # END cleanup
- # END bare repo creator
- bare_repo_creator.__name__ = func.__name__
- return bare_repo_creator
-
-def with_rw_repo(working_tree_ref):
+def with_rw_repo(working_tree_ref, bare=False):
"""
Same as with_bare_repo, but clones the rorepo as non-bare repository, checking
out the working tree at the given working_tree_ref.
@@ -105,11 +76,17 @@ def with_rw_repo(working_tree_ref):
assert isinstance(working_tree_ref, basestring), "Decorator requires ref name for working tree checkout"
def argument_passer(func):
def repo_creator(self):
- repo_dir = tempfile.mktemp("non_bare_%s" % func.__name__)
- rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=False, n=True)
+ prefix = 'non_'
+ if bare:
+ prefix = ''
+ #END handle prefix
+ repo_dir = tempfile.mktemp("%sbare_%s" % (prefix, func.__name__))
+ rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=bare, n=True)
rw_repo.head.commit = rw_repo.commit(working_tree_ref)
- rw_repo.head.reference.checkout()
+ if not bare:
+ rw_repo.head.reference.checkout()
+ # END handle checkout
prev_cwd = os.getcwd()
os.chdir(rw_repo.working_dir)