summaryrefslogtreecommitdiff
path: root/git
diff options
context:
space:
mode:
Diffstat (limited to 'git')
-rw-r--r--git/__init__.py48
-rw-r--r--git/cmd.py515
-rw-r--r--git/config.py420
-rw-r--r--git/db.py61
-rw-r--r--git/diff.py346
-rw-r--r--git/exc.py58
m---------git/ext/gitdb0
-rw-r--r--git/ez_setup.py222
-rw-r--r--git/index/__init__.py4
-rw-r--r--git/index/base.py1153
-rw-r--r--git/index/fun.py322
-rw-r--r--git/index/typ.py173
-rw-r--r--git/index/util.py86
-rw-r--r--git/objects/__init__.py21
-rw-r--r--git/objects/base.py172
-rw-r--r--git/objects/blob.py27
-rw-r--r--git/objects/commit.py465
-rw-r--r--git/objects/fun.py199
-rw-r--r--git/objects/submodule/__init__.py2
-rw-r--r--git/objects/submodule/base.py924
-rw-r--r--git/objects/submodule/root.py315
-rw-r--r--git/objects/submodule/util.py101
-rw-r--r--git/objects/tag.py76
-rw-r--r--git/objects/tree.py280
-rw-r--r--git/objects/util.py315
-rw-r--r--git/odict.py1399
-rw-r--r--git/refs/__init__.py21
-rw-r--r--git/refs/head.py246
-rw-r--r--git/refs/log.py282
-rw-r--r--git/refs/reference.py84
-rw-r--r--git/refs/remote.py63
-rw-r--r--git/refs/symbolic.py618
-rw-r--r--git/refs/tag.py91
-rw-r--r--git/remote.py603
-rw-r--r--git/repo/__init__.py3
-rw-r--r--git/repo/base.py753
-rw-r--r--git/repo/fun.py280
-rwxr-xr-xgit/setup.py86
-rw-r--r--git/test/__init__.py5
-rw-r--r--git/test/fixtures/blame131
-rw-r--r--git/test/fixtures/cat_file_blob1
-rw-r--r--git/test/fixtures/cat_file_blob_nl1
-rw-r--r--git/test/fixtures/cat_file_blob_size1
-rw-r--r--git/test/fixtures/diff_254
-rw-r--r--git/test/fixtures/diff_2f19
-rw-r--r--git/test/fixtures/diff_f15
-rw-r--r--git/test/fixtures/diff_i201
-rwxr-xr-xgit/test/fixtures/diff_mode_only1152
-rw-r--r--git/test/fixtures/diff_new_mode14
-rw-r--r--git/test/fixtures/diff_numstat2
-rw-r--r--git/test/fixtures/diff_p610
-rw-r--r--git/test/fixtures/diff_rename12
-rw-r--r--git/test/fixtures/diff_tree_numstat_root3
-rw-r--r--git/test/fixtures/for_each_ref_with_path_componentbin0 -> 84 bytes
-rw-r--r--git/test/fixtures/git_config23
-rw-r--r--git/test/fixtures/git_config_global24
-rw-r--r--git/test/fixtures/indexbin0 -> 163616 bytes
-rw-r--r--git/test/fixtures/index_mergebin0 -> 9192 bytes
-rw-r--r--git/test/fixtures/ls_tree_a7
-rw-r--r--git/test/fixtures/ls_tree_b2
-rw-r--r--git/test/fixtures/ls_tree_commit3
-rw-r--r--git/test/fixtures/reflog_HEAD460
-rw-r--r--git/test/fixtures/reflog_invalid_date2
-rw-r--r--git/test/fixtures/reflog_invalid_email2
-rw-r--r--git/test/fixtures/reflog_invalid_newsha2
-rw-r--r--git/test/fixtures/reflog_invalid_oldsha2
-rw-r--r--git/test/fixtures/reflog_invalid_sep2
-rw-r--r--git/test/fixtures/reflog_master124
-rw-r--r--git/test/fixtures/rev_list3
-rw-r--r--git/test/fixtures/rev_list_bisect_all51
-rw-r--r--git/test/fixtures/rev_list_commit_diffs8
-rw-r--r--git/test/fixtures/rev_list_commit_idabbrev8
-rw-r--r--git/test/fixtures/rev_list_commit_stats7
-rw-r--r--git/test/fixtures/rev_list_count655
-rw-r--r--git/test/fixtures/rev_list_delta_a8
-rw-r--r--git/test/fixtures/rev_list_delta_b11
-rw-r--r--git/test/fixtures/rev_list_single7
-rw-r--r--git/test/fixtures/rev_parse1
-rw-r--r--git/test/fixtures/show_empty_commit6
-rw-r--r--git/test/lib/__init__.py13
-rw-r--r--git/test/lib/asserts.py50
-rw-r--r--git/test/lib/helper.py245
-rw-r--r--git/test/performance/lib.py78
-rw-r--r--git/test/performance/test_commit.py99
-rw-r--r--git/test/performance/test_odb.py70
-rw-r--r--git/test/performance/test_streams.py131
-rw-r--r--git/test/performance/test_utils.py174
-rw-r--r--git/test/test_actor.py36
-rw-r--r--git/test/test_base.py100
-rw-r--r--git/test/test_blob.py23
-rw-r--r--git/test/test_commit.py275
-rw-r--r--git/test/test_config.py102
-rw-r--r--git/test/test_db.py25
-rw-r--r--git/test/test_diff.py108
-rw-r--r--git/test/test_fun.py251
-rw-r--r--git/test/test_git.py84
-rw-r--r--git/test/test_index.py669
-rw-r--r--git/test/test_reflog.py102
-rw-r--r--git/test/test_refs.py521
-rw-r--r--git/test/test_remote.py445
-rw-r--r--git/test/test_repo.py604
-rw-r--r--git/test/test_stats.py25
-rw-r--r--git/test/test_submodule.py546
-rw-r--r--git/test/test_tree.py144
-rw-r--r--git/test/test_util.py109
-rw-r--r--git/util.py602
106 files changed, 20104 insertions, 0 deletions
diff --git a/git/__init__.py b/git/__init__.py
new file mode 100644
index 00000000..483ac091
--- /dev/null
+++ b/git/__init__.py
@@ -0,0 +1,48 @@
+# __init__.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+import sys
+import inspect
+
+__version__ = 'git'
+
+
+#{ Initialization
+def _init_externals():
+ """Initialize external projects by putting them into the path"""
+ sys.path.append(os.path.join(os.path.dirname(__file__), 'ext'))
+
+#} END initialization
+
+#################
+_init_externals()
+#################
+
+#{ Imports
+
+from git.config import GitConfigParser
+from git.objects import *
+from git.refs import *
+from git.diff import *
+from git.exc import *
+from git.db import *
+from git.cmd import Git
+from git.repo import Repo
+from git.remote import *
+from git.index import *
+from git.util import (
+ LockFile,
+ BlockingLockFile,
+ Stats,
+ Actor
+ )
+
+#} END imports
+
+__all__ = [ name for name, obj in locals().items()
+ if not (name.startswith('_') or inspect.ismodule(obj)) ]
+
diff --git a/git/cmd.py b/git/cmd.py
new file mode 100644
index 00000000..60887f5d
--- /dev/null
+++ b/git/cmd.py
@@ -0,0 +1,515 @@
+# cmd.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os, sys
+from util import *
+from exc import GitCommandError
+
+from subprocess import (
+ call,
+ Popen,
+ PIPE
+ )
+
+# Enables debugging of GitPython's git commands
+GIT_PYTHON_TRACE = os.environ.get("GIT_PYTHON_TRACE", False)
+
+execute_kwargs = ('istream', 'with_keep_cwd', 'with_extended_output',
+ 'with_exceptions', 'as_process',
+ 'output_stream' )
+
+__all__ = ('Git', )
+
+def dashify(string):
+ return string.replace('_', '-')
+
+class Git(object):
+ """
+ The Git class manages communication with the Git binary.
+
+ It provides a convenient interface to calling the Git binary, such as in::
+
+ g = Git( git_dir )
+ g.init() # calls 'git init' program
+ rval = g.ls_files() # calls 'git ls-files' program
+
+ ``Debugging``
+ Set the GIT_PYTHON_TRACE environment variable print each invocation
+ of the command to stdout.
+ Set its value to 'full' to see details about the returned values.
+ """
+ __slots__ = ("_working_dir", "cat_file_all", "cat_file_header")
+
+ # CONFIGURATION
+ # The size in bytes read from stdout when copying git's output to another stream
+ max_chunk_size = 1024*64
+
+ class AutoInterrupt(object):
+ """Kill/Interrupt the stored process instance once this instance goes out of scope. It is
+ used to prevent processes piling up in case iterators stop reading.
+ Besides all attributes are wired through to the contained process object.
+
+ The wait method was overridden to perform automatic status code checking
+ and possibly raise."""
+ __slots__= ("proc", "args")
+
+ def __init__(self, proc, args ):
+ self.proc = proc
+ self.args = args
+
+ def __del__(self):
+ # did the process finish already so we have a return code ?
+ if self.proc.poll() is not None:
+ return
+
+ # can be that nothing really exists anymore ...
+ if os is None:
+ return
+
+ # try to kill it
+ try:
+ os.kill(self.proc.pid, 2) # interrupt signal
+ except AttributeError:
+ # try windows
+ # for some reason, providing None for stdout/stderr still prints something. This is why
+ # we simply use the shell and redirect to nul. Its slower than CreateProcess, question
+ # is whether we really want to see all these messages. Its annoying no matter what.
+ call(("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(self.proc.pid)), shell=True)
+ # END exception handling
+
+ def __getattr__(self, attr):
+ return getattr(self.proc, attr)
+
+ def wait(self):
+ """Wait for the process and return its status code.
+
+ :raise GitCommandError: if the return status is not 0"""
+ status = self.proc.wait()
+ if status != 0:
+ raise GitCommandError(self.args, status, self.proc.stderr.read())
+ # END status handling
+ return status
+ # END auto interrupt
+
+ class CatFileContentStream(object):
+ """Object representing a sized read-only stream returning the contents of
+ an object.
+ It behaves like a stream, but counts the data read and simulates an empty
+ stream once our sized content region is empty.
+ If not all data is read to the end of the objects's lifetime, we read the
+ rest to assure the underlying stream continues to work"""
+
+ __slots__ = ('_stream', '_nbr', '_size')
+
+ def __init__(self, size, stream):
+ self._stream = stream
+ self._size = size
+ self._nbr = 0 # num bytes read
+
+ # special case: if the object is empty, has null bytes, get the
+ # final newline right away.
+ if size == 0:
+ stream.read(1)
+ # END handle empty streams
+
+ def read(self, size=-1):
+ bytes_left = self._size - self._nbr
+ if bytes_left == 0:
+ return ''
+ if size > -1:
+ # assure we don't try to read past our limit
+ size = min(bytes_left, size)
+ else:
+ # they try to read all, make sure its not more than what remains
+ size = bytes_left
+ # END check early depletion
+ data = self._stream.read(size)
+ self._nbr += len(data)
+
+ # check for depletion, read our final byte to make the stream usable by others
+ if self._size - self._nbr == 0:
+ self._stream.read(1) # final newline
+ # END finish reading
+ return data
+
+ def readline(self, size=-1):
+ if self._nbr == self._size:
+ return ''
+
+ # clamp size to lowest allowed value
+ bytes_left = self._size - self._nbr
+ if size > -1:
+ size = min(bytes_left, size)
+ else:
+ size = bytes_left
+ # END handle size
+
+ data = self._stream.readline(size)
+ self._nbr += len(data)
+
+ # handle final byte
+ if self._size - self._nbr == 0:
+ self._stream.read(1)
+ # END finish reading
+
+ return data
+
+ def readlines(self, size=-1):
+ if self._nbr == self._size:
+ return list()
+
+ # leave all additional logic to our readline method, we just check the size
+ out = list()
+ nbr = 0
+ while True:
+ line = self.readline()
+ if not line:
+ break
+ out.append(line)
+ if size > -1:
+ nbr += len(line)
+ if nbr > size:
+ break
+ # END handle size constraint
+ # END readline loop
+ return out
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ line = self.readline()
+ if not line:
+ raise StopIteration
+ return line
+
+ def __del__(self):
+ bytes_left = self._size - self._nbr
+ if bytes_left:
+ # read and discard - seeking is impossible within a stream
+ # includes terminating newline
+ self._stream.read(bytes_left + 1)
+ # END handle incomplete read
+
+
+ def __init__(self, working_dir=None):
+ """Initialize this instance with:
+
+ :param working_dir:
+ Git directory we should work in. If None, we always work in the current
+ directory as returned by os.getcwd().
+ It is meant to be the working tree directory if available, or the
+ .git directory in case of bare repositories."""
+ super(Git, self).__init__()
+ self._working_dir = working_dir
+
+ # cached command slots
+ self.cat_file_header = None
+ self.cat_file_all = None
+
+ def __getattr__(self, name):
+ """A convenience method as it allows to call the command as if it was
+ an object.
+ :return: Callable object that will execute call _call_process with your arguments."""
+ if name[:1] == '_':
+ raise AttributeError(name)
+ return lambda *args, **kwargs: self._call_process(name, *args, **kwargs)
+
+ @property
+ def working_dir(self):
+ """:return: Git directory we are working on"""
+ return self._working_dir
+
+ def execute(self, command,
+ istream=None,
+ with_keep_cwd=False,
+ with_extended_output=False,
+ with_exceptions=True,
+ as_process=False,
+ output_stream=None,
+ **subprocess_kwargs
+ ):
+ """Handles executing the command on the shell and consumes and returns
+ the returned information (stdout)
+
+ :param command:
+ The command argument list to execute.
+ It should be a string, or a sequence of program arguments. The
+ program to execute is the first item in the args sequence or string.
+
+ :param istream:
+ Standard input filehandle passed to subprocess.Popen.
+
+ :param with_keep_cwd:
+ Whether to use the current working directory from os.getcwd().
+ The cmd otherwise uses its own working_dir that it has been initialized
+ with if possible.
+
+ :param with_extended_output:
+ Whether to return a (status, stdout, stderr) tuple.
+
+ :param with_exceptions:
+ Whether to raise an exception when git returns a non-zero status.
+
+ :param as_process:
+ Whether to return the created process instance directly from which
+ streams can be read on demand. This will render with_extended_output and
+ with_exceptions ineffective - the caller will have
+ to deal with the details himself.
+ It is important to note that the process will be placed into an AutoInterrupt
+ wrapper that will interrupt the process once it goes out of scope. If you
+ use the command in iterators, you should pass the whole process instance
+ instead of a single stream.
+
+ :param output_stream:
+ If set to a file-like object, data produced by the git command will be
+ output to the given stream directly.
+ This feature only has any effect if as_process is False. Processes will
+ always be created with a pipe due to issues with subprocess.
+ This merely is a workaround as data will be copied from the
+ output pipe to the given output stream directly.
+
+ :param subprocess_kwargs:
+ Keyword arguments to be passed to subprocess.Popen. Please note that
+ some of the valid kwargs are already set by this method, the ones you
+ specify may not be the same ones.
+
+ :return:
+ * str(output) if extended_output = False (Default)
+ * tuple(int(status), str(stdout), str(stderr)) if extended_output = True
+
+ if ouput_stream is True, the stdout value will be your output stream:
+ * output_stream if extended_output = False
+ * tuple(int(status), output_stream, str(stderr)) if extended_output = True
+
+ :raise GitCommandError:
+
+ :note:
+ If you add additional keyword arguments to the signature of this method,
+ you must update the execute_kwargs tuple housed in this module."""
+ if GIT_PYTHON_TRACE and not GIT_PYTHON_TRACE == 'full':
+ print ' '.join(command)
+
+ # Allow the user to have the command executed in their working dir.
+ if with_keep_cwd or self._working_dir is None:
+ cwd = os.getcwd()
+ else:
+ cwd=self._working_dir
+
+ # Start the process
+ proc = Popen(command,
+ cwd=cwd,
+ stdin=istream,
+ stderr=PIPE,
+ stdout=PIPE,
+ close_fds=(os.name=='posix'),# unsupported on linux
+ **subprocess_kwargs
+ )
+ if as_process:
+ return self.AutoInterrupt(proc, command)
+
+ # Wait for the process to return
+ status = 0
+ stdout_value = ''
+ stderr_value = ''
+ try:
+ if output_stream is None:
+ stdout_value, stderr_value = proc.communicate()
+ # strip trailing "\n"
+ if stdout_value.endswith("\n"):
+ stdout_value = stdout_value[:-1]
+ if stderr_value.endswith("\n"):
+ stderr_value = stderr_value[:-1]
+ status = proc.returncode
+ else:
+ stream_copy(proc.stdout, output_stream, self.max_chunk_size)
+ stdout_value = output_stream
+ stderr_value = proc.stderr.read()
+ # strip trailing "\n"
+ if stderr_value.endswith("\n"):
+ stderr_value = stderr_value[:-1]
+ status = proc.wait()
+ # END stdout handling
+ finally:
+ proc.stdout.close()
+ proc.stderr.close()
+
+ if GIT_PYTHON_TRACE == 'full':
+ cmdstr = " ".join(command)
+ if stderr_value:
+ print "%s -> %d; stdout: '%s'; stderr: '%s'" % (cmdstr, status, stdout_value, stderr_value)
+ elif stdout_value:
+ print "%s -> %d; stdout: '%s'" % (cmdstr, status, stdout_value)
+ else:
+ print "%s -> %d" % (cmdstr, status)
+ # END handle debug printing
+
+ if with_exceptions and status != 0:
+ raise GitCommandError(command, status, stderr_value)
+
+ # Allow access to the command's status code
+ if with_extended_output:
+ return (status, stdout_value, stderr_value)
+ else:
+ return stdout_value
+
+ def transform_kwargs(self, **kwargs):
+ """Transforms Python style kwargs into git command line options."""
+ args = list()
+ for k, v in kwargs.items():
+ if len(k) == 1:
+ if v is True:
+ args.append("-%s" % k)
+ elif type(v) is not bool:
+ args.append("-%s%s" % (k, v))
+ else:
+ if v is True:
+ args.append("--%s" % dashify(k))
+ elif type(v) is not bool:
+ args.append("--%s=%s" % (dashify(k), v))
+ return args
+
+ @classmethod
+ def __unpack_args(cls, arg_list):
+ if not isinstance(arg_list, (list,tuple)):
+ return [ str(arg_list) ]
+
+ outlist = list()
+ for arg in arg_list:
+ if isinstance(arg_list, (list, tuple)):
+ outlist.extend(cls.__unpack_args( arg ))
+ # END recursion
+ else:
+ outlist.append(str(arg))
+ # END for each arg
+ return outlist
+
+ def _call_process(self, method, *args, **kwargs):
+ """Run the given git command with the specified arguments and return
+ the result as a String
+
+ :param method:
+ is the command. Contained "_" characters will be converted to dashes,
+ such as in 'ls_files' to call 'ls-files'.
+
+ :param args:
+ is the list of arguments. If None is included, it will be pruned.
+ This allows your commands to call git more conveniently as None
+ is realized as non-existent
+
+ :param kwargs:
+ is a dict of keyword arguments.
+ This function accepts the same optional keyword arguments
+ as execute().
+
+ ``Examples``::
+ git.rev_list('master', max_count=10, header=True)
+
+ :return: Same as ``execute``"""
+ # Handle optional arguments prior to calling transform_kwargs
+ # otherwise these'll end up in args, which is bad.
+ _kwargs = dict()
+ for kwarg in execute_kwargs:
+ try:
+ _kwargs[kwarg] = kwargs.pop(kwarg)
+ except KeyError:
+ pass
+
+ # Prepare the argument list
+ opt_args = self.transform_kwargs(**kwargs)
+
+ ext_args = self.__unpack_args([a for a in args if a is not None])
+ args = opt_args + ext_args
+
+ call = ["git", dashify(method)]
+ call.extend(args)
+
+ return self.execute(call, **_kwargs)
+
+ def _parse_object_header(self, header_line):
+ """
+ :param header_line:
+ <hex_sha> type_string size_as_int
+
+ :return: (hex_sha, type_string, size_as_int)
+
+ :raise ValueError: if the header contains indication for an error due to
+ incorrect input sha"""
+ tokens = header_line.split()
+ if len(tokens) != 3:
+ if not tokens:
+ raise ValueError("SHA could not be resolved, git returned: %r" % (header_line.strip()))
+ else:
+ raise ValueError("SHA %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip()))
+ # END handle actual return value
+ # END error handling
+
+ if len(tokens[0]) != 40:
+ raise ValueError("Failed to parse header: %r" % header_line)
+ return (tokens[0], tokens[1], int(tokens[2]))
+
+ def __prepare_ref(self, ref):
+ # required for command to separate refs on stdin
+ refstr = str(ref) # could be ref-object
+ if refstr.endswith("\n"):
+ return refstr
+ return refstr + "\n"
+
+ def __get_persistent_cmd(self, attr_name, cmd_name, *args,**kwargs):
+ cur_val = getattr(self, attr_name)
+ if cur_val is not None:
+ return cur_val
+
+ options = { "istream" : PIPE, "as_process" : True }
+ options.update( kwargs )
+
+ cmd = self._call_process( cmd_name, *args, **options )
+ setattr(self, attr_name, cmd )
+ return cmd
+
+ def __get_object_header(self, cmd, ref):
+ cmd.stdin.write(self.__prepare_ref(ref))
+ cmd.stdin.flush()
+ return self._parse_object_header(cmd.stdout.readline())
+
+ def get_object_header(self, ref):
+ """ Use this method to quickly examine the type and size of the object behind
+ the given ref.
+
+ :note: The method will only suffer from the costs of command invocation
+ once and reuses the command in subsequent calls.
+
+ :return: (hexsha, type_string, size_as_int)"""
+ cmd = self.__get_persistent_cmd("cat_file_header", "cat_file", batch_check=True)
+ return self.__get_object_header(cmd, ref)
+
+ def get_object_data(self, ref):
+ """ As get_object_header, but returns object data as well
+ :return: (hexsha, type_string, size_as_int,data_string)
+ :note: not threadsafe"""
+ hexsha, typename, size, stream = self.stream_object_data(ref)
+ data = stream.read(size)
+ del(stream)
+ return (hexsha, typename, size, data)
+
+ def stream_object_data(self, ref):
+ """As get_object_header, but returns the data as a stream
+ :return: (hexsha, type_string, size_as_int, stream)
+ :note: This method is not threadsafe, you need one independent Command instance
+ per thread to be safe !"""
+ cmd = self.__get_persistent_cmd("cat_file_all", "cat_file", batch=True)
+ hexsha, typename, size = self.__get_object_header(cmd, ref)
+ return (hexsha, typename, size, self.CatFileContentStream(size, cmd.stdout))
+
+ def clear_cache(self):
+ """Clear all kinds of internal caches to release resources.
+
+ Currently persistent commands will be interrupted.
+
+ :return: self"""
+ self.cat_file_all = None
+ self.cat_file_header = None
+ return self
diff --git a/git/config.py b/git/config.py
new file mode 100644
index 00000000..f1a8832e
--- /dev/null
+++ b/git/config.py
@@ -0,0 +1,420 @@
+# config.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Module containing module parser implementation able to properly read and write
+configuration files"""
+
+import re
+import os
+import ConfigParser as cp
+import inspect
+import cStringIO
+
+from git.odict import OrderedDict
+from git.util import LockFile
+
+__all__ = ('GitConfigParser', 'SectionConstraint')
+
+class MetaParserBuilder(type):
+ """Utlity class wrapping base-class methods into decorators that assure read-only properties"""
+ def __new__(metacls, name, bases, clsdict):
+ """
+ Equip all base-class methods with a needs_values decorator, and all non-const methods
+ with a set_dirty_and_flush_changes decorator in addition to that."""
+ kmm = '_mutating_methods_'
+ if kmm in clsdict:
+ mutating_methods = clsdict[kmm]
+ for base in bases:
+ methods = ( t for t in inspect.getmembers(base, inspect.ismethod) if not t[0].startswith("_") )
+ for name, method in methods:
+ if name in clsdict:
+ continue
+ method_with_values = needs_values(method)
+ if name in mutating_methods:
+ method_with_values = set_dirty_and_flush_changes(method_with_values)
+ # END mutating methods handling
+
+ clsdict[name] = method_with_values
+ # END for each name/method pair
+ # END for each base
+ # END if mutating methods configuration is set
+
+ new_type = super(MetaParserBuilder, metacls).__new__(metacls, name, bases, clsdict)
+ return new_type
+
+
+
+def needs_values(func):
+ """Returns method assuring we read values (on demand) before we try to access them"""
+ def assure_data_present(self, *args, **kwargs):
+ self.read()
+ return func(self, *args, **kwargs)
+ # END wrapper method
+ assure_data_present.__name__ = func.__name__
+ return assure_data_present
+
+def set_dirty_and_flush_changes(non_const_func):
+ """Return method that checks whether given non constant function may be called.
+ If so, the instance will be set dirty.
+ Additionally, we flush the changes right to disk"""
+ def flush_changes(self, *args, **kwargs):
+ rval = non_const_func(self, *args, **kwargs)
+ self.write()
+ return rval
+ # END wrapper method
+ flush_changes.__name__ = non_const_func.__name__
+ return flush_changes
+
+
+class SectionConstraint(object):
+ """Constrains a ConfigParser to only option commands which are constrained to
+ always use the section we have been initialized with.
+
+ It supports all ConfigParser methods that operate on an option"""
+ __slots__ = ("_config", "_section_name")
+ _valid_attrs_ = ("get_value", "set_value", "get", "set", "getint", "getfloat", "getboolean", "has_option",
+ "remove_section", "remove_option", "options")
+
+ def __init__(self, config, section):
+ self._config = config
+ self._section_name = section
+
+ def __getattr__(self, attr):
+ if attr in self._valid_attrs_:
+ return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
+ return super(SectionConstraint,self).__getattribute__(attr)
+
+ def _call_config(self, method, *args, **kwargs):
+ """Call the configuration at the given method which must take a section name
+ as first argument"""
+ return getattr(self._config, method)(self._section_name, *args, **kwargs)
+
+ @property
+ def config(self):
+ """return: Configparser instance we constrain"""
+ return self._config
+
+
+class GitConfigParser(cp.RawConfigParser, object):
+ """Implements specifics required to read git style configuration files.
+
+ This variation behaves much like the git.config command such that the configuration
+ will be read on demand based on the filepath given during initialization.
+
+ The changes will automatically be written once the instance goes out of scope, but
+ can be triggered manually as well.
+
+ The configuration file will be locked if you intend to change values preventing other
+ instances to write concurrently.
+
+ :note:
+ The config is case-sensitive even when queried, hence section and option names
+ must match perfectly."""
+ __metaclass__ = MetaParserBuilder
+
+
+ #{ Configuration
+ # The lock type determines the type of lock to use in new configuration readers.
+ # They must be compatible to the LockFile interface.
+ # A suitable alternative would be the BlockingLockFile
+ t_lock = LockFile
+
+ #} END configuration
+
+ OPTCRE = re.compile(
+ r'\s?(?P<option>[^:=\s][^:=]*)' # very permissive, incuding leading whitespace
+ r'\s*(?P<vi>[:=])\s*' # any number of space/tab,
+ # followed by separator
+ # (either : or =), followed
+ # by any # space/tab
+ r'(?P<value>.*)$' # everything up to eol
+ )
+
+ # list of RawConfigParser methods able to change the instance
+ _mutating_methods_ = ("add_section", "remove_section", "remove_option", "set")
+ __slots__ = ("_sections", "_defaults", "_file_or_files", "_read_only","_is_initialized", '_lock')
+
+ def __init__(self, file_or_files, read_only=True):
+ """Initialize a configuration reader to read the given file_or_files and to
+ possibly allow changes to it by setting read_only False
+
+ :param file_or_files:
+ A single file path or file objects or multiple of these
+
+ :param read_only:
+ If True, the ConfigParser may only read the data , but not change it.
+ If False, only a single file path or file object may be given."""
+ super(GitConfigParser, self).__init__()
+ # initialize base with ordered dictionaries to be sure we write the same
+ # file back
+ self._sections = OrderedDict()
+ self._defaults = OrderedDict()
+
+ self._file_or_files = file_or_files
+ self._read_only = read_only
+ self._is_initialized = False
+ self._lock = None
+
+ if not read_only:
+ if isinstance(file_or_files, (tuple, list)):
+ raise ValueError("Write-ConfigParsers can operate on a single file only, multiple files have been passed")
+ # END single file check
+
+ if not isinstance(file_or_files, basestring):
+ file_or_files = file_or_files.name
+ # END get filename from handle/stream
+ # initialize lock base - we want to write
+ self._lock = self.t_lock(file_or_files)
+
+ self._lock._obtain_lock()
+ # END read-only check
+
+
+ def __del__(self):
+ """Write pending changes if required and release locks"""
+ # checking for the lock here makes sure we do not raise during write()
+ # in case an invalid parser was created who could not get a lock
+ if self.read_only or not self._lock._has_lock():
+ return
+
+ try:
+ try:
+ self.write()
+ except IOError,e:
+ print "Exception during destruction of GitConfigParser: %s" % str(e)
+ finally:
+ self._lock._release_lock()
+
+ def optionxform(self, optionstr):
+ """Do not transform options in any way when writing"""
+ return optionstr
+
+ def _read(self, fp, fpname):
+ """A direct copy of the py2.4 version of the super class's _read method
+ to assure it uses ordered dicts. Had to change one line to make it work.
+
+ Future versions have this fixed, but in fact its quite embarassing for the
+ guys not to have done it right in the first place !
+
+ Removed big comments to make it more compact.
+
+ Made sure it ignores initial whitespace as git uses tabs"""
+ cursect = None # None, or a dictionary
+ optname = None
+ lineno = 0
+ e = None # None, or an exception
+ while True:
+ line = fp.readline()
+ if not line:
+ break
+ lineno = lineno + 1
+ # comment or blank line?
+ if line.strip() == '' or line[0] in '#;':
+ continue
+ if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR":
+ # no leading whitespace
+ continue
+ else:
+ # is it a section header?
+ mo = self.SECTCRE.match(line)
+ if mo:
+ sectname = mo.group('header')
+ if sectname in self._sections:
+ cursect = self._sections[sectname]
+ elif sectname == cp.DEFAULTSECT:
+ cursect = self._defaults
+ else:
+ # THE ONLY LINE WE CHANGED !
+ cursect = OrderedDict((('__name__', sectname),))
+ self._sections[sectname] = cursect
+ # So sections can't start with a continuation line
+ optname = None
+ # no section header in the file?
+ elif cursect is None:
+ raise cp.MissingSectionHeaderError(fpname, lineno, line)
+ # an option line?
+ else:
+ mo = self.OPTCRE.match(line)
+ if mo:
+ optname, vi, optval = mo.group('option', 'vi', 'value')
+ if vi in ('=', ':') and ';' in optval:
+ pos = optval.find(';')
+ if pos != -1 and optval[pos-1].isspace():
+ optval = optval[:pos]
+ optval = optval.strip()
+ if optval == '""':
+ optval = ''
+ optname = self.optionxform(optname.rstrip())
+ cursect[optname] = optval
+ else:
+ if not e:
+ e = cp.ParsingError(fpname)
+ e.append(lineno, repr(line))
+ # END
+ # END ?
+ # END ?
+ # END while reading
+ # if any parsing errors occurred, raise an exception
+ if e:
+ raise e
+
+
+ def read(self):
+ """Reads the data stored in the files we have been initialized with. It will
+ ignore files that cannot be read, possibly leaving an empty configuration
+
+ :return: Nothing
+ :raise IOError: if a file cannot be handled"""
+ if self._is_initialized:
+ return
+
+ files_to_read = self._file_or_files
+ if not isinstance(files_to_read, (tuple, list)):
+ files_to_read = [ files_to_read ]
+
+ for file_object in files_to_read:
+ fp = file_object
+ close_fp = False
+ # assume a path if it is not a file-object
+ if not hasattr(file_object, "seek"):
+ try:
+ fp = open(file_object)
+ close_fp = True
+ except IOError,e:
+ continue
+ # END fp handling
+
+ try:
+ self._read(fp, fp.name)
+ finally:
+ if close_fp:
+ fp.close()
+ # END read-handling
+ # END for each file object to read
+ self._is_initialized = True
+
+ def _write(self, fp):
+ """Write an .ini-format representation of the configuration state in
+ git compatible format"""
+ def write_section(name, section_dict):
+ fp.write("[%s]\n" % name)
+ for (key, value) in section_dict.items():
+ if key != "__name__":
+ fp.write("\t%s = %s\n" % (key, str(value).replace('\n', '\n\t')))
+ # END if key is not __name__
+ # END section writing
+
+ if self._defaults:
+ write_section(cp.DEFAULTSECT, self._defaults)
+ map(lambda t: write_section(t[0],t[1]), self._sections.items())
+
+
+ @needs_values
+ def write(self):
+ """Write changes to our file, if there are changes at all
+
+ :raise IOError: if this is a read-only writer instance or if we could not obtain
+ a file lock"""
+ self._assure_writable("write")
+
+ fp = self._file_or_files
+ close_fp = False
+
+ # we have a physical file on disk, so get a lock
+ if isinstance(fp, (basestring, file)):
+ self._lock._obtain_lock()
+ # END get lock for physical files
+
+ if not hasattr(fp, "seek"):
+ fp = open(self._file_or_files, "w")
+ close_fp = True
+ else:
+ fp.seek(0)
+ # END handle stream or file
+
+ # WRITE DATA
+ try:
+ self._write(fp)
+ finally:
+ if close_fp:
+ fp.close()
+ # END data writing
+
+ # we do not release the lock - it will be done automatically once the
+ # instance vanishes
+
+ def _assure_writable(self, method_name):
+ if self.read_only:
+ raise IOError("Cannot execute non-constant method %s.%s" % (self, method_name))
+
+ @needs_values
+ @set_dirty_and_flush_changes
+ def add_section(self, section):
+ """Assures added options will stay in order"""
+ super(GitConfigParser, self).add_section(section)
+ self._sections[section] = OrderedDict()
+
+ @property
+ def read_only(self):
+ """:return: True if this instance may change the configuration file"""
+ return self._read_only
+
+ def get_value(self, section, option, default = None):
+ """
+ :param default:
+ If not None, the given default value will be returned in case
+ the option did not exist
+ :return: a properly typed value, either int, float or string
+
+ :raise TypeError: in case the value could not be understood
+ Otherwise the exceptions known to the ConfigParser will be raised."""
+ try:
+ valuestr = self.get(section, option)
+ except Exception:
+ if default is not None:
+ return default
+ raise
+
+ types = ( long, float )
+ for numtype in types:
+ try:
+ val = numtype( valuestr )
+
+ # truncated value ?
+ if val != float( valuestr ):
+ continue
+
+ return val
+ except (ValueError,TypeError):
+ continue
+ # END for each numeric type
+
+ # try boolean values as git uses them
+ vl = valuestr.lower()
+ if vl == 'false':
+ return False
+ if vl == 'true':
+ return True
+
+ if not isinstance( valuestr, basestring ):
+ raise TypeError( "Invalid value type: only int, long, float and str are allowed", valuestr )
+
+ return valuestr
+
+ @needs_values
+ @set_dirty_and_flush_changes
+ def set_value(self, section, option, value):
+ """Sets the given option in section to the given value.
+ It will create the section if required, and will not throw as opposed to the default
+ ConfigParser 'set' method.
+
+ :param section: Name of the section in which the option resides or should reside
+ :param option: Name of the options whose value to set
+
+ :param value: Value to set the option to. It must be a string or convertible
+ to a string"""
+ if not self.has_section(section):
+ self.add_section(section)
+ self.set(section, option, str(value))
diff --git a/git/db.py b/git/db.py
new file mode 100644
index 00000000..b1c65377
--- /dev/null
+++ b/git/db.py
@@ -0,0 +1,61 @@
+"""Module with our own gitdb implementation - it uses the git command"""
+from exc import (
+ GitCommandError,
+ BadObject
+ )
+
+from gitdb.base import (
+ OInfo,
+ OStream
+ )
+
+from gitdb.util import (
+ bin_to_hex,
+ hex_to_bin
+ )
+from gitdb.db import GitDB
+from gitdb.db import LooseObjectDB
+
+
+__all__ = ('GitCmdObjectDB', 'GitDB' )
+
+#class GitCmdObjectDB(CompoundDB, ObjectDBW):
+class GitCmdObjectDB(LooseObjectDB):
+ """A database representing the default git object store, which includes loose
+ objects, pack files and an alternates file
+
+ It will create objects only in the loose object database.
+ :note: for now, we use the git command to do all the lookup, just until he
+ have packs and the other implementations
+ """
+ def __init__(self, root_path, git):
+ """Initialize this instance with the root and a git command"""
+ super(GitCmdObjectDB, self).__init__(root_path)
+ self._git = git
+
+ def info(self, sha):
+ hexsha, typename, size = self._git.get_object_header(bin_to_hex(sha))
+ return OInfo(hex_to_bin(hexsha), typename, size)
+
+ def stream(self, sha):
+ """For now, all lookup is done by git itself"""
+ hexsha, typename, size, stream = self._git.stream_object_data(bin_to_hex(sha))
+ return OStream(hex_to_bin(hexsha), typename, size, stream)
+
+
+ # { Interface
+
+ def partial_to_complete_sha_hex(self, partial_hexsha):
+ """:return: Full binary 20 byte sha from the given partial hexsha
+ :raise AmbiguousObjectName:
+ :raise BadObject:
+ :note: currently we only raise BadObject as git does not communicate
+ AmbiguousObjects separately"""
+ try:
+ hexsha, typename, size = self._git.get_object_header(partial_hexsha)
+ return hex_to_bin(hexsha)
+ except (GitCommandError, ValueError):
+ raise BadObject(partial_hexsha)
+ # END handle exceptions
+
+ #} END interface
diff --git a/git/diff.py b/git/diff.py
new file mode 100644
index 00000000..48253c42
--- /dev/null
+++ b/git/diff.py
@@ -0,0 +1,346 @@
+# diff.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import re
+from objects.blob import Blob
+from objects.util import mode_str_to_int
+from exc import GitCommandError
+
+from gitdb.util import hex_to_bin
+
+__all__ = ('Diffable', 'DiffIndex', 'Diff')
+
+class Diffable(object):
+ """Common interface for all object that can be diffed against another object of compatible type.
+
+ :note:
+ Subclasses require a repo member as it is the case for Object instances, for practical
+ reasons we do not derive from Object."""
+ __slots__ = tuple()
+
+ # standin indicating you want to diff against the index
+ class Index(object):
+ pass
+
+ def _process_diff_args(self, args):
+ """
+ :return:
+ possibly altered version of the given args list.
+ Method is called right before git command execution.
+ Subclasses can use it to alter the behaviour of the superclass"""
+ return args
+
+ def diff(self, other=Index, paths=None, create_patch=False, **kwargs):
+ """Creates diffs between two items being trees, trees and index or an
+ index and the working tree.
+
+ :param other:
+ Is the item to compare us with.
+ If None, we will be compared to the working tree.
+ If Treeish, it will be compared against the respective tree
+ If Index ( type ), it will be compared against the index.
+ It defaults to Index to assure the method will not by-default fail
+ on bare repositories.
+
+ :param paths:
+ is a list of paths or a single path to limit the diff to.
+ It will only include at least one of the givne path or paths.
+
+ :param create_patch:
+ If True, the returned Diff contains a detailed patch that if applied
+ makes the self to other. Patches are somwhat costly as blobs have to be read
+ and diffed.
+
+ :param kwargs:
+ Additional arguments passed to git-diff, such as
+ R=True to swap both sides of the diff.
+
+ :return: git.DiffIndex
+
+ :note:
+ Rename detection will only work if create_patch is True.
+
+ On a bare repository, 'other' needs to be provided as Index or as
+ as Tree/Commit, or a git command error will occour"""
+ args = list()
+ args.append( "--abbrev=40" ) # we need full shas
+ args.append( "--full-index" ) # get full index paths, not only filenames
+
+ if create_patch:
+ args.append("-p")
+ args.append("-M") # check for renames
+ else:
+ args.append("--raw")
+
+ if paths is not None and not isinstance(paths, (tuple,list)):
+ paths = [ paths ]
+
+ if other is not None and other is not self.Index:
+ args.insert(0, other)
+ if other is self.Index:
+ args.insert(0, "--cached")
+
+ args.insert(0,self)
+
+ # paths is list here or None
+ if paths:
+ args.append("--")
+ args.extend(paths)
+ # END paths handling
+
+ kwargs['as_process'] = True
+ proc = self.repo.git.diff(*self._process_diff_args(args), **kwargs)
+
+ diff_method = Diff._index_from_raw_format
+ if create_patch:
+ diff_method = Diff._index_from_patch_format
+ index = diff_method(self.repo, proc.stdout)
+
+ status = proc.wait()
+ return index
+
+
+class DiffIndex(list):
+ """Implements an Index for diffs, allowing a list of Diffs to be queried by
+ the diff properties.
+
+ The class improves the diff handling convenience"""
+ # change type invariant identifying possible ways a blob can have changed
+ # A = Added
+ # D = Deleted
+ # R = Renamed
+ # M = modified
+ change_type = ("A", "D", "R", "M")
+
+
+ def iter_change_type(self, change_type):
+ """
+ :return:
+ iterator yieling Diff instances that match the given change_type
+
+ :param change_type:
+ Member of DiffIndex.change_type, namely:
+
+ * 'A' for added paths
+ * 'D' for deleted paths
+ * 'R' for renamed paths
+ * 'M' for paths with modified data"""
+ if change_type not in self.change_type:
+ raise ValueError( "Invalid change type: %s" % change_type )
+
+ for diff in self:
+ if change_type == "A" and diff.new_file:
+ yield diff
+ elif change_type == "D" and diff.deleted_file:
+ yield diff
+ elif change_type == "R" and diff.renamed:
+ yield diff
+ elif change_type == "M" and diff.a_blob and diff.b_blob and diff.a_blob != diff.b_blob:
+ yield diff
+ # END for each diff
+
+
+class Diff(object):
+ """A Diff contains diff information between two Trees.
+
+ It contains two sides a and b of the diff, members are prefixed with
+ "a" and "b" respectively to inidcate that.
+
+ Diffs keep information about the changed blob objects, the file mode, renames,
+ deletions and new files.
+
+ There are a few cases where None has to be expected as member variable value:
+
+ ``New File``::
+
+ a_mode is None
+ a_blob is None
+
+ ``Deleted File``::
+
+ b_mode is None
+ b_blob is None
+
+ ``Working Tree Blobs``
+
+ When comparing to working trees, the working tree blob will have a null hexsha
+ as a corresponding object does not yet exist. The mode will be null as well.
+ But the path will be available though.
+ If it is listed in a diff the working tree version of the file must
+ be different to the version in the index or tree, and hence has been modified."""
+
+ # precompiled regex
+ re_header = re.compile(r"""
+ #^diff[ ]--git
+ [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
+ (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
+ ^rename[ ]from[ ](?P<rename_from>\S+)\n
+ ^rename[ ]to[ ](?P<rename_to>\S+)(?:\n|$))?
+ (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
+ ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
+ (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
+ (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
+ (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
+ \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
+ """, re.VERBOSE | re.MULTILINE)
+ # can be used for comparisons
+ NULL_HEX_SHA = "0"*40
+ NULL_BIN_SHA = "\0"*20
+
+ __slots__ = ("a_blob", "b_blob", "a_mode", "b_mode", "new_file", "deleted_file",
+ "rename_from", "rename_to", "diff")
+
+ def __init__(self, repo, a_path, b_path, a_blob_id, b_blob_id, a_mode,
+ b_mode, new_file, deleted_file, rename_from,
+ rename_to, diff):
+
+ self.a_mode = a_mode
+ self.b_mode = b_mode
+
+ if self.a_mode:
+ self.a_mode = mode_str_to_int(self.a_mode)
+ if self.b_mode:
+ self.b_mode = mode_str_to_int(self.b_mode)
+
+ if a_blob_id is None:
+ self.a_blob = None
+ else:
+ self.a_blob = Blob(repo, hex_to_bin(a_blob_id), mode=self.a_mode, path=a_path)
+ if b_blob_id is None:
+ self.b_blob = None
+ else:
+ self.b_blob = Blob(repo, hex_to_bin(b_blob_id), mode=self.b_mode, path=b_path)
+
+ self.new_file = new_file
+ self.deleted_file = deleted_file
+
+ # be clear and use None instead of empty strings
+ self.rename_from = rename_from or None
+ self.rename_to = rename_to or None
+
+ self.diff = diff
+
+
+ def __eq__(self, other):
+ for name in self.__slots__:
+ if getattr(self, name) != getattr(other, name):
+ return False
+ # END for each name
+ return True
+
+ def __ne__(self, other):
+ return not ( self == other )
+
+ def __hash__(self):
+ return hash(tuple(getattr(self,n) for n in self.__slots__))
+
+ def __str__(self):
+ h = "%s"
+ if self.a_blob:
+ h %= self.a_blob.path
+ elif self.b_blob:
+ h %= self.b_blob.path
+
+ msg = ''
+ l = None # temp line
+ ll = 0 # line length
+ for b,n in zip((self.a_blob, self.b_blob), ('lhs', 'rhs')):
+ if b:
+ l = "\n%s: %o | %s" % (n, b.mode, b.hexsha)
+ else:
+ l = "\n%s: None" % n
+ # END if blob is not None
+ ll = max(len(l), ll)
+ msg += l
+ # END for each blob
+
+ # add headline
+ h += '\n' + '='*ll
+
+ if self.deleted_file:
+ msg += '\nfile deleted in rhs'
+ if self.new_file:
+ msg += '\nfile added in rhs'
+ if self.rename_from:
+ msg += '\nfile renamed from %r' % self.rename_from
+ if self.rename_to:
+ msg += '\nfile renamed to %r' % self.rename_to
+ if self.diff:
+ msg += '\n---'
+ msg += self.diff
+ msg += '\n---'
+ # END diff info
+
+ return h + msg
+
+ @property
+ def renamed(self):
+ """:returns: True if the blob of our diff has been renamed"""
+ return self.rename_from != self.rename_to
+
+ @classmethod
+ def _index_from_patch_format(cls, repo, stream):
+ """Create a new DiffIndex from the given text which must be in patch format
+ :param repo: is the repository we are operating on - it is required
+ :param stream: result of 'git diff' as a stream (supporting file protocol)
+ :return: git.DiffIndex """
+ # for now, we have to bake the stream
+ text = stream.read()
+ index = DiffIndex()
+
+ diff_header = cls.re_header.match
+ for diff in ('\n' + text).split('\ndiff --git')[1:]:
+ header = diff_header(diff)
+
+ a_path, b_path, similarity_index, rename_from, rename_to, \
+ old_mode, new_mode, new_file_mode, deleted_file_mode, \
+ a_blob_id, b_blob_id, b_mode = header.groups()
+ new_file, deleted_file = bool(new_file_mode), bool(deleted_file_mode)
+
+ index.append(Diff(repo, a_path, b_path, a_blob_id, b_blob_id,
+ old_mode or deleted_file_mode, new_mode or new_file_mode or b_mode,
+ new_file, deleted_file, rename_from, rename_to, diff[header.end():]))
+
+ return index
+
+ @classmethod
+ def _index_from_raw_format(cls, repo, stream):
+ """Create a new DiffIndex from the given stream which must be in raw format.
+ :note:
+ This format is inherently incapable of detecting renames, hence we only
+ modify, delete and add files
+ :return: git.DiffIndex"""
+ # handles
+ # :100644 100644 6870991011cc8d9853a7a8a6f02061512c6a8190 37c5e30c879213e9ae83b21e9d11e55fc20c54b7 M .gitignore
+ index = DiffIndex()
+ for line in stream:
+ if not line.startswith(":"):
+ continue
+ # END its not a valid diff line
+ old_mode, new_mode, a_blob_id, b_blob_id, change_type, path = line[1:].split(None, 5)
+ path = path.strip()
+ a_path = path
+ b_path = path
+ deleted_file = False
+ new_file = False
+
+ # NOTE: We cannot conclude from the existance of a blob to change type
+ # as diffs with the working do not have blobs yet
+ if change_type == 'D':
+ b_blob_id = None
+ deleted_file = True
+ elif change_type == 'A':
+ a_blob_id = None
+ new_file = True
+ # END add/remove handling
+
+ diff = Diff(repo, a_path, b_path, a_blob_id, b_blob_id, old_mode, new_mode,
+ new_file, deleted_file, None, None, '')
+ index.append(diff)
+ # END for each line
+
+ return index
+
diff --git a/git/exc.py b/git/exc.py
new file mode 100644
index 00000000..d2cb8d7e
--- /dev/null
+++ b/git/exc.py
@@ -0,0 +1,58 @@
+# exc.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+""" Module containing all exceptions thrown througout the git package, """
+
+from gitdb.exc import *
+
+class InvalidGitRepositoryError(Exception):
+ """ Thrown if the given repository appears to have an invalid format. """
+
+
+class NoSuchPathError(OSError):
+ """ Thrown if a path could not be access by the system. """
+
+
+class GitCommandError(Exception):
+ """ Thrown if execution of the git command fails with non-zero status code. """
+ def __init__(self, command, status, stderr=None):
+ self.stderr = stderr
+ self.status = status
+ self.command = command
+
+ def __str__(self):
+ return ("'%s' returned exit status %i: %s" %
+ (' '.join(str(i) for i in self.command), self.status, self.stderr))
+
+
+class CheckoutError( Exception ):
+ """Thrown if a file could not be checked out from the index as it contained
+ changes.
+
+ The .failed_files attribute contains a list of relative paths that failed
+ to be checked out as they contained changes that did not exist in the index.
+
+ The .failed_reasons attribute contains a string informing about the actual
+ cause of the issue.
+
+ The .valid_files attribute contains a list of relative paths to files that
+ were checked out successfully and hence match the version stored in the
+ index"""
+ def __init__(self, message, failed_files, valid_files, failed_reasons):
+ Exception.__init__(self, message)
+ self.failed_files = failed_files
+ self.failed_reasons = failed_reasons
+ self.valid_files = valid_files
+
+ def __str__(self):
+ return Exception.__str__(self) + ":%s" % self.failed_files
+
+
+class CacheError(Exception):
+ """Base for all errors related to the git index, which is called cache internally"""
+
+class UnmergedEntriesError(CacheError):
+ """Thrown if an operation cannot proceed as there are still unmerged
+ entries in the cache"""
diff --git a/git/ext/gitdb b/git/ext/gitdb
new file mode 160000
+Subproject 1bc281d31b8d31fd4dcbcd9b441b5c7b2c1b0bb
diff --git a/git/ez_setup.py b/git/ez_setup.py
new file mode 100644
index 00000000..3031ad0d
--- /dev/null
+++ b/git/ez_setup.py
@@ -0,0 +1,222 @@
+#!python
+"""Bootstrap setuptools installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+ from ez_setup import use_setuptools
+ use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import sys
+DEFAULT_VERSION = "0.6c3"
+DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3]
+
+md5_data = {
+ 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
+ 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
+ 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
+ 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
+ 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
+ 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
+ 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
+ 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
+ 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
+ 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
+ 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
+ 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
+ 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
+ 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
+ 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
+}
+
+import sys, os
+
+def _validate_md5(egg_name, data):
+ if egg_name in md5_data:
+ from md5 import md5
+ digest = md5(data).hexdigest()
+ if digest != md5_data[egg_name]:
+ print >>sys.stderr, (
+ "md5 validation of %s failed! (Possible download problem?)"
+ % egg_name
+ )
+ sys.exit(2)
+ return data
+
+
+def use_setuptools(
+ version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
+ download_delay=15
+):
+ """Automatically find/download setuptools and make it available on sys.path
+
+ `version` should be a valid setuptools version number that is available
+ as an egg for download under the `download_base` URL (which should end with
+ a '/'). `to_dir` is the directory where setuptools will be downloaded, if
+ it is not already available. If `download_delay` is specified, it should
+ be the number of seconds that will be paused before initiating a download,
+ should one be required. If an older version of setuptools is installed,
+ this routine will print a message to ``sys.stderr`` and raise SystemExit in
+ an attempt to abort the calling script.
+ """
+ try:
+ import setuptools
+ if setuptools.__version__ == '0.0.1':
+ print >>sys.stderr, (
+ "You have an obsolete version of setuptools installed. Please\n"
+ "remove it from your system entirely before rerunning this script."
+ )
+ sys.exit(2)
+ except ImportError:
+ egg = download_setuptools(version, download_base, to_dir, download_delay)
+ sys.path.insert(0, egg)
+ import setuptools; setuptools.bootstrap_install_from = egg
+
+ import pkg_resources
+ try:
+ pkg_resources.require("setuptools>="+version)
+
+ except pkg_resources.VersionConflict, e:
+ # XXX could we install in a subprocess here?
+ print >>sys.stderr, (
+ "The required version of setuptools (>=%s) is not available, and\n"
+ "can't be installed while this script is running. Please install\n"
+ " a more recent version first.\n\n(Currently using %r)"
+ ) % (version, e.args[0])
+ sys.exit(2)
+
+def download_setuptools(
+ version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
+ delay = 15
+):
+ """Download setuptools from a specified location and return its filename
+
+ `version` should be a valid setuptools version number that is available
+ as an egg for download under the `download_base` URL (which should end
+ with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `delay` is the number of seconds to pause before an actual download attempt.
+ """
+ import urllib2, shutil
+ egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
+ url = download_base + egg_name
+ saveto = os.path.join(to_dir, egg_name)
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ from distutils import log
+ if delay:
+ log.warn("""
+---------------------------------------------------------------------------
+This script requires setuptools version %s to run (even to display
+help). I will attempt to download it for you (from
+%s), but
+you may need to enable firewall access for this script first.
+I will start the download in %d seconds.
+
+(Note: if this machine does not have network access, please obtain the file
+
+ %s
+
+and place it in this directory before rerunning this script.)
+---------------------------------------------------------------------------""",
+ version, download_base, delay, url
+ ); from time import sleep; sleep(delay)
+ log.warn("Downloading %s", url)
+ src = urllib2.urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = _validate_md5(egg_name, src.read())
+ dst = open(saveto,"wb"); dst.write(data)
+ finally:
+ if src: src.close()
+ if dst: dst.close()
+ return os.path.realpath(saveto)
+
+def main(argv, version=DEFAULT_VERSION):
+ """Install or upgrade setuptools and EasyInstall"""
+
+ try:
+ import setuptools
+ except ImportError:
+ egg = None
+ try:
+ egg = download_setuptools(version, delay=0)
+ sys.path.insert(0,egg)
+ from setuptools.command.easy_install import main
+ return main(list(argv)+[egg]) # we're done here
+ finally:
+ if egg and os.path.exists(egg):
+ os.unlink(egg)
+ else:
+ if setuptools.__version__ == '0.0.1':
+ # tell the user to uninstall obsolete version
+ use_setuptools(version)
+
+ req = "setuptools>="+version
+ import pkg_resources
+ try:
+ pkg_resources.require(req)
+ except pkg_resources.VersionConflict:
+ try:
+ from setuptools.command.easy_install import main
+ except ImportError:
+ from easy_install import main
+ main(list(argv)+[download_setuptools(delay=0)])
+ sys.exit(0) # try to force an exit
+ else:
+ if argv:
+ from setuptools.command.easy_install import main
+ main(argv)
+ else:
+ print "Setuptools version",version,"or greater has been installed."
+ print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
+
+
+
+def update_md5(filenames):
+ """Update our built-in md5 registry"""
+
+ import re
+ from md5 import md5
+
+ for name in filenames:
+ base = os.path.basename(name)
+ f = open(name,'rb')
+ md5_data[base] = md5(f.read()).hexdigest()
+ f.close()
+
+ data = [" %r: %r,\n" % it for it in md5_data.items()]
+ data.sort()
+ repl = "".join(data)
+
+ import inspect
+ srcfile = inspect.getsourcefile(sys.modules[__name__])
+ f = open(srcfile, 'rb'); src = f.read(); f.close()
+
+ match = re.search("\nmd5_data = {\n([^}]+)}", src)
+ if not match:
+ print >>sys.stderr, "Internal error!"
+ sys.exit(2)
+
+ src = src[:match.start(1)] + repl + src[match.end(1):]
+ f = open(srcfile,'w')
+ f.write(src)
+ f.close()
+
+
+if __name__=='__main__':
+ if len(sys.argv)>2 and sys.argv[1]=='--md5update':
+ update_md5(sys.argv[2:])
+ else:
+ main(sys.argv[1:])
+
+
+
+
+
diff --git a/git/index/__init__.py b/git/index/__init__.py
new file mode 100644
index 00000000..fe4a7f59
--- /dev/null
+++ b/git/index/__init__.py
@@ -0,0 +1,4 @@
+"""Initialize the index package"""
+
+from base import *
+from typ import * \ No newline at end of file
diff --git a/git/index/base.py b/git/index/base.py
new file mode 100644
index 00000000..d813e6c1
--- /dev/null
+++ b/git/index/base.py
@@ -0,0 +1,1153 @@
+# index.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+import tempfile
+import os
+import sys
+import subprocess
+import glob
+from cStringIO import StringIO
+
+from stat import S_ISLNK
+
+from typ import (
+ BaseIndexEntry,
+ IndexEntry,
+ )
+
+from util import (
+ TemporaryFileSwap,
+ post_clear_cache,
+ default_index,
+ git_working_dir
+ )
+
+import git.objects
+import git.diff as diff
+
+from git.exc import (
+ GitCommandError,
+ CheckoutError
+ )
+
+from git.objects import (
+ Blob,
+ Submodule,
+ Tree,
+ Object,
+ Commit,
+ )
+
+from git.objects.util import Serializable
+
+from git.util import (
+ IndexFileSHA1Writer,
+ LazyMixin,
+ LockedFD,
+ join_path_native,
+ file_contents_ro,
+ to_native_path_linux,
+ to_native_path
+ )
+
+from fun import (
+ entry_key,
+ write_cache,
+ read_cache,
+ aggressive_tree_merge,
+ write_tree_from_cache,
+ stat_mode_to_index_mode,
+ S_IFGITLINK
+ )
+
+from gitdb.base import IStream
+from gitdb.db import MemoryDB
+from gitdb.util import to_bin_sha
+from itertools import izip
+
+__all__ = ( 'IndexFile', 'CheckoutError' )
+
+
+class IndexFile(LazyMixin, diff.Diffable, Serializable):
+ """
+ Implements an Index that can be manipulated using a native implementation in
+ order to save git command function calls wherever possible.
+
+ It provides custom merging facilities allowing to merge without actually changing
+ your index or your working tree. This way you can perform own test-merges based
+ on the index only without having to deal with the working copy. This is useful
+ in case of partial working trees.
+
+ ``Entries``
+
+ The index contains an entries dict whose keys are tuples of type IndexEntry
+ to facilitate access.
+
+ You may read the entries dict or manipulate it using IndexEntry instance, i.e.::
+
+ index.entries[index.entry_key(index_entry_instance)] = index_entry_instance
+
+ Make sure you use index.write() once you are done manipulating the index directly
+ before operating on it using the git command"""
+ __slots__ = ("repo", "version", "entries", "_extension_data", "_file_path")
+ _VERSION = 2 # latest version we support
+ S_IFGITLINK = S_IFGITLINK # a submodule
+
+ def __init__(self, repo, file_path=None):
+ """Initialize this Index instance, optionally from the given ``file_path``.
+ If no file_path is given, we will be created from the current index file.
+
+ If a stream is not given, the stream will be initialized from the current
+ repository's index on demand."""
+ self.repo = repo
+ self.version = self._VERSION
+ self._extension_data = ''
+ self._file_path = file_path or self._index_path()
+
+ def _set_cache_(self, attr):
+ if attr == "entries":
+ # read the current index
+ # try memory map for speed
+ lfd = LockedFD(self._file_path)
+ try:
+ fd = lfd.open(write=False, stream=False)
+ except OSError:
+ lfd.rollback()
+ # in new repositories, there may be no index, which means we are empty
+ self.entries = dict()
+ return
+ # END exception handling
+
+ # Here it comes: on windows in python 2.5, memory maps aren't closed properly
+ # Hence we are in trouble if we try to delete a file that is memory mapped,
+ # which happens during read-tree.
+ # In this case, we will just read the memory in directly.
+ # Its insanely bad ... I am disappointed !
+ allow_mmap = (os.name != 'nt' or sys.version_info[1] > 5)
+ stream = file_contents_ro(fd, stream=True, allow_mmap=allow_mmap)
+
+ try:
+ self._deserialize(stream)
+ finally:
+ lfd.rollback()
+ # The handles will be closed on desctruction
+ # END read from default index on demand
+ else:
+ super(IndexFile, self)._set_cache_(attr)
+
+ def _index_path(self):
+ return join_path_native(self.repo.git_dir, "index")
+
+ @property
+ def path(self):
+ """ :return: Path to the index file we are representing """
+ return self._file_path
+
+ def _delete_entries_cache(self):
+ """Safely clear the entries cache so it can be recreated"""
+ try:
+ del(self.entries)
+ except AttributeError:
+ # fails in python 2.6.5 with this exception
+ pass
+ # END exception handling
+
+ #{ Serializable Interface
+
+ def _deserialize(self, stream):
+ """Initialize this instance with index values read from the given stream"""
+ self.version, self.entries, self._extension_data, conten_sha = read_cache(stream)
+ return self
+
+ def _entries_sorted(self):
+ """:return: list of entries, in a sorted fashion, first by path, then by stage"""
+ entries_sorted = self.entries.values()
+ entries_sorted.sort(key=lambda e: (e.path, e.stage)) # use path/stage as sort key
+ return entries_sorted
+
+ def _serialize(self, stream, ignore_tree_extension_data=False):
+ entries = self._entries_sorted()
+ write_cache(entries,
+ stream,
+ (ignore_tree_extension_data and None) or self._extension_data)
+ return self
+
+
+ #} END serializable interface
+
+ def write(self, file_path = None, ignore_tree_extension_data=False):
+ """Write the current state to our file path or to the given one
+
+ :param file_path:
+ If None, we will write to our stored file path from which we have
+ been initialized. Otherwise we write to the given file path.
+ Please note that this will change the file_path of this index to
+ the one you gave.
+
+ :param ignore_tree_extension_data:
+ If True, the TREE type extension data read in the index will not
+ be written to disk. Use this if you have altered the index and
+ would like to use git-write-tree afterwards to create a tree
+ representing your written changes.
+ If this data is present in the written index, git-write-tree
+ will instead write the stored/cached tree.
+ Alternatively, use IndexFile.write_tree() to handle this case
+ automatically
+
+ :return: self"""
+ # make sure we have our entries read before getting a write lock
+ # else it would be done when streaming. This can happen
+ # if one doesn't change the index, but writes it right away
+ self.entries
+ lfd = LockedFD(file_path or self._file_path)
+ stream = lfd.open(write=True, stream=True)
+
+ self._serialize(stream, ignore_tree_extension_data)
+
+ lfd.commit()
+
+ # make sure we represent what we have written
+ if file_path is not None:
+ self._file_path = file_path
+
+ @post_clear_cache
+ @default_index
+ def merge_tree(self, rhs, base=None):
+ """Merge the given rhs treeish into the current index, possibly taking
+ a common base treeish into account.
+
+ As opposed to the from_tree_ method, this allows you to use an already
+ existing tree as the left side of the merge
+
+ :param rhs:
+ treeish reference pointing to the 'other' side of the merge.
+
+ :param base:
+ optional treeish reference pointing to the common base of 'rhs' and
+ this index which equals lhs
+
+ :return:
+ self ( containing the merge and possibly unmerged entries in case of
+ conflicts )
+
+ :raise GitCommandError:
+ If there is a merge conflict. The error will
+ be raised at the first conflicting path. If you want to have proper
+ merge resolution to be done by yourself, you have to commit the changed
+ index ( or make a valid tree from it ) and retry with a three-way
+ index.from_tree call. """
+ # -i : ignore working tree status
+ # --aggressive : handle more merge cases
+ # -m : do an actual merge
+ args = ["--aggressive", "-i", "-m"]
+ if base is not None:
+ args.append(base)
+ args.append(rhs)
+
+ self.repo.git.read_tree(args)
+ return self
+
+ @classmethod
+ def new(cls, repo, *tree_sha):
+ """ Merge the given treeish revisions into a new index which is returned.
+ This method behaves like git-read-tree --aggressive when doing the merge.
+
+ :param repo: The repository treeish are located in.
+
+ :param tree_sha:
+ 20 byte or 40 byte tree sha or tree objects
+
+ :return:
+ New IndexFile instance. Its path will be undefined.
+ If you intend to write such a merged Index, supply an alternate file_path
+ to its 'write' method."""
+ base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha])
+
+ inst = cls(repo)
+ # convert to entries dict
+ entries = dict(izip(((e.path, e.stage) for e in base_entries),
+ (IndexEntry.from_base(e) for e in base_entries)))
+
+ inst.entries = entries
+ return inst
+
+
+ @classmethod
+ def from_tree(cls, repo, *treeish, **kwargs):
+ """Merge the given treeish revisions into a new index which is returned.
+ The original index will remain unaltered
+
+ :param repo:
+ The repository treeish are located in.
+
+ :param treeish:
+ One, two or three Tree Objects, Commits or 40 byte hexshas. The result
+ changes according to the amount of trees.
+ If 1 Tree is given, it will just be read into a new index
+ If 2 Trees are given, they will be merged into a new index using a
+ two way merge algorithm. Tree 1 is the 'current' tree, tree 2 is the 'other'
+ one. It behaves like a fast-forward.
+ If 3 Trees are given, a 3-way merge will be performed with the first tree
+ being the common ancestor of tree 2 and tree 3. Tree 2 is the 'current' tree,
+ tree 3 is the 'other' one
+
+ :param kwargs:
+ Additional arguments passed to git-read-tree
+
+ :return:
+ New IndexFile instance. It will point to a temporary index location which
+ does not exist anymore. If you intend to write such a merged Index, supply
+ an alternate file_path to its 'write' method.
+
+ :note:
+ In the three-way merge case, --aggressive will be specified to automatically
+ resolve more cases in a commonly correct manner. Specify trivial=True as kwarg
+ to override that.
+
+ As the underlying git-read-tree command takes into account the current index,
+ it will be temporarily moved out of the way to assure there are no unsuspected
+ interferences."""
+ if len(treeish) == 0 or len(treeish) > 3:
+ raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish))
+
+ arg_list = list()
+ # ignore that working tree and index possibly are out of date
+ if len(treeish)>1:
+ # drop unmerged entries when reading our index and merging
+ arg_list.append("--reset")
+ # handle non-trivial cases the way a real merge does
+ arg_list.append("--aggressive")
+ # END merge handling
+
+ # tmp file created in git home directory to be sure renaming
+ # works - /tmp/ dirs could be on another device
+ tmp_index = tempfile.mktemp('','',repo.git_dir)
+ arg_list.append("--index-output=%s" % tmp_index)
+ arg_list.extend(treeish)
+
+ # move current index out of the way - otherwise the merge may fail
+ # as it considers existing entries. moving it essentially clears the index.
+ # Unfortunately there is no 'soft' way to do it.
+ # The TemporaryFileSwap assure the original file get put back
+ index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index'))
+ try:
+ repo.git.read_tree(*arg_list, **kwargs)
+ index = cls(repo, tmp_index)
+ index.entries # force it to read the file as we will delete the temp-file
+ del(index_handler) # release as soon as possible
+ finally:
+ if os.path.exists(tmp_index):
+ os.remove(tmp_index)
+ # END index merge handling
+
+ return index
+
+ # UTILITIES
+ def _iter_expand_paths(self, paths):
+ """Expand the directories in list of paths to the corresponding paths accordingly,
+
+ Note: git will add items multiple times even if a glob overlapped
+ with manually specified paths or if paths where specified multiple
+ times - we respect that and do not prune"""
+ def raise_exc(e):
+ raise e
+ r = self.repo.working_tree_dir
+ rs = r + os.sep
+ for path in paths:
+ abs_path = path
+ if not os.path.isabs(abs_path):
+ abs_path = os.path.join(r, path)
+ # END make absolute path
+
+ # resolve globs if possible
+ if '?' in path or '*' in path or '[' in path:
+ for f in self._iter_expand_paths(glob.glob(abs_path)):
+ yield f.replace(rs, '')
+ continue
+ # END glob handling
+ try:
+ for root, dirs, files in os.walk(abs_path, onerror=raise_exc):
+ for rela_file in files:
+ # add relative paths only
+ yield os.path.join(root.replace(rs, ''), rela_file)
+ # END for each file in subdir
+ # END for each subdirectory
+ except OSError:
+ # was a file or something that could not be iterated
+ yield path.replace(rs, '')
+ # END path exception handling
+ # END for each path
+
+ def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
+ read_from_stdout=True):
+ """Write path to proc.stdin and make sure it processes the item, including progress.
+
+ :return: stdout string
+ :param read_from_stdout: if True, proc.stdout will be read after the item
+ was sent to stdin. In that case, it will return None
+ :note: There is a bug in git-update-index that prevents it from sending
+ reports just in time. This is why we have a version that tries to
+ read stdout and one which doesn't. In fact, the stdout is not
+ important as the piped-in files are processed anyway and just in time
+ :note: Newlines are essential here, gits behaviour is somewhat inconsistent
+ on this depending on the version, hence we try our best to deal with
+ newlines carefully. Usually the last newline will not be sent, instead
+ we will close stdin to break the pipe."""
+
+ fprogress(filepath, False, item)
+ rval = None
+ try:
+ proc.stdin.write("%s\n" % filepath)
+ except IOError:
+ # pipe broke, usually because some error happend
+ raise fmakeexc()
+ # END write exception handling
+ proc.stdin.flush()
+ if read_from_stdout:
+ rval = proc.stdout.readline().strip()
+ fprogress(filepath, True, item)
+ return rval
+
+ def iter_blobs(self, predicate = lambda t: True):
+ """
+ :return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)
+
+ :param predicate:
+ Function(t) returning True if tuple(stage, Blob) should be yielded by the
+ iterator. A default filter, the BlobFilter, allows you to yield blobs
+ only if they match a given list of paths. """
+ for entry in self.entries.itervalues():
+ # TODO: is it necessary to convert the mode ? We did that when adding
+ # it to the index, right ?
+ mode = stat_mode_to_index_mode(entry.mode)
+ blob = entry.to_blob(self.repo)
+ blob.size = entry.size
+ output = (entry.stage, blob)
+ if predicate(output):
+ yield output
+ # END for each entry
+
+ def unmerged_blobs(self):
+ """
+ :return:
+ Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being
+ a dictionary associating a path in the index with a list containing
+ sorted stage/blob pairs
+
+ :note:
+ Blobs that have been removed in one side simply do not exist in the
+ given stage. I.e. a file removed on the 'other' branch whose entries
+ are at stage 3 will not have a stage 3 entry.
+ """
+ is_unmerged_blob = lambda t: t[0] != 0
+ path_map = dict()
+ for stage, blob in self.iter_blobs(is_unmerged_blob):
+ path_map.setdefault(blob.path, list()).append((stage, blob))
+ # END for each unmerged blob
+ for l in path_map.itervalues():
+ l.sort()
+ return path_map
+
+ @classmethod
+ def entry_key(cls, *entry):
+ return entry_key(*entry)
+
+ def resolve_blobs(self, iter_blobs):
+ """Resolve the blobs given in blob iterator. This will effectively remove the
+ index entries of the respective path at all non-null stages and add the given
+ blob as new stage null blob.
+
+ For each path there may only be one blob, otherwise a ValueError will be raised
+ claiming the path is already at stage 0.
+
+ :raise ValueError: if one of the blobs already existed at stage 0
+ :return: self
+
+ :note:
+ You will have to write the index manually once you are done, i.e.
+ index.resolve_blobs(blobs).write()
+ """
+ for blob in iter_blobs:
+ stage_null_key = (blob.path, 0)
+ if stage_null_key in self.entries:
+ raise ValueError( "Path %r already exists at stage 0" % blob.path )
+ # END assert blob is not stage 0 already
+
+ # delete all possible stages
+ for stage in (1, 2, 3):
+ try:
+ del( self.entries[(blob.path, stage)])
+ except KeyError:
+ pass
+ # END ignore key errors
+ # END for each possible stage
+
+ self.entries[stage_null_key] = IndexEntry.from_blob(blob)
+ # END for each blob
+
+ return self
+
+ def update(self):
+ """Reread the contents of our index file, discarding all cached information
+ we might have.
+
+ :note: This is a possibly dangerious operations as it will discard your changes
+ to index.entries
+ :return: self"""
+ self._delete_entries_cache()
+ # allows to lazily reread on demand
+ return self
+
+ def write_tree(self):
+ """Writes this index to a corresponding Tree object into the repository's
+ object database and return it.
+
+ :return: Tree object representing this index
+ :note: The tree will be written even if one or more objects the tree refers to
+ does not yet exist in the object database. This could happen if you added
+ Entries to the index directly.
+ :raise ValueError: if there are no entries in the cache
+ :raise UnmergedEntriesError: """
+ # we obtain no lock as we just flush our contents to disk as tree
+ # If we are a new index, the entries access will load our data accordingly
+ mdb = MemoryDB()
+ entries = self._entries_sorted()
+ binsha, tree_items = write_tree_from_cache(entries, mdb, slice(0, len(entries)))
+
+ # copy changed trees only
+ mdb.stream_copy(mdb.sha_iter(), self.repo.odb)
+
+
+ # note: additional deserialization could be saved if write_tree_from_cache
+ # would return sorted tree entries
+ root_tree = Tree(self.repo, binsha, path='')
+ root_tree._cache = tree_items
+ return root_tree
+
+ def _process_diff_args(self, args):
+ try:
+ args.pop(args.index(self))
+ except IndexError:
+ pass
+ # END remove self
+ return args
+
+ def _to_relative_path(self, path):
+ """:return: Version of path relative to our git directory or raise ValueError
+ if it is not within our git direcotory"""
+ if not os.path.isabs(path):
+ return path
+ relative_path = path.replace(self.repo.working_tree_dir+os.sep, "")
+ if relative_path == path:
+ raise ValueError("Absolute path %r is not in git repository at %r" % (path,self.repo.working_tree_dir))
+ return relative_path
+
+ def _preprocess_add_items(self, items):
+ """ Split the items into two lists of path strings and BaseEntries. """
+ paths = list()
+ entries = list()
+
+ for item in items:
+ if isinstance(item, basestring):
+ paths.append(self._to_relative_path(item))
+ elif isinstance(item, (Blob, Submodule)):
+ entries.append(BaseIndexEntry.from_blob(item))
+ elif isinstance(item, BaseIndexEntry):
+ entries.append(item)
+ else:
+ raise TypeError("Invalid Type: %r" % item)
+ # END for each item
+ return (paths, entries)
+
+ @git_working_dir
+ def add(self, items, force=True, fprogress=lambda *args: None, path_rewriter=None,
+ write=True):
+ """Add files from the working tree, specific blobs or BaseIndexEntries
+ to the index.
+
+ :param items:
+ Multiple types of items are supported, types can be mixed within one call.
+ Different types imply a different handling. File paths may generally be
+ relative or absolute.
+
+ - path string
+ strings denote a relative or absolute path into the repository pointing to
+ an existing file, i.e. CHANGES, lib/myfile.ext, '/home/gitrepo/lib/myfile.ext'.
+
+ Paths provided like this must exist. When added, they will be written
+ into the object database.
+
+ PathStrings may contain globs, such as 'lib/__init__*' or can be directories
+ like 'lib', the latter ones will add all the files within the dirctory and
+ subdirectories.
+
+ This equals a straight git-add.
+
+ They are added at stage 0
+
+ - Blob or Submodule object
+ Blobs are added as they are assuming a valid mode is set.
+ The file they refer to may or may not exist in the file system, but
+ must be a path relative to our repository.
+
+ If their sha is null ( 40*0 ), their path must exist in the file system
+ relative to the git repository as an object will be created from
+ the data at the path.
+ The handling now very much equals the way string paths are processed, except that
+ the mode you have set will be kept. This allows you to create symlinks
+ by settings the mode respectively and writing the target of the symlink
+ directly into the file. This equals a default Linux-Symlink which
+ is not dereferenced automatically, except that it can be created on
+ filesystems not supporting it as well.
+
+ Please note that globs or directories are not allowed in Blob objects.
+
+ They are added at stage 0
+
+ - BaseIndexEntry or type
+ Handling equals the one of Blob objects, but the stage may be
+ explicitly set. Please note that Index Entries require binary sha's.
+
+ :param force:
+ **CURRENTLY INEFFECTIVE**
+ If True, otherwise ignored or excluded files will be
+ added anyway.
+ As opposed to the git-add command, we enable this flag by default
+ as the API user usually wants the item to be added even though
+ they might be excluded.
+
+ :param fprogress:
+ Function with signature f(path, done=False, item=item) called for each
+ path to be added, one time once it is about to be added where done==False
+ and once after it was added where done=True.
+ item is set to the actual item we handle, either a Path or a BaseIndexEntry
+ Please note that the processed path is not guaranteed to be present
+ in the index already as the index is currently being processed.
+
+ :param path_rewriter:
+ Function with signature (string) func(BaseIndexEntry) function returning a path
+ for each passed entry which is the path to be actually recorded for the
+ object created from entry.path. This allows you to write an index which
+ is not identical to the layout of the actual files on your hard-dist.
+ If not None and ``items`` contain plain paths, these paths will be
+ converted to Entries beforehand and passed to the path_rewriter.
+ Please note that entry.path is relative to the git repository.
+
+ :param write:
+ If True, the index will be written once it was altered. Otherwise
+ the changes only exist in memory and are not available to git commands.
+
+ :return:
+ List(BaseIndexEntries) representing the entries just actually added.
+
+ :raise OSError:
+ if a supplied Path did not exist. Please note that BaseIndexEntry
+ Objects that do not have a null sha will be added even if their paths
+ do not exist.
+ """
+ # sort the entries into strings and Entries, Blobs are converted to entries
+ # automatically
+ # paths can be git-added, for everything else we use git-update-index
+ entries_added = list()
+ paths, entries = self._preprocess_add_items(items)
+ if paths and path_rewriter:
+ for path in paths:
+ abspath = os.path.abspath(path)
+ gitrelative_path = abspath[len(self.repo.working_tree_dir)+1:]
+ blob = Blob(self.repo, Blob.NULL_BIN_SHA,
+ stat_mode_to_index_mode(os.stat(abspath).st_mode),
+ to_native_path_linux(gitrelative_path))
+ entries.append(BaseIndexEntry.from_blob(blob))
+ # END for each path
+ del(paths[:])
+ # END rewrite paths
+
+
+ def store_path(filepath):
+ """Store file at filepath in the database and return the base index entry"""
+ st = os.lstat(filepath) # handles non-symlinks as well
+ stream = None
+ if S_ISLNK(st.st_mode):
+ stream = StringIO(os.readlink(filepath))
+ else:
+ stream = open(filepath, 'rb')
+ # END handle stream
+ fprogress(filepath, False, filepath)
+ istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream))
+ fprogress(filepath, True, filepath)
+ return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode),
+ istream.binsha, 0, to_native_path_linux(filepath)))
+ # END utility method
+
+
+ # HANDLE PATHS
+ if paths:
+ assert len(entries_added) == 0
+ added_files = list()
+ for filepath in self._iter_expand_paths(paths):
+ entries_added.append(store_path(filepath))
+ # END for each filepath
+ # END path handling
+
+
+ # HANDLE ENTRIES
+ if entries:
+ null_mode_entries = [ e for e in entries if e.mode == 0 ]
+ if null_mode_entries:
+ raise ValueError("At least one Entry has a null-mode - please use index.remove to remove files for clarity")
+ # END null mode should be remove
+
+ # HANLDE ENTRY OBJECT CREATION
+ # create objects if required, otherwise go with the existing shas
+ null_entries_indices = [ i for i,e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA ]
+ if null_entries_indices:
+ for ei in null_entries_indices:
+ null_entry = entries[ei]
+ new_entry = store_path(null_entry.path)
+
+ # update null entry
+ entries[ei] = BaseIndexEntry((null_entry.mode, new_entry.binsha, null_entry.stage, null_entry.path))
+ # END for each entry index
+ # END null_entry handling
+
+ # REWRITE PATHS
+ # If we have to rewrite the entries, do so now, after we have generated
+ # all object sha's
+ if path_rewriter:
+ for i,e in enumerate(entries):
+ entries[i] = BaseIndexEntry((e.mode, e.binsha, e.stage, path_rewriter(e)))
+ # END for each entry
+ # END handle path rewriting
+
+ # just go through the remaining entries and provide progress info
+ for i, entry in enumerate(entries):
+ progress_sent = i in null_entries_indices
+ if not progress_sent:
+ fprogress(entry.path, False, entry)
+ fprogress(entry.path, True, entry)
+ # END handle progress
+ # END for each enty
+ entries_added.extend(entries)
+ # END if there are base entries
+
+ # FINALIZE
+ # add the new entries to this instance
+ for entry in entries_added:
+ self.entries[(entry.path, 0)] = IndexEntry.from_base(entry)
+
+ if write:
+ self.write()
+ # END handle write
+
+ return entries_added
+
+ def _items_to_rela_paths(self, items):
+ """Returns a list of repo-relative paths from the given items which
+ may be absolute or relative paths, entries or blobs"""
+ paths = list()
+ for item in items:
+ if isinstance(item, (BaseIndexEntry,(Blob, Submodule))):
+ paths.append(self._to_relative_path(item.path))
+ elif isinstance(item, basestring):
+ paths.append(self._to_relative_path(item))
+ else:
+ raise TypeError("Invalid item type: %r" % item)
+ # END for each item
+ return paths
+
+ @post_clear_cache
+ @default_index
+ def remove(self, items, working_tree=False, **kwargs):
+ """Remove the given items from the index and optionally from
+ the working tree as well.
+
+ :param items:
+ Multiple types of items are supported which may be be freely mixed.
+
+ - path string
+ Remove the given path at all stages. If it is a directory, you must
+ specify the r=True keyword argument to remove all file entries
+ below it. If absolute paths are given, they will be converted
+ to a path relative to the git repository directory containing
+ the working tree
+
+ The path string may include globs, such as *.c.
+
+ - Blob Object
+ Only the path portion is used in this case.
+
+ - BaseIndexEntry or compatible type
+ The only relevant information here Yis the path. The stage is ignored.
+
+ :param working_tree:
+ If True, the entry will also be removed from the working tree, physically
+ removing the respective file. This may fail if there are uncommited changes
+ in it.
+
+ :param kwargs:
+ Additional keyword arguments to be passed to git-rm, such
+ as 'r' to allow recurive removal of
+
+ :return:
+ List(path_string, ...) list of repository relative paths that have
+ been removed effectively.
+ This is interesting to know in case you have provided a directory or
+ globs. Paths are relative to the repository. """
+ args = list()
+ if not working_tree:
+ args.append("--cached")
+ args.append("--")
+
+ # preprocess paths
+ paths = self._items_to_rela_paths(items)
+ removed_paths = self.repo.git.rm(args, paths, **kwargs).splitlines()
+
+ # process output to gain proper paths
+ # rm 'path'
+ return [ p[4:-1] for p in removed_paths ]
+
+ @post_clear_cache
+ @default_index
+ def move(self, items, skip_errors=False, **kwargs):
+ """Rename/move the items, whereas the last item is considered the destination of
+ the move operation. If the destination is a file, the first item ( of two )
+ must be a file as well. If the destination is a directory, it may be preceeded
+ by one or more directories or files.
+
+ The working tree will be affected in non-bare repositories.
+
+ :parma items:
+ Multiple types of items are supported, please see the 'remove' method
+ for reference.
+ :param skip_errors:
+ If True, errors such as ones resulting from missing source files will
+ be skpped.
+ :param kwargs:
+ Additional arguments you would like to pass to git-mv, such as dry_run
+ or force.
+
+ :return:List(tuple(source_path_string, destination_path_string), ...)
+ A list of pairs, containing the source file moved as well as its
+ actual destination. Relative to the repository root.
+
+ :raise ValueErorr: If only one item was given
+ GitCommandError: If git could not handle your request"""
+ args = list()
+ if skip_errors:
+ args.append('-k')
+
+ paths = self._items_to_rela_paths(items)
+ if len(paths) < 2:
+ raise ValueError("Please provide at least one source and one destination of the move operation")
+
+ was_dry_run = kwargs.pop('dry_run', kwargs.pop('n', None))
+ kwargs['dry_run'] = True
+
+ # first execute rename in dryrun so the command tells us what it actually does
+ # ( for later output )
+ out = list()
+ mvlines = self.repo.git.mv(args, paths, **kwargs).splitlines()
+
+ # parse result - first 0:n/2 lines are 'checking ', the remaining ones
+ # are the 'renaming' ones which we parse
+ for ln in xrange(len(mvlines)/2, len(mvlines)):
+ tokens = mvlines[ln].split(' to ')
+ assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln]
+
+ # [0] = Renaming x
+ # [1] = y
+ out.append((tokens[0][9:], tokens[1]))
+ # END for each line to parse
+
+ # either prepare for the real run, or output the dry-run result
+ if was_dry_run:
+ return out
+ # END handle dryrun
+
+
+ # now apply the actual operation
+ kwargs.pop('dry_run')
+ self.repo.git.mv(args, paths, **kwargs)
+
+ return out
+
+ def commit(self, message, parent_commits=None, head=True):
+ """Commit the current default index file, creating a commit object.
+
+ For more information on the arguments, see tree.commit.
+ :note:
+ If you have manually altered the .entries member of this instance,
+ don't forget to write() your changes to disk beforehand.
+
+ :return:
+ Commit object representing the new commit"""
+ tree = self.write_tree()
+ return Commit.create_from_tree(self.repo, tree, message, parent_commits, head)
+
+ @classmethod
+ def _flush_stdin_and_wait(cls, proc, ignore_stdout = False):
+ proc.stdin.flush()
+ proc.stdin.close()
+ stdout = ''
+ if not ignore_stdout:
+ stdout = proc.stdout.read()
+ proc.stdout.close()
+ proc.wait()
+ return stdout
+
+ @default_index
+ def checkout(self, paths=None, force=False, fprogress=lambda *args: None, **kwargs):
+ """Checkout the given paths or all files from the version known to the index into
+ the working tree.
+
+ :note: Be sure you have written pending changes using the ``write`` method
+ in case you have altered the enties dictionary directly
+
+ :param paths:
+ If None, all paths in the index will be checked out. Otherwise an iterable
+ of relative or absolute paths or a single path pointing to files or directories
+ in the index is expected.
+
+ :param force:
+ If True, existing files will be overwritten even if they contain local modifications.
+ If False, these will trigger a CheckoutError.
+
+ :param fprogress:
+ see Index.add_ for signature and explanation.
+ The provided progress information will contain None as path and item if no
+ explicit paths are given. Otherwise progress information will be send
+ prior and after a file has been checked out
+
+ :param kwargs:
+ Additional arguments to be pasesd to git-checkout-index
+
+ :return:
+ iterable yielding paths to files which have been checked out and are
+ guaranteed to match the version stored in the index
+
+ :raise CheckoutError:
+ If at least one file failed to be checked out. This is a summary,
+ hence it will checkout as many files as it can anyway.
+ If one of files or directories do not exist in the index
+ ( as opposed to the original git command who ignores them ).
+ Raise GitCommandError if error lines could not be parsed - this truly is
+ an exceptional state
+
+ .. note:: The checkout is limited to checking out the files in the
+ index. Files which are not in the index anymore and exist in
+ the working tree will not be deleted. This behaviour is fundamentally
+ different to *head.checkout*, i.e. if you want git-checkout like behaviour,
+ use head.checkout instead of index.checkout.
+ """
+ args = ["--index"]
+ if force:
+ args.append("--force")
+
+ def handle_stderr(proc, iter_checked_out_files):
+ stderr = proc.stderr.read()
+ if not stderr:
+ return
+ # line contents:
+ # git-checkout-index: this already exists
+ failed_files = list()
+ failed_reasons = list()
+ unknown_lines = list()
+ endings = (' already exists', ' is not in the cache', ' does not exist at stage', ' is unmerged')
+ for line in stderr.splitlines():
+ if not line.startswith("git checkout-index: ") and not line.startswith("git-checkout-index: "):
+ is_a_dir = " is a directory"
+ unlink_issue = "unable to unlink old '"
+ if line.endswith(is_a_dir):
+ failed_files.append(line[:-len(is_a_dir)])
+ failed_reasons.append(is_a_dir)
+ elif line.startswith(unlink_issue):
+ failed_files.append(line[len(unlink_issue):line.rfind("'")])
+ failed_reasons.append(unlink_issue)
+ else:
+ unknown_lines.append(line)
+ continue
+ # END special lines parsing
+
+ for e in endings:
+ if line.endswith(e):
+ failed_files.append(line[20:-len(e)])
+ failed_reasons.append(e)
+ break
+ # END if ending matches
+ # END for each possible ending
+ # END for each line
+ if unknown_lines:
+ raise GitCommandError(("git-checkout-index", ), 128, stderr)
+ if failed_files:
+ valid_files = list(set(iter_checked_out_files) - set(failed_files))
+ raise CheckoutError("Some files could not be checked out from the index due to local modifications", failed_files, valid_files, failed_reasons)
+ # END stderr handler
+
+
+ if paths is None:
+ args.append("--all")
+ kwargs['as_process'] = 1
+ fprogress(None, False, None)
+ proc = self.repo.git.checkout_index(*args, **kwargs)
+ proc.wait()
+ fprogress(None, True, None)
+ rval_iter = ( e.path for e in self.entries.itervalues() )
+ handle_stderr(proc, rval_iter)
+ return rval_iter
+ else:
+ if isinstance(paths, basestring):
+ paths = [paths]
+
+ # make sure we have our entries loaded before we start checkout_index
+ # which will hold a lock on it. We try to get the lock as well during
+ # our entries initialization
+ self.entries
+
+ args.append("--stdin")
+ kwargs['as_process'] = True
+ kwargs['istream'] = subprocess.PIPE
+ proc = self.repo.git.checkout_index(args, **kwargs)
+ make_exc = lambda : GitCommandError(("git-checkout-index",)+tuple(args), 128, proc.stderr.read())
+ checked_out_files = list()
+
+ for path in paths:
+ co_path = to_native_path_linux(self._to_relative_path(path))
+ # if the item is not in the index, it could be a directory
+ path_is_directory = False
+
+ try:
+ self.entries[(co_path, 0)]
+ except KeyError:
+ dir = co_path
+ if not dir.endswith('/'):
+ dir += '/'
+ for entry in self.entries.itervalues():
+ if entry.path.startswith(dir):
+ p = entry.path
+ self._write_path_to_stdin(proc, p, p, make_exc,
+ fprogress, read_from_stdout=False)
+ checked_out_files.append(p)
+ path_is_directory = True
+ # END if entry is in directory
+ # END for each entry
+ # END path exception handlnig
+
+ if not path_is_directory:
+ self._write_path_to_stdin(proc, co_path, path, make_exc,
+ fprogress, read_from_stdout=False)
+ checked_out_files.append(co_path)
+ # END path is a file
+ # END for each path
+ self._flush_stdin_and_wait(proc, ignore_stdout=True)
+
+ handle_stderr(proc, checked_out_files)
+ return checked_out_files
+ # END paths handling
+ assert "Should not reach this point"
+
+ @default_index
+ def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwargs):
+ """Reset the index to reflect the tree at the given commit. This will not
+ adjust our HEAD reference as opposed to HEAD.reset by default.
+
+ :param commit:
+ Revision, Reference or Commit specifying the commit we should represent.
+ If you want to specify a tree only, use IndexFile.from_tree and overwrite
+ the default index.
+
+ :param working_tree:
+ If True, the files in the working tree will reflect the changed index.
+ If False, the working tree will not be touched
+ Please note that changes to the working copy will be discarded without
+ warning !
+
+ :param head:
+ If True, the head will be set to the given commit. This is False by default,
+ but if True, this method behaves like HEAD.reset.
+
+ :param paths: if given as an iterable of absolute or repository-relative paths,
+ only these will be reset to their state at the given commit'ish.
+ The paths need to exist at the commit, otherwise an exception will be
+ raised.
+
+ :param kwargs:
+ Additional keyword arguments passed to git-reset
+
+ .. note:: IndexFile.reset, as opposed to HEAD.reset, will not delete anyfiles
+ in order to maintain a consistent working tree. Instead, it will just
+ checkout the files according to their state in the index.
+ If you want git-reset like behaviour, use *HEAD.reset* instead.
+
+ :return: self """
+ # what we actually want to do is to merge the tree into our existing
+ # index, which is what git-read-tree does
+ new_inst = type(self).from_tree(self.repo, commit)
+ if not paths:
+ self.entries = new_inst.entries
+ else:
+ nie = new_inst.entries
+ for path in paths:
+ path = self._to_relative_path(path)
+ try:
+ key = entry_key(path, 0)
+ self.entries[key] = nie[key]
+ except KeyError:
+ # if key is not in theirs, it musn't be in ours
+ try:
+ del(self.entries[key])
+ except KeyError:
+ pass
+ # END handle deletion keyerror
+ # END handle keyerror
+ # END for each path
+ # END handle paths
+ self.write()
+
+ if working_tree:
+ self.checkout(paths=paths, force=True)
+ # END handle working tree
+
+ if head:
+ self.repo.head.set_commit(self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit)
+ # END handle head change
+
+ return self
+
+ @default_index
+ def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwargs):
+ """Diff this index against the working copy or a Tree or Commit object
+
+ For a documentation of the parameters and return values, see
+ Diffable.diff
+
+ :note:
+ Will only work with indices that represent the default git index as
+ they have not been initialized with a stream.
+ """
+ # index against index is always empty
+ if other is self.Index:
+ return diff.DiffIndex()
+
+ # index against anything but None is a reverse diff with the respective
+ # item. Handle existing -R flags properly. Transform strings to the object
+ # so that we can call diff on it
+ if isinstance(other, basestring):
+ other = self.repo.rev_parse(other)
+ # END object conversion
+
+ if isinstance(other, Object):
+ # invert the existing R flag
+ cur_val = kwargs.get('R', False)
+ kwargs['R'] = not cur_val
+ return other.diff(self.Index, paths, create_patch, **kwargs)
+ # END diff against other item handlin
+
+ # if other is not None here, something is wrong
+ if other is not None:
+ raise ValueError( "other must be None, Diffable.Index, a Tree or Commit, was %r" % other )
+
+ # diff against working copy - can be handled by superclass natively
+ return super(IndexFile, self).diff(other, paths, create_patch, **kwargs)
+
diff --git a/git/index/fun.py b/git/index/fun.py
new file mode 100644
index 00000000..9b35bf04
--- /dev/null
+++ b/git/index/fun.py
@@ -0,0 +1,322 @@
+# Contains standalone functions to accompany the index implementation and make it
+# more versatile
+# NOTE: Autodoc hates it if this is a docstring
+from stat import (
+ S_IFDIR,
+ S_IFLNK,
+ S_ISLNK,
+ S_IFDIR,
+ S_ISDIR,
+ S_IFMT,
+ S_IFREG,
+ )
+
+S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule
+
+from cStringIO import StringIO
+
+from git.util import IndexFileSHA1Writer
+from git.exc import UnmergedEntriesError
+from git.objects.fun import (
+ tree_to_stream,
+ traverse_tree_recursive,
+ traverse_trees_recursive
+ )
+
+from typ import (
+ BaseIndexEntry,
+ IndexEntry,
+ CE_NAMEMASK,
+ CE_STAGESHIFT
+ )
+CE_NAMEMASK_INV = ~CE_NAMEMASK
+
+from util import (
+ pack,
+ unpack
+ )
+
+from gitdb.base import IStream
+from gitdb.typ import str_tree_type
+
+__all__ = ('write_cache', 'read_cache', 'write_tree_from_cache', 'entry_key',
+ 'stat_mode_to_index_mode', 'S_IFGITLINK')
+
+
+def stat_mode_to_index_mode(mode):
+ """Convert the given mode from a stat call to the corresponding index mode
+ and return it"""
+ if S_ISLNK(mode): # symlinks
+ return S_IFLNK
+ if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules
+ return S_IFGITLINK
+ return S_IFREG | 0644 | (mode & 0100) # blobs with or without executable bit
+
+
+def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer):
+ """Write the cache represented by entries to a stream
+
+ :param entries: **sorted** list of entries
+ :param stream: stream to wrap into the AdapterStreamCls - it is used for
+ final output.
+
+ :param ShaStreamCls: Type to use when writing to the stream. It produces a sha
+ while writing to it, before the data is passed on to the wrapped stream
+
+ :param extension_data: any kind of data to write as a trailer, it must begin
+ a 4 byte identifier, followed by its size ( 4 bytes )"""
+ # wrap the stream into a compatible writer
+ stream = ShaStreamCls(stream)
+
+ tell = stream.tell
+ write = stream.write
+
+ # header
+ version = 2
+ write("DIRC")
+ write(pack(">LL", version, len(entries)))
+
+ # body
+ for entry in entries:
+ beginoffset = tell()
+ write(entry[4]) # ctime
+ write(entry[5]) # mtime
+ path = entry[3]
+ plen = len(path) & CE_NAMEMASK # path length
+ assert plen == len(path), "Path %s too long to fit into index" % entry[3]
+ flags = plen | (entry[2] & CE_NAMEMASK_INV) # clear possible previous values
+ write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0],
+ entry[8], entry[9], entry[10], entry[1], flags))
+ write(path)
+ real_size = ((tell() - beginoffset + 8) & ~7)
+ write("\0" * ((beginoffset + real_size) - tell()))
+ # END for each entry
+
+ # write previously cached extensions data
+ if extension_data is not None:
+ stream.write(extension_data)
+
+ # write the sha over the content
+ stream.write_sha()
+
+def read_header(stream):
+ """Return tuple(version_long, num_entries) from the given stream"""
+ type_id = stream.read(4)
+ if type_id != "DIRC":
+ raise AssertionError("Invalid index file header: %r" % type_id)
+ version, num_entries = unpack(">LL", stream.read(4 * 2))
+
+ # TODO: handle version 3: extended data, see read-cache.c
+ assert version in (1, 2)
+ return version, num_entries
+
+def entry_key(*entry):
+ """:return: Key suitable to be used for the index.entries dictionary
+ :param entry: One instance of type BaseIndexEntry or the path and the stage"""
+ if len(entry) == 1:
+ return (entry[0].path, entry[0].stage)
+ else:
+ return tuple(entry)
+ # END handle entry
+
+def read_cache(stream):
+ """Read a cache file from the given stream
+ :return: tuple(version, entries_dict, extension_data, content_sha)
+ * version is the integer version number
+ * entries dict is a dictionary which maps IndexEntry instances to a path
+ at a stage
+ * extension_data is '' or 4 bytes of type + 4 bytes of size + size bytes
+ * content_sha is a 20 byte sha on all cache file contents"""
+ version, num_entries = read_header(stream)
+ count = 0
+ entries = dict()
+
+ read = stream.read
+ tell = stream.tell
+ while count < num_entries:
+ beginoffset = tell()
+ ctime = unpack(">8s", read(8))[0]
+ mtime = unpack(">8s", read(8))[0]
+ (dev, ino, mode, uid, gid, size, sha, flags) = \
+ unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
+ path_size = flags & CE_NAMEMASK
+ path = read(path_size)
+
+ real_size = ((tell() - beginoffset + 8) & ~7)
+ data = read((beginoffset + real_size) - tell())
+ entry = IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
+ # entry_key would be the method to use, but we safe the effort
+ entries[(path, entry.stage)] = entry
+ count += 1
+ # END for each entry
+
+ # the footer contains extension data and a sha on the content so far
+ # Keep the extension footer,and verify we have a sha in the end
+ # Extension data format is:
+ # 4 bytes ID
+ # 4 bytes length of chunk
+ # repeated 0 - N times
+ extension_data = stream.read(~0)
+ assert len(extension_data) > 19, "Index Footer was not at least a sha on content as it was only %i bytes in size" % len(extension_data)
+
+ content_sha = extension_data[-20:]
+
+ # truncate the sha in the end as we will dynamically create it anyway
+ extension_data = extension_data[:-20]
+
+ return (version, entries, extension_data, content_sha)
+
+def write_tree_from_cache(entries, odb, sl, si=0):
+ """Create a tree from the given sorted list of entries and put the respective
+ trees into the given object database
+
+ :param entries: **sorted** list of IndexEntries
+ :param odb: object database to store the trees in
+ :param si: start index at which we should start creating subtrees
+ :param sl: slice indicating the range we should process on the entries list
+ :return: tuple(binsha, list(tree_entry, ...)) a tuple of a sha and a list of
+ tree entries being a tuple of hexsha, mode, name"""
+ tree_items = list()
+ tree_items_append = tree_items.append
+ ci = sl.start
+ end = sl.stop
+ while ci < end:
+ entry = entries[ci]
+ if entry.stage != 0:
+ raise UnmergedEntriesError(entry)
+ # END abort on unmerged
+ ci += 1
+ rbound = entry.path.find('/', si)
+ if rbound == -1:
+ # its not a tree
+ tree_items_append((entry.binsha, entry.mode, entry.path[si:]))
+ else:
+ # find common base range
+ base = entry.path[si:rbound]
+ xi = ci
+ while xi < end:
+ oentry = entries[xi]
+ orbound = oentry.path.find('/', si)
+ if orbound == -1 or oentry.path[si:orbound] != base:
+ break
+ # END abort on base mismatch
+ xi += 1
+ # END find common base
+
+ # enter recursion
+ # ci - 1 as we want to count our current item as well
+ sha, tree_entry_list = write_tree_from_cache(entries, odb, slice(ci-1, xi), rbound+1)
+ tree_items_append((sha, S_IFDIR, base))
+
+ # skip ahead
+ ci = xi
+ # END handle bounds
+ # END for each entry
+
+ # finally create the tree
+ sio = StringIO()
+ tree_to_stream(tree_items, sio.write)
+ sio.seek(0)
+
+ istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
+ return (istream.binsha, tree_items)
+
+def _tree_entry_to_baseindexentry(tree_entry, stage):
+ return BaseIndexEntry((tree_entry[1], tree_entry[0], stage <<CE_STAGESHIFT, tree_entry[2]))
+
+def aggressive_tree_merge(odb, tree_shas):
+ """
+ :return: list of BaseIndexEntries representing the aggressive merge of the given
+ trees. All valid entries are on stage 0, whereas the conflicting ones are left
+ on stage 1, 2 or 3, whereas stage 1 corresponds to the common ancestor tree,
+ 2 to our tree and 3 to 'their' tree.
+ :param tree_shas: 1, 2 or 3 trees as identified by their binary 20 byte shas
+ If 1 or two, the entries will effectively correspond to the last given tree
+ If 3 are given, a 3 way merge is performed"""
+ out = list()
+ out_append = out.append
+
+ # one and two way is the same for us, as we don't have to handle an existing
+ # index, instrea
+ if len(tree_shas) in (1,2):
+ for entry in traverse_tree_recursive(odb, tree_shas[-1], ''):
+ out_append(_tree_entry_to_baseindexentry(entry, 0))
+ # END for each entry
+ return out
+ # END handle single tree
+
+ if len(tree_shas) > 3:
+ raise ValueError("Cannot handle %i trees at once" % len(tree_shas))
+
+ # three trees
+ for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ''):
+ if base is not None:
+ # base version exists
+ if ours is not None:
+ # ours exists
+ if theirs is not None:
+ # it exists in all branches, if it was changed in both
+ # its a conflict, otherwise we take the changed version
+ # This should be the most common branch, so it comes first
+ if( base[0] != ours[0] and base[0] != theirs[0] and ours[0] != theirs[0] ) or \
+ ( base[1] != ours[1] and base[1] != theirs[1] and ours[1] != theirs[1] ):
+ # changed by both
+ out_append(_tree_entry_to_baseindexentry(base, 1))
+ out_append(_tree_entry_to_baseindexentry(ours, 2))
+ out_append(_tree_entry_to_baseindexentry(theirs, 3))
+ elif base[0] != ours[0] or base[1] != ours[1]:
+ # only we changed it
+ out_append(_tree_entry_to_baseindexentry(ours, 0))
+ else:
+ # either nobody changed it, or they did. In either
+ # case, use theirs
+ out_append(_tree_entry_to_baseindexentry(theirs, 0))
+ # END handle modification
+ else:
+
+ if ours[0] != base[0] or ours[1] != base[1]:
+ # they deleted it, we changed it, conflict
+ out_append(_tree_entry_to_baseindexentry(base, 1))
+ out_append(_tree_entry_to_baseindexentry(ours, 2))
+ # else:
+ # we didn't change it, ignore
+ # pass
+ # END handle our change
+ # END handle theirs
+ else:
+ if theirs is None:
+ # deleted in both, its fine - its out
+ pass
+ else:
+ if theirs[0] != base[0] or theirs[1] != base[1]:
+ # deleted in ours, changed theirs, conflict
+ out_append(_tree_entry_to_baseindexentry(base, 1))
+ out_append(_tree_entry_to_baseindexentry(theirs, 3))
+ # END theirs changed
+ #else:
+ # theirs didnt change
+ # pass
+ # END handle theirs
+ # END handle ours
+ else:
+ # all three can't be None
+ if ours is None:
+ # added in their branch
+ out_append(_tree_entry_to_baseindexentry(theirs, 0))
+ elif theirs is None:
+ # added in our branch
+ out_append(_tree_entry_to_baseindexentry(ours, 0))
+ else:
+ # both have it, except for the base, see whether it changed
+ if ours[0] != theirs[0] or ours[1] != theirs[1]:
+ out_append(_tree_entry_to_baseindexentry(ours, 2))
+ out_append(_tree_entry_to_baseindexentry(theirs, 3))
+ else:
+ # it was added the same in both
+ out_append(_tree_entry_to_baseindexentry(ours, 0))
+ # END handle two items
+ # END handle heads
+ # END handle base exists
+ # END for each entries tuple
+
+ return out
diff --git a/git/index/typ.py b/git/index/typ.py
new file mode 100644
index 00000000..ad988285
--- /dev/null
+++ b/git/index/typ.py
@@ -0,0 +1,173 @@
+"""Module with additional types used by the index"""
+
+from util import (
+ pack,
+ unpack
+ )
+
+from binascii import (
+ b2a_hex,
+ )
+
+from git.objects import Blob
+__all__ = ('BlobFilter', 'BaseIndexEntry', 'IndexEntry')
+
+#{ Invariants
+CE_NAMEMASK = 0x0fff
+CE_STAGEMASK = 0x3000
+CE_EXTENDED = 0x4000
+CE_VALID = 0x8000
+CE_STAGESHIFT = 12
+
+#} END invariants
+
+class BlobFilter(object):
+ """
+ Predicate to be used by iter_blobs allowing to filter only return blobs which
+ match the given list of directories or files.
+
+ The given paths are given relative to the repository.
+ """
+ __slots__ = 'paths'
+
+ def __init__(self, paths):
+ """:param paths:
+ tuple or list of paths which are either pointing to directories or
+ to files relative to the current repository
+ """
+ self.paths = paths
+
+ def __call__(self, stage_blob):
+ path = stage_blob[1].path
+ for p in self.paths:
+ if path.startswith(p):
+ return True
+ # END for each path in filter paths
+ return False
+
+
+class BaseIndexEntry(tuple):
+ """Small Brother of an index entry which can be created to describe changes
+ done to the index in which case plenty of additional information is not requried.
+
+ As the first 4 data members match exactly to the IndexEntry type, methods
+ expecting a BaseIndexEntry can also handle full IndexEntries even if they
+ use numeric indices for performance reasons. """
+
+ def __str__(self):
+ return "%o %s %i\t%s" % (self.mode, self.hexsha, self.stage, self.path)
+
+ def __repr__(self):
+ return "(%o, %s, %i, %s)" % (self.mode, self.hexsha, self.stage, self.path)
+
+ @property
+ def mode(self):
+ """ File Mode, compatible to stat module constants """
+ return self[0]
+
+ @property
+ def binsha(self):
+ """binary sha of the blob """
+ return self[1]
+
+ @property
+ def hexsha(self):
+ """hex version of our sha"""
+ return b2a_hex(self[1])
+
+ @property
+ def stage(self):
+ """Stage of the entry, either:
+
+ * 0 = default stage
+ * 1 = stage before a merge or common ancestor entry in case of a 3 way merge
+ * 2 = stage of entries from the 'left' side of the merge
+ * 3 = stage of entries from the right side of the merge
+
+ :note: For more information, see http://www.kernel.org/pub/software/scm/git/docs/git-read-tree.html
+ """
+ return (self[2] & CE_STAGEMASK) >> CE_STAGESHIFT
+
+ @property
+ def path(self):
+ """:return: our path relative to the repository working tree root"""
+ return self[3]
+
+ @property
+ def flags(self):
+ """:return: flags stored with this entry"""
+ return self[2]
+
+ @classmethod
+ def from_blob(cls, blob, stage = 0):
+ """:return: Fully equipped BaseIndexEntry at the given stage"""
+ return cls((blob.mode, blob.binsha, stage << CE_STAGESHIFT, blob.path))
+
+ def to_blob(self, repo):
+ """:return: Blob using the information of this index entry"""
+ return Blob(repo, self.binsha, self.mode, self.path)
+
+
+class IndexEntry(BaseIndexEntry):
+ """Allows convenient access to IndexEntry data without completely unpacking it.
+
+ Attributes usully accessed often are cached in the tuple whereas others are
+ unpacked on demand.
+
+ See the properties for a mapping between names and tuple indices. """
+ @property
+ def ctime(self):
+ """
+ :return:
+ Tuple(int_time_seconds_since_epoch, int_nano_seconds) of the
+ file's creation time"""
+ return unpack(">LL", self[4])
+
+ @property
+ def mtime(self):
+ """See ctime property, but returns modification time """
+ return unpack(">LL", self[5])
+
+ @property
+ def dev(self):
+ """ Device ID """
+ return self[6]
+
+ @property
+ def inode(self):
+ """ Inode ID """
+ return self[7]
+
+ @property
+ def uid(self):
+ """ User ID """
+ return self[8]
+
+ @property
+ def gid(self):
+ """ Group ID """
+ return self[9]
+
+ @property
+ def size(self):
+ """:return: Uncompressed size of the blob """
+ return self[10]
+
+ @classmethod
+ def from_base(cls, base):
+ """
+ :return:
+ Minimal entry as created from the given BaseIndexEntry instance.
+ Missing values will be set to null-like values
+
+ :param base: Instance of type BaseIndexEntry"""
+ time = pack(">LL", 0, 0)
+ return IndexEntry((base.mode, base.binsha, base.flags, base.path, time, time, 0, 0, 0, 0, 0))
+
+ @classmethod
+ def from_blob(cls, blob, stage = 0):
+ """:return: Minimal entry resembling the given blob object"""
+ time = pack(">LL", 0, 0)
+ return IndexEntry((blob.mode, blob.binsha, stage << CE_STAGESHIFT, blob.path, time, time, 0, 0, 0, 0, blob.size))
+
+
diff --git a/git/index/util.py b/git/index/util.py
new file mode 100644
index 00000000..bd5fcc03
--- /dev/null
+++ b/git/index/util.py
@@ -0,0 +1,86 @@
+"""Module containing index utilities"""
+import struct
+import tempfile
+import os
+
+__all__ = ( 'TemporaryFileSwap', 'post_clear_cache', 'default_index', 'git_working_dir' )
+
+#{ Aliases
+pack = struct.pack
+unpack = struct.unpack
+
+
+#} END aliases
+
+class TemporaryFileSwap(object):
+ """Utility class moving a file to a temporary location within the same directory
+ and moving it back on to where on object deletion."""
+ __slots__ = ("file_path", "tmp_file_path")
+
+ def __init__(self, file_path):
+ self.file_path = file_path
+ self.tmp_file_path = self.file_path + tempfile.mktemp('','','')
+ # it may be that the source does not exist
+ try:
+ os.rename(self.file_path, self.tmp_file_path)
+ except OSError:
+ pass
+
+ def __del__(self):
+ if os.path.isfile(self.tmp_file_path):
+ if os.name == 'nt' and os.path.exists(self.file_path):
+ os.remove(self.file_path)
+ os.rename(self.tmp_file_path, self.file_path)
+ # END temp file exists
+
+
+#{ Decorators
+
+def post_clear_cache(func):
+ """Decorator for functions that alter the index using the git command. This would
+ invalidate our possibly existing entries dictionary which is why it must be
+ deleted to allow it to be lazily reread later.
+
+ :note:
+ This decorator will not be required once all functions are implemented
+ natively which in fact is possible, but probably not feasible performance wise.
+ """
+ def post_clear_cache_if_not_raised(self, *args, **kwargs):
+ rval = func(self, *args, **kwargs)
+ self._delete_entries_cache()
+ return rval
+
+ # END wrapper method
+ post_clear_cache_if_not_raised.__name__ = func.__name__
+ return post_clear_cache_if_not_raised
+
+def default_index(func):
+ """Decorator assuring the wrapped method may only run if we are the default
+ repository index. This is as we rely on git commands that operate
+ on that index only. """
+ def check_default_index(self, *args, **kwargs):
+ if self._file_path != self._index_path():
+ raise AssertionError( "Cannot call %r on indices that do not represent the default git index" % func.__name__ )
+ return func(self, *args, **kwargs)
+ # END wrpaper method
+
+ check_default_index.__name__ = func.__name__
+ return check_default_index
+
+def git_working_dir(func):
+ """Decorator which changes the current working dir to the one of the git
+ repository in order to assure relative paths are handled correctly"""
+ def set_git_working_dir(self, *args, **kwargs):
+ cur_wd = os.getcwd()
+ os.chdir(self.repo.working_tree_dir)
+ try:
+ return func(self, *args, **kwargs)
+ finally:
+ os.chdir(cur_wd)
+ # END handle working dir
+ # END wrapper
+
+ set_git_working_dir.__name__ = func.__name__
+ return set_git_working_dir
+
+#} END decorators
diff --git a/git/objects/__init__.py b/git/objects/__init__.py
new file mode 100644
index 00000000..77f69d29
--- /dev/null
+++ b/git/objects/__init__.py
@@ -0,0 +1,21 @@
+"""
+Import all submodules main classes into the package space
+"""
+import inspect
+from base import *
+# Fix import dependency - add IndexObject to the util module, so that it can be
+# imported by the submodule.base
+import submodule.util
+submodule.util.IndexObject = IndexObject
+submodule.util.Object = Object
+from submodule.base import *
+from submodule.root import *
+
+# must come after submodule was made available
+from tag import *
+from blob import *
+from commit import *
+from tree import *
+
+__all__ = [ name for name, obj in locals().items()
+ if not (name.startswith('_') or inspect.ismodule(obj)) ] \ No newline at end of file
diff --git a/git/objects/base.py b/git/objects/base.py
new file mode 100644
index 00000000..5f2f7809
--- /dev/null
+++ b/git/objects/base.py
@@ -0,0 +1,172 @@
+# base.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.util import LazyMixin, join_path_native, stream_copy
+from util import get_object_type_by_name
+from gitdb.util import (
+ hex_to_bin,
+ bin_to_hex,
+ basename
+ )
+
+import gitdb.typ as dbtyp
+
+_assertion_msg_format = "Created object %r whose python type %r disagrees with the acutal git object type %r"
+
+__all__ = ("Object", "IndexObject")
+
+class Object(LazyMixin):
+ """Implements an Object which may be Blobs, Trees, Commits and Tags"""
+ NULL_HEX_SHA = '0'*40
+ NULL_BIN_SHA = '\0'*20
+
+ TYPES = (dbtyp.str_blob_type, dbtyp.str_tree_type, dbtyp.str_commit_type, dbtyp.str_tag_type)
+ __slots__ = ("repo", "binsha", "size" )
+ type = None # to be set by subclass
+
+ def __init__(self, repo, binsha):
+ """Initialize an object by identifying it by its binary sha.
+ All keyword arguments will be set on demand if None.
+
+ :param repo: repository this object is located in
+
+ :param binsha: 20 byte SHA1"""
+ super(Object,self).__init__()
+ self.repo = repo
+ self.binsha = binsha
+ assert len(binsha) == 20, "Require 20 byte binary sha, got %r, len = %i" % (binsha, len(binsha))
+
+ @classmethod
+ def new(cls, repo, id):
+ """
+ :return: New Object instance of a type appropriate to the object type behind
+ id. The id of the newly created object will be a binsha even though
+ the input id may have been a Reference or Rev-Spec
+
+ :param id: reference, rev-spec, or hexsha
+
+ :note: This cannot be a __new__ method as it would always call __init__
+ with the input id which is not necessarily a binsha."""
+ return repo.rev_parse(str(id))
+
+ @classmethod
+ def new_from_sha(cls, repo, sha1):
+ """
+ :return: new object instance of a type appropriate to represent the given
+ binary sha1
+ :param sha1: 20 byte binary sha1"""
+ if sha1 == cls.NULL_BIN_SHA:
+ # the NULL binsha is always the root commit
+ return get_object_type_by_name('commit')(repo, sha1)
+ #END handle special case
+ oinfo = repo.odb.info(sha1)
+ inst = get_object_type_by_name(oinfo.type)(repo, oinfo.binsha)
+ inst.size = oinfo.size
+ return inst
+
+ def _set_cache_(self, attr):
+ """Retrieve object information"""
+ if attr == "size":
+ oinfo = self.repo.odb.info(self.binsha)
+ self.size = oinfo.size
+ # assert oinfo.type == self.type, _assertion_msg_format % (self.binsha, oinfo.type, self.type)
+ else:
+ super(Object,self)._set_cache_(attr)
+
+ def __eq__(self, other):
+ """:return: True if the objects have the same SHA1"""
+ return self.binsha == other.binsha
+
+ def __ne__(self, other):
+ """:return: True if the objects do not have the same SHA1 """
+ return self.binsha != other.binsha
+
+ def __hash__(self):
+ """:return: Hash of our id allowing objects to be used in dicts and sets"""
+ return hash(self.binsha)
+
+ def __str__(self):
+ """:return: string of our SHA1 as understood by all git commands"""
+ return bin_to_hex(self.binsha)
+
+ def __repr__(self):
+ """:return: string with pythonic representation of our object"""
+ return '<git.%s "%s">' % (self.__class__.__name__, self.hexsha)
+
+ @property
+ def hexsha(self):
+ """:return: 40 byte hex version of our 20 byte binary sha"""
+ return bin_to_hex(self.binsha)
+
+ @property
+ def data_stream(self):
+ """ :return: File Object compatible stream to the uncompressed raw data of the object
+ :note: returned streams must be read in order"""
+ return self.repo.odb.stream(self.binsha)
+
+ def stream_data(self, ostream):
+ """Writes our data directly to the given output stream
+ :param ostream: File object compatible stream object.
+ :return: self"""
+ istream = self.repo.odb.stream(self.binsha)
+ stream_copy(istream, ostream)
+ return self
+
+
+class IndexObject(Object):
+ """Base for all objects that can be part of the index file , namely Tree, Blob and
+ SubModule objects"""
+ __slots__ = ("path", "mode")
+
+ # for compatability with iterable lists
+ _id_attribute_ = 'path'
+
+ def __init__(self, repo, binsha, mode=None, path=None):
+ """Initialize a newly instanced IndexObject
+ :param repo: is the Repo we are located in
+ :param binsha: 20 byte sha1
+ :param mode: is the stat compatible file mode as int, use the stat module
+ to evaluate the infomration
+ :param path:
+ is the path to the file in the file system, relative to the git repository root, i.e.
+ file.ext or folder/other.ext
+ :note:
+ Path may not be set of the index object has been created directly as it cannot
+ be retrieved without knowing the parent tree."""
+ super(IndexObject, self).__init__(repo, binsha)
+ if mode is not None:
+ self.mode = mode
+ if path is not None:
+ self.path = path
+
+ def __hash__(self):
+ """:return:
+ Hash of our path as index items are uniquely identifyable by path, not
+ by their data !"""
+ return hash(self.path)
+
+ def _set_cache_(self, attr):
+ if attr in IndexObject.__slots__:
+ # they cannot be retrieved lateron ( not without searching for them )
+ raise AttributeError( "path and mode attributes must have been set during %s object creation" % type(self).__name__ )
+ else:
+ super(IndexObject, self)._set_cache_(attr)
+ # END hanlde slot attribute
+
+ @property
+ def name(self):
+ """:return: Name portion of the path, effectively being the basename"""
+ return basename(self.path)
+
+ @property
+ def abspath(self):
+ """
+ :return:
+ Absolute path to this index object in the file system ( as opposed to the
+ .path field which is a path relative to the git repository ).
+
+ The returned path will be native to the system and contains '\' on windows. """
+ return join_path_native(self.repo.working_tree_dir, self.path)
+
diff --git a/git/objects/blob.py b/git/objects/blob.py
new file mode 100644
index 00000000..32f8c61c
--- /dev/null
+++ b/git/objects/blob.py
@@ -0,0 +1,27 @@
+# blob.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from mimetypes import guess_type
+import base
+
+__all__ = ('Blob', )
+
+class Blob(base.IndexObject):
+ """A Blob encapsulates a git blob object"""
+ DEFAULT_MIME_TYPE = "text/plain"
+ type = "blob"
+
+ __slots__ = tuple()
+
+ @property
+ def mime_type(self):
+ """
+ :return: String describing the mime type of this file (based on the filename)
+ :note: Defaults to 'text/plain' in case the actual file type is unknown. """
+ guesses = None
+ if self.path:
+ guesses = guess_type(self.path)
+ return guesses and guesses[0] or self.DEFAULT_MIME_TYPE
diff --git a/git/objects/commit.py b/git/objects/commit.py
new file mode 100644
index 00000000..69a3adc4
--- /dev/null
+++ b/git/objects/commit.py
@@ -0,0 +1,465 @@
+# commit.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.util import (
+ Actor,
+ Iterable,
+ Stats,
+ )
+from git.diff import Diffable
+from tree import Tree
+from gitdb import IStream
+from cStringIO import StringIO
+
+import base
+from gitdb.util import (
+ hex_to_bin
+ )
+from util import (
+ Traversable,
+ Serializable,
+ parse_date,
+ altz_to_utctz_str,
+ parse_actor_and_date
+ )
+from time import (
+ time,
+ altzone
+ )
+import os
+import sys
+
+__all__ = ('Commit', )
+
+class Commit(base.Object, Iterable, Diffable, Traversable, Serializable):
+ """Wraps a git Commit object.
+
+ This class will act lazily on some of its attributes and will query the
+ value on demand only if it involves calling the git binary."""
+
+ # ENVIRONMENT VARIABLES
+ # read when creating new commits
+ env_author_date = "GIT_AUTHOR_DATE"
+ env_committer_date = "GIT_COMMITTER_DATE"
+
+ # CONFIGURATION KEYS
+ conf_encoding = 'i18n.commitencoding'
+
+ # INVARIANTS
+ default_encoding = "UTF-8"
+
+
+ # object configuration
+ type = "commit"
+ __slots__ = ("tree",
+ "author", "authored_date", "author_tz_offset",
+ "committer", "committed_date", "committer_tz_offset",
+ "message", "parents", "encoding")
+ _id_attribute_ = "binsha"
+
+ def __init__(self, repo, binsha, tree=None, author=None, authored_date=None, author_tz_offset=None,
+ committer=None, committed_date=None, committer_tz_offset=None,
+ message=None, parents=None, encoding=None):
+ """Instantiate a new Commit. All keyword arguments taking None as default will
+ be implicitly set on first query.
+
+ :param binsha: 20 byte sha1
+ :param parents: tuple( Commit, ... )
+ is a tuple of commit ids or actual Commits
+ :param tree: Tree
+ Tree object
+ :param author: Actor
+ is the author string ( will be implicitly converted into an Actor object )
+ :param authored_date: int_seconds_since_epoch
+ is the authored DateTime - use time.gmtime() to convert it into a
+ different format
+ :param author_tz_offset: int_seconds_west_of_utc
+ is the timezone that the authored_date is in
+ :param committer: Actor
+ is the committer string
+ :param committed_date: int_seconds_since_epoch
+ is the committed DateTime - use time.gmtime() to convert it into a
+ different format
+ :param committer_tz_offset: int_seconds_west_of_utc
+ is the timezone that the authored_date is in
+ :param message: string
+ is the commit message
+ :param encoding: string
+ encoding of the message, defaults to UTF-8
+ :param parents:
+ List or tuple of Commit objects which are our parent(s) in the commit
+ dependency graph
+ :return: git.Commit
+
+ :note: Timezone information is in the same format and in the same sign
+ as what time.altzone returns. The sign is inverted compared to git's
+ UTC timezone."""
+ super(Commit,self).__init__(repo, binsha)
+ if tree is not None:
+ assert isinstance(tree, Tree), "Tree needs to be a Tree instance, was %s" % type(tree)
+ if tree is not None:
+ self.tree = tree
+ if author is not None:
+ self.author = author
+ if authored_date is not None:
+ self.authored_date = authored_date
+ if author_tz_offset is not None:
+ self.author_tz_offset = author_tz_offset
+ if committer is not None:
+ self.committer = committer
+ if committed_date is not None:
+ self.committed_date = committed_date
+ if committer_tz_offset is not None:
+ self.committer_tz_offset = committer_tz_offset
+ if message is not None:
+ self.message = message
+ if parents is not None:
+ self.parents = parents
+ if encoding is not None:
+ self.encoding = encoding
+
+ @classmethod
+ def _get_intermediate_items(cls, commit):
+ return commit.parents
+
+ def _set_cache_(self, attr):
+ if attr in Commit.__slots__:
+ # read the data in a chunk, its faster - then provide a file wrapper
+ binsha, typename, self.size, stream = self.repo.odb.stream(self.binsha)
+ self._deserialize(StringIO(stream.read()))
+ else:
+ super(Commit, self)._set_cache_(attr)
+ # END handle attrs
+
+ @property
+ def summary(self):
+ """:return: First line of the commit message"""
+ return self.message.split('\n', 1)[0]
+
+ def count(self, paths='', **kwargs):
+ """Count the number of commits reachable from this commit
+
+ :param paths:
+ is an optinal path or a list of paths restricting the return value
+ to commits actually containing the paths
+
+ :param kwargs:
+ Additional options to be passed to git-rev-list. They must not alter
+ the ouput style of the command, or parsing will yield incorrect results
+ :return: int defining the number of reachable commits"""
+ # yes, it makes a difference whether empty paths are given or not in our case
+ # as the empty paths version will ignore merge commits for some reason.
+ if paths:
+ return len(self.repo.git.rev_list(self.hexsha, '--', paths, **kwargs).splitlines())
+ else:
+ return len(self.repo.git.rev_list(self.hexsha, **kwargs).splitlines())
+
+
+ @property
+ def name_rev(self):
+ """
+ :return:
+ String describing the commits hex sha based on the closest Reference.
+ Mostly useful for UI purposes"""
+ return self.repo.git.name_rev(self)
+
+ @classmethod
+ def iter_items(cls, repo, rev, paths='', **kwargs):
+ """Find all commits matching the given criteria.
+
+ :param repo: is the Repo
+ :param rev: revision specifier, see git-rev-parse for viable options
+ :param paths:
+ is an optinal path or list of paths, if set only Commits that include the path
+ or paths will be considered
+ :param kwargs:
+ optional keyword arguments to git rev-list where
+ ``max_count`` is the maximum number of commits to fetch
+ ``skip`` is the number of commits to skip
+ ``since`` all commits since i.e. '1970-01-01'
+ :return: iterator yielding Commit items"""
+ if 'pretty' in kwargs:
+ raise ValueError("--pretty cannot be used as parsing expects single sha's only")
+ # END handle pretty
+ args = list()
+ if paths:
+ args.extend(('--', paths))
+ # END if paths
+
+ proc = repo.git.rev_list(rev, args, as_process=True, **kwargs)
+ return cls._iter_from_process_or_stream(repo, proc)
+
+ def iter_parents(self, paths='', **kwargs):
+ """Iterate _all_ parents of this commit.
+
+ :param paths:
+ Optional path or list of paths limiting the Commits to those that
+ contain at least one of the paths
+ :param kwargs: All arguments allowed by git-rev-list
+ :return: Iterator yielding Commit objects which are parents of self """
+ # skip ourselves
+ skip = kwargs.get("skip", 1)
+ if skip == 0: # skip ourselves
+ skip = 1
+ kwargs['skip'] = skip
+
+ return self.iter_items(self.repo, self, paths, **kwargs)
+
+ @property
+ def stats(self):
+ """Create a git stat from changes between this commit and its first parent
+ or from all changes done if this is the very first commit.
+
+ :return: git.Stats"""
+ if not self.parents:
+ text = self.repo.git.diff_tree(self.hexsha, '--', numstat=True, root=True)
+ text2 = ""
+ for line in text.splitlines()[1:]:
+ (insertions, deletions, filename) = line.split("\t")
+ text2 += "%s\t%s\t%s\n" % (insertions, deletions, filename)
+ text = text2
+ else:
+ text = self.repo.git.diff(self.parents[0].hexsha, self.hexsha, '--', numstat=True)
+ return Stats._list_from_string(self.repo, text)
+
+ @classmethod
+ def _iter_from_process_or_stream(cls, repo, proc_or_stream):
+ """Parse out commit information into a list of Commit objects
+ We expect one-line per commit, and parse the actual commit information directly
+ from our lighting fast object database
+
+ :param proc: git-rev-list process instance - one sha per line
+ :return: iterator returning Commit objects"""
+ stream = proc_or_stream
+ if not hasattr(stream,'readline'):
+ stream = proc_or_stream.stdout
+
+ readline = stream.readline
+ while True:
+ line = readline()
+ if not line:
+ break
+ hexsha = line.strip()
+ if len(hexsha) > 40:
+ # split additional information, as returned by bisect for instance
+ hexsha, rest = line.split(None, 1)
+ # END handle extra info
+
+ assert len(hexsha) == 40, "Invalid line: %s" % hexsha
+ yield Commit(repo, hex_to_bin(hexsha))
+ # END for each line in stream
+
+
+ @classmethod
+ def create_from_tree(cls, repo, tree, message, parent_commits=None, head=False):
+ """Commit the given tree, creating a commit object.
+
+ :param repo: Repo object the commit should be part of
+ :param tree: Tree object or hex or bin sha
+ the tree of the new commit
+ :param message: Commit message. It may be an empty string if no message is provided.
+ It will be converted to a string in any case.
+ :param parent_commits:
+ Optional Commit objects to use as parents for the new commit.
+ If empty list, the commit will have no parents at all and become
+ a root commit.
+ If None , the current head commit will be the parent of the
+ new commit object
+ :param head:
+ If True, the HEAD will be advanced to the new commit automatically.
+ Else the HEAD will remain pointing on the previous commit. This could
+ lead to undesired results when diffing files.
+
+ :return: Commit object representing the new commit
+
+ :note:
+ Additional information about the committer and Author are taken from the
+ environment or from the git configuration, see git-commit-tree for
+ more information"""
+ parents = parent_commits
+ if parent_commits is None:
+ try:
+ parent_commits = [ repo.head.commit ]
+ except ValueError:
+ # empty repositories have no head commit
+ parent_commits = list()
+ # END handle parent commits
+ # END if parent commits are unset
+
+ # retrieve all additional information, create a commit object, and
+ # serialize it
+ # Generally:
+ # * Environment variables override configuration values
+ # * Sensible defaults are set according to the git documentation
+
+ # COMMITER AND AUTHOR INFO
+ cr = repo.config_reader()
+ env = os.environ
+
+ committer = Actor.committer(cr)
+ author = Actor.author(cr)
+
+ # PARSE THE DATES
+ unix_time = int(time())
+ offset = altzone
+
+ author_date_str = env.get(cls.env_author_date, '')
+ if author_date_str:
+ author_time, author_offset = parse_date(author_date_str)
+ else:
+ author_time, author_offset = unix_time, offset
+ # END set author time
+
+ committer_date_str = env.get(cls.env_committer_date, '')
+ if committer_date_str:
+ committer_time, committer_offset = parse_date(committer_date_str)
+ else:
+ committer_time, committer_offset = unix_time, offset
+ # END set committer time
+
+ # assume utf8 encoding
+ enc_section, enc_option = cls.conf_encoding.split('.')
+ conf_encoding = cr.get_value(enc_section, enc_option, cls.default_encoding)
+
+
+ # if the tree is no object, make sure we create one - otherwise
+ # the created commit object is invalid
+ if isinstance(tree, str):
+ tree = repo.tree(tree)
+ # END tree conversion
+
+ # CREATE NEW COMMIT
+ new_commit = cls(repo, cls.NULL_BIN_SHA, tree,
+ author, author_time, author_offset,
+ committer, committer_time, committer_offset,
+ message, parent_commits, conf_encoding)
+
+ stream = StringIO()
+ new_commit._serialize(stream)
+ streamlen = stream.tell()
+ stream.seek(0)
+
+ istream = repo.odb.store(IStream(cls.type, streamlen, stream))
+ new_commit.binsha = istream.binsha
+
+ if head:
+ # need late import here, importing git at the very beginning throws
+ # as well ...
+ import git.refs
+ try:
+ repo.head.set_commit(new_commit, logmsg="commit: %s" % message)
+ except ValueError:
+ # head is not yet set to the ref our HEAD points to
+ # Happens on first commit
+ import git.refs
+ master = git.refs.Head.create(repo, repo.head.ref, commit=new_commit, logmsg="commit (initial): %s" % message)
+ repo.head.set_reference(master, logmsg='commit: Switching to %s' % master)
+ # END handle empty repositories
+ # END advance head handling
+
+ return new_commit
+
+ #{ Serializable Implementation
+
+ def _serialize(self, stream):
+ write = stream.write
+ write("tree %s\n" % self.tree)
+ for p in self.parents:
+ write("parent %s\n" % p)
+
+ a = self.author
+ aname = a.name
+ if isinstance(aname, unicode):
+ aname = aname.encode(self.encoding)
+ # END handle unicode in name
+
+ c = self.committer
+ fmt = "%s %s <%s> %s %s\n"
+ write(fmt % ("author", aname, a.email,
+ self.authored_date,
+ altz_to_utctz_str(self.author_tz_offset)))
+
+ # encode committer
+ aname = c.name
+ if isinstance(aname, unicode):
+ aname = aname.encode(self.encoding)
+ # END handle unicode in name
+ write(fmt % ("committer", aname, c.email,
+ self.committed_date,
+ altz_to_utctz_str(self.committer_tz_offset)))
+
+ if self.encoding != self.default_encoding:
+ write("encoding %s\n" % self.encoding)
+
+ write("\n")
+
+ # write plain bytes, be sure its encoded according to our encoding
+ if isinstance(self.message, unicode):
+ write(self.message.encode(self.encoding))
+ else:
+ write(self.message)
+ # END handle encoding
+ return self
+
+ def _deserialize(self, stream):
+ """:param from_rev_list: if true, the stream format is coming from the rev-list command
+ Otherwise it is assumed to be a plain data stream from our object"""
+ readline = stream.readline
+ self.tree = Tree(self.repo, hex_to_bin(readline().split()[1]), Tree.tree_id<<12, '')
+
+ self.parents = list()
+ next_line = None
+ while True:
+ parent_line = readline()
+ if not parent_line.startswith('parent'):
+ next_line = parent_line
+ break
+ # END abort reading parents
+ self.parents.append(type(self)(self.repo, hex_to_bin(parent_line.split()[-1])))
+ # END for each parent line
+ self.parents = tuple(self.parents)
+
+ self.author, self.authored_date, self.author_tz_offset = parse_actor_and_date(next_line)
+ self.committer, self.committed_date, self.committer_tz_offset = parse_actor_and_date(readline())
+
+
+ # now we can have the encoding line, or an empty line followed by the optional
+ # message.
+ self.encoding = self.default_encoding
+ # read encoding or empty line to separate message
+ enc = readline()
+ enc = enc.strip()
+ if enc:
+ self.encoding = enc[enc.find(' ')+1:]
+ # now comes the message separator
+ readline()
+ # END handle encoding
+
+ # decode the authors name
+ try:
+ self.author.name = self.author.name.decode(self.encoding)
+ except UnicodeDecodeError:
+ print >> sys.stderr, "Failed to decode author name '%s' using encoding %s" % (self.author.name, self.encoding)
+ # END handle author's encoding
+
+ # decode committer name
+ try:
+ self.committer.name = self.committer.name.decode(self.encoding)
+ except UnicodeDecodeError:
+ print >> sys.stderr, "Failed to decode committer name '%s' using encoding %s" % (self.committer.name, self.encoding)
+ # END handle author's encoding
+
+ # a stream from our data simply gives us the plain message
+ # The end of our message stream is marked with a newline that we strip
+ self.message = stream.read()
+ try:
+ self.message = self.message.decode(self.encoding)
+ except UnicodeDecodeError:
+ print >> sys.stderr, "Failed to decode message '%s' using encoding %s" % (self.message, self.encoding)
+ # END exception handling
+ return self
+
+ #} END serializable implementation
diff --git a/git/objects/fun.py b/git/objects/fun.py
new file mode 100644
index 00000000..9b0a377c
--- /dev/null
+++ b/git/objects/fun.py
@@ -0,0 +1,199 @@
+"""Module with functions which are supposed to be as fast as possible"""
+from stat import S_ISDIR
+
+__all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive',
+ 'traverse_tree_recursive')
+
+
+
+
+def tree_to_stream(entries, write):
+ """Write the give list of entries into a stream using its write method
+ :param entries: **sorted** list of tuples with (binsha, mode, name)
+ :param write: write method which takes a data string"""
+ ord_zero = ord('0')
+ bit_mask = 7 # 3 bits set
+
+ for binsha, mode, name in entries:
+ mode_str = ''
+ for i in xrange(6):
+ mode_str = chr(((mode >> (i*3)) & bit_mask) + ord_zero) + mode_str
+ # END for each 8 octal value
+
+ # git slices away the first octal if its zero
+ if mode_str[0] == '0':
+ mode_str = mode_str[1:]
+ # END save a byte
+
+ # here it comes: if the name is actually unicode, the replacement below
+ # will not work as the binsha is not part of the ascii unicode encoding -
+ # hence we must convert to an utf8 string for it to work properly.
+ # According to my tests, this is exactly what git does, that is it just
+ # takes the input literally, which appears to be utf8 on linux.
+ if isinstance(name, unicode):
+ name = name.encode("utf8")
+ write("%s %s\0%s" % (mode_str, name, binsha))
+ # END for each item
+
+
+def tree_entries_from_data(data):
+ """Reads the binary representation of a tree and returns tuples of Tree items
+ :param data: data block with tree data
+ :return: list(tuple(binsha, mode, tree_relative_path), ...)"""
+ ord_zero = ord('0')
+ len_data = len(data)
+ i = 0
+ out = list()
+ while i < len_data:
+ mode = 0
+
+ # read mode
+ # Some git versions truncate the leading 0, some don't
+ # The type will be extracted from the mode later
+ while data[i] != ' ':
+ # move existing mode integer up one level being 3 bits
+ # and add the actual ordinal value of the character
+ mode = (mode << 3) + (ord(data[i]) - ord_zero)
+ i += 1
+ # END while reading mode
+
+ # byte is space now, skip it
+ i += 1
+
+ # parse name, it is NULL separated
+
+ ns = i
+ while data[i] != '\0':
+ i += 1
+ # END while not reached NULL
+
+ # default encoding for strings in git is utf8
+ # Only use the respective unicode object if the byte stream was encoded
+ name = data[ns:i]
+ name_enc = name.decode("utf-8")
+ if len(name) > len(name_enc):
+ name = name_enc
+ # END handle encoding
+
+ # byte is NULL, get next 20
+ i += 1
+ sha = data[i:i+20]
+ i = i + 20
+ out.append((sha, mode, name))
+ # END for each byte in data stream
+ return out
+
+
+def _find_by_name(tree_data, name, is_dir, start_at):
+ """return data entry matching the given name and tree mode
+ or None.
+ Before the item is returned, the respective data item is set
+ None in the tree_data list to mark it done"""
+ try:
+ item = tree_data[start_at]
+ if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
+ tree_data[start_at] = None
+ return item
+ except IndexError:
+ pass
+ # END exception handling
+ for index, item in enumerate(tree_data):
+ if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
+ tree_data[index] = None
+ return item
+ # END if item matches
+ # END for each item
+ return None
+
+def _to_full_path(item, path_prefix):
+ """Rebuild entry with given path prefix"""
+ if not item:
+ return item
+ return (item[0], item[1], path_prefix+item[2])
+
+def traverse_trees_recursive(odb, tree_shas, path_prefix):
+ """
+ :return: list with entries according to the given binary tree-shas.
+ The result is encoded in a list
+ of n tuple|None per blob/commit, (n == len(tree_shas)), where
+ * [0] == 20 byte sha
+ * [1] == mode as int
+ * [2] == path relative to working tree root
+ The entry tuple is None if the respective blob/commit did not
+ exist in the given tree.
+ :param tree_shas: iterable of shas pointing to trees. All trees must
+ be on the same level. A tree-sha may be None in which case None
+ :param path_prefix: a prefix to be added to the returned paths on this level,
+ set it '' for the first iteration
+ :note: The ordering of the returned items will be partially lost"""
+ trees_data = list()
+ nt = len(tree_shas)
+ for tree_sha in tree_shas:
+ if tree_sha is None:
+ data = list()
+ else:
+ data = tree_entries_from_data(odb.stream(tree_sha).read())
+ # END handle muted trees
+ trees_data.append(data)
+ # END for each sha to get data for
+
+ out = list()
+ out_append = out.append
+
+ # find all matching entries and recursively process them together if the match
+ # is a tree. If the match is a non-tree item, put it into the result.
+ # Processed items will be set None
+ for ti, tree_data in enumerate(trees_data):
+ for ii, item in enumerate(tree_data):
+ if not item:
+ continue
+ # END skip already done items
+ entries = [ None for n in range(nt) ]
+ entries[ti] = item
+ sha, mode, name = item # its faster to unpack
+ is_dir = S_ISDIR(mode) # type mode bits
+
+ # find this item in all other tree data items
+ # wrap around, but stop one before our current index, hence
+ # ti+nt, not ti+1+nt
+ for tio in range(ti+1, ti+nt):
+ tio = tio % nt
+ entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii)
+ # END for each other item data
+
+ # if we are a directory, enter recursion
+ if is_dir:
+ out.extend(traverse_trees_recursive(odb, [((ei and ei[0]) or None) for ei in entries], path_prefix+name+'/'))
+ else:
+ out_append(tuple(_to_full_path(e, path_prefix) for e in entries))
+ # END handle recursion
+
+ # finally mark it done
+ tree_data[ii] = None
+ # END for each item
+
+ # we are done with one tree, set all its data empty
+ del(tree_data[:])
+ # END for each tree_data chunk
+ return out
+
+def traverse_tree_recursive(odb, tree_sha, path_prefix):
+ """
+ :return: list of entries of the tree pointed to by the binary tree_sha. An entry
+ has the following format:
+ * [0] 20 byte sha
+ * [1] mode as int
+ * [2] path relative to the repository
+ :param path_prefix: prefix to prepend to the front of all returned paths"""
+ entries = list()
+ data = tree_entries_from_data(odb.stream(tree_sha).read())
+
+ # unpacking/packing is faster than accessing individual items
+ for sha, mode, name in data:
+ if S_ISDIR(mode):
+ entries.extend(traverse_tree_recursive(odb, sha, path_prefix+name+'/'))
+ else:
+ entries.append((sha, mode, path_prefix+name))
+ # END for each item
+
+ return entries
diff --git a/git/objects/submodule/__init__.py b/git/objects/submodule/__init__.py
new file mode 100644
index 00000000..82df59b0
--- /dev/null
+++ b/git/objects/submodule/__init__.py
@@ -0,0 +1,2 @@
+# NOTE: Cannot import anything here as the top-level _init_ has to handle
+# our dependencies
diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py
new file mode 100644
index 00000000..fd6c9396
--- /dev/null
+++ b/git/objects/submodule/base.py
@@ -0,0 +1,924 @@
+import util
+from util import (
+ mkhead,
+ sm_name,
+ sm_section,
+ unbare_repo,
+ SubmoduleConfigParser,
+ find_first_remote_branch
+ )
+from git.objects.util import Traversable
+from StringIO import StringIO # need a dict to set bloody .name field
+from git.util import (
+ Iterable,
+ join_path_native,
+ to_native_path_linux,
+ RemoteProgress
+ )
+
+from git.config import SectionConstraint
+from git.exc import (
+ InvalidGitRepositoryError,
+ NoSuchPathError
+ )
+
+import stat
+import git
+
+import os
+import sys
+import time
+
+import shutil
+
+__all__ = ["Submodule", "UpdateProgress"]
+
+
+class UpdateProgress(RemoteProgress):
+ """Class providing detailed progress information to the caller who should
+ derive from it and implement the ``update(...)`` message"""
+ CLONE, FETCH, UPDWKTREE = [1 << x for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes+3)]
+ _num_op_codes = RemoteProgress._num_op_codes + 3
+
+ __slots__ = tuple()
+
+
+BEGIN = UpdateProgress.BEGIN
+END = UpdateProgress.END
+CLONE = UpdateProgress.CLONE
+FETCH = UpdateProgress.FETCH
+UPDWKTREE = UpdateProgress.UPDWKTREE
+
+
+# IndexObject comes via util module, its a 'hacky' fix thanks to pythons import
+# mechanism which cause plenty of trouble of the only reason for packages and
+# modules is refactoring - subpackages shoudn't depend on parent packages
+class Submodule(util.IndexObject, Iterable, Traversable):
+ """Implements access to a git submodule. They are special in that their sha
+ represents a commit in the submodule's repository which is to be checked out
+ at the path of this instance.
+ The submodule type does not have a string type associated with it, as it exists
+ solely as a marker in the tree and index.
+
+ All methods work in bare and non-bare repositories."""
+
+ _id_attribute_ = "name"
+ k_modules_file = '.gitmodules'
+ k_head_option = 'branch'
+ k_head_default = 'master'
+ k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status
+
+ # this is a bogus type for base class compatability
+ type = 'submodule'
+
+ __slots__ = ('_parent_commit', '_url', '_branch_path', '_name', '__weakref__')
+ _cache_attrs = ('path', '_url', '_branch_path')
+
+ def __init__(self, repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, branch_path=None):
+ """Initialize this instance with its attributes. We only document the ones
+ that differ from ``IndexObject``
+
+ :param repo: Our parent repository
+ :param binsha: binary sha referring to a commit in the remote repository, see url parameter
+ :param parent_commit: see set_parent_commit()
+ :param url: The url to the remote repository which is the submodule
+ :param branch_path: full (relative) path to ref to checkout when cloning the remote repository"""
+ super(Submodule, self).__init__(repo, binsha, mode, path)
+ self.size = 0
+ if parent_commit is not None:
+ self._parent_commit = parent_commit
+ if url is not None:
+ self._url = url
+ if branch_path is not None:
+ assert isinstance(branch_path, basestring)
+ self._branch_path = branch_path
+ if name is not None:
+ self._name = name
+
+ def _set_cache_(self, attr):
+ if attr == '_parent_commit':
+ # set a default value, which is the root tree of the current head
+ self._parent_commit = self.repo.commit()
+ elif attr in ('path', '_url', '_branch_path'):
+ reader = self.config_reader()
+ # default submodule values
+ self.path = reader.get_value('path')
+ self._url = reader.get_value('url')
+ # git-python extension values - optional
+ self._branch_path = reader.get_value(self.k_head_option, git.Head.to_full_path(self.k_head_default))
+ elif attr == '_name':
+ raise AttributeError("Cannot retrieve the name of a submodule if it was not set initially")
+ else:
+ super(Submodule, self)._set_cache_(attr)
+ # END handle attribute name
+
+ def _get_intermediate_items(self, item):
+ """:return: all the submodules of our module repository"""
+ try:
+ return type(self).list_items(item.module())
+ except InvalidGitRepositoryError:
+ return list()
+ # END handle intermeditate items
+
+ def __eq__(self, other):
+ """Compare with another submodule"""
+ # we may only compare by name as this should be the ID they are hashed with
+ # Otherwise this type wouldn't be hashable
+ # return self.path == other.path and self.url == other.url and super(Submodule, self).__eq__(other)
+ return self._name == other._name
+
+ def __ne__(self, other):
+ """Compare with another submodule for inequality"""
+ return not (self == other)
+
+ def __hash__(self):
+ """Hash this instance using its logical id, not the sha"""
+ return hash(self._name)
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)" % (type(self).__name__, self._name, self.path, self.url, self.branch_path)
+
+ @classmethod
+ def _config_parser(cls, repo, parent_commit, read_only):
+ """:return: Config Parser constrained to our submodule in read or write mode
+ :raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
+ at the given parent commit. Otherwise the exception would be delayed until the first
+ access of the config parser"""
+ parent_matches_head = repo.head.commit == parent_commit
+ if not repo.bare and parent_matches_head:
+ fp_module = cls.k_modules_file
+ fp_module_path = os.path.join(repo.working_tree_dir, fp_module)
+ if not os.path.isfile(fp_module_path):
+ raise IOError("%s file was not accessible" % fp_module_path)
+ # END handle existance
+ fp_module = fp_module_path
+ else:
+ try:
+ fp_module = cls._sio_modules(parent_commit)
+ except KeyError:
+ raise IOError("Could not find %s file in the tree of parent commit %s" % (cls.k_modules_file, parent_commit))
+ # END handle exceptions
+ # END handle non-bare working tree
+
+ if not read_only and (repo.bare or not parent_matches_head):
+ raise ValueError("Cannot write blobs of 'historical' submodule configurations")
+ # END handle writes of historical submodules
+
+ return SubmoduleConfigParser(fp_module, read_only = read_only)
+
+ def _clear_cache(self):
+ # clear the possibly changed values
+ for name in self._cache_attrs:
+ try:
+ delattr(self, name)
+ except AttributeError:
+ pass
+ # END try attr deletion
+ # END for each name to delete
+
+ @classmethod
+ def _sio_modules(cls, parent_commit):
+ """:return: Configuration file as StringIO - we only access it through the respective blob's data"""
+ sio = StringIO(parent_commit.tree[cls.k_modules_file].data_stream.read())
+ sio.name = cls.k_modules_file
+ return sio
+
+ def _config_parser_constrained(self, read_only):
+ """:return: Config Parser constrained to our submodule in read or write mode"""
+ parser = self._config_parser(self.repo, self._parent_commit, read_only)
+ parser.set_submodule(self)
+ return SectionConstraint(parser, sm_section(self.name))
+
+ #{ Edit Interface
+
+ @classmethod
+ def add(cls, repo, name, path, url=None, branch=None, no_checkout=False):
+ """Add a new submodule to the given repository. This will alter the index
+ as well as the .gitmodules file, but will not create a new commit.
+ If the submodule already exists, no matter if the configuration differs
+ from the one provided, the existing submodule will be returned.
+
+ :param repo: Repository instance which should receive the submodule
+ :param name: The name/identifier for the submodule
+ :param path: repository-relative or absolute path at which the submodule
+ should be located
+ It will be created as required during the repository initialization.
+ :param url: git-clone compatible URL, see git-clone reference for more information
+ If None, the repository is assumed to exist, and the url of the first
+ remote is taken instead. This is useful if you want to make an existing
+ repository a submodule of anotherone.
+ :param branch: branch at which the submodule should (later) be checked out.
+ The given branch must exist in the remote repository, and will be checked
+ out locally as a tracking branch.
+ It will only be written into the configuration if it not None, which is
+ when the checked out branch will be the one the remote HEAD pointed to.
+ The result you get in these situation is somewhat fuzzy, and it is recommended
+ to specify at least 'master' here
+ :param no_checkout: if True, and if the repository has to be cloned manually,
+ no checkout will be performed
+ :return: The newly created submodule instance
+ :note: works atomically, such that no change will be done if the repository
+ update fails for instance"""
+ if repo.bare:
+ raise InvalidGitRepositoryError("Cannot add submodules to bare repositories")
+ # END handle bare repos
+
+ path = to_native_path_linux(path)
+ if path.endswith('/'):
+ path = path[:-1]
+ # END handle trailing slash
+
+ # assure we never put backslashes into the url, as some operating systems
+ # like it ...
+ if url != None:
+ url = to_native_path_linux(url)
+ #END assure url correctness
+
+ # INSTANTIATE INTERMEDIATE SM
+ sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name)
+ if sm.exists():
+ # reretrieve submodule from tree
+ try:
+ return repo.head.commit.tree[path]
+ except KeyError:
+ # could only be in index
+ index = repo.index
+ entry = index.entries[index.entry_key(path, 0)]
+ sm.binsha = entry.binsha
+ return sm
+ # END handle exceptions
+ # END handle existing
+
+ br = git.Head.to_full_path(str(branch) or cls.k_head_default)
+ has_module = sm.module_exists()
+ branch_is_default = branch is None
+ if has_module and url is not None:
+ if url not in [r.url for r in sm.module().remotes]:
+ raise ValueError("Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath))
+ # END check url
+ # END verify urls match
+
+ mrepo = None
+ if url is None:
+ if not has_module:
+ raise ValueError("A URL was not given and existing repository did not exsit at %s" % path)
+ # END check url
+ mrepo = sm.module()
+ urls = [r.url for r in mrepo.remotes]
+ if not urls:
+ raise ValueError("Didn't find any remote url in repository at %s" % sm.abspath)
+ # END verify we have url
+ url = urls[0]
+ else:
+ # clone new repo
+ kwargs = {'n' : no_checkout}
+ if not branch_is_default:
+ kwargs['b'] = br
+ # END setup checkout-branch
+ mrepo = git.Repo.clone_from(url, path, **kwargs)
+ # END verify url
+
+ # update configuration and index
+ index = sm.repo.index
+ writer = sm.config_writer(index=index, write=False)
+ writer.set_value('url', url)
+ writer.set_value('path', path)
+
+ sm._url = url
+ if not branch_is_default:
+ # store full path
+ writer.set_value(cls.k_head_option, br)
+ sm._branch_path = br
+ # END handle path
+ del(writer)
+
+ # we deliberatly assume that our head matches our index !
+ pcommit = repo.head.commit
+ sm._parent_commit = pcommit
+ sm.binsha = mrepo.head.commit.binsha
+ index.add([sm], write=True)
+
+ return sm
+
+ def update(self, recursive=False, init=True, to_latest_revision=False, progress=None,
+ dry_run=False):
+ """Update the repository of this submodule to point to the checkout
+ we point at with the binsha of this instance.
+
+ :param recursive: if True, we will operate recursively and update child-
+ modules as well.
+ :param init: if True, the module repository will be cloned into place if necessary
+ :param to_latest_revision: if True, the submodule's sha will be ignored during checkout.
+ Instead, the remote will be fetched, and the local tracking branch updated.
+ This only works if we have a local tracking branch, which is the case
+ if the remote repository had a master branch, or of the 'branch' option
+ was specified for this submodule and the branch existed remotely
+ :param progress: UpdateProgress instance or None of no progress should be shown
+ :param dry_run: if True, the operation will only be simulated, but not performed.
+ All performed operations are read-only
+ :note: does nothing in bare repositories
+ :note: method is definitely not atomic if recurisve is True
+ :return: self"""
+ if self.repo.bare:
+ return self
+ #END pass in bare mode
+
+ if progress is None:
+ progress = UpdateProgress()
+ #END handle progress
+ prefix = ''
+ if dry_run:
+ prefix = "DRY-RUN: "
+ #END handle prefix
+
+ # to keep things plausible in dry-run mode
+ if dry_run:
+ mrepo = None
+ #END init mrepo
+
+ # ASSURE REPO IS PRESENT AND UPTODATE
+ #####################################
+ try:
+ mrepo = self.module()
+ rmts = mrepo.remotes
+ len_rmts = len(rmts)
+ for i, remote in enumerate(rmts):
+ op = FETCH
+ if i == 0:
+ op |= BEGIN
+ #END handle start
+
+ progress.update(op, i, len_rmts, prefix+"Fetching remote %s of submodule %r" % (remote, self.name))
+ #===============================
+ if not dry_run:
+ remote.fetch(progress=progress)
+ #END handle dry-run
+ #===============================
+ if i == len_rmts-1:
+ op |= END
+ #END handle end
+ progress.update(op, i, len_rmts, prefix+"Done fetching remote of submodule %r" % self.name)
+ #END fetch new data
+ except InvalidGitRepositoryError:
+ if not init:
+ return self
+ # END early abort if init is not allowed
+ import git
+
+ # there is no git-repository yet - but delete empty paths
+ module_path = join_path_native(self.repo.working_tree_dir, self.path)
+ if not dry_run and os.path.isdir(module_path):
+ try:
+ os.rmdir(module_path)
+ except OSError:
+ raise OSError("Module directory at %r does already exist and is non-empty" % module_path)
+ # END handle OSError
+ # END handle directory removal
+
+ # don't check it out at first - nonetheless it will create a local
+ # branch according to the remote-HEAD if possible
+ progress.update(BEGIN|CLONE, 0, 1, prefix+"Cloning %s to %s in submodule %r" % (self.url, module_path, self.name))
+ if not dry_run:
+ mrepo = git.Repo.clone_from(self.url, module_path, n=True)
+ #END handle dry-run
+ progress.update(END|CLONE, 0, 1, prefix+"Done cloning to %s" % module_path)
+
+
+ if not dry_run:
+ # see whether we have a valid branch to checkout
+ try:
+ # find a remote which has our branch - we try to be flexible
+ remote_branch = find_first_remote_branch(mrepo.remotes, self.branch_name)
+ local_branch = mkhead(mrepo, self.branch_path)
+
+ # have a valid branch, but no checkout - make sure we can figure
+ # that out by marking the commit with a null_sha
+ local_branch.set_object(util.Object(mrepo, self.NULL_BIN_SHA))
+ # END initial checkout + branch creation
+
+ # make sure HEAD is not detached
+ mrepo.head.set_reference(local_branch, logmsg="submodule: attaching head to %s" % local_branch)
+ mrepo.head.ref.set_tracking_branch(remote_branch)
+ except IndexError:
+ print >> sys.stderr, "Warning: Failed to checkout tracking branch %s" % self.branch_path
+ #END handle tracking branch
+
+ # NOTE: Have to write the repo config file as well, otherwise
+ # the default implementation will be offended and not update the repository
+ # Maybe this is a good way to assure it doesn't get into our way, but
+ # we want to stay backwards compatible too ... . Its so redundant !
+ self.repo.config_writer().set_value(sm_section(self.name), 'url', self.url)
+ #END handle dry_run
+ #END handle initalization
+
+
+ # DETERMINE SHAS TO CHECKOUT
+ ############################
+ binsha = self.binsha
+ hexsha = self.hexsha
+ if mrepo is not None:
+ # mrepo is only set if we are not in dry-run mode or if the module existed
+ is_detached = mrepo.head.is_detached
+ #END handle dry_run
+
+ if mrepo is not None and to_latest_revision:
+ msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir
+ if not is_detached:
+ rref = mrepo.head.ref.tracking_branch()
+ if rref is not None:
+ rcommit = rref.commit
+ binsha = rcommit.binsha
+ hexsha = rcommit.hexsha
+ else:
+ print >> sys.stderr, "%s a tracking branch was not set for local branch '%s'" % (msg_base, mrepo.head.ref)
+ # END handle remote ref
+ else:
+ print >> sys.stderr, "%s there was no local tracking branch" % msg_base
+ # END handle detached head
+ # END handle to_latest_revision option
+
+ # update the working tree
+ # handles dry_run
+ if mrepo is not None and mrepo.head.commit.binsha != binsha:
+ progress.update(BEGIN|UPDWKTREE, 0, 1, prefix+"Updating working tree at %s for submodule %r to revision %s" % (self.path, self.name, hexsha))
+ if not dry_run:
+ if is_detached:
+ # NOTE: for now we force, the user is no supposed to change detached
+ # submodules anyway. Maybe at some point this becomes an option, to
+ # properly handle user modifications - see below for future options
+ # regarding rebase and merge.
+ mrepo.git.checkout(hexsha, force=True)
+ else:
+ # TODO: allow to specify a rebase, merge, or reset
+ # TODO: Warn if the hexsha forces the tracking branch off the remote
+ # branch - this should be prevented when setting the branch option
+ mrepo.head.reset(hexsha, index=True, working_tree=True)
+ # END handle checkout
+ #END handle dry_run
+ progress.update(END|UPDWKTREE, 0, 1, prefix+"Done updating working tree for submodule %r" % self.name)
+ # END update to new commit only if needed
+
+ # HANDLE RECURSION
+ ##################
+ if recursive:
+ # in dry_run mode, the module might not exist
+ if mrepo is not None:
+ for submodule in self.iter_items(self.module()):
+ submodule.update(recursive, init, to_latest_revision, progress=progress, dry_run=dry_run)
+ # END handle recursive update
+ #END handle dry run
+ # END for each submodule
+
+ return self
+
+ @unbare_repo
+ def move(self, module_path, configuration=True, module=True):
+ """Move the submodule to a another module path. This involves physically moving
+ the repository at our current path, changing the configuration, as well as
+ adjusting our index entry accordingly.
+
+ :param module_path: the path to which to move our module, given as
+ repository-relative path. Intermediate directories will be created
+ accordingly. If the path already exists, it must be empty.
+ Trailling (back)slashes are removed automatically
+ :param configuration: if True, the configuration will be adjusted to let
+ the submodule point to the given path.
+ :param module: if True, the repository managed by this submodule
+ will be moved, not the configuration. This will effectively
+ leave your repository in an inconsistent state unless the configuration
+ and index already point to the target location.
+ :return: self
+ :raise ValueError: if the module path existed and was not empty, or was a file
+ :note: Currently the method is not atomic, and it could leave the repository
+ in an inconsistent state if a sub-step fails for some reason
+ """
+ if module + configuration < 1:
+ raise ValueError("You must specify to move at least the module or the configuration of the submodule")
+ #END handle input
+
+ module_path = to_native_path_linux(module_path)
+ if module_path.endswith('/'):
+ module_path = module_path[:-1]
+ # END handle trailing slash
+
+ # VERIFY DESTINATION
+ if module_path == self.path:
+ return self
+ #END handle no change
+
+ dest_path = join_path_native(self.repo.working_tree_dir, module_path)
+ if os.path.isfile(dest_path):
+ raise ValueError("Cannot move repository onto a file: %s" % dest_path)
+ # END handle target files
+
+ index = self.repo.index
+ tekey = index.entry_key(module_path, 0)
+ # if the target item already exists, fail
+ if configuration and tekey in index.entries:
+ raise ValueError("Index entry for target path did alredy exist")
+ #END handle index key already there
+
+ # remove existing destination
+ if module:
+ if os.path.exists(dest_path):
+ if len(os.listdir(dest_path)):
+ raise ValueError("Destination module directory was not empty")
+ #END handle non-emptyness
+
+ if os.path.islink(dest_path):
+ os.remove(dest_path)
+ else:
+ os.rmdir(dest_path)
+ #END handle link
+ else:
+ # recreate parent directories
+ # NOTE: renames() does that now
+ pass
+ #END handle existance
+ # END handle module
+
+ # move the module into place if possible
+ cur_path = self.abspath
+ renamed_module = False
+ if module and os.path.exists(cur_path):
+ os.renames(cur_path, dest_path)
+ renamed_module = True
+ #END move physical module
+
+
+ # rename the index entry - have to manipulate the index directly as
+ # git-mv cannot be used on submodules ... yeah
+ try:
+ if configuration:
+ try:
+ ekey = index.entry_key(self.path, 0)
+ entry = index.entries[ekey]
+ del(index.entries[ekey])
+ nentry = git.IndexEntry(entry[:3]+(module_path,)+entry[4:])
+ index.entries[tekey] = nentry
+ except KeyError:
+ raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path))
+ #END handle submodule doesn't exist
+
+ # update configuration
+ writer = self.config_writer(index=index) # auto-write
+ writer.set_value('path', module_path)
+ self.path = module_path
+ del(writer)
+ # END handle configuration flag
+ except Exception:
+ if renamed_module:
+ os.renames(dest_path, cur_path)
+ # END undo module renaming
+ raise
+ #END handle undo rename
+
+ return self
+
+ @unbare_repo
+ def remove(self, module=True, force=False, configuration=True, dry_run=False):
+ """Remove this submodule from the repository. This will remove our entry
+ from the .gitmodules file and the entry in the .git/config file.
+
+ :param module: If True, the module we point to will be deleted
+ as well. If the module is currently on a commit which is not part
+ of any branch in the remote, if the currently checked out branch
+ working tree, or untracked files,
+ is ahead of its tracking branch, if you have modifications in the
+ In case the removal of the repository fails for these reasons, the
+ submodule status will not have been altered.
+ If this submodule has child-modules on its own, these will be deleted
+ prior to touching the own module.
+ :param force: Enforces the deletion of the module even though it contains
+ modifications. This basically enforces a brute-force file system based
+ deletion.
+ :param configuration: if True, the submodule is deleted from the configuration,
+ otherwise it isn't. Although this should be enabled most of the times,
+ this flag enables you to safely delete the repository of your submodule.
+ :param dry_run: if True, we will not actually do anything, but throw the errors
+ we would usually throw
+ :return: self
+ :note: doesn't work in bare repositories
+ :raise InvalidGitRepositoryError: thrown if the repository cannot be deleted
+ :raise OSError: if directories or files could not be removed"""
+ if not (module + configuration):
+ raise ValueError("Need to specify to delete at least the module, or the configuration")
+ # END handle params
+
+ # DELETE MODULE REPOSITORY
+ ##########################
+ if module and self.module_exists():
+ if force:
+ # take the fast lane and just delete everything in our module path
+ # TODO: If we run into permission problems, we have a highly inconsistent
+ # state. Delete the .git folders last, start with the submodules first
+ mp = self.abspath
+ method = None
+ if os.path.islink(mp):
+ method = os.remove
+ elif os.path.isdir(mp):
+ method = shutil.rmtree
+ elif os.path.exists(mp):
+ raise AssertionError("Cannot forcibly delete repository as it was neither a link, nor a directory")
+ #END handle brutal deletion
+ if not dry_run:
+ assert method
+ method(mp)
+ #END apply deletion method
+ else:
+ # verify we may delete our module
+ mod = self.module()
+ if mod.is_dirty(untracked_files=True):
+ raise InvalidGitRepositoryError("Cannot delete module at %s with any modifications, unless force is specified" % mod.working_tree_dir)
+ # END check for dirt
+
+ # figure out whether we have new commits compared to the remotes
+ # NOTE: If the user pulled all the time, the remote heads might
+ # not have been updated, so commits coming from the remote look
+ # as if they come from us. But we stay strictly read-only and
+ # don't fetch beforhand.
+ for remote in mod.remotes:
+ num_branches_with_new_commits = 0
+ rrefs = remote.refs
+ for rref in rrefs:
+ num_branches_with_new_commits = len(mod.git.cherry(rref)) != 0
+ # END for each remote ref
+ # not a single remote branch contained all our commits
+ if num_branches_with_new_commits == len(rrefs):
+ raise InvalidGitRepositoryError("Cannot delete module at %s as there are new commits" % mod.working_tree_dir)
+ # END handle new commits
+ # have to manually delete references as python's scoping is
+ # not existing, they could keep handles open ( on windows this is a problem )
+ if len(rrefs):
+ del(rref)
+ #END handle remotes
+ del(rrefs)
+ del(remote)
+ # END for each remote
+
+ # gently remove all submodule repositories
+ for sm in self.children():
+ sm.remove(module=True, force=False, configuration=False, dry_run=dry_run)
+ del(sm)
+ # END for each child-submodule
+
+ # finally delete our own submodule
+ if not dry_run:
+ wtd = mod.working_tree_dir
+ del(mod) # release file-handles (windows)
+ shutil.rmtree(wtd)
+ # END delete tree if possible
+ # END handle force
+ # END handle module deletion
+
+ # DELETE CONFIGURATION
+ ######################
+ if configuration and not dry_run:
+ # first the index-entry
+ index = self.repo.index
+ try:
+ del(index.entries[index.entry_key(self.path, 0)])
+ except KeyError:
+ pass
+ #END delete entry
+ index.write()
+
+ # now git config - need the config intact, otherwise we can't query
+ # inforamtion anymore
+ self.repo.config_writer().remove_section(sm_section(self.name))
+ self.config_writer().remove_section()
+ # END delete configuration
+
+ # void our data not to delay invalid access
+ self._clear_cache()
+
+ return self
+
+ def set_parent_commit(self, commit, check=True):
+ """Set this instance to use the given commit whose tree is supposed to
+ contain the .gitmodules blob.
+
+ :param commit: Commit'ish reference pointing at the root_tree
+ :param check: if True, relatively expensive checks will be performed to verify
+ validity of the submodule.
+ :raise ValueError: if the commit's tree didn't contain the .gitmodules blob.
+ :raise ValueError: if the parent commit didn't store this submodule under the
+ current path
+ :return: self"""
+ pcommit = self.repo.commit(commit)
+ pctree = pcommit.tree
+ if self.k_modules_file not in pctree:
+ raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file))
+ # END handle exceptions
+
+ prev_pc = self._parent_commit
+ self._parent_commit = pcommit
+
+ if check:
+ parser = self._config_parser(self.repo, self._parent_commit, read_only=True)
+ if not parser.has_section(sm_section(self.name)):
+ self._parent_commit = prev_pc
+ raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit))
+ # END handle submodule did not exist
+ # END handle checking mode
+
+ # update our sha, it could have changed
+ self.binsha = pctree[self.path].binsha
+
+ self._clear_cache()
+
+ return self
+
+ @unbare_repo
+ def config_writer(self, index=None, write=True):
+ """:return: a config writer instance allowing you to read and write the data
+ belonging to this submodule into the .gitmodules file.
+
+ :param index: if not None, an IndexFile instance which should be written.
+ defaults to the index of the Submodule's parent repository.
+ :param write: if True, the index will be written each time a configuration
+ value changes.
+ :note: the parameters allow for a more efficient writing of the index,
+ as you can pass in a modified index on your own, prevent automatic writing,
+ and write yourself once the whole operation is complete
+ :raise ValueError: if trying to get a writer on a parent_commit which does not
+ match the current head commit
+ :raise IOError: If the .gitmodules file/blob could not be read"""
+ writer = self._config_parser_constrained(read_only=False)
+ if index is not None:
+ writer.config._index = index
+ writer.config._auto_write = write
+ return writer
+
+ #} END edit interface
+
+ #{ Query Interface
+
+ @unbare_repo
+ def module(self):
+ """:return: Repo instance initialized from the repository at our submodule path
+ :raise InvalidGitRepositoryError: if a repository was not available. This could
+ also mean that it was not yet initialized"""
+ # late import to workaround circular dependencies
+ module_path = self.abspath
+ try:
+ repo = git.Repo(module_path)
+ if repo != self.repo:
+ return repo
+ # END handle repo uninitialized
+ except (InvalidGitRepositoryError, NoSuchPathError):
+ raise InvalidGitRepositoryError("No valid repository at %s" % self.path)
+ else:
+ raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_path)
+ # END handle exceptions
+
+ def module_exists(self):
+ """:return: True if our module exists and is a valid git repository. See module() method"""
+ try:
+ self.module()
+ return True
+ except Exception:
+ return False
+ # END handle exception
+
+ def exists(self):
+ """
+ :return: True if the submodule exists, False otherwise. Please note that
+ a submodule may exist (in the .gitmodules file) even though its module
+ doesn't exist"""
+ # keep attributes for later, and restore them if we have no valid data
+ # this way we do not actually alter the state of the object
+ loc = locals()
+ for attr in self._cache_attrs:
+ if hasattr(self, attr):
+ loc[attr] = getattr(self, attr)
+ # END if we have the attribute cache
+ #END for each attr
+ self._clear_cache()
+
+ try:
+ try:
+ self.path
+ return True
+ except Exception:
+ return False
+ # END handle exceptions
+ finally:
+ for attr in self._cache_attrs:
+ if attr in loc:
+ setattr(self, attr, loc[attr])
+ # END if we have a cache
+ # END reapply each attribute
+ # END handle object state consistency
+
+ @property
+ def branch(self):
+ """:return: The branch instance that we are to checkout
+ :raise InvalidGitRepositoryError: if our module is not yet checked out"""
+ return mkhead(self.module(), self._branch_path)
+
+ @property
+ def branch_path(self):
+ """
+ :return: full (relative) path as string to the branch we would checkout
+ from the remote and track"""
+ return self._branch_path
+
+ @property
+ def branch_name(self):
+ """:return: the name of the branch, which is the shortest possible branch name"""
+ # use an instance method, for this we create a temporary Head instance
+ # which uses a repository that is available at least ( it makes no difference )
+ return git.Head(self.repo, self._branch_path).name
+
+ @property
+ def url(self):
+ """:return: The url to the repository which our module-repository refers to"""
+ return self._url
+
+ @property
+ def parent_commit(self):
+ """:return: Commit instance with the tree containing the .gitmodules file
+ :note: will always point to the current head's commit if it was not set explicitly"""
+ return self._parent_commit
+
+ @property
+ def name(self):
+ """:return: The name of this submodule. It is used to identify it within the
+ .gitmodules file.
+ :note: by default, the name is the path at which to find the submodule, but
+ in git-python it should be a unique identifier similar to the identifiers
+ used for remotes, which allows to change the path of the submodule
+ easily
+ """
+ return self._name
+
+ def config_reader(self):
+ """
+ :return: ConfigReader instance which allows you to qurey the configuration values
+ of this submodule, as provided by the .gitmodules file
+ :note: The config reader will actually read the data directly from the repository
+ and thus does not need nor care about your working tree.
+ :note: Should be cached by the caller and only kept as long as needed
+ :raise IOError: If the .gitmodules file/blob could not be read"""
+ return self._config_parser_constrained(read_only=True)
+
+ def children(self):
+ """
+ :return: IterableList(Submodule, ...) an iterable list of submodules instances
+ which are children of this submodule or 0 if the submodule is not checked out"""
+ return self._get_intermediate_items(self)
+
+ #} END query interface
+
+ #{ Iterable Interface
+
+ @classmethod
+ def iter_items(cls, repo, parent_commit='HEAD'):
+ """:return: iterator yielding Submodule instances available in the given repository"""
+ pc = repo.commit(parent_commit) # parent commit instance
+ try:
+ parser = cls._config_parser(repo, pc, read_only=True)
+ except IOError:
+ raise StopIteration
+ # END handle empty iterator
+
+ rt = pc.tree # root tree
+
+ for sms in parser.sections():
+ n = sm_name(sms)
+ p = parser.get_value(sms, 'path')
+ u = parser.get_value(sms, 'url')
+ b = cls.k_head_default
+ if parser.has_option(sms, cls.k_head_option):
+ b = parser.get_value(sms, cls.k_head_option)
+ # END handle optional information
+
+ # get the binsha
+ index = repo.index
+ try:
+ sm = rt[p]
+ except KeyError:
+ # try the index, maybe it was just added
+ try:
+ entry = index.entries[index.entry_key(p, 0)]
+ sm = cls(repo, entry.binsha, entry.mode, entry.path)
+ except KeyError:
+ raise InvalidGitRepositoryError("Gitmodule path %r did not exist in revision of parent commit %s" % (p, parent_commit))
+ # END handle keyerror
+ # END handle critical error
+
+ # fill in remaining info - saves time as it doesn't have to be parsed again
+ sm._name = n
+ sm._parent_commit = pc
+ sm._branch_path = git.Head.to_full_path(b)
+ sm._url = u
+
+ yield sm
+ # END for each section
+
+ #} END iterable interface
+
diff --git a/git/objects/submodule/root.py b/git/objects/submodule/root.py
new file mode 100644
index 00000000..36cd7209
--- /dev/null
+++ b/git/objects/submodule/root.py
@@ -0,0 +1,315 @@
+from base import Submodule, UpdateProgress
+from util import (
+ find_first_remote_branch
+ )
+from git.exc import InvalidGitRepositoryError
+import git
+
+import sys
+
+__all__ = ["RootModule", "RootUpdateProgress"]
+
+
+class RootUpdateProgress(UpdateProgress):
+ """Utility class which adds more opcodes to the UpdateProgress"""
+ REMOVE, PATHCHANGE, BRANCHCHANGE, URLCHANGE = [1 << x for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes+4)]
+ _num_op_codes = UpdateProgress._num_op_codes+4
+
+ __slots__ = tuple()
+
+BEGIN = RootUpdateProgress.BEGIN
+END = RootUpdateProgress.END
+REMOVE = RootUpdateProgress.REMOVE
+BRANCHCHANGE = RootUpdateProgress.BRANCHCHANGE
+URLCHANGE = RootUpdateProgress.URLCHANGE
+PATHCHANGE = RootUpdateProgress.PATHCHANGE
+
+class RootModule(Submodule):
+ """A (virtual) Root of all submodules in the given repository. It can be used
+ to more easily traverse all submodules of the master repository"""
+
+ __slots__ = tuple()
+
+ k_root_name = '__ROOT__'
+
+ def __init__(self, repo):
+ # repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
+ super(RootModule, self).__init__(
+ repo,
+ binsha = self.NULL_BIN_SHA,
+ mode = self.k_default_mode,
+ path = '',
+ name = self.k_root_name,
+ parent_commit = repo.head.commit,
+ url = '',
+ branch_path = git.Head.to_full_path(self.k_head_default)
+ )
+
+
+ def _clear_cache(self):
+ """May not do anything"""
+ pass
+
+ #{ Interface
+
+ def update(self, previous_commit=None, recursive=True, force_remove=False, init=True,
+ to_latest_revision=False, progress=None, dry_run=False):
+ """Update the submodules of this repository to the current HEAD commit.
+ This method behaves smartly by determining changes of the path of a submodules
+ repository, next to changes to the to-be-checked-out commit or the branch to be
+ checked out. This works if the submodules ID does not change.
+ Additionally it will detect addition and removal of submodules, which will be handled
+ gracefully.
+
+ :param previous_commit: If set to a commit'ish, the commit we should use
+ as the previous commit the HEAD pointed to before it was set to the commit it points to now.
+ If None, it defaults to HEAD@{1} otherwise
+ :param recursive: if True, the children of submodules will be updated as well
+ using the same technique
+ :param force_remove: If submodules have been deleted, they will be forcibly removed.
+ Otherwise the update may fail if a submodule's repository cannot be deleted as
+ changes have been made to it (see Submodule.update() for more information)
+ :param init: If we encounter a new module which would need to be initialized, then do it.
+ :param to_latest_revision: If True, instead of checking out the revision pointed to
+ by this submodule's sha, the checked out tracking branch will be merged with the
+ newest remote branch fetched from the repository's origin
+ :param progress: RootUpdateProgress instance or None if no progress should be sent
+ :param dry_run: if True, operations will not actually be performed. Progress messages
+ will change accordingly to indicate the WOULD DO state of the operation."""
+ if self.repo.bare:
+ raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
+ # END handle bare
+
+ if progress is None:
+ progress = RootUpdateProgress()
+ #END assure progress is set
+
+ prefix = ''
+ if dry_run:
+ prefix = 'DRY-RUN: '
+
+ repo = self.repo
+
+ # SETUP BASE COMMIT
+ ###################
+ cur_commit = repo.head.commit
+ if previous_commit is None:
+ try:
+ previous_commit = repo.commit(repo.head.log_entry(-1).oldhexsha)
+ if previous_commit.binsha == previous_commit.NULL_BIN_SHA:
+ raise IndexError
+ #END handle initial commit
+ except IndexError:
+ # in new repositories, there is no previous commit
+ previous_commit = cur_commit
+ #END exception handling
+ else:
+ previous_commit = repo.commit(previous_commit) # obtain commit object
+ # END handle previous commit
+
+
+ psms = self.list_items(repo, parent_commit=previous_commit)
+ sms = self.list_items(self.module())
+ spsms = set(psms)
+ ssms = set(sms)
+
+ # HANDLE REMOVALS
+ ###################
+ rrsm = (spsms - ssms)
+ len_rrsm = len(rrsm)
+ for i, rsm in enumerate(rrsm):
+ op = REMOVE
+ if i == 0:
+ op |= BEGIN
+ #END handle begin
+
+ # fake it into thinking its at the current commit to allow deletion
+ # of previous module. Trigger the cache to be updated before that
+ progress.update(op, i, len_rrsm, prefix+"Removing submodule %r at %s" % (rsm.name, rsm.abspath))
+ rsm._parent_commit = repo.head.commit
+ if not dry_run:
+ rsm.remove(configuration=False, module=True, force=force_remove)
+ #END handle dry-run
+
+ if i == len_rrsm-1:
+ op |= END
+ #END handle end
+ progress.update(op, i, len_rrsm, prefix+"Done removing submodule %r" % rsm.name)
+ # END for each removed submodule
+
+ # HANDLE PATH RENAMES
+ #####################
+ # url changes + branch changes
+ csms = (spsms & ssms)
+ len_csms = len(csms)
+ for i, csm in enumerate(csms):
+ psm = psms[csm.name]
+ sm = sms[csm.name]
+
+ #PATH CHANGES
+ ##############
+ if sm.path != psm.path and psm.module_exists():
+ progress.update(BEGIN|PATHCHANGE, i, len_csms, prefix+"Moving repository of submodule %r from %s to %s" % (sm.name, psm.abspath, sm.abspath))
+ # move the module to the new path
+ if not dry_run:
+ psm.move(sm.path, module=True, configuration=False)
+ #END handle dry_run
+ progress.update(END|PATHCHANGE, i, len_csms, prefix+"Done moving repository of submodule %r" % sm.name)
+ # END handle path changes
+
+ if sm.module_exists():
+ # HANDLE URL CHANGE
+ ###################
+ if sm.url != psm.url:
+ # Add the new remote, remove the old one
+ # This way, if the url just changes, the commits will not
+ # have to be re-retrieved
+ nn = '__new_origin__'
+ smm = sm.module()
+ rmts = smm.remotes
+
+ # don't do anything if we already have the url we search in place
+ if len([r for r in rmts if r.url == sm.url]) == 0:
+ progress.update(BEGIN|URLCHANGE, i, len_csms, prefix+"Changing url of submodule %r from %s to %s" % (sm.name, psm.url, sm.url))
+
+ if not dry_run:
+ assert nn not in [r.name for r in rmts]
+ smr = smm.create_remote(nn, sm.url)
+ smr.fetch(progress=progress)
+
+ # If we have a tracking branch, it should be available
+ # in the new remote as well.
+ if len([r for r in smr.refs if r.remote_head == sm.branch_name]) == 0:
+ raise ValueError("Submodule branch named %r was not available in new submodule remote at %r" % (sm.branch_name, sm.url))
+ # END head is not detached
+
+ # now delete the changed one
+ rmt_for_deletion = None
+ for remote in rmts:
+ if remote.url == psm.url:
+ rmt_for_deletion = remote
+ break
+ # END if urls match
+ # END for each remote
+
+ # if we didn't find a matching remote, but have exactly one,
+ # we can safely use this one
+ if rmt_for_deletion is None:
+ if len(rmts) == 1:
+ rmt_for_deletion = rmts[0]
+ else:
+ # if we have not found any remote with the original url
+ # we may not have a name. This is a special case,
+ # and its okay to fail here
+ # Alternatively we could just generate a unique name and leave all
+ # existing ones in place
+ raise InvalidGitRepositoryError("Couldn't find original remote-repo at url %r" % psm.url)
+ #END handle one single remote
+ # END handle check we found a remote
+
+ orig_name = rmt_for_deletion.name
+ smm.delete_remote(rmt_for_deletion)
+ # NOTE: Currently we leave tags from the deleted remotes
+ # as well as separate tracking branches in the possibly totally
+ # changed repository ( someone could have changed the url to
+ # another project ). At some point, one might want to clean
+ # it up, but the danger is high to remove stuff the user
+ # has added explicitly
+
+ # rename the new remote back to what it was
+ smr.rename(orig_name)
+
+ # early on, we verified that the our current tracking branch
+ # exists in the remote. Now we have to assure that the
+ # sha we point to is still contained in the new remote
+ # tracking branch.
+ smsha = sm.binsha
+ found = False
+ rref = smr.refs[self.branch_name]
+ for c in rref.commit.traverse():
+ if c.binsha == smsha:
+ found = True
+ break
+ # END traverse all commits in search for sha
+ # END for each commit
+
+ if not found:
+ # adjust our internal binsha to use the one of the remote
+ # this way, it will be checked out in the next step
+ # This will change the submodule relative to us, so
+ # the user will be able to commit the change easily
+ print >> sys.stderr, "WARNING: Current sha %s was not contained in the tracking branch at the new remote, setting it the the remote's tracking branch" % sm.hexsha
+ sm.binsha = rref.commit.binsha
+ #END reset binsha
+
+ #NOTE: All checkout is performed by the base implementation of update
+ #END handle dry_run
+ progress.update(END|URLCHANGE, i, len_csms, prefix+"Done adjusting url of submodule %r" % (sm.name))
+ # END skip remote handling if new url already exists in module
+ # END handle url
+
+ # HANDLE PATH CHANGES
+ #####################
+ if sm.branch_path != psm.branch_path:
+ # finally, create a new tracking branch which tracks the
+ # new remote branch
+ progress.update(BEGIN|BRANCHCHANGE, i, len_csms, prefix+"Changing branch of submodule %r from %s to %s" % (sm.name, psm.branch_path, sm.branch_path))
+ if not dry_run:
+ smm = sm.module()
+ smmr = smm.remotes
+ try:
+ tbr = git.Head.create(smm, sm.branch_name, logmsg='branch: Created from HEAD')
+ except OSError:
+ # ... or reuse the existing one
+ tbr = git.Head(smm, sm.branch_path)
+ #END assure tracking branch exists
+
+ tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch_name))
+ # figure out whether the previous tracking branch contains
+ # new commits compared to the other one, if not we can
+ # delete it.
+ try:
+ tbr = find_first_remote_branch(smmr, psm.branch_name)
+ if len(smm.git.cherry(tbr, psm.branch)) == 0:
+ psm.branch.delete(smm, psm.branch)
+ #END delete original tracking branch if there are no changes
+ except InvalidGitRepositoryError:
+ # ignore it if the previous branch couldn't be found in the
+ # current remotes, this just means we can't handle it
+ pass
+ # END exception handling
+
+ #NOTE: All checkout is done in the base implementation of update
+ #END handle dry_run
+
+ progress.update(END|BRANCHCHANGE, i, len_csms, prefix+"Done changing branch of submodule %r" % sm.name)
+ #END handle branch
+ #END handle
+ # END for each common submodule
+
+ # FINALLY UPDATE ALL ACTUAL SUBMODULES
+ ######################################
+ for sm in sms:
+ # update the submodule using the default method
+ sm.update(recursive=False, init=init, to_latest_revision=to_latest_revision,
+ progress=progress, dry_run=dry_run)
+
+ # update recursively depth first - question is which inconsitent
+ # state will be better in case it fails somewhere. Defective branch
+ # or defective depth. The RootSubmodule type will never process itself,
+ # which was done in the previous expression
+ if recursive:
+ # the module would exist by now if we are not in dry_run mode
+ if sm.module_exists():
+ type(self)(sm.module()).update( recursive=True, force_remove=force_remove,
+ init=init, to_latest_revision=to_latest_revision,
+ progress=progress, dry_run=dry_run)
+ #END handle dry_run
+ #END handle recursive
+ # END for each submodule to update
+
+ def module(self):
+ """:return: the actual repository containing the submodules"""
+ return self.repo
+ #} END interface
+#} END classes
diff --git a/git/objects/submodule/util.py b/git/objects/submodule/util.py
new file mode 100644
index 00000000..9b32807a
--- /dev/null
+++ b/git/objects/submodule/util.py
@@ -0,0 +1,101 @@
+import git
+from git.exc import InvalidGitRepositoryError
+from git.config import GitConfigParser
+from StringIO import StringIO
+import weakref
+
+__all__ = ( 'sm_section', 'sm_name', 'mkhead', 'unbare_repo', 'find_first_remote_branch',
+ 'SubmoduleConfigParser')
+
+#{ Utilities
+
+def sm_section(name):
+ """:return: section title used in .gitmodules configuration file"""
+ return 'submodule "%s"' % name
+
+def sm_name(section):
+ """:return: name of the submodule as parsed from the section name"""
+ section = section.strip()
+ return section[11:-1]
+
+def mkhead(repo, path):
+ """:return: New branch/head instance"""
+ return git.Head(repo, git.Head.to_full_path(path))
+
+def unbare_repo(func):
+ """Methods with this decorator raise InvalidGitRepositoryError if they
+ encounter a bare repository"""
+ def wrapper(self, *args, **kwargs):
+ if self.repo.bare:
+ raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
+ #END bare method
+ return func(self, *args, **kwargs)
+ # END wrapper
+ wrapper.__name__ = func.__name__
+ return wrapper
+
+def find_first_remote_branch(remotes, branch_name):
+ """Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
+ for remote in remotes:
+ try:
+ return remote.refs[branch_name]
+ except IndexError:
+ continue
+ # END exception handling
+ #END for remote
+ raise InvalidGitRepositoryError("Didn't find remote branch %r in any of the given remotes", branch_name)
+
+#} END utilities
+
+
+#{ Classes
+
+class SubmoduleConfigParser(GitConfigParser):
+ """
+ Catches calls to _write, and updates the .gitmodules blob in the index
+ with the new data, if we have written into a stream. Otherwise it will
+ add the local file to the index to make it correspond with the working tree.
+ Additionally, the cache must be cleared
+
+ Please note that no mutating method will work in bare mode
+ """
+
+ def __init__(self, *args, **kwargs):
+ self._smref = None
+ self._index = None
+ self._auto_write = True
+ super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
+
+ #{ Interface
+ def set_submodule(self, submodule):
+ """Set this instance's submodule. It must be called before
+ the first write operation begins"""
+ self._smref = weakref.ref(submodule)
+
+ def flush_to_index(self):
+ """Flush changes in our configuration file to the index"""
+ assert self._smref is not None
+ # should always have a file here
+ assert not isinstance(self._file_or_files, StringIO)
+
+ sm = self._smref()
+ if sm is not None:
+ index = self._index
+ if index is None:
+ index = sm.repo.index
+ # END handle index
+ index.add([sm.k_modules_file], write=self._auto_write)
+ sm._clear_cache()
+ # END handle weakref
+
+ #} END interface
+
+ #{ Overridden Methods
+ def write(self):
+ rval = super(SubmoduleConfigParser, self).write()
+ self.flush_to_index()
+ return rval
+ # END overridden methods
+
+
+#} END classes
diff --git a/git/objects/tag.py b/git/objects/tag.py
new file mode 100644
index 00000000..c7d02abe
--- /dev/null
+++ b/git/objects/tag.py
@@ -0,0 +1,76 @@
+# objects.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+""" Module containing all object based types. """
+import base
+from gitdb.util import hex_to_bin
+from util import (
+ get_object_type_by_name,
+ parse_actor_and_date
+ )
+
+__all__ = ("TagObject", )
+
+class TagObject(base.Object):
+ """Non-Lightweight tag carrying additional information about an object we are pointing to."""
+ type = "tag"
+ __slots__ = ( "object", "tag", "tagger", "tagged_date", "tagger_tz_offset", "message" )
+
+ def __init__(self, repo, binsha, object=None, tag=None,
+ tagger=None, tagged_date=None, tagger_tz_offset=None, message=None):
+ """Initialize a tag object with additional data
+
+ :param repo: repository this object is located in
+ :param binsha: 20 byte SHA1
+ :param object: Object instance of object we are pointing to
+ :param tag: name of this tag
+ :param tagger: Actor identifying the tagger
+ :param tagged_date: int_seconds_since_epoch
+ is the DateTime of the tag creation - use time.gmtime to convert
+ it into a different format
+ :param tagged_tz_offset: int_seconds_west_of_utc is the timezone that the
+ authored_date is in, in a format similar to time.altzone"""
+ super(TagObject, self).__init__(repo, binsha )
+ if object is not None:
+ self.object = object
+ if tag is not None:
+ self.tag = tag
+ if tagger is not None:
+ self.tagger = tagger
+ if tagged_date is not None:
+ self.tagged_date = tagged_date
+ if tagger_tz_offset is not None:
+ self.tagger_tz_offset = tagger_tz_offset
+ if message is not None:
+ self.message = message
+
+ def _set_cache_(self, attr):
+ """Cache all our attributes at once"""
+ if attr in TagObject.__slots__:
+ ostream = self.repo.odb.stream(self.binsha)
+ lines = ostream.read().splitlines()
+
+ obj, hexsha = lines[0].split(" ") # object <hexsha>
+ type_token, type_name = lines[1].split(" ") # type <type_name>
+ self.object = get_object_type_by_name(type_name)(self.repo, hex_to_bin(hexsha))
+
+ self.tag = lines[2][4:] # tag <tag name>
+
+ tagger_info = lines[3][7:]# tagger <actor> <date>
+ self.tagger, self.tagged_date, self.tagger_tz_offset = parse_actor_and_date(tagger_info)
+
+ # line 4 empty - it could mark the beginning of the next header
+ # in case there really is no message, it would not exist. Otherwise
+ # a newline separates header from message
+ if len(lines) > 5:
+ self.message = "\n".join(lines[5:])
+ else:
+ self.message = ''
+ # END check our attributes
+ else:
+ super(TagObject, self)._set_cache_(attr)
+
+
+
diff --git a/git/objects/tree.py b/git/objects/tree.py
new file mode 100644
index 00000000..67431686
--- /dev/null
+++ b/git/objects/tree.py
@@ -0,0 +1,280 @@
+# tree.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+import util
+from base import IndexObject
+from git.util import join_path
+from blob import Blob
+from submodule.base import Submodule
+import git.diff as diff
+
+from fun import (
+ tree_entries_from_data,
+ tree_to_stream
+ )
+
+from gitdb.util import (
+ to_bin_sha,
+ )
+
+__all__ = ("TreeModifier", "Tree")
+
+class TreeModifier(object):
+ """A utility class providing methods to alter the underlying cache in a list-like fashion.
+
+ Once all adjustments are complete, the _cache, which really is a refernce to
+ the cache of a tree, will be sorted. Assuring it will be in a serializable state"""
+ __slots__ = '_cache'
+
+ def __init__(self, cache):
+ self._cache = cache
+
+ def _index_by_name(self, name):
+ """:return: index of an item with name, or -1 if not found"""
+ for i, t in enumerate(self._cache):
+ if t[2] == name:
+ return i
+ # END found item
+ # END for each item in cache
+ return -1
+
+ #{ Interface
+ def set_done(self):
+ """Call this method once you are done modifying the tree information.
+ It may be called several times, but be aware that each call will cause
+ a sort operation
+ :return self:"""
+ self._cache.sort(key=lambda t: t[2]) # sort by name
+ return self
+ #} END interface
+
+ #{ Mutators
+ def add(self, sha, mode, name, force=False):
+ """Add the given item to the tree. If an item with the given name already
+ exists, nothing will be done, but a ValueError will be raised if the
+ sha and mode of the existing item do not match the one you add, unless
+ force is True
+
+ :param sha: The 20 or 40 byte sha of the item to add
+ :param mode: int representing the stat compatible mode of the item
+ :param force: If True, an item with your name and information will overwrite
+ any existing item with the same name, no matter which information it has
+ :return: self"""
+ if '/' in name:
+ raise ValueError("Name must not contain '/' characters")
+ if (mode >> 12) not in Tree._map_id_to_type:
+ raise ValueError("Invalid object type according to mode %o" % mode)
+
+ sha = to_bin_sha(sha)
+ index = self._index_by_name(name)
+ item = (sha, mode, name)
+ if index == -1:
+ self._cache.append(item)
+ else:
+ if force:
+ self._cache[index] = item
+ else:
+ ex_item = self._cache[index]
+ if ex_item[0] != sha or ex_item[1] != mode:
+ raise ValueError("Item %r existed with different properties" % name)
+ # END handle mismatch
+ # END handle force
+ # END handle name exists
+ return self
+
+ def add_unchecked(self, binsha, mode, name):
+ """Add the given item to the tree, its correctness is assumed, which
+ puts the caller into responsibility to assure the input is correct.
+ For more information on the parameters, see ``add``
+ :param binsha: 20 byte binary sha"""
+ self._cache.append((binsha, mode, name))
+
+ def __delitem__(self, name):
+ """Deletes an item with the given name if it exists"""
+ index = self._index_by_name(name)
+ if index > -1:
+ del(self._cache[index])
+
+ #} END mutators
+
+
+class Tree(IndexObject, diff.Diffable, util.Traversable, util.Serializable):
+ """Tree objects represent an ordered list of Blobs and other Trees.
+
+ ``Tree as a list``::
+
+ Access a specific blob using the
+ tree['filename'] notation.
+
+ You may as well access by index
+ blob = tree[0]
+ """
+
+ type = "tree"
+ __slots__ = "_cache"
+
+ # actual integer ids for comparison
+ commit_id = 016 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link
+ blob_id = 010
+ symlink_id = 012
+ tree_id = 004
+
+ _map_id_to_type = {
+ commit_id : Submodule,
+ blob_id : Blob,
+ symlink_id : Blob
+ # tree id added once Tree is defined
+ }
+
+
+ def __init__(self, repo, binsha, mode=tree_id<<12, path=None):
+ super(Tree, self).__init__(repo, binsha, mode, path)
+
+ @classmethod
+ def _get_intermediate_items(cls, index_object):
+ if index_object.type == "tree":
+ return tuple(index_object._iter_convert_to_object(index_object._cache))
+ return tuple()
+
+ def _set_cache_(self, attr):
+ if attr == "_cache":
+ # Set the data when we need it
+ ostream = self.repo.odb.stream(self.binsha)
+ self._cache = tree_entries_from_data(ostream.read())
+ else:
+ super(Tree, self)._set_cache_(attr)
+ # END handle attribute
+
+ def _iter_convert_to_object(self, iterable):
+ """Iterable yields tuples of (binsha, mode, name), which will be converted
+ to the respective object representation"""
+ for binsha, mode, name in iterable:
+ path = join_path(self.path, name)
+ try:
+ yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
+ except KeyError:
+ raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path))
+ # END for each item
+
+ def __div__(self, file):
+ """Find the named object in this tree's contents
+ :return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``
+
+ :raise KeyError: if given file or tree does not exist in tree"""
+ msg = "Blob or Tree named %r not found"
+ if '/' in file:
+ tree = self
+ item = self
+ tokens = file.split('/')
+ for i,token in enumerate(tokens):
+ item = tree[token]
+ if item.type == 'tree':
+ tree = item
+ else:
+ # safety assertion - blobs are at the end of the path
+ if i != len(tokens)-1:
+ raise KeyError(msg % file)
+ return item
+ # END handle item type
+ # END for each token of split path
+ if item == self:
+ raise KeyError(msg % file)
+ return item
+ else:
+ for info in self._cache:
+ if info[2] == file: # [2] == name
+ return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
+ # END for each obj
+ raise KeyError( msg % file )
+ # END handle long paths
+
+
+ @property
+ def trees(self):
+ """:return: list(Tree, ...) list of trees directly below this tree"""
+ return [ i for i in self if i.type == "tree" ]
+
+ @property
+ def blobs(self):
+ """:return: list(Blob, ...) list of blobs directly below this tree"""
+ return [ i for i in self if i.type == "blob" ]
+
+ @property
+ def cache(self):
+ """
+ :return: An object allowing to modify the internal cache. This can be used
+ to change the tree's contents. When done, make sure you call ``set_done``
+ on the tree modifier, or serialization behaviour will be incorrect.
+ See the ``TreeModifier`` for more information on how to alter the cache"""
+ return TreeModifier(self._cache)
+
+ def traverse( self, predicate = lambda i,d: True,
+ prune = lambda i,d: False, depth = -1, branch_first=True,
+ visit_once = False, ignore_self=1 ):
+ """For documentation, see util.Traversable.traverse
+ Trees are set to visit_once = False to gain more performance in the traversal"""
+ return super(Tree, self).traverse(predicate, prune, depth, branch_first, visit_once, ignore_self)
+
+ # List protocol
+ def __getslice__(self, i, j):
+ return list(self._iter_convert_to_object(self._cache[i:j]))
+
+ def __iter__(self):
+ return self._iter_convert_to_object(self._cache)
+
+ def __len__(self):
+ return len(self._cache)
+
+ def __getitem__(self, item):
+ if isinstance(item, int):
+ info = self._cache[item]
+ return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
+
+ if isinstance(item, basestring):
+ # compatability
+ return self.__div__(item)
+ # END index is basestring
+
+ raise TypeError( "Invalid index type: %r" % item )
+
+
+ def __contains__(self, item):
+ if isinstance(item, IndexObject):
+ for info in self._cache:
+ if item.binsha == info[0]:
+ return True
+ # END compare sha
+ # END for each entry
+ # END handle item is index object
+ # compatability
+
+ # treat item as repo-relative path
+ path = self.path
+ for info in self._cache:
+ if item == join_path(path, info[2]):
+ return True
+ # END for each item
+ return False
+
+ def __reversed__(self):
+ return reversed(self._iter_convert_to_object(self._cache))
+
+ def _serialize(self, stream):
+ """Serialize this tree into the stream. Please note that we will assume
+ our tree data to be in a sorted state. If this is not the case, serialization
+ will not generate a correct tree representation as these are assumed to be sorted
+ by algorithms"""
+ tree_to_stream(self._cache, stream.write)
+ return self
+
+ def _deserialize(self, stream):
+ self._cache = tree_entries_from_data(stream.read())
+ return self
+
+
+# END tree
+
+# finalize map definition
+Tree._map_id_to_type[Tree.tree_id] = Tree
diff --git a/git/objects/util.py b/git/objects/util.py
new file mode 100644
index 00000000..4c9323b8
--- /dev/null
+++ b/git/objects/util.py
@@ -0,0 +1,315 @@
+# util.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Module for general utility functions"""
+from git.util import (
+ IterableList,
+ Actor
+ )
+
+import re
+from collections import deque as Deque
+
+from string import digits
+import time
+import os
+
+__all__ = ('get_object_type_by_name', 'parse_date', 'parse_actor_and_date',
+ 'ProcessStreamAdapter', 'Traversable', 'altz_to_utctz_str', 'utctz_to_altz',
+ 'verify_utctz', 'Actor')
+
+#{ Functions
+
+def mode_str_to_int(modestr):
+ """
+ :param modestr: string like 755 or 644 or 100644 - only the last 6 chars will be used
+ :return:
+ String identifying a mode compatible to the mode methods ids of the
+ stat module regarding the rwx permissions for user, group and other,
+ special flags and file system flags, i.e. whether it is a symlink
+ for example."""
+ mode = 0
+ for iteration, char in enumerate(reversed(modestr[-6:])):
+ mode += int(char) << iteration*3
+ # END for each char
+ return mode
+
+def get_object_type_by_name(object_type_name):
+ """
+ :return: type suitable to handle the given object type name.
+ Use the type to create new instances.
+
+ :param object_type_name: Member of TYPES
+
+ :raise ValueError: In case object_type_name is unknown"""
+ if object_type_name == "commit":
+ import commit
+ return commit.Commit
+ elif object_type_name == "tag":
+ import tag
+ return tag.TagObject
+ elif object_type_name == "blob":
+ import blob
+ return blob.Blob
+ elif object_type_name == "tree":
+ import tree
+ return tree.Tree
+ else:
+ raise ValueError("Cannot handle unknown object type: %s" % object_type_name)
+
+def utctz_to_altz(utctz):
+ """we convert utctz to the timezone in seconds, it is the format time.altzone
+ returns. Git stores it as UTC timezone which has the opposite sign as well,
+ which explains the -1 * ( that was made explicit here )
+ :param utctz: git utc timezone string, i.e. +0200"""
+ return -1 * int(float(utctz)/100*3600)
+
+def altz_to_utctz_str(altz):
+ """As above, but inverses the operation, returning a string that can be used
+ in commit objects"""
+ utci = -1 * int((altz / 3600)*100)
+ utcs = str(abs(utci))
+ utcs = "0"*(4-len(utcs)) + utcs
+ prefix = (utci < 0 and '-') or '+'
+ return prefix + utcs
+
+
+def verify_utctz(offset):
+ """:raise ValueError: if offset is incorrect
+ :return: offset"""
+ fmt_exc = ValueError("Invalid timezone offset format: %s" % offset)
+ if len(offset) != 5:
+ raise fmt_exc
+ if offset[0] not in "+-":
+ raise fmt_exc
+ if offset[1] not in digits or \
+ offset[2] not in digits or \
+ offset[3] not in digits or \
+ offset[4] not in digits:
+ raise fmt_exc
+ # END for each char
+ return offset
+
+def parse_date(string_date):
+ """
+ Parse the given date as one of the following
+
+ * Git internal format: timestamp offset
+ * RFC 2822: Thu, 07 Apr 2005 22:13:13 +0200.
+ * ISO 8601 2005-04-07T22:13:13
+ The T can be a space as well
+
+ :return: Tuple(int(timestamp), int(offset)), both in seconds since epoch
+ :raise ValueError: If the format could not be understood
+ :note: Date can also be YYYY.MM.DD, MM/DD/YYYY and DD.MM.YYYY"""
+ # git time
+ try:
+ if string_date.count(' ') == 1 and string_date.rfind(':') == -1:
+ timestamp, offset = string_date.split()
+ timestamp = int(timestamp)
+ return timestamp, utctz_to_altz(verify_utctz(offset))
+ else:
+ offset = "+0000" # local time by default
+ if string_date[-5] in '-+':
+ offset = verify_utctz(string_date[-5:])
+ string_date = string_date[:-6] # skip space as well
+ # END split timezone info
+
+ # now figure out the date and time portion - split time
+ date_formats = list()
+ splitter = -1
+ if ',' in string_date:
+ date_formats.append("%a, %d %b %Y")
+ splitter = string_date.rfind(' ')
+ else:
+ # iso plus additional
+ date_formats.append("%Y-%m-%d")
+ date_formats.append("%Y.%m.%d")
+ date_formats.append("%m/%d/%Y")
+ date_formats.append("%d.%m.%Y")
+
+ splitter = string_date.rfind('T')
+ if splitter == -1:
+ splitter = string_date.rfind(' ')
+ # END handle 'T' and ' '
+ # END handle rfc or iso
+
+ assert splitter > -1
+
+ # split date and time
+ time_part = string_date[splitter+1:] # skip space
+ date_part = string_date[:splitter]
+
+ # parse time
+ tstruct = time.strptime(time_part, "%H:%M:%S")
+
+ for fmt in date_formats:
+ try:
+ dtstruct = time.strptime(date_part, fmt)
+ fstruct = time.struct_time((dtstruct.tm_year, dtstruct.tm_mon, dtstruct.tm_mday,
+ tstruct.tm_hour, tstruct.tm_min, tstruct.tm_sec,
+ dtstruct.tm_wday, dtstruct.tm_yday, tstruct.tm_isdst))
+ return int(time.mktime(fstruct)), utctz_to_altz(offset)
+ except ValueError:
+ continue
+ # END exception handling
+ # END for each fmt
+
+ # still here ? fail
+ raise ValueError("no format matched")
+ # END handle format
+ except Exception:
+ raise ValueError("Unsupported date format: %s" % string_date)
+ # END handle exceptions
+
+
+# precompiled regex
+_re_actor_epoch = re.compile(r'^.+? (.*) (\d+) ([+-]\d+).*$')
+
+def parse_actor_and_date(line):
+ """Parse out the actor (author or committer) info from a line like::
+
+ author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
+
+ :return: [Actor, int_seconds_since_epoch, int_timezone_offset]"""
+ m = _re_actor_epoch.search(line)
+ actor, epoch, offset = m.groups()
+ return (Actor._from_string(actor), int(epoch), utctz_to_altz(offset))
+
+
+#} END functions
+
+
+#{ Classes
+
+class ProcessStreamAdapter(object):
+ """Class wireing all calls to the contained Process instance.
+
+ Use this type to hide the underlying process to provide access only to a specified
+ stream. The process is usually wrapped into an AutoInterrupt class to kill
+ it if the instance goes out of scope."""
+ __slots__ = ("_proc", "_stream")
+ def __init__(self, process, stream_name):
+ self._proc = process
+ self._stream = getattr(process, stream_name)
+
+ def __getattr__(self, attr):
+ return getattr(self._stream, attr)
+
+
+class Traversable(object):
+ """Simple interface to perforam depth-first or breadth-first traversals
+ into one direction.
+ Subclasses only need to implement one function.
+ Instances of the Subclass must be hashable"""
+ __slots__ = tuple()
+
+ @classmethod
+ def _get_intermediate_items(cls, item):
+ """
+ Returns:
+ List of items connected to the given item.
+ Must be implemented in subclass
+ """
+ raise NotImplementedError("To be implemented in subclass")
+
+ def list_traverse(self, *args, **kwargs):
+ """
+ :return: IterableList with the results of the traversal as produced by
+ traverse()"""
+ out = IterableList(self._id_attribute_)
+ out.extend(self.traverse(*args, **kwargs))
+ return out
+
+ def traverse( self, predicate = lambda i,d: True,
+ prune = lambda i,d: False, depth = -1, branch_first=True,
+ visit_once = True, ignore_self=1, as_edge = False ):
+ """:return: iterator yieling of items found when traversing self
+
+ :param predicate: f(i,d) returns False if item i at depth d should not be included in the result
+
+ :param prune:
+ f(i,d) return True if the search should stop at item i at depth d.
+ Item i will not be returned.
+
+ :param depth:
+ define at which level the iteration should not go deeper
+ if -1, there is no limit
+ if 0, you would effectively only get self, the root of the iteration
+ i.e. if 1, you would only get the first level of predessessors/successors
+
+ :param branch_first:
+ if True, items will be returned branch first, otherwise depth first
+
+ :param visit_once:
+ if True, items will only be returned once, although they might be encountered
+ several times. Loops are prevented that way.
+
+ :param ignore_self:
+ if True, self will be ignored and automatically pruned from
+ the result. Otherwise it will be the first item to be returned.
+ If as_edge is True, the source of the first edge is None
+
+ :param as_edge:
+ if True, return a pair of items, first being the source, second the
+ destinatination, i.e. tuple(src, dest) with the edge spanning from
+ source to destination"""
+ visited = set()
+ stack = Deque()
+ stack.append( ( 0 ,self, None ) ) # self is always depth level 0
+
+ def addToStack( stack, item, branch_first, depth ):
+ lst = self._get_intermediate_items( item )
+ if not lst:
+ return
+ if branch_first:
+ stack.extendleft( ( depth , i, item ) for i in lst )
+ else:
+ reviter = ( ( depth , lst[i], item ) for i in range( len( lst )-1,-1,-1) )
+ stack.extend( reviter )
+ # END addToStack local method
+
+ while stack:
+ d, item, src = stack.pop() # depth of item, item, item_source
+
+ if visit_once and item in visited:
+ continue
+
+ if visit_once:
+ visited.add(item)
+
+ rval = ( as_edge and (src, item) ) or item
+ if prune( rval, d ):
+ continue
+
+ skipStartItem = ignore_self and ( item is self )
+ if not skipStartItem and predicate( rval, d ):
+ yield rval
+
+ # only continue to next level if this is appropriate !
+ nd = d + 1
+ if depth > -1 and nd > depth:
+ continue
+
+ addToStack( stack, item, branch_first, nd )
+ # END for each item on work stack
+
+
+class Serializable(object):
+ """Defines methods to serialize and deserialize objects from and into a data stream"""
+ __slots__ = tuple()
+
+ def _serialize(self, stream):
+ """Serialize the data of this object into the given data stream
+ :note: a serialized object would ``_deserialize`` into the same objet
+ :param stream: a file-like object
+ :return: self"""
+ raise NotImplementedError("To be implemented in subclass")
+
+ def _deserialize(self, stream):
+ """Deserialize all information regarding this object from the stream
+ :param stream: a file-like object
+ :return: self"""
+ raise NotImplementedError("To be implemented in subclass")
diff --git a/git/odict.py b/git/odict.py
new file mode 100644
index 00000000..2c8391d7
--- /dev/null
+++ b/git/odict.py
@@ -0,0 +1,1399 @@
+# odict.py
+# An Ordered Dictionary object
+# Copyright (C) 2005 Nicola Larosa, Michael Foord
+# E-mail: nico AT tekNico DOT net, fuzzyman AT voidspace DOT org DOT uk
+
+# This software is licensed under the terms of the BSD license.
+# http://www.voidspace.org.uk/python/license.shtml
+# Basically you're free to copy, modify, distribute and relicense it,
+# So long as you keep a copy of the license with it.
+
+# Documentation at http://www.voidspace.org.uk/python/odict.html
+# For information about bugfixes, updates and support, please join the
+# Pythonutils mailing list:
+# http://groups.google.com/group/pythonutils/
+# Comments, suggestions and bug reports welcome.
+
+"""A dict that keeps keys in insertion order"""
+from __future__ import generators
+
+__author__ = ('Nicola Larosa <nico-NoSp@m-tekNico.net>,'
+ 'Michael Foord <fuzzyman AT voidspace DOT org DOT uk>')
+
+__docformat__ = "restructuredtext en"
+
+__revision__ = '$Id: odict.py 129 2005-09-12 18:15:28Z teknico $'
+
+__version__ = '0.2.2'
+
+__all__ = ['OrderedDict', 'SequenceOrderedDict']
+
+import sys
+INTP_VER = sys.version_info[:2]
+if INTP_VER < (2, 2):
+ raise RuntimeError("Python v.2.2 or later required")
+
+import types, warnings
+
+class OrderedDict(dict):
+ """
+ A class of dictionary that keeps the insertion order of keys.
+
+ All appropriate methods return keys, items, or values in an ordered way.
+
+ All normal dictionary methods are available. Update and comparison is
+ restricted to other OrderedDict objects.
+
+ Various sequence methods are available, including the ability to explicitly
+ mutate the key ordering.
+
+ __contains__ tests:
+
+ >>> d = OrderedDict(((1, 3),))
+ >>> 1 in d
+ 1
+ >>> 4 in d
+ 0
+
+ __getitem__ tests:
+
+ >>> OrderedDict(((1, 3), (3, 2), (2, 1)))[2]
+ 1
+ >>> OrderedDict(((1, 3), (3, 2), (2, 1)))[4]
+ Traceback (most recent call last):
+ KeyError: 4
+
+ __len__ tests:
+
+ >>> len(OrderedDict())
+ 0
+ >>> len(OrderedDict(((1, 3), (3, 2), (2, 1))))
+ 3
+
+ get tests:
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.get(1)
+ 3
+ >>> d.get(4) is None
+ 1
+ >>> d.get(4, 5)
+ 5
+ >>> d
+ OrderedDict([(1, 3), (3, 2), (2, 1)])
+
+ has_key tests:
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.has_key(1)
+ 1
+ >>> d.has_key(4)
+ 0
+ """
+
+ def __init__(self, init_val=(), strict=False):
+ """
+ Create a new ordered dictionary. Cannot init from a normal dict,
+ nor from kwargs, since items order is undefined in those cases.
+
+ If the ``strict`` keyword argument is ``True`` (``False`` is the
+ default) then when doing slice assignment - the ``OrderedDict`` you are
+ assigning from *must not* contain any keys in the remaining dict.
+
+ >>> OrderedDict()
+ OrderedDict([])
+ >>> OrderedDict({1: 1})
+ Traceback (most recent call last):
+ TypeError: undefined order, cannot get items from dict
+ >>> OrderedDict({1: 1}.items())
+ OrderedDict([(1, 1)])
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d
+ OrderedDict([(1, 3), (3, 2), (2, 1)])
+ >>> OrderedDict(d)
+ OrderedDict([(1, 3), (3, 2), (2, 1)])
+ """
+ self.strict = strict
+ dict.__init__(self)
+ if isinstance(init_val, OrderedDict):
+ self._sequence = init_val.keys()
+ dict.update(self, init_val)
+ elif isinstance(init_val, dict):
+ # we lose compatibility with other ordered dict types this way
+ raise TypeError('undefined order, cannot get items from dict')
+ else:
+ self._sequence = []
+ self.update(init_val)
+
+### Special methods ###
+
+ def __delitem__(self, key):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> del d[3]
+ >>> d
+ OrderedDict([(1, 3), (2, 1)])
+ >>> del d[3]
+ Traceback (most recent call last):
+ KeyError: 3
+ >>> d[3] = 2
+ >>> d
+ OrderedDict([(1, 3), (2, 1), (3, 2)])
+ >>> del d[0:1]
+ >>> d
+ OrderedDict([(2, 1), (3, 2)])
+ """
+ if isinstance(key, types.SliceType):
+ # FIXME: efficiency?
+ keys = self._sequence[key]
+ for entry in keys:
+ dict.__delitem__(self, entry)
+ del self._sequence[key]
+ else:
+ # do the dict.__delitem__ *first* as it raises
+ # the more appropriate error
+ dict.__delitem__(self, key)
+ self._sequence.remove(key)
+
+ def __eq__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d == OrderedDict(d)
+ True
+ >>> d == OrderedDict(((1, 3), (2, 1), (3, 2)))
+ False
+ >>> d == OrderedDict(((1, 0), (3, 2), (2, 1)))
+ False
+ >>> d == OrderedDict(((0, 3), (3, 2), (2, 1)))
+ False
+ >>> d == dict(d)
+ False
+ >>> d == False
+ False
+ """
+ if isinstance(other, OrderedDict):
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return (self.items() == other.items())
+ else:
+ return False
+
+ def __lt__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> c = OrderedDict(((0, 3), (3, 2), (2, 1)))
+ >>> c < d
+ True
+ >>> d < c
+ False
+ >>> d < dict(c)
+ Traceback (most recent call last):
+ TypeError: Can only compare with other OrderedDicts
+ """
+ if not isinstance(other, OrderedDict):
+ raise TypeError('Can only compare with other OrderedDicts')
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return (self.items() < other.items())
+
+ def __le__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> c = OrderedDict(((0, 3), (3, 2), (2, 1)))
+ >>> e = OrderedDict(d)
+ >>> c <= d
+ True
+ >>> d <= c
+ False
+ >>> d <= dict(c)
+ Traceback (most recent call last):
+ TypeError: Can only compare with other OrderedDicts
+ >>> d <= e
+ True
+ """
+ if not isinstance(other, OrderedDict):
+ raise TypeError('Can only compare with other OrderedDicts')
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return (self.items() <= other.items())
+
+ def __ne__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d != OrderedDict(d)
+ False
+ >>> d != OrderedDict(((1, 3), (2, 1), (3, 2)))
+ True
+ >>> d != OrderedDict(((1, 0), (3, 2), (2, 1)))
+ True
+ >>> d == OrderedDict(((0, 3), (3, 2), (2, 1)))
+ False
+ >>> d != dict(d)
+ True
+ >>> d != False
+ True
+ """
+ if isinstance(other, OrderedDict):
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return not (self.items() == other.items())
+ else:
+ return True
+
+ def __gt__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> c = OrderedDict(((0, 3), (3, 2), (2, 1)))
+ >>> d > c
+ True
+ >>> c > d
+ False
+ >>> d > dict(c)
+ Traceback (most recent call last):
+ TypeError: Can only compare with other OrderedDicts
+ """
+ if not isinstance(other, OrderedDict):
+ raise TypeError('Can only compare with other OrderedDicts')
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return (self.items() > other.items())
+
+ def __ge__(self, other):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> c = OrderedDict(((0, 3), (3, 2), (2, 1)))
+ >>> e = OrderedDict(d)
+ >>> c >= d
+ False
+ >>> d >= c
+ True
+ >>> d >= dict(c)
+ Traceback (most recent call last):
+ TypeError: Can only compare with other OrderedDicts
+ >>> e >= d
+ True
+ """
+ if not isinstance(other, OrderedDict):
+ raise TypeError('Can only compare with other OrderedDicts')
+ # FIXME: efficiency?
+ # Generate both item lists for each compare
+ return (self.items() >= other.items())
+
+ def __repr__(self):
+ """
+ Used for __repr__ and __str__
+
+ >>> r1 = repr(OrderedDict((('a', 'b'), ('c', 'd'), ('e', 'f'))))
+ >>> r1
+ "OrderedDict([('a', 'b'), ('c', 'd'), ('e', 'f')])"
+ >>> r2 = repr(OrderedDict((('a', 'b'), ('e', 'f'), ('c', 'd'))))
+ >>> r2
+ "OrderedDict([('a', 'b'), ('e', 'f'), ('c', 'd')])"
+ >>> r1 == str(OrderedDict((('a', 'b'), ('c', 'd'), ('e', 'f'))))
+ True
+ >>> r2 == str(OrderedDict((('a', 'b'), ('e', 'f'), ('c', 'd'))))
+ True
+ """
+ return '%s([%s])' % (self.__class__.__name__, ', '.join(
+ ['(%r, %r)' % (key, self[key]) for key in self._sequence]))
+
+ def __setitem__(self, key, val):
+ """
+ Allows slice assignment, so long as the slice is an OrderedDict
+ >>> d = OrderedDict()
+ >>> d['a'] = 'b'
+ >>> d['b'] = 'a'
+ >>> d[3] = 12
+ >>> d
+ OrderedDict([('a', 'b'), ('b', 'a'), (3, 12)])
+ >>> d[:] = OrderedDict(((1, 2), (2, 3), (3, 4)))
+ >>> d
+ OrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> d[::2] = OrderedDict(((7, 8), (9, 10)))
+ >>> d
+ OrderedDict([(7, 8), (2, 3), (9, 10)])
+ >>> d = OrderedDict(((0, 1), (1, 2), (2, 3), (3, 4)))
+ >>> d[1:3] = OrderedDict(((1, 2), (5, 6), (7, 8)))
+ >>> d
+ OrderedDict([(0, 1), (1, 2), (5, 6), (7, 8), (3, 4)])
+ >>> d = OrderedDict(((0, 1), (1, 2), (2, 3), (3, 4)), strict=True)
+ >>> d[1:3] = OrderedDict(((1, 2), (5, 6), (7, 8)))
+ >>> d
+ OrderedDict([(0, 1), (1, 2), (5, 6), (7, 8), (3, 4)])
+
+ >>> a = OrderedDict(((0, 1), (1, 2), (2, 3)), strict=True)
+ >>> a[3] = 4
+ >>> a
+ OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a[::1] = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a
+ OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a[:2] = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)])
+ Traceback (most recent call last):
+ ValueError: slice assignment must be from unique keys
+ >>> a = OrderedDict(((0, 1), (1, 2), (2, 3)))
+ >>> a[3] = 4
+ >>> a
+ OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a[::1] = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a
+ OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a[:2] = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a
+ OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a[::-1] = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> a
+ OrderedDict([(3, 4), (2, 3), (1, 2), (0, 1)])
+
+ >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> d[:1] = 3
+ Traceback (most recent call last):
+ TypeError: slice assignment requires an OrderedDict
+
+ >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
+ >>> d[:1] = OrderedDict([(9, 8)])
+ >>> d
+ OrderedDict([(9, 8), (1, 2), (2, 3), (3, 4)])
+ """
+ if isinstance(key, types.SliceType):
+ if not isinstance(val, OrderedDict):
+ # FIXME: allow a list of tuples?
+ raise TypeError('slice assignment requires an OrderedDict')
+ keys = self._sequence[key]
+ # NOTE: Could use ``range(*key.indices(len(self._sequence)))``
+ indexes = range(len(self._sequence))[key]
+ if key.step is None:
+ # NOTE: new slice may not be the same size as the one being
+ # overwritten !
+ # NOTE: What is the algorithm for an impossible slice?
+ # e.g. d[5:3]
+ pos = key.start or 0
+ del self[key]
+ newkeys = val.keys()
+ for k in newkeys:
+ if k in self:
+ if self.strict:
+ raise ValueError('slice assignment must be from '
+ 'unique keys')
+ else:
+ # NOTE: This removes duplicate keys *first*
+ # so start position might have changed?
+ del self[k]
+ self._sequence = (self._sequence[:pos] + newkeys +
+ self._sequence[pos:])
+ dict.update(self, val)
+ else:
+ # extended slice - length of new slice must be the same
+ # as the one being replaced
+ if len(keys) != len(val):
+ raise ValueError('attempt to assign sequence of size %s '
+ 'to extended slice of size %s' % (len(val), len(keys)))
+ # FIXME: efficiency?
+ del self[key]
+ item_list = zip(indexes, val.items())
+ # smallest indexes first - higher indexes not guaranteed to
+ # exist
+ item_list.sort()
+ for pos, (newkey, newval) in item_list:
+ if self.strict and newkey in self:
+ raise ValueError('slice assignment must be from unique'
+ ' keys')
+ self.insert(pos, newkey, newval)
+ else:
+ if key not in self:
+ self._sequence.append(key)
+ dict.__setitem__(self, key, val)
+
+ def __getitem__(self, key):
+ """
+ Allows slicing. Returns an OrderedDict if you slice.
+ >>> b = OrderedDict([(7, 0), (6, 1), (5, 2), (4, 3), (3, 4), (2, 5), (1, 6)])
+ >>> b[::-1]
+ OrderedDict([(1, 6), (2, 5), (3, 4), (4, 3), (5, 2), (6, 1), (7, 0)])
+ >>> b[2:5]
+ OrderedDict([(5, 2), (4, 3), (3, 4)])
+ >>> type(b[2:4])
+ <class '__main__.OrderedDict'>
+ """
+ if isinstance(key, types.SliceType):
+ # FIXME: does this raise the error we want?
+ keys = self._sequence[key]
+ # FIXME: efficiency?
+ return OrderedDict([(entry, self[entry]) for entry in keys])
+ else:
+ return dict.__getitem__(self, key)
+
+ __str__ = __repr__
+
+ def __setattr__(self, name, value):
+ """
+ Implemented so that accesses to ``sequence`` raise a warning and are
+ diverted to the new ``setkeys`` method.
+ """
+ if name == 'sequence':
+ warnings.warn('Use of the sequence attribute is deprecated.'
+ ' Use the keys method instead.', DeprecationWarning)
+ # NOTE: doesn't return anything
+ self.setkeys(value)
+ else:
+ # FIXME: do we want to allow arbitrary setting of attributes?
+ # Or do we want to manage it?
+ object.__setattr__(self, name, value)
+
+ def __getattr__(self, name):
+ """
+ Implemented so that access to ``sequence`` raises a warning.
+
+ >>> d = OrderedDict()
+ >>> d.sequence
+ []
+ """
+ if name == 'sequence':
+ warnings.warn('Use of the sequence attribute is deprecated.'
+ ' Use the keys method instead.', DeprecationWarning)
+ # NOTE: Still (currently) returns a direct reference. Need to
+ # because code that uses sequence will expect to be able to
+ # mutate it in place.
+ return self._sequence
+ else:
+ # raise the appropriate error
+ raise AttributeError("OrderedDict has no '%s' attribute" % name)
+
+ def __deepcopy__(self, memo):
+ """
+ To allow deepcopy to work with OrderedDict.
+
+ >>> from copy import deepcopy
+ >>> a = OrderedDict([(1, 1), (2, 2), (3, 3)])
+ >>> a['test'] = {}
+ >>> b = deepcopy(a)
+ >>> b == a
+ True
+ >>> b is a
+ False
+ >>> a['test'] is b['test']
+ False
+ """
+ from copy import deepcopy
+ return self.__class__(deepcopy(self.items(), memo), self.strict)
+
+
+### Read-only methods ###
+
+ def copy(self):
+ """
+ >>> OrderedDict(((1, 3), (3, 2), (2, 1))).copy()
+ OrderedDict([(1, 3), (3, 2), (2, 1)])
+ """
+ return OrderedDict(self)
+
+ def items(self):
+ """
+ ``items`` returns a list of tuples representing all the
+ ``(key, value)`` pairs in the dictionary.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.items()
+ [(1, 3), (3, 2), (2, 1)]
+ >>> d.clear()
+ >>> d.items()
+ []
+ """
+ return zip(self._sequence, self.values())
+
+ def keys(self):
+ """
+ Return a list of keys in the ``OrderedDict``.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.keys()
+ [1, 3, 2]
+ """
+ return self._sequence[:]
+
+ def values(self, values=None):
+ """
+ Return a list of all the values in the OrderedDict.
+
+ Optionally you can pass in a list of values, which will replace the
+ current list. The value list must be the same len as the OrderedDict.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.values()
+ [3, 2, 1]
+ """
+ return [self[key] for key in self._sequence]
+
+ def iteritems(self):
+ """
+ >>> ii = OrderedDict(((1, 3), (3, 2), (2, 1))).iteritems()
+ >>> ii.next()
+ (1, 3)
+ >>> ii.next()
+ (3, 2)
+ >>> ii.next()
+ (2, 1)
+ >>> ii.next()
+ Traceback (most recent call last):
+ StopIteration
+ """
+ def make_iter(self=self):
+ keys = self.iterkeys()
+ while True:
+ key = keys.next()
+ yield (key, self[key])
+ return make_iter()
+
+ def iterkeys(self):
+ """
+ >>> ii = OrderedDict(((1, 3), (3, 2), (2, 1))).iterkeys()
+ >>> ii.next()
+ 1
+ >>> ii.next()
+ 3
+ >>> ii.next()
+ 2
+ >>> ii.next()
+ Traceback (most recent call last):
+ StopIteration
+ """
+ return iter(self._sequence)
+
+ __iter__ = iterkeys
+
+ def itervalues(self):
+ """
+ >>> iv = OrderedDict(((1, 3), (3, 2), (2, 1))).itervalues()
+ >>> iv.next()
+ 3
+ >>> iv.next()
+ 2
+ >>> iv.next()
+ 1
+ >>> iv.next()
+ Traceback (most recent call last):
+ StopIteration
+ """
+ def make_iter(self=self):
+ keys = self.iterkeys()
+ while True:
+ yield self[keys.next()]
+ return make_iter()
+
+### Read-write methods ###
+
+ def clear(self):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.clear()
+ >>> d
+ OrderedDict([])
+ """
+ dict.clear(self)
+ self._sequence = []
+
+ def pop(self, key, *args):
+ """
+ No dict.pop in Python 2.2, gotta reimplement it
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.pop(3)
+ 2
+ >>> d
+ OrderedDict([(1, 3), (2, 1)])
+ >>> d.pop(4)
+ Traceback (most recent call last):
+ KeyError: 4
+ >>> d.pop(4, 0)
+ 0
+ >>> d.pop(4, 0, 1)
+ Traceback (most recent call last):
+ TypeError: pop expected at most 2 arguments, got 3
+ """
+ if len(args) > 1:
+ raise TypeError, ('pop expected at most 2 arguments, got %s' %
+ (len(args) + 1))
+ if key in self:
+ val = self[key]
+ del self[key]
+ else:
+ try:
+ val = args[0]
+ except IndexError:
+ raise KeyError(key)
+ return val
+
+ def popitem(self, i=-1):
+ """
+ Delete and return an item specified by index, not a random one as in
+ dict. The index is -1 by default (the last item).
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.popitem()
+ (2, 1)
+ >>> d
+ OrderedDict([(1, 3), (3, 2)])
+ >>> d.popitem(0)
+ (1, 3)
+ >>> OrderedDict().popitem()
+ Traceback (most recent call last):
+ KeyError: 'popitem(): dictionary is empty'
+ >>> d.popitem(2)
+ Traceback (most recent call last):
+ IndexError: popitem(): index 2 not valid
+ """
+ if not self._sequence:
+ raise KeyError('popitem(): dictionary is empty')
+ try:
+ key = self._sequence[i]
+ except IndexError:
+ raise IndexError('popitem(): index %s not valid' % i)
+ return (key, self.pop(key))
+
+ def setdefault(self, key, defval = None):
+ """
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.setdefault(1)
+ 3
+ >>> d.setdefault(4) is None
+ True
+ >>> d
+ OrderedDict([(1, 3), (3, 2), (2, 1), (4, None)])
+ >>> d.setdefault(5, 0)
+ 0
+ >>> d
+ OrderedDict([(1, 3), (3, 2), (2, 1), (4, None), (5, 0)])
+ """
+ if key in self:
+ return self[key]
+ else:
+ self[key] = defval
+ return defval
+
+ def update(self, from_od):
+ """
+ Update from another OrderedDict or sequence of (key, value) pairs
+
+ >>> d = OrderedDict(((1, 0), (0, 1)))
+ >>> d.update(OrderedDict(((1, 3), (3, 2), (2, 1))))
+ >>> d
+ OrderedDict([(1, 3), (0, 1), (3, 2), (2, 1)])
+ >>> d.update({4: 4})
+ Traceback (most recent call last):
+ TypeError: undefined order, cannot get items from dict
+ >>> d.update((4, 4))
+ Traceback (most recent call last):
+ TypeError: cannot convert dictionary update sequence element "4" to a 2-item sequence
+ """
+ if isinstance(from_od, OrderedDict):
+ for key, val in from_od.items():
+ self[key] = val
+ elif isinstance(from_od, dict):
+ # we lose compatibility with other ordered dict types this way
+ raise TypeError('undefined order, cannot get items from dict')
+ else:
+ # FIXME: efficiency?
+ # sequence of 2-item sequences, or error
+ for item in from_od:
+ try:
+ key, val = item
+ except TypeError:
+ raise TypeError('cannot convert dictionary update'
+ ' sequence element "%s" to a 2-item sequence' % item)
+ self[key] = val
+
+ def rename(self, old_key, new_key):
+ """
+ Rename the key for a given value, without modifying sequence order.
+
+ For the case where new_key already exists this raise an exception,
+ since if new_key exists, it is ambiguous as to what happens to the
+ associated values, and the position of new_key in the sequence.
+
+ >>> od = OrderedDict()
+ >>> od['a'] = 1
+ >>> od['b'] = 2
+ >>> od.items()
+ [('a', 1), ('b', 2)]
+ >>> od.rename('b', 'c')
+ >>> od.items()
+ [('a', 1), ('c', 2)]
+ >>> od.rename('c', 'a')
+ Traceback (most recent call last):
+ ValueError: New key already exists: 'a'
+ >>> od.rename('d', 'b')
+ Traceback (most recent call last):
+ KeyError: 'd'
+ """
+ if new_key == old_key:
+ # no-op
+ return
+ if new_key in self:
+ raise ValueError("New key already exists: %r" % new_key)
+ # rename sequence entry
+ value = self[old_key]
+ old_idx = self._sequence.index(old_key)
+ self._sequence[old_idx] = new_key
+ # rename internal dict entry
+ dict.__delitem__(self, old_key)
+ dict.__setitem__(self, new_key, value)
+
+ def setitems(self, items):
+ """
+ This method allows you to set the items in the dict.
+
+ It takes a list of tuples - of the same sort returned by the ``items``
+ method.
+
+ >>> d = OrderedDict()
+ >>> d.setitems(((3, 1), (2, 3), (1, 2)))
+ >>> d
+ OrderedDict([(3, 1), (2, 3), (1, 2)])
+ """
+ self.clear()
+ # FIXME: this allows you to pass in an OrderedDict as well :-)
+ self.update(items)
+
+ def setkeys(self, keys):
+ """
+ ``setkeys`` all ows you to pass in a new list of keys which will
+ replace the current set. This must contain the same set of keys, but
+ need not be in the same order.
+
+ If you pass in new keys that don't match, a ``KeyError`` will be
+ raised.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.keys()
+ [1, 3, 2]
+ >>> d.setkeys((1, 2, 3))
+ >>> d
+ OrderedDict([(1, 3), (2, 1), (3, 2)])
+ >>> d.setkeys(['a', 'b', 'c'])
+ Traceback (most recent call last):
+ KeyError: 'Keylist is not the same as current keylist.'
+ """
+ # FIXME: Efficiency? (use set for Python 2.4 :-)
+ # NOTE: list(keys) rather than keys[:] because keys[:] returns
+ # a tuple, if keys is a tuple.
+ kcopy = list(keys)
+ kcopy.sort()
+ self._sequence.sort()
+ if kcopy != self._sequence:
+ raise KeyError('Keylist is not the same as current keylist.')
+ # NOTE: This makes the _sequence attribute a new object, instead
+ # of changing it in place.
+ # FIXME: efficiency?
+ self._sequence = list(keys)
+
+ def setvalues(self, values):
+ """
+ You can pass in a list of values, which will replace the
+ current list. The value list must be the same len as the OrderedDict.
+
+ (Or a ``ValueError`` is raised.)
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.setvalues((1, 2, 3))
+ >>> d
+ OrderedDict([(1, 1), (3, 2), (2, 3)])
+ >>> d.setvalues([6])
+ Traceback (most recent call last):
+ ValueError: Value list is not the same length as the OrderedDict.
+ """
+ if len(values) != len(self):
+ # FIXME: correct error to raise?
+ raise ValueError('Value list is not the same length as the '
+ 'OrderedDict.')
+ self.update(zip(self, values))
+
+### Sequence Methods ###
+
+ def index(self, key):
+ """
+ Return the position of the specified key in the OrderedDict.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.index(3)
+ 1
+ >>> d.index(4)
+ Traceback (most recent call last):
+ ValueError: list.index(x): x not in list
+ """
+ return self._sequence.index(key)
+
+ def insert(self, index, key, value):
+ """
+ Takes ``index``, ``key``, and ``value`` as arguments.
+
+ Sets ``key`` to ``value``, so that ``key`` is at position ``index`` in
+ the OrderedDict.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.insert(0, 4, 0)
+ >>> d
+ OrderedDict([(4, 0), (1, 3), (3, 2), (2, 1)])
+ >>> d.insert(0, 2, 1)
+ >>> d
+ OrderedDict([(2, 1), (4, 0), (1, 3), (3, 2)])
+ >>> d.insert(8, 8, 1)
+ >>> d
+ OrderedDict([(2, 1), (4, 0), (1, 3), (3, 2), (8, 1)])
+ """
+ if key in self:
+ # FIXME: efficiency?
+ del self[key]
+ self._sequence.insert(index, key)
+ dict.__setitem__(self, key, value)
+
+ def reverse(self):
+ """
+ Reverse the order of the OrderedDict.
+
+ >>> d = OrderedDict(((1, 3), (3, 2), (2, 1)))
+ >>> d.reverse()
+ >>> d
+ OrderedDict([(2, 1), (3, 2), (1, 3)])
+ """
+ self._sequence.reverse()
+
+ def sort(self, *args, **kwargs):
+ """
+ Sort the key order in the OrderedDict.
+
+ This method takes the same arguments as the ``list.sort`` method on
+ your version of Python.
+
+ >>> d = OrderedDict(((4, 1), (2, 2), (3, 3), (1, 4)))
+ >>> d.sort()
+ >>> d
+ OrderedDict([(1, 4), (2, 2), (3, 3), (4, 1)])
+ """
+ self._sequence.sort(*args, **kwargs)
+
+class Keys(object):
+ # FIXME: should this object be a subclass of list?
+ """
+ Custom object for accessing the keys of an OrderedDict.
+
+ Can be called like the normal ``OrderedDict.keys`` method, but also
+ supports indexing and sequence methods.
+ """
+
+ def __init__(self, main):
+ self._main = main
+
+ def __call__(self):
+ """Pretend to be the keys method."""
+ return self._main._keys()
+
+ def __getitem__(self, index):
+ """Fetch the key at position i."""
+ # NOTE: this automatically supports slicing :-)
+ return self._main._sequence[index]
+
+ def __setitem__(self, index, name):
+ """
+ You cannot assign to keys, but you can do slice assignment to re-order
+ them.
+
+ You can only do slice assignment if the new set of keys is a reordering
+ of the original set.
+ """
+ if isinstance(index, types.SliceType):
+ # FIXME: efficiency?
+ # check length is the same
+ indexes = range(len(self._main._sequence))[index]
+ if len(indexes) != len(name):
+ raise ValueError('attempt to assign sequence of size %s '
+ 'to slice of size %s' % (len(name), len(indexes)))
+ # check they are the same keys
+ # FIXME: Use set
+ old_keys = self._main._sequence[index]
+ new_keys = list(name)
+ old_keys.sort()
+ new_keys.sort()
+ if old_keys != new_keys:
+ raise KeyError('Keylist is not the same as current keylist.')
+ orig_vals = [self._main[k] for k in name]
+ del self._main[index]
+ vals = zip(indexes, name, orig_vals)
+ vals.sort()
+ for i, k, v in vals:
+ if self._main.strict and k in self._main:
+ raise ValueError('slice assignment must be from '
+ 'unique keys')
+ self._main.insert(i, k, v)
+ else:
+ raise ValueError('Cannot assign to keys')
+
+ ### following methods pinched from UserList and adapted ###
+ def __repr__(self): return repr(self._main._sequence)
+
+ # FIXME: do we need to check if we are comparing with another ``Keys``
+ # object? (like the __cast method of UserList)
+ def __lt__(self, other): return self._main._sequence < other
+ def __le__(self, other): return self._main._sequence <= other
+ def __eq__(self, other): return self._main._sequence == other
+ def __ne__(self, other): return self._main._sequence != other
+ def __gt__(self, other): return self._main._sequence > other
+ def __ge__(self, other): return self._main._sequence >= other
+ # FIXME: do we need __cmp__ as well as rich comparisons?
+ def __cmp__(self, other): return cmp(self._main._sequence, other)
+
+ def __contains__(self, item): return item in self._main._sequence
+ def __len__(self): return len(self._main._sequence)
+ def __iter__(self): return self._main.iterkeys()
+ def count(self, item): return self._main._sequence.count(item)
+ def index(self, item, *args): return self._main._sequence.index(item, *args)
+ def reverse(self): self._main._sequence.reverse()
+ def sort(self, *args, **kwds): self._main._sequence.sort(*args, **kwds)
+ def __mul__(self, n): return self._main._sequence*n
+ __rmul__ = __mul__
+ def __add__(self, other): return self._main._sequence + other
+ def __radd__(self, other): return other + self._main._sequence
+
+ ## following methods not implemented for keys ##
+ def __delitem__(self, i): raise TypeError('Can\'t delete items from keys')
+ def __iadd__(self, other): raise TypeError('Can\'t add in place to keys')
+ def __imul__(self, n): raise TypeError('Can\'t multiply keys in place')
+ def append(self, item): raise TypeError('Can\'t append items to keys')
+ def insert(self, i, item): raise TypeError('Can\'t insert items into keys')
+ def pop(self, i=-1): raise TypeError('Can\'t pop items from keys')
+ def remove(self, item): raise TypeError('Can\'t remove items from keys')
+ def extend(self, other): raise TypeError('Can\'t extend keys')
+
+class Items(object):
+ """
+ Custom object for accessing the items of an OrderedDict.
+
+ Can be called like the normal ``OrderedDict.items`` method, but also
+ supports indexing and sequence methods.
+ """
+
+ def __init__(self, main):
+ self._main = main
+
+ def __call__(self):
+ """Pretend to be the items method."""
+ return self._main._items()
+
+ def __getitem__(self, index):
+ """Fetch the item at position i."""
+ if isinstance(index, types.SliceType):
+ # fetching a slice returns an OrderedDict
+ return self._main[index].items()
+ key = self._main._sequence[index]
+ return (key, self._main[key])
+
+ def __setitem__(self, index, item):
+ """Set item at position i to item."""
+ if isinstance(index, types.SliceType):
+ # NOTE: item must be an iterable (list of tuples)
+ self._main[index] = OrderedDict(item)
+ else:
+ # FIXME: Does this raise a sensible error?
+ orig = self._main.keys[index]
+ key, value = item
+ if self._main.strict and key in self and (key != orig):
+ raise ValueError('slice assignment must be from '
+ 'unique keys')
+ # delete the current one
+ del self._main[self._main._sequence[index]]
+ self._main.insert(index, key, value)
+
+ def __delitem__(self, i):
+ """Delete the item at position i."""
+ key = self._main._sequence[i]
+ if isinstance(i, types.SliceType):
+ for k in key:
+ # FIXME: efficiency?
+ del self._main[k]
+ else:
+ del self._main[key]
+
+ ### following methods pinched from UserList and adapted ###
+ def __repr__(self): return repr(self._main.items())
+
+ # FIXME: do we need to check if we are comparing with another ``Items``
+ # object? (like the __cast method of UserList)
+ def __lt__(self, other): return self._main.items() < other
+ def __le__(self, other): return self._main.items() <= other
+ def __eq__(self, other): return self._main.items() == other
+ def __ne__(self, other): return self._main.items() != other
+ def __gt__(self, other): return self._main.items() > other
+ def __ge__(self, other): return self._main.items() >= other
+ def __cmp__(self, other): return cmp(self._main.items(), other)
+
+ def __contains__(self, item): return item in self._main.items()
+ def __len__(self): return len(self._main._sequence) # easier :-)
+ def __iter__(self): return self._main.iteritems()
+ def count(self, item): return self._main.items().count(item)
+ def index(self, item, *args): return self._main.items().index(item, *args)
+ def reverse(self): self._main.reverse()
+ def sort(self, *args, **kwds): self._main.sort(*args, **kwds)
+ def __mul__(self, n): return self._main.items()*n
+ __rmul__ = __mul__
+ def __add__(self, other): return self._main.items() + other
+ def __radd__(self, other): return other + self._main.items()
+
+ def append(self, item):
+ """Add an item to the end."""
+ # FIXME: this is only append if the key isn't already present
+ key, value = item
+ self._main[key] = value
+
+ def insert(self, i, item):
+ key, value = item
+ self._main.insert(i, key, value)
+
+ def pop(self, i=-1):
+ key = self._main._sequence[i]
+ return (key, self._main.pop(key))
+
+ def remove(self, item):
+ key, value = item
+ try:
+ assert value == self._main[key]
+ except (KeyError, AssertionError):
+ raise ValueError('ValueError: list.remove(x): x not in list')
+ else:
+ del self._main[key]
+
+ def extend(self, other):
+ # FIXME: is only a true extend if none of the keys already present
+ for item in other:
+ key, value = item
+ self._main[key] = value
+
+ def __iadd__(self, other):
+ self.extend(other)
+
+ ## following methods not implemented for items ##
+
+ def __imul__(self, n): raise TypeError('Can\'t multiply items in place')
+
+class Values(object):
+ """
+ Custom object for accessing the values of an OrderedDict.
+
+ Can be called like the normal ``OrderedDict.values`` method, but also
+ supports indexing and sequence methods.
+ """
+
+ def __init__(self, main):
+ self._main = main
+
+ def __call__(self):
+ """Pretend to be the values method."""
+ return self._main._values()
+
+ def __getitem__(self, index):
+ """Fetch the value at position i."""
+ if isinstance(index, types.SliceType):
+ return [self._main[key] for key in self._main._sequence[index]]
+ else:
+ return self._main[self._main._sequence[index]]
+
+ def __setitem__(self, index, value):
+ """
+ Set the value at position i to value.
+
+ You can only do slice assignment to values if you supply a sequence of
+ equal length to the slice you are replacing.
+ """
+ if isinstance(index, types.SliceType):
+ keys = self._main._sequence[index]
+ if len(keys) != len(value):
+ raise ValueError('attempt to assign sequence of size %s '
+ 'to slice of size %s' % (len(name), len(keys)))
+ # FIXME: efficiency? Would be better to calculate the indexes
+ # directly from the slice object
+ # NOTE: the new keys can collide with existing keys (or even
+ # contain duplicates) - these will overwrite
+ for key, val in zip(keys, value):
+ self._main[key] = val
+ else:
+ self._main[self._main._sequence[index]] = value
+
+ ### following methods pinched from UserList and adapted ###
+ def __repr__(self): return repr(self._main.values())
+
+ # FIXME: do we need to check if we are comparing with another ``Values``
+ # object? (like the __cast method of UserList)
+ def __lt__(self, other): return self._main.values() < other
+ def __le__(self, other): return self._main.values() <= other
+ def __eq__(self, other): return self._main.values() == other
+ def __ne__(self, other): return self._main.values() != other
+ def __gt__(self, other): return self._main.values() > other
+ def __ge__(self, other): return self._main.values() >= other
+ def __cmp__(self, other): return cmp(self._main.values(), other)
+
+ def __contains__(self, item): return item in self._main.values()
+ def __len__(self): return len(self._main._sequence) # easier :-)
+ def __iter__(self): return self._main.itervalues()
+ def count(self, item): return self._main.values().count(item)
+ def index(self, item, *args): return self._main.values().index(item, *args)
+
+ def reverse(self):
+ """Reverse the values"""
+ vals = self._main.values()
+ vals.reverse()
+ # FIXME: efficiency
+ self[:] = vals
+
+ def sort(self, *args, **kwds):
+ """Sort the values."""
+ vals = self._main.values()
+ vals.sort(*args, **kwds)
+ self[:] = vals
+
+ def __mul__(self, n): return self._main.values()*n
+ __rmul__ = __mul__
+ def __add__(self, other): return self._main.values() + other
+ def __radd__(self, other): return other + self._main.values()
+
+ ## following methods not implemented for values ##
+ def __delitem__(self, i): raise TypeError('Can\'t delete items from values')
+ def __iadd__(self, other): raise TypeError('Can\'t add in place to values')
+ def __imul__(self, n): raise TypeError('Can\'t multiply values in place')
+ def append(self, item): raise TypeError('Can\'t append items to values')
+ def insert(self, i, item): raise TypeError('Can\'t insert items into values')
+ def pop(self, i=-1): raise TypeError('Can\'t pop items from values')
+ def remove(self, item): raise TypeError('Can\'t remove items from values')
+ def extend(self, other): raise TypeError('Can\'t extend values')
+
+class SequenceOrderedDict(OrderedDict):
+ """
+ Experimental version of OrderedDict that has a custom object for ``keys``,
+ ``values``, and ``items``.
+
+ These are callable sequence objects that work as methods, or can be
+ manipulated directly as sequences.
+
+ Test for ``keys``, ``items`` and ``values``.
+
+ >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4)))
+ >>> d
+ SequenceOrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> d.keys
+ [1, 2, 3]
+ >>> d.keys()
+ [1, 2, 3]
+ >>> d.setkeys((3, 2, 1))
+ >>> d
+ SequenceOrderedDict([(3, 4), (2, 3), (1, 2)])
+ >>> d.setkeys((1, 2, 3))
+ >>> d.keys[0]
+ 1
+ >>> d.keys[:]
+ [1, 2, 3]
+ >>> d.keys[-1]
+ 3
+ >>> d.keys[-2]
+ 2
+ >>> d.keys[0:2] = [2, 1]
+ >>> d
+ SequenceOrderedDict([(2, 3), (1, 2), (3, 4)])
+ >>> d.keys.reverse()
+ >>> d.keys
+ [3, 1, 2]
+ >>> d.keys = [1, 2, 3]
+ >>> d
+ SequenceOrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> d.keys = [3, 1, 2]
+ >>> d
+ SequenceOrderedDict([(3, 4), (1, 2), (2, 3)])
+ >>> a = SequenceOrderedDict()
+ >>> b = SequenceOrderedDict()
+ >>> a.keys == b.keys
+ 1
+ >>> a['a'] = 3
+ >>> a.keys == b.keys
+ 0
+ >>> b['a'] = 3
+ >>> a.keys == b.keys
+ 1
+ >>> b['b'] = 3
+ >>> a.keys == b.keys
+ 0
+ >>> a.keys > b.keys
+ 0
+ >>> a.keys < b.keys
+ 1
+ >>> 'a' in a.keys
+ 1
+ >>> len(b.keys)
+ 2
+ >>> 'c' in d.keys
+ 0
+ >>> 1 in d.keys
+ 1
+ >>> [v for v in d.keys]
+ [3, 1, 2]
+ >>> d.keys.sort()
+ >>> d.keys
+ [1, 2, 3]
+ >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4)), strict=True)
+ >>> d.keys[::-1] = [1, 2, 3]
+ >>> d
+ SequenceOrderedDict([(3, 4), (2, 3), (1, 2)])
+ >>> d.keys[:2]
+ [3, 2]
+ >>> d.keys[:2] = [1, 3]
+ Traceback (most recent call last):
+ KeyError: 'Keylist is not the same as current keylist.'
+
+ >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4)))
+ >>> d
+ SequenceOrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> d.values
+ [2, 3, 4]
+ >>> d.values()
+ [2, 3, 4]
+ >>> d.setvalues((4, 3, 2))
+ >>> d
+ SequenceOrderedDict([(1, 4), (2, 3), (3, 2)])
+ >>> d.values[::-1]
+ [2, 3, 4]
+ >>> d.values[0]
+ 4
+ >>> d.values[-2]
+ 3
+ >>> del d.values[0]
+ Traceback (most recent call last):
+ TypeError: Can't delete items from values
+ >>> d.values[::2] = [2, 4]
+ >>> d
+ SequenceOrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> 7 in d.values
+ 0
+ >>> len(d.values)
+ 3
+ >>> [val for val in d.values]
+ [2, 3, 4]
+ >>> d.values[-1] = 2
+ >>> d.values.count(2)
+ 2
+ >>> d.values.index(2)
+ 0
+ >>> d.values[-1] = 7
+ >>> d.values
+ [2, 3, 7]
+ >>> d.values.reverse()
+ >>> d.values
+ [7, 3, 2]
+ >>> d.values.sort()
+ >>> d.values
+ [2, 3, 7]
+ >>> d.values.append('anything')
+ Traceback (most recent call last):
+ TypeError: Can't append items to values
+ >>> d.values = (1, 2, 3)
+ >>> d
+ SequenceOrderedDict([(1, 1), (2, 2), (3, 3)])
+
+ >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4)))
+ >>> d
+ SequenceOrderedDict([(1, 2), (2, 3), (3, 4)])
+ >>> d.items()
+ [(1, 2), (2, 3), (3, 4)]
+ >>> d.setitems([(3, 4), (2 ,3), (1, 2)])
+ >>> d
+ SequenceOrderedDict([(3, 4), (2, 3), (1, 2)])
+ >>> d.items[0]
+ (3, 4)
+ >>> d.items[:-1]
+ [(3, 4), (2, 3)]
+ >>> d.items[1] = (6, 3)
+ >>> d.items
+ [(3, 4), (6, 3), (1, 2)]
+ >>> d.items[1:2] = [(9, 9)]
+ >>> d
+ SequenceOrderedDict([(3, 4), (9, 9), (1, 2)])
+ >>> del d.items[1:2]
+ >>> d
+ SequenceOrderedDict([(3, 4), (1, 2)])
+ >>> (3, 4) in d.items
+ 1
+ >>> (4, 3) in d.items
+ 0
+ >>> len(d.items)
+ 2
+ >>> [v for v in d.items]
+ [(3, 4), (1, 2)]
+ >>> d.items.count((3, 4))
+ 1
+ >>> d.items.index((1, 2))
+ 1
+ >>> d.items.index((2, 1))
+ Traceback (most recent call last):
+ ValueError: list.index(x): x not in list
+ >>> d.items.reverse()
+ >>> d.items
+ [(1, 2), (3, 4)]
+ >>> d.items.reverse()
+ >>> d.items.sort()
+ >>> d.items
+ [(1, 2), (3, 4)]
+ >>> d.items.append((5, 6))
+ >>> d.items
+ [(1, 2), (3, 4), (5, 6)]
+ >>> d.items.insert(0, (0, 0))
+ >>> d.items
+ [(0, 0), (1, 2), (3, 4), (5, 6)]
+ >>> d.items.insert(-1, (7, 8))
+ >>> d.items
+ [(0, 0), (1, 2), (3, 4), (7, 8), (5, 6)]
+ >>> d.items.pop()
+ (5, 6)
+ >>> d.items
+ [(0, 0), (1, 2), (3, 4), (7, 8)]
+ >>> d.items.remove((1, 2))
+ >>> d.items
+ [(0, 0), (3, 4), (7, 8)]
+ >>> d.items.extend([(1, 2), (5, 6)])
+ >>> d.items
+ [(0, 0), (3, 4), (7, 8), (1, 2), (5, 6)]
+ """
+
+ def __init__(self, init_val=(), strict=True):
+ OrderedDict.__init__(self, init_val, strict=strict)
+ self._keys = self.keys
+ self._values = self.values
+ self._items = self.items
+ self.keys = Keys(self)
+ self.values = Values(self)
+ self.items = Items(self)
+ self._att_dict = {
+ 'keys': self.setkeys,
+ 'items': self.setitems,
+ 'values': self.setvalues,
+ }
+
+ def __setattr__(self, name, value):
+ """Protect keys, items, and values."""
+ if not '_att_dict' in self.__dict__:
+ object.__setattr__(self, name, value)
+ else:
+ try:
+ fun = self._att_dict[name]
+ except KeyError:
+ OrderedDict.__setattr__(self, name, value)
+ else:
+ fun(value)
+
+if __name__ == '__main__':
+ if INTP_VER < (2, 3):
+ raise RuntimeError("Tests require Python v.2.3 or later")
+ # turn off warnings for tests
+ warnings.filterwarnings('ignore')
+ # run the code tests in doctest format
+ import doctest
+ m = sys.modules.get('__main__')
+ globs = m.__dict__.copy()
+ globs.update({
+ 'INTP_VER': INTP_VER,
+ })
+ doctest.testmod(m, globs=globs)
+
diff --git a/git/refs/__init__.py b/git/refs/__init__.py
new file mode 100644
index 00000000..fc8ce644
--- /dev/null
+++ b/git/refs/__init__.py
@@ -0,0 +1,21 @@
+
+# import all modules in order, fix the names they require
+from symbolic import *
+from reference import *
+from head import *
+from tag import *
+from remote import *
+
+# name fixes
+import head
+head.RemoteReference = RemoteReference
+del(head)
+
+
+import symbolic
+for item in (HEAD, Head, RemoteReference, TagReference, Reference, SymbolicReference):
+ setattr(symbolic, item.__name__, item)
+del(symbolic)
+
+
+from log import *
diff --git a/git/refs/head.py b/git/refs/head.py
new file mode 100644
index 00000000..d8729434
--- /dev/null
+++ b/git/refs/head.py
@@ -0,0 +1,246 @@
+from symbolic import SymbolicReference
+from reference import Reference
+
+from git.config import SectionConstraint
+
+from git.util import join_path
+
+from git.exc import GitCommandError
+
+__all__ = ["HEAD", "Head"]
+
+
+
+class HEAD(SymbolicReference):
+ """Special case of a Symbolic Reference as it represents the repository's
+ HEAD reference."""
+ _HEAD_NAME = 'HEAD'
+ _ORIG_HEAD_NAME = 'ORIG_HEAD'
+ __slots__ = tuple()
+
+ def __init__(self, repo, path=_HEAD_NAME):
+ if path != self._HEAD_NAME:
+ raise ValueError("HEAD instance must point to %r, got %r" % (self._HEAD_NAME, path))
+ super(HEAD, self).__init__(repo, path)
+
+ def orig_head(self):
+ """
+ :return: SymbolicReference pointing at the ORIG_HEAD, which is maintained
+ to contain the previous value of HEAD"""
+ return SymbolicReference(self.repo, self._ORIG_HEAD_NAME)
+
+ def reset(self, commit='HEAD', index=True, working_tree = False,
+ paths=None, **kwargs):
+ """Reset our HEAD to the given commit optionally synchronizing
+ the index and working tree. The reference we refer to will be set to
+ commit as well.
+
+ :param commit:
+ Commit object, Reference Object or string identifying a revision we
+ should reset HEAD to.
+
+ :param index:
+ If True, the index will be set to match the given commit. Otherwise
+ it will not be touched.
+
+ :param working_tree:
+ If True, the working tree will be forcefully adjusted to match the given
+ commit, possibly overwriting uncommitted changes without warning.
+ If working_tree is True, index must be true as well
+
+ :param paths:
+ Single path or list of paths relative to the git root directory
+ that are to be reset. This allows to partially reset individual files.
+
+ :param kwargs:
+ Additional arguments passed to git-reset.
+
+ :return: self"""
+ mode = "--soft"
+ add_arg = None
+ if index:
+ mode = "--mixed"
+
+ # it appears, some git-versions declare mixed and paths deprecated
+ # see http://github.com/Byron/GitPython/issues#issue/2
+ if paths:
+ mode = None
+ # END special case
+ # END handle index
+
+ if working_tree:
+ mode = "--hard"
+ if not index:
+ raise ValueError( "Cannot reset the working tree if the index is not reset as well")
+
+ # END working tree handling
+
+ if paths:
+ add_arg = "--"
+ # END nicely separate paths from rest
+
+ try:
+ self.repo.git.reset(mode, commit, add_arg, paths, **kwargs)
+ except GitCommandError, e:
+ # git nowadays may use 1 as status to indicate there are still unstaged
+ # modifications after the reset
+ if e.status != 1:
+ raise
+ # END handle exception
+
+ return self
+
+
+class Head(Reference):
+ """A Head is a named reference to a Commit. Every Head instance contains a name
+ and a Commit object.
+
+ Examples::
+
+ >>> repo = Repo("/path/to/repo")
+ >>> head = repo.heads[0]
+
+ >>> head.name
+ 'master'
+
+ >>> head.commit
+ <git.Commit "1c09f116cbc2cb4100fb6935bb162daa4723f455">
+
+ >>> head.commit.hexsha
+ '1c09f116cbc2cb4100fb6935bb162daa4723f455'"""
+ _common_path_default = "refs/heads"
+ k_config_remote = "remote"
+ k_config_remote_ref = "merge" # branch to merge from remote
+
+ @classmethod
+ def delete(cls, repo, *heads, **kwargs):
+ """Delete the given heads
+ :param force:
+ If True, the heads will be deleted even if they are not yet merged into
+ the main development stream.
+ Default False"""
+ force = kwargs.get("force", False)
+ flag = "-d"
+ if force:
+ flag = "-D"
+ repo.git.branch(flag, *heads)
+
+ def set_tracking_branch(self, remote_reference):
+ """
+ Configure this branch to track the given remote reference. This will alter
+ this branch's configuration accordingly.
+
+ :param remote_reference: The remote reference to track or None to untrack
+ any references
+ :return: self"""
+ if remote_reference is not None and not isinstance(remote_reference, RemoteReference):
+ raise ValueError("Incorrect parameter type: %r" % remote_reference)
+ # END handle type
+
+ writer = self.config_writer()
+ if remote_reference is None:
+ writer.remove_option(self.k_config_remote)
+ writer.remove_option(self.k_config_remote_ref)
+ if len(writer.options()) == 0:
+ writer.remove_section()
+ # END handle remove section
+ else:
+ writer.set_value(self.k_config_remote, remote_reference.remote_name)
+ writer.set_value(self.k_config_remote_ref, Head.to_full_path(remote_reference.remote_head))
+ # END handle ref value
+
+ return self
+
+
+ def tracking_branch(self):
+ """
+ :return: The remote_reference we are tracking, or None if we are
+ not a tracking branch"""
+ reader = self.config_reader()
+ if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
+ ref = Head(self.repo, Head.to_full_path(reader.get_value(self.k_config_remote_ref)))
+ remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
+ return RemoteReference(self.repo, remote_refpath)
+ # END handle have tracking branch
+
+ # we are not a tracking branch
+ return None
+
+ def rename(self, new_path, force=False):
+ """Rename self to a new path
+
+ :param new_path:
+ Either a simple name or a path, i.e. new_name or features/new_name.
+ The prefix refs/heads is implied
+
+ :param force:
+ If True, the rename will succeed even if a head with the target name
+ already exists.
+
+ :return: self
+ :note: respects the ref log as git commands are used"""
+ flag = "-m"
+ if force:
+ flag = "-M"
+
+ self.repo.git.branch(flag, self, new_path)
+ self.path = "%s/%s" % (self._common_path_default, new_path)
+ return self
+
+ def checkout(self, force=False, **kwargs):
+ """Checkout this head by setting the HEAD to this reference, by updating the index
+ to reflect the tree we point to and by updating the working tree to reflect
+ the latest index.
+
+ The command will fail if changed working tree files would be overwritten.
+
+ :param force:
+ If True, changes to the index and the working tree will be discarded.
+ If False, GitCommandError will be raised in that situation.
+
+ :param kwargs:
+ Additional keyword arguments to be passed to git checkout, i.e.
+ b='new_branch' to create a new branch at the given spot.
+
+ :return:
+ The active branch after the checkout operation, usually self unless
+ a new branch has been created.
+
+ :note:
+ By default it is only allowed to checkout heads - everything else
+ will leave the HEAD detached which is allowed and possible, but remains
+ a special state that some tools might not be able to handle."""
+ args = list()
+ kwargs['f'] = force
+ if kwargs['f'] == False:
+ kwargs.pop('f')
+
+ self.repo.git.checkout(self, **kwargs)
+ return self.repo.active_branch
+
+ #{ Configruation
+
+ def _config_parser(self, read_only):
+ if read_only:
+ parser = self.repo.config_reader()
+ else:
+ parser = self.repo.config_writer()
+ # END handle parser instance
+
+ return SectionConstraint(parser, 'branch "%s"' % self.name)
+
+ def config_reader(self):
+ """
+ :return: A configuration parser instance constrained to only read
+ this instance's values"""
+ return self._config_parser(read_only=True)
+
+ def config_writer(self):
+ """
+ :return: A configuration writer instance with read-and write acccess
+ to options of this head"""
+ return self._config_parser(read_only=False)
+
+ #} END configuration
+
+
diff --git a/git/refs/log.py b/git/refs/log.py
new file mode 100644
index 00000000..f49c07fd
--- /dev/null
+++ b/git/refs/log.py
@@ -0,0 +1,282 @@
+from git.util import (
+ join_path,
+ Actor,
+ LockedFD,
+ LockFile,
+ assure_directory_exists,
+ to_native_path,
+ )
+
+from gitdb.util import (
+ bin_to_hex,
+ join,
+ file_contents_ro_filepath,
+ )
+
+from git.objects.util import (
+ parse_date,
+ Serializable,
+ utctz_to_altz,
+ altz_to_utctz_str,
+ )
+
+import time
+import os
+import re
+
+__all__ = ["RefLog", "RefLogEntry"]
+
+
+class RefLogEntry(tuple):
+ """Named tuple allowing easy access to the revlog data fields"""
+ _fmt = "%s %s %s <%s> %i %s\t%s\n"
+ _re_hexsha_only = re.compile('^[0-9A-Fa-f]{40}$')
+ __slots__ = tuple()
+
+ def __repr__(self):
+ """Representation of ourselves in git reflog format"""
+ act = self.actor
+ time = self.time
+ return self._fmt % (self.oldhexsha, self.newhexsha, act.name, act.email,
+ time[0], altz_to_utctz_str(time[1]), self.message)
+
+ @property
+ def oldhexsha(self):
+ """The hexsha to the commit the ref pointed to before the change"""
+ return self[0]
+
+ @property
+ def newhexsha(self):
+ """The hexsha to the commit the ref now points to, after the change"""
+ return self[1]
+
+ @property
+ def actor(self):
+ """Actor instance, providing access"""
+ return self[2]
+
+ @property
+ def time(self):
+ """time as tuple:
+
+ * [0] = int(time)
+ * [1] = int(timezone_offset) in time.altzone format """
+ return self[3]
+
+ @property
+ def message(self):
+ """Message describing the operation that acted on the reference"""
+ return self[4]
+
+ @classmethod
+ def new(self, oldhexsha, newhexsha, actor, time, tz_offset, message):
+ """:return: New instance of a RefLogEntry"""
+ if not isinstance(actor, Actor):
+ raise ValueError("Need actor instance, got %s" % actor)
+ # END check types
+ return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), message))
+
+ @classmethod
+ def from_line(cls, line):
+ """:return: New RefLogEntry instance from the given revlog line.
+ :param line: line without trailing newline
+ :raise ValueError: If line could not be parsed"""
+ try:
+ info, msg = line.split('\t', 2)
+ except ValueError:
+ raise ValueError("line is missing tab separator")
+ #END handle first plit
+ oldhexsha = info[:40]
+ newhexsha = info[41:81]
+ for hexsha in (oldhexsha, newhexsha):
+ if not cls._re_hexsha_only.match(hexsha):
+ raise ValueError("Invalid hexsha: %s" % hexsha)
+ # END if hexsha re doesn't match
+ #END for each hexsha
+
+ email_end = info.find('>', 82)
+ if email_end == -1:
+ raise ValueError("Missing token: >")
+ #END handle missing end brace
+
+ actor = Actor._from_string(info[82:email_end+1])
+ time, tz_offset = parse_date(info[email_end+2:])
+
+ return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), msg))
+
+
+class RefLog(list, Serializable):
+ """A reflog contains reflog entries, each of which defines a certain state
+ of the head in question. Custom query methods allow to retrieve log entries
+ by date or by other criteria.
+
+ Reflog entries are orded, the first added entry is first in the list, the last
+ entry, i.e. the last change of the head or reference, is last in the list."""
+
+ __slots__ = ('_path', )
+
+ def __new__(cls, filepath=None):
+ inst = super(RefLog, cls).__new__(cls)
+ return inst
+
+ def __init__(self, filepath=None):
+ """Initialize this instance with an optional filepath, from which we will
+ initialize our data. The path is also used to write changes back using
+ the write() method"""
+ self._path = filepath
+ if filepath is not None:
+ self._read_from_file()
+ # END handle filepath
+
+ def _read_from_file(self):
+ fmap = file_contents_ro_filepath(self._path, stream=False, allow_mmap=True)
+ try:
+ self._deserialize(fmap)
+ finally:
+ fmap.close()
+ #END handle closing of handle
+
+ #{ Interface
+
+ @classmethod
+ def from_file(cls, filepath):
+ """
+ :return: a new RefLog instance containing all entries from the reflog
+ at the given filepath
+ :param filepath: path to reflog
+ :raise ValueError: If the file could not be read or was corrupted in some way"""
+ return cls(filepath)
+
+ @classmethod
+ def path(cls, ref):
+ """
+ :return: string to absolute path at which the reflog of the given ref
+ instance would be found. The path is not guaranteed to point to a valid
+ file though.
+ :param ref: SymbolicReference instance"""
+ return join(ref.repo.git_dir, "logs", to_native_path(ref.path))
+
+ @classmethod
+ def iter_entries(cls, stream):
+ """
+ :return: Iterator yielding RefLogEntry instances, one for each line read
+ sfrom the given stream.
+ :param stream: file-like object containing the revlog in its native format
+ or basestring instance pointing to a file to read"""
+ new_entry = RefLogEntry.from_line
+ if isinstance(stream, basestring):
+ stream = file_contents_ro_filepath(stream)
+ #END handle stream type
+ while True:
+ line = stream.readline()
+ if not line:
+ return
+ yield new_entry(line.strip())
+ #END endless loop
+
+ @classmethod
+ def entry_at(cls, filepath, index):
+ """:return: RefLogEntry at the given index
+ :param filepath: full path to the index file from which to read the entry
+ :param index: python list compatible index, i.e. it may be negative to
+ specifiy an entry counted from the end of the list
+
+ :raise IndexError: If the entry didn't exist
+
+ .. note:: This method is faster as it only parses the entry at index, skipping
+ all other lines. Nonetheless, the whole file has to be read if
+ the index is negative
+ """
+ fp = open(filepath, 'rb')
+ if index < 0:
+ return RefLogEntry.from_line(fp.readlines()[index].strip())
+ else:
+ # read until index is reached
+ for i in xrange(index+1):
+ line = fp.readline()
+ if not line:
+ break
+ #END abort on eof
+ #END handle runup
+
+ if i != index or not line:
+ raise IndexError
+ #END handle exception
+
+ return RefLogEntry.from_line(line.strip())
+ #END handle index
+
+ def to_file(self, filepath):
+ """Write the contents of the reflog instance to a file at the given filepath.
+ :param filepath: path to file, parent directories are assumed to exist"""
+ lfd = LockedFD(filepath)
+ assure_directory_exists(filepath, is_file=True)
+
+ fp = lfd.open(write=True, stream=True)
+ try:
+ self._serialize(fp)
+ lfd.commit()
+ except:
+ # on failure it rolls back automatically, but we make it clear
+ lfd.rollback()
+ raise
+ #END handle change
+
+ @classmethod
+ def append_entry(cls, config_reader, filepath, oldbinsha, newbinsha, message):
+ """Append a new log entry to the revlog at filepath.
+
+ :param config_reader: configuration reader of the repository - used to obtain
+ user information. May be None
+ :param filepath: full path to the log file
+ :param oldbinsha: binary sha of the previous commit
+ :param newbinsha: binary sha of the current commit
+ :param message: message describing the change to the reference
+ :param write: If True, the changes will be written right away. Otherwise
+ the change will not be written
+ :return: RefLogEntry objects which was appended to the log
+ :note: As we are append-only, concurrent access is not a problem as we
+ do not interfere with readers."""
+ if len(oldbinsha) != 20 or len(newbinsha) != 20:
+ raise ValueError("Shas need to be given in binary format")
+ #END handle sha type
+ assure_directory_exists(filepath, is_file=True)
+ entry = RefLogEntry((bin_to_hex(oldbinsha), bin_to_hex(newbinsha), Actor.committer(config_reader), (int(time.time()), time.altzone), message))
+
+ lf = LockFile(filepath)
+ lf._obtain_lock_or_raise()
+
+ fd = open(filepath, 'a')
+ try:
+ fd.write(repr(entry))
+ finally:
+ fd.close()
+ lf._release_lock()
+ #END handle write operation
+
+ return entry
+
+ def write(self):
+ """Write this instance's data to the file we are originating from
+ :return: self"""
+ if self._path is None:
+ raise ValueError("Instance was not initialized with a path, use to_file(...) instead")
+ #END assert path
+ self.to_file(self._path)
+ return self
+
+ #} END interface
+
+ #{ Serializable Interface
+ def _serialize(self, stream):
+ lm1 = len(self) - 1
+ write = stream.write
+
+ # write all entries
+ for e in self:
+ write(repr(e))
+ #END for each entry
+
+ def _deserialize(self, stream):
+ self.extend(self.iter_entries(stream))
+ #} END serializable interface
diff --git a/git/refs/reference.py b/git/refs/reference.py
new file mode 100644
index 00000000..1a745ee9
--- /dev/null
+++ b/git/refs/reference.py
@@ -0,0 +1,84 @@
+from symbolic import SymbolicReference
+import os
+from git.objects import Object
+from git.util import (
+ LazyMixin,
+ Iterable,
+ )
+
+from gitdb.util import (
+ isfile,
+ hex_to_bin
+ )
+
+__all__ = ["Reference"]
+
+
+class Reference(SymbolicReference, LazyMixin, Iterable):
+ """Represents a named reference to any object. Subclasses may apply restrictions though,
+ i.e. Heads can only point to commits."""
+ __slots__ = tuple()
+ _points_to_commits_only = False
+ _resolve_ref_on_create = True
+ _common_path_default = "refs"
+
+ def __init__(self, repo, path):
+ """Initialize this instance
+ :param repo: Our parent repository
+
+ :param path:
+ Path relative to the .git/ directory pointing to the ref in question, i.e.
+ refs/heads/master"""
+ if not path.startswith(self._common_path_default+'/'):
+ raise ValueError("Cannot instantiate %r from path %s" % ( self.__class__.__name__, path ))
+ super(Reference, self).__init__(repo, path)
+
+
+ def __str__(self):
+ return self.name
+
+ def set_object(self, object, logmsg = None):
+ """Special version which checks if the head-log needs an update as well"""
+ oldbinsha = None
+ if logmsg is not None:
+ head = self.repo.head
+ if not head.is_detached and head.ref == self:
+ oldbinsha = self.commit.binsha
+ #END handle commit retrieval
+ #END handle message is set
+
+ super(Reference, self).set_object(object, logmsg)
+
+ if oldbinsha is not None:
+ # /* from refs.c in git-source
+ # * Special hack: If a branch is updated directly and HEAD
+ # * points to it (may happen on the remote side of a push
+ # * for example) then logically the HEAD reflog should be
+ # * updated too.
+ # * A generic solution implies reverse symref information,
+ # * but finding all symrefs pointing to the given branch
+ # * would be rather costly for this rare event (the direct
+ # * update of a branch) to be worth it. So let's cheat and
+ # * check with HEAD only which should cover 99% of all usage
+ # * scenarios (even 100% of the default ones).
+ # */
+ self.repo.head.log_append(oldbinsha, logmsg)
+ #END check if the head
+
+ # NOTE: Don't have to overwrite properties as the will only work without a the log
+
+ @property
+ def name(self):
+ """:return: (shortest) Name of this reference - it may contain path components"""
+ # first two path tokens are can be removed as they are
+ # refs/heads or refs/tags or refs/remotes
+ tokens = self.path.split('/')
+ if len(tokens) < 3:
+ return self.path # could be refs/HEAD
+ return '/'.join(tokens[2:])
+
+ @classmethod
+ def iter_items(cls, repo, common_path = None):
+ """Equivalent to SymbolicReference.iter_items, but will return non-detached
+ references as well."""
+ return cls._iter_items(repo, common_path)
diff --git a/git/refs/remote.py b/git/refs/remote.py
new file mode 100644
index 00000000..b7b07d4b
--- /dev/null
+++ b/git/refs/remote.py
@@ -0,0 +1,63 @@
+from head import Head
+from git.util import join_path
+from gitdb.util import join
+
+import os
+
+
+__all__ = ["RemoteReference"]
+
+
+class RemoteReference(Head):
+ """Represents a reference pointing to a remote head."""
+ _common_path_default = "refs/remotes"
+
+
+ @classmethod
+ def iter_items(cls, repo, common_path = None, remote=None):
+ """Iterate remote references, and if given, constrain them to the given remote"""
+ common_path = common_path or cls._common_path_default
+ if remote is not None:
+ common_path = join_path(common_path, str(remote))
+ # END handle remote constraint
+ return super(RemoteReference, cls).iter_items(repo, common_path)
+
+ @property
+ def remote_name(self):
+ """
+ :return:
+ Name of the remote we are a reference of, such as 'origin' for a reference
+ named 'origin/master'"""
+ tokens = self.path.split('/')
+ # /refs/remotes/<remote name>/<branch_name>
+ return tokens[2]
+
+ @property
+ def remote_head(self):
+ """:return: Name of the remote head itself, i.e. master.
+ :note: The returned name is usually not qualified enough to uniquely identify
+ a branch"""
+ tokens = self.path.split('/')
+ return '/'.join(tokens[3:])
+
+ @classmethod
+ def delete(cls, repo, *refs, **kwargs):
+ """Delete the given remote references.
+ :note:
+ kwargs are given for compatability with the base class method as we
+ should not narrow the signature."""
+ repo.git.branch("-d", "-r", *refs)
+ # the official deletion method will ignore remote symbolic refs - these
+ # are generally ignored in the refs/ folder. We don't though
+ # and delete remainders manually
+ for ref in refs:
+ try:
+ os.remove(join(repo.git_dir, ref.path))
+ except OSError:
+ pass
+ # END for each ref
+
+ @classmethod
+ def create(cls, *args, **kwargs):
+ """Used to disable this method"""
+ raise TypeError("Cannot explicitly create remote references")
diff --git a/git/refs/symbolic.py b/git/refs/symbolic.py
new file mode 100644
index 00000000..9937cf0c
--- /dev/null
+++ b/git/refs/symbolic.py
@@ -0,0 +1,618 @@
+import os
+from git.objects import Object, Commit
+from git.util import (
+ join_path,
+ join_path_native,
+ to_native_path_linux,
+ assure_directory_exists
+ )
+
+from gitdb.exc import BadObject
+from gitdb.util import (
+ join,
+ dirname,
+ isdir,
+ exists,
+ isfile,
+ rename,
+ hex_to_bin,
+ LockedFD
+ )
+
+from log import RefLog
+
+__all__ = ["SymbolicReference"]
+
+class SymbolicReference(object):
+ """Represents a special case of a reference such that this reference is symbolic.
+ It does not point to a specific commit, but to another Head, which itself
+ specifies a commit.
+
+ A typical example for a symbolic reference is HEAD."""
+ __slots__ = ("repo", "path")
+ _resolve_ref_on_create = False
+ _points_to_commits_only = True
+ _common_path_default = ""
+ _id_attribute_ = "name"
+
+ def __init__(self, repo, path):
+ self.repo = repo
+ self.path = path
+
+ def __str__(self):
+ return self.path
+
+ def __repr__(self):
+ return '<git.%s "%s">' % (self.__class__.__name__, self.path)
+
+ def __eq__(self, other):
+ return self.path == other.path
+
+ def __ne__(self, other):
+ return not ( self == other )
+
+ def __hash__(self):
+ return hash(self.path)
+
+ @property
+ def name(self):
+ """
+ :return:
+ In case of symbolic references, the shortest assumable name
+ is the path itself."""
+ return self.path
+
+ @property
+ def abspath(self):
+ return join_path_native(self.repo.git_dir, self.path)
+
+ @classmethod
+ def _get_packed_refs_path(cls, repo):
+ return join(repo.git_dir, 'packed-refs')
+
+ @classmethod
+ def _iter_packed_refs(cls, repo):
+ """Returns an iterator yielding pairs of sha1/path pairs for the corresponding refs.
+ :note: The packed refs file will be kept open as long as we iterate"""
+ try:
+ fp = open(cls._get_packed_refs_path(repo), 'r')
+ for line in fp:
+ line = line.strip()
+ if not line:
+ continue
+ if line.startswith('#'):
+ if line.startswith('# pack-refs with:') and not line.endswith('peeled'):
+ raise TypeError("PackingType of packed-Refs not understood: %r" % line)
+ # END abort if we do not understand the packing scheme
+ continue
+ # END parse comment
+
+ # skip dereferenced tag object entries - previous line was actual
+ # tag reference for it
+ if line[0] == '^':
+ continue
+
+ yield tuple(line.split(' ', 1))
+ # END for each line
+ except (OSError,IOError):
+ raise StopIteration
+ # END no packed-refs file handling
+ # NOTE: Had try-finally block around here to close the fp,
+ # but some python version woudn't allow yields within that.
+ # I believe files are closing themselves on destruction, so it is
+ # alright.
+
+ @classmethod
+ def dereference_recursive(cls, repo, ref_path):
+ """
+ :return: hexsha stored in the reference at the given ref_path, recursively dereferencing all
+ intermediate references as required
+ :param repo: the repository containing the reference at ref_path"""
+ while True:
+ hexsha, ref_path = cls._get_ref_info(repo, ref_path)
+ if hexsha is not None:
+ return hexsha
+ # END recursive dereferencing
+
+ @classmethod
+ def _get_ref_info(cls, repo, ref_path):
+ """Return: (sha, target_ref_path) if available, the sha the file at
+ rela_path points to, or None. target_ref_path is the reference we
+ point to, or None"""
+ tokens = None
+ try:
+ fp = open(join(repo.git_dir, ref_path), 'r')
+ value = fp.read().rstrip()
+ fp.close()
+ tokens = value.split(" ")
+ except (OSError,IOError):
+ # Probably we are just packed, find our entry in the packed refs file
+ # NOTE: We are not a symbolic ref if we are in a packed file, as these
+ # are excluded explictly
+ for sha, path in cls._iter_packed_refs(repo):
+ if path != ref_path: continue
+ tokens = (sha, path)
+ break
+ # END for each packed ref
+ # END handle packed refs
+ if tokens is None:
+ raise ValueError("Reference at %r does not exist" % ref_path)
+
+ # is it a reference ?
+ if tokens[0] == 'ref:':
+ return (None, tokens[1])
+
+ # its a commit
+ if repo.re_hexsha_only.match(tokens[0]):
+ return (tokens[0], None)
+
+ raise ValueError("Failed to parse reference information from %r" % ref_path)
+
+ def _get_object(self):
+ """
+ :return:
+ The object our ref currently refers to. Refs can be cached, they will
+ always point to the actual object as it gets re-created on each query"""
+ # have to be dynamic here as we may be a tag which can point to anything
+ # Our path will be resolved to the hexsha which will be used accordingly
+ return Object.new_from_sha(self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path)))
+
+ def _get_commit(self):
+ """
+ :return:
+ Commit object we point to, works for detached and non-detached
+ SymbolicReferences. The symbolic reference will be dereferenced recursively."""
+ obj = self._get_object()
+ if obj.type == 'tag':
+ obj = obj.object
+ #END dereference tag
+
+ if obj.type != Commit.type:
+ raise TypeError("Symbolic Reference pointed to object %r, commit was required" % obj)
+ #END handle type
+ return obj
+
+ def set_commit(self, commit, logmsg = None):
+ """As set_object, but restricts the type of object to be a Commit
+
+ :raise ValueError: If commit is not a Commit object or doesn't point to
+ a commit
+ :return: self"""
+ # check the type - assume the best if it is a base-string
+ invalid_type = False
+ if isinstance(commit, Object):
+ invalid_type = commit.type != Commit.type
+ elif isinstance(commit, SymbolicReference):
+ invalid_type = commit.object.type != Commit.type
+ else:
+ try:
+ invalid_type = self.repo.rev_parse(commit).type != Commit.type
+ except BadObject:
+ raise ValueError("Invalid object: %s" % commit)
+ #END handle exception
+ # END verify type
+
+ if invalid_type:
+ raise ValueError("Need commit, got %r" % commit)
+ #END handle raise
+
+ # we leave strings to the rev-parse method below
+ self.set_object(commit, logmsg)
+
+ return self
+
+
+ def set_object(self, object, logmsg = None):
+ """Set the object we point to, possibly dereference our symbolic reference first.
+ If the reference does not exist, it will be created
+
+ :param object: a refspec, a SymbolicReference or an Object instance. SymbolicReferences
+ will be dereferenced beforehand to obtain the object they point to
+ :param logmsg: If not None, the message will be used in the reflog entry to be
+ written. Otherwise the reflog is not altered
+ :note: plain SymbolicReferences may not actually point to objects by convention
+ :return: self"""
+ if isinstance(object, SymbolicReference):
+ object = object.object
+ #END resolve references
+
+ is_detached = True
+ try:
+ is_detached = self.is_detached
+ except ValueError:
+ pass
+ # END handle non-existing ones
+
+ if is_detached:
+ return self.set_reference(object, logmsg)
+
+ # set the commit on our reference
+ return self._get_reference().set_object(object, logmsg)
+
+ commit = property(_get_commit, set_commit, doc="Query or set commits directly")
+ object = property(_get_object, set_object, doc="Return the object our ref currently refers to")
+
+ def _get_reference(self):
+ """:return: Reference Object we point to
+ :raise TypeError: If this symbolic reference is detached, hence it doesn't point
+ to a reference, but to a commit"""
+ sha, target_ref_path = self._get_ref_info(self.repo, self.path)
+ if target_ref_path is None:
+ raise TypeError("%s is a detached symbolic reference as it points to %r" % (self, sha))
+ return self.from_path(self.repo, target_ref_path)
+
+ def set_reference(self, ref, logmsg = None):
+ """Set ourselves to the given ref. It will stay a symbol if the ref is a Reference.
+ Otherwise an Object, given as Object instance or refspec, is assumed and if valid,
+ will be set which effectively detaches the refererence if it was a purely
+ symbolic one.
+
+ :param ref: SymbolicReference instance, Object instance or refspec string
+ Only if the ref is a SymbolicRef instance, we will point to it. Everthiny
+ else is dereferenced to obtain the actual object.
+ :param logmsg: If set to a string, the message will be used in the reflog.
+ Otherwise, a reflog entry is not written for the changed reference.
+ The previous commit of the entry will be the commit we point to now.
+
+ See also: log_append()
+
+ :return: self
+ :note: This symbolic reference will not be dereferenced. For that, see
+ ``set_object(...)``"""
+ write_value = None
+ obj = None
+ if isinstance(ref, SymbolicReference):
+ write_value = "ref: %s" % ref.path
+ elif isinstance(ref, Object):
+ obj = ref
+ write_value = ref.hexsha
+ elif isinstance(ref, basestring):
+ try:
+ obj = self.repo.rev_parse(ref+"^{}") # optionally deref tags
+ write_value = obj.hexsha
+ except BadObject:
+ raise ValueError("Could not extract object from %s" % ref)
+ # END end try string
+ else:
+ raise ValueError("Unrecognized Value: %r" % ref)
+ # END try commit attribute
+
+ # typecheck
+ if obj is not None and self._points_to_commits_only and obj.type != Commit.type:
+ raise TypeError("Require commit, got %r" % obj)
+ #END verify type
+
+ oldbinsha = None
+ if logmsg is not None:
+ try:
+ oldbinsha = self.commit.binsha
+ except ValueError:
+ oldbinsha = Commit.NULL_BIN_SHA
+ #END handle non-existing
+ #END retrieve old hexsha
+
+ fpath = self.abspath
+ assure_directory_exists(fpath, is_file=True)
+
+ lfd = LockedFD(fpath)
+ fd = lfd.open(write=True, stream=True)
+ fd.write(write_value)
+ lfd.commit()
+
+ # Adjust the reflog
+ if logmsg is not None:
+ self.log_append(oldbinsha, logmsg)
+ #END handle reflog
+
+ return self
+
+
+ # aliased reference
+ reference = property(_get_reference, set_reference, doc="Returns the Reference we point to")
+ ref = reference
+
+ def is_valid(self):
+ """
+ :return:
+ True if the reference is valid, hence it can be read and points to
+ a valid object or reference."""
+ try:
+ self.object
+ except (OSError, ValueError):
+ return False
+ else:
+ return True
+
+ @property
+ def is_detached(self):
+ """
+ :return:
+ True if we are a detached reference, hence we point to a specific commit
+ instead to another reference"""
+ try:
+ self.ref
+ return False
+ except TypeError:
+ return True
+
+ def log(self):
+ """
+ :return: RefLog for this reference. Its last entry reflects the latest change
+ applied to this reference
+
+ .. note:: As the log is parsed every time, its recommended to cache it for use
+ instead of calling this method repeatedly. It should be considered read-only."""
+ return RefLog.from_file(RefLog.path(self))
+
+ def log_append(self, oldbinsha, message, newbinsha=None):
+ """Append a logentry to the logfile of this ref
+
+ :param oldbinsha: binary sha this ref used to point to
+ :param message: A message describing the change
+ :param newbinsha: The sha the ref points to now. If None, our current commit sha
+ will be used
+ :return: added RefLogEntry instance"""
+ return RefLog.append_entry(self.repo.config_reader(), RefLog.path(self), oldbinsha,
+ (newbinsha is None and self.commit.binsha) or newbinsha,
+ message)
+
+ def log_entry(self, index):
+ """:return: RefLogEntry at the given index
+ :param index: python list compatible positive or negative index
+
+ .. note:: This method must read part of the reflog during execution, hence
+ it should be used sparringly, or only if you need just one index.
+ In that case, it will be faster than the ``log()`` method"""
+ return RefLog.entry_at(RefLog.path(self), index)
+
+ @classmethod
+ def to_full_path(cls, path):
+ """
+ :return: string with a full repository-relative path which can be used to initialize
+ a Reference instance, for instance by using ``Reference.from_path``"""
+ if isinstance(path, SymbolicReference):
+ path = path.path
+ full_ref_path = path
+ if not cls._common_path_default:
+ return full_ref_path
+ if not path.startswith(cls._common_path_default+"/"):
+ full_ref_path = '%s/%s' % (cls._common_path_default, path)
+ return full_ref_path
+
+ @classmethod
+ def delete(cls, repo, path):
+ """Delete the reference at the given path
+
+ :param repo:
+ Repository to delete the reference from
+
+ :param path:
+ Short or full path pointing to the reference, i.e. refs/myreference
+ or just "myreference", hence 'refs/' is implied.
+ Alternatively the symbolic reference to be deleted"""
+ full_ref_path = cls.to_full_path(path)
+ abs_path = join(repo.git_dir, full_ref_path)
+ if exists(abs_path):
+ os.remove(abs_path)
+ else:
+ # check packed refs
+ pack_file_path = cls._get_packed_refs_path(repo)
+ try:
+ reader = open(pack_file_path)
+ except (OSError,IOError):
+ pass # it didnt exist at all
+ else:
+ new_lines = list()
+ made_change = False
+ dropped_last_line = False
+ for line in reader:
+ # keep line if it is a comment or if the ref to delete is not
+ # in the line
+ # If we deleted the last line and this one is a tag-reference object,
+ # we drop it as well
+ if ( line.startswith('#') or full_ref_path not in line ) and \
+ ( not dropped_last_line or dropped_last_line and not line.startswith('^') ):
+ new_lines.append(line)
+ dropped_last_line = False
+ continue
+ # END skip comments and lines without our path
+
+ # drop this line
+ made_change = True
+ dropped_last_line = True
+ # END for each line in packed refs
+ reader.close()
+
+ # write the new lines
+ if made_change:
+ open(pack_file_path, 'w').writelines(new_lines)
+ # END open exception handling
+ # END handle deletion
+
+ # delete the reflog
+ reflog_path = RefLog.path(cls(repo, full_ref_path))
+ if os.path.isfile(reflog_path):
+ os.remove(reflog_path)
+ #END remove reflog
+
+
+ @classmethod
+ def _create(cls, repo, path, resolve, reference, force, logmsg=None):
+ """internal method used to create a new symbolic reference.
+ If resolve is False, the reference will be taken as is, creating
+ a proper symbolic reference. Otherwise it will be resolved to the
+ corresponding object and a detached symbolic reference will be created
+ instead"""
+ full_ref_path = cls.to_full_path(path)
+ abs_ref_path = join(repo.git_dir, full_ref_path)
+
+ # figure out target data
+ target = reference
+ if resolve:
+ target = repo.rev_parse(str(reference))
+
+ if not force and isfile(abs_ref_path):
+ target_data = str(target)
+ if isinstance(target, SymbolicReference):
+ target_data = target.path
+ if not resolve:
+ target_data = "ref: " + target_data
+ existing_data = open(abs_ref_path, 'rb').read().strip()
+ if existing_data != target_data:
+ raise OSError("Reference at %r does already exist, pointing to %r, requested was %r" % (full_ref_path, existing_data, target_data))
+ # END no force handling
+
+ ref = cls(repo, full_ref_path)
+ ref.set_reference(target, logmsg)
+ return ref
+
+ @classmethod
+ def create(cls, repo, path, reference='HEAD', force=False, logmsg=None):
+ """Create a new symbolic reference, hence a reference pointing to another reference.
+
+ :param repo:
+ Repository to create the reference in
+
+ :param path:
+ full path at which the new symbolic reference is supposed to be
+ created at, i.e. "NEW_HEAD" or "symrefs/my_new_symref"
+
+ :param reference:
+ The reference to which the new symbolic reference should point to
+
+ :param force:
+ if True, force creation even if a symbolic reference with that name already exists.
+ Raise OSError otherwise
+
+ :param logmsg:
+ If not None, the message to append to the reflog. Otherwise no reflog
+ entry is written.
+
+ :return: Newly created symbolic Reference
+
+ :raise OSError:
+ If a (Symbolic)Reference with the same name but different contents
+ already exists.
+
+ :note: This does not alter the current HEAD, index or Working Tree"""
+ return cls._create(repo, path, cls._resolve_ref_on_create, reference, force, logmsg)
+
+ def rename(self, new_path, force=False):
+ """Rename self to a new path
+
+ :param new_path:
+ Either a simple name or a full path, i.e. new_name or features/new_name.
+ The prefix refs/ is implied for references and will be set as needed.
+ In case this is a symbolic ref, there is no implied prefix
+
+ :param force:
+ If True, the rename will succeed even if a head with the target name
+ already exists. It will be overwritten in that case
+
+ :return: self
+ :raise OSError: In case a file at path but a different contents already exists """
+ new_path = self.to_full_path(new_path)
+ if self.path == new_path:
+ return self
+
+ new_abs_path = join(self.repo.git_dir, new_path)
+ cur_abs_path = join(self.repo.git_dir, self.path)
+ if isfile(new_abs_path):
+ if not force:
+ # if they point to the same file, its not an error
+ if open(new_abs_path,'rb').read().strip() != open(cur_abs_path,'rb').read().strip():
+ raise OSError("File at path %r already exists" % new_abs_path)
+ # else: we could remove ourselves and use the otherone, but
+ # but clarity we just continue as usual
+ # END not force handling
+ os.remove(new_abs_path)
+ # END handle existing target file
+
+ dname = dirname(new_abs_path)
+ if not isdir(dname):
+ os.makedirs(dname)
+ # END create directory
+
+ rename(cur_abs_path, new_abs_path)
+ self.path = new_path
+
+ return self
+
+ @classmethod
+ def _iter_items(cls, repo, common_path = None):
+ if common_path is None:
+ common_path = cls._common_path_default
+ rela_paths = set()
+
+ # walk loose refs
+ # Currently we do not follow links
+ for root, dirs, files in os.walk(join_path_native(repo.git_dir, common_path)):
+ if 'refs/' not in root: # skip non-refs subfolders
+ refs_id = [ i for i,d in enumerate(dirs) if d == 'refs' ]
+ if refs_id:
+ dirs[0:] = ['refs']
+ # END prune non-refs folders
+
+ for f in files:
+ abs_path = to_native_path_linux(join_path(root, f))
+ rela_paths.add(abs_path.replace(to_native_path_linux(repo.git_dir) + '/', ""))
+ # END for each file in root directory
+ # END for each directory to walk
+
+ # read packed refs
+ for sha, rela_path in cls._iter_packed_refs(repo):
+ if rela_path.startswith(common_path):
+ rela_paths.add(rela_path)
+ # END relative path matches common path
+ # END packed refs reading
+
+ # return paths in sorted order
+ for path in sorted(rela_paths):
+ try:
+ yield cls.from_path(repo, path)
+ except ValueError:
+ continue
+ # END for each sorted relative refpath
+
+ @classmethod
+ def iter_items(cls, repo, common_path = None):
+ """Find all refs in the repository
+
+ :param repo: is the Repo
+
+ :param common_path:
+ Optional keyword argument to the path which is to be shared by all
+ returned Ref objects.
+ Defaults to class specific portion if None assuring that only
+ refs suitable for the actual class are returned.
+
+ :return:
+ git.SymbolicReference[], each of them is guaranteed to be a symbolic
+ ref which is not detached.
+
+ List is lexigraphically sorted
+ The returned objects represent actual subclasses, such as Head or TagReference"""
+ return ( r for r in cls._iter_items(repo, common_path) if r.__class__ == SymbolicReference or not r.is_detached )
+
+ @classmethod
+ def from_path(cls, repo, path):
+ """
+ :param path: full .git-directory-relative path name to the Reference to instantiate
+ :note: use to_full_path() if you only have a partial path of a known Reference Type
+ :return:
+ Instance of type Reference, Head, or Tag
+ depending on the given path"""
+ if not path:
+ raise ValueError("Cannot create Reference from %r" % path)
+
+ for ref_type in (HEAD, Head, RemoteReference, TagReference, Reference, SymbolicReference):
+ try:
+ instance = ref_type(repo, path)
+ if instance.__class__ == SymbolicReference and instance.is_detached:
+ raise ValueError("SymbolRef was detached, we drop it")
+ return instance
+ except ValueError:
+ pass
+ # END exception handling
+ # END for each type to try
+ raise ValueError("Could not find reference type suitable to handle path %r" % path)
diff --git a/git/refs/tag.py b/git/refs/tag.py
new file mode 100644
index 00000000..c09d814d
--- /dev/null
+++ b/git/refs/tag.py
@@ -0,0 +1,91 @@
+from reference import Reference
+
+__all__ = ["TagReference", "Tag"]
+
+
+
+class TagReference(Reference):
+ """Class representing a lightweight tag reference which either points to a commit
+ ,a tag object or any other object. In the latter case additional information,
+ like the signature or the tag-creator, is available.
+
+ This tag object will always point to a commit object, but may carray additional
+ information in a tag object::
+
+ tagref = TagReference.list_items(repo)[0]
+ print tagref.commit.message
+ if tagref.tag is not None:
+ print tagref.tag.message"""
+
+ __slots__ = tuple()
+ _common_path_default = "refs/tags"
+
+ @property
+ def commit(self):
+ """:return: Commit object the tag ref points to"""
+ obj = self.object
+ if obj.type == "commit":
+ return obj
+ elif obj.type == "tag":
+ # it is a tag object which carries the commit as an object - we can point to anything
+ return obj.object
+ else:
+ raise ValueError( "Tag %s points to a Blob or Tree - have never seen that before" % self )
+
+ @property
+ def tag(self):
+ """
+ :return: Tag object this tag ref points to or None in case
+ we are a light weight tag"""
+ obj = self.object
+ if obj.type == "tag":
+ return obj
+ return None
+
+ # make object read-only
+ # It should be reasonably hard to adjust an existing tag
+ object = property(Reference._get_object)
+
+ @classmethod
+ def create(cls, repo, path, ref='HEAD', message=None, force=False, **kwargs):
+ """Create a new tag reference.
+
+ :param path:
+ The name of the tag, i.e. 1.0 or releases/1.0.
+ The prefix refs/tags is implied
+
+ :param ref:
+ A reference to the object you want to tag. It can be a commit, tree or
+ blob.
+
+ :param message:
+ If not None, the message will be used in your tag object. This will also
+ create an additional tag object that allows to obtain that information, i.e.::
+
+ tagref.tag.message
+
+ :param force:
+ If True, to force creation of a tag even though that tag already exists.
+
+ :param kwargs:
+ Additional keyword arguments to be passed to git-tag
+
+ :return: A new TagReference"""
+ args = ( path, ref )
+ if message:
+ kwargs['m'] = message
+ if force:
+ kwargs['f'] = True
+
+ repo.git.tag(*args, **kwargs)
+ return TagReference(repo, "%s/%s" % (cls._common_path_default, path))
+
+ @classmethod
+ def delete(cls, repo, *tags):
+ """Delete the given existing tag or tags"""
+ repo.git.tag("-d", *tags)
+
+
+
+# provide an alias
+Tag = TagReference
diff --git a/git/remote.py b/git/remote.py
new file mode 100644
index 00000000..69a8126b
--- /dev/null
+++ b/git/remote.py
@@ -0,0 +1,603 @@
+# remote.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+# Module implementing a remote object allowing easy access to git remotes
+
+from exc import GitCommandError
+from ConfigParser import NoOptionError
+from config import SectionConstraint
+
+from git.util import (
+ LazyMixin,
+ Iterable,
+ IterableList,
+ RemoteProgress
+ )
+
+from refs import (
+ Reference,
+ RemoteReference,
+ SymbolicReference,
+ TagReference
+ )
+
+from git.util import join_path
+from gitdb.util import join
+
+import re
+import os
+import sys
+
+__all__ = ('RemoteProgress', 'PushInfo', 'FetchInfo', 'Remote')
+
+
+class PushInfo(object):
+ """
+ Carries information about the result of a push operation of a single head::
+
+ info = remote.push()[0]
+ info.flags # bitflags providing more information about the result
+ info.local_ref # Reference pointing to the local reference that was pushed
+ # It is None if the ref was deleted.
+ info.remote_ref_string # path to the remote reference located on the remote side
+ info.remote_ref # Remote Reference on the local side corresponding to
+ # the remote_ref_string. It can be a TagReference as well.
+ info.old_commit # commit at which the remote_ref was standing before we pushed
+ # it to local_ref.commit. Will be None if an error was indicated
+ info.summary # summary line providing human readable english text about the push
+ """
+ __slots__ = ('local_ref', 'remote_ref_string', 'flags', 'old_commit', '_remote', 'summary')
+
+ NEW_TAG, NEW_HEAD, NO_MATCH, REJECTED, REMOTE_REJECTED, REMOTE_FAILURE, DELETED, \
+ FORCED_UPDATE, FAST_FORWARD, UP_TO_DATE, ERROR = [ 1 << x for x in range(11) ]
+
+ _flag_map = { 'X' : NO_MATCH, '-' : DELETED, '*' : 0,
+ '+' : FORCED_UPDATE, ' ' : FAST_FORWARD,
+ '=' : UP_TO_DATE, '!' : ERROR }
+
+ def __init__(self, flags, local_ref, remote_ref_string, remote, old_commit=None,
+ summary=''):
+ """ Initialize a new instance """
+ self.flags = flags
+ self.local_ref = local_ref
+ self.remote_ref_string = remote_ref_string
+ self._remote = remote
+ self.old_commit = old_commit
+ self.summary = summary
+
+ @property
+ def remote_ref(self):
+ """
+ :return:
+ Remote Reference or TagReference in the local repository corresponding
+ to the remote_ref_string kept in this instance."""
+ # translate heads to a local remote, tags stay as they are
+ if self.remote_ref_string.startswith("refs/tags"):
+ return TagReference(self._remote.repo, self.remote_ref_string)
+ elif self.remote_ref_string.startswith("refs/heads"):
+ remote_ref = Reference(self._remote.repo, self.remote_ref_string)
+ return RemoteReference(self._remote.repo, "refs/remotes/%s/%s" % (str(self._remote), remote_ref.name))
+ else:
+ raise ValueError("Could not handle remote ref: %r" % self.remote_ref_string)
+ # END
+
+ @classmethod
+ def _from_line(cls, remote, line):
+ """Create a new PushInfo instance as parsed from line which is expected to be like
+ refs/heads/master:refs/heads/master 05d2687..1d0568e"""
+ control_character, from_to, summary = line.split('\t', 3)
+ flags = 0
+
+ # control character handling
+ try:
+ flags |= cls._flag_map[ control_character ]
+ except KeyError:
+ raise ValueError("Control Character %r unknown as parsed from line %r" % (control_character, line))
+ # END handle control character
+
+ # from_to handling
+ from_ref_string, to_ref_string = from_to.split(':')
+ if flags & cls.DELETED:
+ from_ref = None
+ else:
+ from_ref = Reference.from_path(remote.repo, from_ref_string)
+
+ # commit handling, could be message or commit info
+ old_commit = None
+ if summary.startswith('['):
+ if "[rejected]" in summary:
+ flags |= cls.REJECTED
+ elif "[remote rejected]" in summary:
+ flags |= cls.REMOTE_REJECTED
+ elif "[remote failure]" in summary:
+ flags |= cls.REMOTE_FAILURE
+ elif "[no match]" in summary:
+ flags |= cls.ERROR
+ elif "[new tag]" in summary:
+ flags |= cls.NEW_TAG
+ elif "[new branch]" in summary:
+ flags |= cls.NEW_HEAD
+ # uptodate encoded in control character
+ else:
+ # fast-forward or forced update - was encoded in control character,
+ # but we parse the old and new commit
+ split_token = "..."
+ if control_character == " ":
+ split_token = ".."
+ old_sha, new_sha = summary.split(' ')[0].split(split_token)
+ # have to use constructor here as the sha usually is abbreviated
+ old_commit = remote.repo.commit(old_sha)
+ # END message handling
+
+ return PushInfo(flags, from_ref, to_ref_string, remote, old_commit, summary)
+
+
+class FetchInfo(object):
+ """
+ Carries information about the results of a fetch operation of a single head::
+
+ info = remote.fetch()[0]
+ info.ref # Symbolic Reference or RemoteReference to the changed
+ # remote head or FETCH_HEAD
+ info.flags # additional flags to be & with enumeration members,
+ # i.e. info.flags & info.REJECTED
+ # is 0 if ref is SymbolicReference
+ info.note # additional notes given by git-fetch intended for the user
+ info.old_commit # if info.flags & info.FORCED_UPDATE|info.FAST_FORWARD,
+ # field is set to the previous location of ref, otherwise None
+ """
+ __slots__ = ('ref','old_commit', 'flags', 'note')
+
+ NEW_TAG, NEW_HEAD, HEAD_UPTODATE, TAG_UPDATE, REJECTED, FORCED_UPDATE, \
+ FAST_FORWARD, ERROR = [ 1 << x for x in range(8) ]
+
+ # %c %-*s %-*s -> %s (%s)
+ re_fetch_result = re.compile("^\s*(.) (\[?[\w\s\.]+\]?)\s+(.+) -> ([/\w_\+\.-]+)( \(.*\)?$)?")
+
+ _flag_map = { '!' : ERROR, '+' : FORCED_UPDATE, '-' : TAG_UPDATE, '*' : 0,
+ '=' : HEAD_UPTODATE, ' ' : FAST_FORWARD }
+
+ def __init__(self, ref, flags, note = '', old_commit = None):
+ """
+ Initialize a new instance
+ """
+ self.ref = ref
+ self.flags = flags
+ self.note = note
+ self.old_commit = old_commit
+
+ def __str__(self):
+ return self.name
+
+ @property
+ def name(self):
+ """:return: Name of our remote ref"""
+ return self.ref.name
+
+ @property
+ def commit(self):
+ """:return: Commit of our remote ref"""
+ return self.ref.commit
+
+ @classmethod
+ def _from_line(cls, repo, line, fetch_line):
+ """Parse information from the given line as returned by git-fetch -v
+ and return a new FetchInfo object representing this information.
+
+ We can handle a line as follows
+ "%c %-*s %-*s -> %s%s"
+
+ Where c is either ' ', !, +, -, *, or =
+ ! means error
+ + means success forcing update
+ - means a tag was updated
+ * means birth of new branch or tag
+ = means the head was up to date ( and not moved )
+ ' ' means a fast-forward
+
+ fetch line is the corresponding line from FETCH_HEAD, like
+ acb0fa8b94ef421ad60c8507b634759a472cd56c not-for-merge branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
+ match = cls.re_fetch_result.match(line)
+ if match is None:
+ raise ValueError("Failed to parse line: %r" % line)
+
+ # parse lines
+ control_character, operation, local_remote_ref, remote_local_ref, note = match.groups()
+ try:
+ new_hex_sha, fetch_operation, fetch_note = fetch_line.split("\t")
+ ref_type_name, fetch_note = fetch_note.split(' ', 1)
+ except ValueError: # unpack error
+ raise ValueError("Failed to parse FETCH__HEAD line: %r" % fetch_line)
+
+ # handle FETCH_HEAD and figure out ref type
+ # If we do not specify a target branch like master:refs/remotes/origin/master,
+ # the fetch result is stored in FETCH_HEAD which destroys the rule we usually
+ # have. In that case we use a symbolic reference which is detached
+ ref_type = None
+ if remote_local_ref == "FETCH_HEAD":
+ ref_type = SymbolicReference
+ elif ref_type_name == "branch":
+ ref_type = RemoteReference
+ elif ref_type_name == "tag":
+ ref_type = TagReference
+ else:
+ raise TypeError("Cannot handle reference type: %r" % ref_type_name)
+
+ # create ref instance
+ if ref_type is SymbolicReference:
+ remote_local_ref = ref_type(repo, "FETCH_HEAD")
+ else:
+ remote_local_ref = Reference.from_path(repo, join_path(ref_type._common_path_default, remote_local_ref.strip()))
+ # END create ref instance
+
+ note = ( note and note.strip() ) or ''
+
+ # parse flags from control_character
+ flags = 0
+ try:
+ flags |= cls._flag_map[control_character]
+ except KeyError:
+ raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line))
+ # END control char exception hanlding
+
+ # parse operation string for more info - makes no sense for symbolic refs
+ old_commit = None
+ if isinstance(remote_local_ref, Reference):
+ if 'rejected' in operation:
+ flags |= cls.REJECTED
+ if 'new tag' in operation:
+ flags |= cls.NEW_TAG
+ if 'new branch' in operation:
+ flags |= cls.NEW_HEAD
+ if '...' in operation or '..' in operation:
+ split_token = '...'
+ if control_character == ' ':
+ split_token = split_token[:-1]
+ old_commit = repo.rev_parse(operation.split(split_token)[0])
+ # END handle refspec
+ # END reference flag handling
+
+ return cls(remote_local_ref, flags, note, old_commit)
+
+
+class Remote(LazyMixin, Iterable):
+ """Provides easy read and write access to a git remote.
+
+ Everything not part of this interface is considered an option for the current
+ remote, allowing constructs like remote.pushurl to query the pushurl.
+
+ NOTE: When querying configuration, the configuration accessor will be cached
+ to speed up subsequent accesses."""
+
+ __slots__ = ( "repo", "name", "_config_reader" )
+ _id_attribute_ = "name"
+
+ def __init__(self, repo, name):
+ """Initialize a remote instance
+
+ :param repo: The repository we are a remote of
+ :param name: the name of the remote, i.e. 'origin'"""
+ self.repo = repo
+ self.name = name
+
+ if os.name == 'nt':
+ # some oddity: on windows, python 2.5, it for some reason does not realize
+ # that it has the config_writer property, but instead calls __getattr__
+ # which will not yield the expected results. 'pinging' the members
+ # with a dir call creates the config_writer property that we require
+ # ... bugs like these make me wonder wheter python really wants to be used
+ # for production. It doesn't happen on linux though.
+ dir(self)
+ # END windows special handling
+
+ def __getattr__(self, attr):
+ """Allows to call this instance like
+ remote.special( *args, **kwargs) to call git-remote special self.name"""
+ if attr == "_config_reader":
+ return super(Remote, self).__getattr__(attr)
+
+ # sometimes, probably due to a bug in python itself, we are being called
+ # even though a slot of the same name exists
+ try:
+ return self._config_reader.get(attr)
+ except NoOptionError:
+ return super(Remote, self).__getattr__(attr)
+ # END handle exception
+
+ def _config_section_name(self):
+ return 'remote "%s"' % self.name
+
+ def _set_cache_(self, attr):
+ if attr == "_config_reader":
+ self._config_reader = SectionConstraint(self.repo.config_reader(), self._config_section_name())
+ else:
+ super(Remote, self)._set_cache_(attr)
+
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return '<git.%s "%s">' % (self.__class__.__name__, self.name)
+
+ def __eq__(self, other):
+ return self.name == other.name
+
+ def __ne__(self, other):
+ return not ( self == other )
+
+ def __hash__(self):
+ return hash(self.name)
+
+ @classmethod
+ def iter_items(cls, repo):
+ """:return: Iterator yielding Remote objects of the given repository"""
+ for section in repo.config_reader("repository").sections():
+ if not section.startswith('remote'):
+ continue
+ lbound = section.find('"')
+ rbound = section.rfind('"')
+ if lbound == -1 or rbound == -1:
+ raise ValueError("Remote-Section has invalid format: %r" % section)
+ yield Remote(repo, section[lbound+1:rbound])
+ # END for each configuration section
+
+ @property
+ def refs(self):
+ """
+ :return:
+ IterableList of RemoteReference objects. It is prefixed, allowing
+ you to omit the remote path portion, i.e.::
+ remote.refs.master # yields RemoteReference('/refs/remotes/origin/master')"""
+ out_refs = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
+ out_refs.extend(RemoteReference.list_items(self.repo, remote=self.name))
+ assert out_refs, "Remote %s did not have any references" % self.name
+ return out_refs
+
+ @property
+ def stale_refs(self):
+ """
+ :return:
+ IterableList RemoteReference objects that do not have a corresponding
+ head in the remote reference anymore as they have been deleted on the
+ remote side, but are still available locally.
+
+ The IterableList is prefixed, hence the 'origin' must be omitted. See
+ 'refs' property for an example."""
+ out_refs = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
+ for line in self.repo.git.remote("prune", "--dry-run", self).splitlines()[2:]:
+ # expecting
+ # * [would prune] origin/new_branch
+ token = " * [would prune] "
+ if not line.startswith(token):
+ raise ValueError("Could not parse git-remote prune result: %r" % line)
+ fqhn = "%s/%s" % (RemoteReference._common_path_default,line.replace(token, ""))
+ out_refs.append(RemoteReference(self.repo, fqhn))
+ # END for each line
+ return out_refs
+
+ @classmethod
+ def create(cls, repo, name, url, **kwargs):
+ """Create a new remote to the given repository
+ :param repo: Repository instance that is to receive the new remote
+ :param name: Desired name of the remote
+ :param url: URL which corresponds to the remote's name
+ :param kwargs:
+ Additional arguments to be passed to the git-remote add command
+
+ :return: New Remote instance
+
+ :raise GitCommandError: in case an origin with that name already exists"""
+ repo.git.remote( "add", name, url, **kwargs )
+ return cls(repo, name)
+
+ # add is an alias
+ add = create
+
+ @classmethod
+ def remove(cls, repo, name ):
+ """Remove the remote with the given name"""
+ repo.git.remote("rm", name)
+
+ # alias
+ rm = remove
+
+ def rename(self, new_name):
+ """Rename self to the given new_name
+ :return: self """
+ if self.name == new_name:
+ return self
+
+ self.repo.git.remote("rename", self.name, new_name)
+ self.name = new_name
+ del(self._config_reader) # it contains cached values, section names are different now
+ return self
+
+ def update(self, **kwargs):
+ """Fetch all changes for this remote, including new branches which will
+ be forced in ( in case your local remote branch is not part the new remote branches
+ ancestry anymore ).
+
+ :param kwargs:
+ Additional arguments passed to git-remote update
+
+ :return: self """
+ self.repo.git.remote("update", self.name)
+ return self
+
+ def _digest_process_messages(self, fh, progress):
+ """Read progress messages from file-like object fh, supplying the respective
+ progress messages to the progress instance.
+
+ :return: list(line, ...) list of lines without linebreaks that did
+ not contain progress information"""
+ line_so_far = ''
+ dropped_lines = list()
+ while True:
+ char = fh.read(1)
+ if not char:
+ break
+
+ if char in ('\r', '\n'):
+ dropped_lines.extend(progress._parse_progress_line(line_so_far))
+ line_so_far = ''
+ else:
+ line_so_far += char
+ # END process parsed line
+ # END while file is not done reading
+ return dropped_lines
+
+
+ def _finalize_proc(self, proc):
+ """Wait for the process (fetch, pull or push) and handle its errors accordingly"""
+ try:
+ proc.wait()
+ except GitCommandError,e:
+ # if a push has rejected items, the command has non-zero return status
+ # a return status of 128 indicates a connection error - reraise the previous one
+ if proc.poll() == 128:
+ raise
+ pass
+ # END exception handling
+
+
+ def _get_fetch_info_from_stderr(self, proc, progress):
+ # skip first line as it is some remote info we are not interested in
+ output = IterableList('name')
+
+
+ # lines which are no progress are fetch info lines
+ # this also waits for the command to finish
+ # Skip some progress lines that don't provide relevant information
+ fetch_info_lines = list()
+ for line in self._digest_process_messages(proc.stderr, progress):
+ if line.startswith('From') or line.startswith('remote: Total'):
+ continue
+ elif line.startswith('warning:'):
+ print >> sys.stderr, line
+ continue
+ elif line.startswith('fatal:'):
+ raise GitCommandError("Error when fetching: %s" % line)
+ # END handle special messages
+ fetch_info_lines.append(line)
+ # END for each line
+
+ # read head information
+ fp = open(join(self.repo.git_dir, 'FETCH_HEAD'),'r')
+ fetch_head_info = fp.readlines()
+ fp.close()
+
+ assert len(fetch_info_lines) == len(fetch_head_info)
+
+ output.extend(FetchInfo._from_line(self.repo, err_line, fetch_line)
+ for err_line,fetch_line in zip(fetch_info_lines, fetch_head_info))
+
+ self._finalize_proc(proc)
+ return output
+
+ def _get_push_info(self, proc, progress):
+ # read progress information from stderr
+ # we hope stdout can hold all the data, it should ...
+ # read the lines manually as it will use carriage returns between the messages
+ # to override the previous one. This is why we read the bytes manually
+ self._digest_process_messages(proc.stderr, progress)
+
+ output = IterableList('name')
+ for line in proc.stdout.readlines():
+ try:
+ output.append(PushInfo._from_line(self, line))
+ except ValueError:
+ # if an error happens, additional info is given which we cannot parse
+ pass
+ # END exception handling
+ # END for each line
+
+ self._finalize_proc(proc)
+ return output
+
+
+ def fetch(self, refspec=None, progress=None, **kwargs):
+ """Fetch the latest changes for this remote
+
+ :param refspec:
+ A "refspec" is used by fetch and push to describe the mapping
+ between remote ref and local ref. They are combined with a colon in
+ the format <src>:<dst>, preceded by an optional plus sign, +.
+ For example: git fetch $URL refs/heads/master:refs/heads/origin means
+ "grab the master branch head from the $URL and store it as my origin
+ branch head". And git push $URL refs/heads/master:refs/heads/to-upstream
+ means "publish my master branch head as to-upstream branch at $URL".
+ See also git-push(1).
+
+ Taken from the git manual
+ :param progress: See 'push' method
+ :param kwargs: Additional arguments to be passed to git-fetch
+ :return:
+ IterableList(FetchInfo, ...) list of FetchInfo instances providing detailed
+ information about the fetch results
+
+ :note:
+ As fetch does not provide progress information to non-ttys, we cannot make
+ it available here unfortunately as in the 'push' method."""
+ proc = self.repo.git.fetch(self, refspec, with_extended_output=True, as_process=True, v=True, **kwargs)
+ return self._get_fetch_info_from_stderr(proc, progress or RemoteProgress())
+
+ def pull(self, refspec=None, progress=None, **kwargs):
+ """Pull changes from the given branch, being the same as a fetch followed
+ by a merge of branch with your local branch.
+
+ :param refspec: see 'fetch' method
+ :param progress: see 'push' method
+ :param kwargs: Additional arguments to be passed to git-pull
+ :return: Please see 'fetch' method """
+ proc = self.repo.git.pull(self, refspec, with_extended_output=True, as_process=True, v=True, **kwargs)
+ return self._get_fetch_info_from_stderr(proc, progress or RemoteProgress())
+
+ def push(self, refspec=None, progress=None, **kwargs):
+ """Push changes from source branch in refspec to target branch in refspec.
+
+ :param refspec: see 'fetch' method
+ :param progress:
+ Instance of type RemoteProgress allowing the caller to receive
+ progress information until the method returns.
+ If None, progress information will be discarded
+
+ :param kwargs: Additional arguments to be passed to git-push
+ :return:
+ IterableList(PushInfo, ...) iterable list of PushInfo instances, each
+ one informing about an individual head which had been updated on the remote
+ side.
+ If the push contains rejected heads, these will have the PushInfo.ERROR bit set
+ in their flags.
+ If the operation fails completely, the length of the returned IterableList will
+ be null."""
+ proc = self.repo.git.push(self, refspec, porcelain=True, as_process=True, **kwargs)
+ return self._get_push_info(proc, progress or RemoteProgress())
+
+ @property
+ def config_reader(self):
+ """
+ :return:
+ GitConfigParser compatible object able to read options for only our remote.
+ Hence you may simple type config.get("pushurl") to obtain the information"""
+ return self._config_reader
+
+ @property
+ def config_writer(self):
+ """
+ :return: GitConfigParser compatible object able to write options for this remote.
+ :note:
+ You can only own one writer at a time - delete it to release the
+ configuration file and make it useable by others.
+
+ To assure consistent results, you should only query options through the
+ writer. Once you are done writing, you are free to use the config reader
+ once again."""
+ writer = self.repo.config_writer()
+
+ # clear our cache to assure we re-read the possibly changed configuration
+ del(self._config_reader)
+ return SectionConstraint(writer, self._config_section_name())
diff --git a/git/repo/__init__.py b/git/repo/__init__.py
new file mode 100644
index 00000000..8902a254
--- /dev/null
+++ b/git/repo/__init__.py
@@ -0,0 +1,3 @@
+"""Initialize the Repo package"""
+
+from base import * \ No newline at end of file
diff --git a/git/repo/base.py b/git/repo/base.py
new file mode 100644
index 00000000..e26da101
--- /dev/null
+++ b/git/repo/base.py
@@ -0,0 +1,753 @@
+# repo.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.exc import InvalidGitRepositoryError, NoSuchPathError
+from git.cmd import Git
+from git.util import Actor
+from git.refs import *
+from git.index import IndexFile
+from git.objects import *
+from git.config import GitConfigParser
+from git.remote import Remote
+from git.db import (
+ GitCmdObjectDB,
+ GitDB
+ )
+
+
+from gitdb.util import (
+ join,
+ isfile,
+ hex_to_bin
+ )
+
+from fun import (
+ rev_parse,
+ is_git_dir,
+ touch
+ )
+
+import os
+import sys
+import re
+
+DefaultDBType = GitDB
+if sys.version_info[1] < 5: # python 2.4 compatiblity
+ DefaultDBType = GitCmdObjectDB
+# END handle python 2.4
+
+
+__all__ = ('Repo', )
+
+
+class Repo(object):
+ """Represents a git repository and allows you to query references,
+ gather commit information, generate diffs, create and clone repositories query
+ the log.
+
+ The following attributes are worth using:
+
+ 'working_dir' is the working directory of the git command, wich is the working tree
+ directory if available or the .git directory in case of bare repositories
+
+ 'working_tree_dir' is the working tree directory, but will raise AssertionError
+ if we are a bare repository.
+
+ 'git_dir' is the .git repository directoy, which is always set."""
+ DAEMON_EXPORT_FILE = 'git-daemon-export-ok'
+ __slots__ = ( "working_dir", "_working_tree_dir", "git_dir", "_bare", "git", "odb" )
+
+ # precompiled regex
+ re_whitespace = re.compile(r'\s+')
+ re_hexsha_only = re.compile('^[0-9A-Fa-f]{40}$')
+ re_hexsha_shortened = re.compile('^[0-9A-Fa-f]{4,40}$')
+ re_author_committer_start = re.compile(r'^(author|committer)')
+ re_tab_full_line = re.compile(r'^\t(.*)$')
+
+ # invariants
+ # represents the configuration level of a configuration file
+ config_level = ("system", "global", "repository")
+
+ def __init__(self, path=None, odbt = DefaultDBType):
+ """Create a new Repo instance
+
+ :param path: is the path to either the root git directory or the bare git repo::
+
+ repo = Repo("/Users/mtrier/Development/git-python")
+ repo = Repo("/Users/mtrier/Development/git-python.git")
+ repo = Repo("~/Development/git-python.git")
+ repo = Repo("$REPOSITORIES/Development/git-python.git")
+
+ :param odbt: Object DataBase type - a type which is constructed by providing
+ the directory containing the database objects, i.e. .git/objects. It will
+ be used to access all object data
+ :raise InvalidGitRepositoryError:
+ :raise NoSuchPathError:
+ :return: git.Repo """
+ epath = os.path.abspath(os.path.expandvars(os.path.expanduser(path or os.getcwd())))
+
+ if not os.path.exists(epath):
+ raise NoSuchPathError(epath)
+
+ self.working_dir = None
+ self._working_tree_dir = None
+ self.git_dir = None
+ curpath = epath
+
+ # walk up the path to find the .git dir
+ while curpath:
+ if is_git_dir(curpath):
+ self.git_dir = curpath
+ self._working_tree_dir = os.path.dirname(curpath)
+ break
+ gitpath = join(curpath, '.git')
+ if is_git_dir(gitpath):
+ self.git_dir = gitpath
+ self._working_tree_dir = curpath
+ break
+ curpath, dummy = os.path.split(curpath)
+ if not dummy:
+ break
+ # END while curpath
+
+ if self.git_dir is None:
+ raise InvalidGitRepositoryError(epath)
+
+ self._bare = False
+ try:
+ self._bare = self.config_reader("repository").getboolean('core','bare')
+ except Exception:
+ # lets not assume the option exists, although it should
+ pass
+
+ # adjust the wd in case we are actually bare - we didn't know that
+ # in the first place
+ if self._bare:
+ self._working_tree_dir = None
+ # END working dir handling
+
+ self.working_dir = self._working_tree_dir or self.git_dir
+ self.git = Git(self.working_dir)
+
+ # special handling, in special times
+ args = [join(self.git_dir, 'objects')]
+ if issubclass(odbt, GitCmdObjectDB):
+ args.append(self.git)
+ self.odb = odbt(*args)
+
+ def __eq__(self, rhs):
+ if isinstance(rhs, Repo):
+ return self.git_dir == rhs.git_dir
+ return False
+
+ def __ne__(self, rhs):
+ return not self.__eq__(rhs)
+
+ def __hash__(self):
+ return hash(self.git_dir)
+
+ def __repr__(self):
+ return "%s(%r)" % (type(self).__name__, self.git_dir)
+
+ # Description property
+ def _get_description(self):
+ filename = join(self.git_dir, 'description')
+ return file(filename).read().rstrip()
+
+ def _set_description(self, descr):
+ filename = join(self.git_dir, 'description')
+ file(filename, 'w').write(descr+'\n')
+
+ description = property(_get_description, _set_description,
+ doc="the project's description")
+ del _get_description
+ del _set_description
+
+
+
+ @property
+ def working_tree_dir(self):
+ """:return: The working tree directory of our git repository
+ :raise AssertionError: If we are a bare repository"""
+ if self._working_tree_dir is None:
+ raise AssertionError( "Repository at %r is bare and does not have a working tree directory" % self.git_dir )
+ return self._working_tree_dir
+
+ @property
+ def bare(self):
+ """:return: True if the repository is bare"""
+ return self._bare
+
+ @property
+ def heads(self):
+ """A list of ``Head`` objects representing the branch heads in
+ this repo
+
+ :return: ``git.IterableList(Head, ...)``"""
+ return Head.list_items(self)
+
+ @property
+ def references(self):
+ """A list of Reference objects representing tags, heads and remote references.
+
+ :return: IterableList(Reference, ...)"""
+ return Reference.list_items(self)
+
+ # alias for references
+ refs = references
+
+ # alias for heads
+ branches = heads
+
+ @property
+ def index(self):
+ """:return: IndexFile representing this repository's index."""
+ return IndexFile(self)
+
+ @property
+ def head(self):
+ """:return: HEAD Object pointing to the current head reference"""
+ return HEAD(self,'HEAD')
+
+ @property
+ def remotes(self):
+ """A list of Remote objects allowing to access and manipulate remotes
+ :return: ``git.IterableList(Remote, ...)``"""
+ return Remote.list_items(self)
+
+ def remote(self, name='origin'):
+ """:return: Remote with the specified name
+ :raise ValueError: if no remote with such a name exists"""
+ return Remote(self, name)
+
+ #{ Submodules
+
+ @property
+ def submodules(self):
+ """
+ :return: git.IterableList(Submodule, ...) of direct submodules
+ available from the current head"""
+ return Submodule.list_items(self)
+
+ def submodule(self, name):
+ """ :return: Submodule with the given name
+ :raise ValueError: If no such submodule exists"""
+ try:
+ return self.submodules[name]
+ except IndexError:
+ raise ValueError("Didn't find submodule named %r" % name)
+ # END exception handling
+
+ def create_submodule(self, *args, **kwargs):
+ """Create a new submodule
+
+ :note: See the documentation of Submodule.add for a description of the
+ applicable parameters
+ :return: created submodules"""
+ return Submodule.add(self, *args, **kwargs)
+
+ def iter_submodules(self, *args, **kwargs):
+ """An iterator yielding Submodule instances, see Traversable interface
+ for a description of args and kwargs
+ :return: Iterator"""
+ return RootModule(self).traverse(*args, **kwargs)
+
+ def submodule_update(self, *args, **kwargs):
+ """Update the submodules, keeping the repository consistent as it will
+ take the previous state into consideration. For more information, please
+ see the documentation of RootModule.update"""
+ return RootModule(self).update(*args, **kwargs)
+
+ #}END submodules
+
+ @property
+ def tags(self):
+ """A list of ``Tag`` objects that are available in this repo
+ :return: ``git.IterableList(TagReference, ...)`` """
+ return TagReference.list_items(self)
+
+ def tag(self,path):
+ """:return: TagReference Object, reference pointing to a Commit or Tag
+ :param path: path to the tag reference, i.e. 0.1.5 or tags/0.1.5 """
+ return TagReference(self, path)
+
+ def create_head(self, path, commit='HEAD', force=False, logmsg=None ):
+ """Create a new head within the repository.
+ For more documentation, please see the Head.create method.
+
+ :return: newly created Head Reference"""
+ return Head.create(self, path, commit, force, logmsg)
+
+ def delete_head(self, *heads, **kwargs):
+ """Delete the given heads
+
+ :param kwargs: Additional keyword arguments to be passed to git-branch"""
+ return Head.delete(self, *heads, **kwargs)
+
+ def create_tag(self, path, ref='HEAD', message=None, force=False, **kwargs):
+ """Create a new tag reference.
+ For more documentation, please see the TagReference.create method.
+
+ :return: TagReference object """
+ return TagReference.create(self, path, ref, message, force, **kwargs)
+
+ def delete_tag(self, *tags):
+ """Delete the given tag references"""
+ return TagReference.delete(self, *tags)
+
+ def create_remote(self, name, url, **kwargs):
+ """Create a new remote.
+
+ For more information, please see the documentation of the Remote.create
+ methods
+
+ :return: Remote reference"""
+ return Remote.create(self, name, url, **kwargs)
+
+ def delete_remote(self, remote):
+ """Delete the given remote."""
+ return Remote.remove(self, remote)
+
+ def _get_config_path(self, config_level ):
+ # we do not support an absolute path of the gitconfig on windows ,
+ # use the global config instead
+ if sys.platform == "win32" and config_level == "system":
+ config_level = "global"
+
+ if config_level == "system":
+ return "/etc/gitconfig"
+ elif config_level == "global":
+ return os.path.normpath(os.path.expanduser("~/.gitconfig"))
+ elif config_level == "repository":
+ return join(self.git_dir, "config")
+
+ raise ValueError( "Invalid configuration level: %r" % config_level )
+
+ def config_reader(self, config_level=None):
+ """
+ :return:
+ GitConfigParser allowing to read the full git configuration, but not to write it
+
+ The configuration will include values from the system, user and repository
+ configuration files.
+
+ :param config_level:
+ For possible values, see config_writer method
+ If None, all applicable levels will be used. Specify a level in case
+ you know which exact file you whish to read to prevent reading multiple files for
+ instance
+ :note: On windows, system configuration cannot currently be read as the path is
+ unknown, instead the global path will be used."""
+ files = None
+ if config_level is None:
+ files = [ self._get_config_path(f) for f in self.config_level ]
+ else:
+ files = [ self._get_config_path(config_level) ]
+ return GitConfigParser(files, read_only=True)
+
+ def config_writer(self, config_level="repository"):
+ """
+ :return:
+ GitConfigParser allowing to write values of the specified configuration file level.
+ Config writers should be retrieved, used to change the configuration ,and written
+ right away as they will lock the configuration file in question and prevent other's
+ to write it.
+
+ :param config_level:
+ One of the following values
+ system = sytem wide configuration file
+ global = user level configuration file
+ repository = configuration file for this repostory only"""
+ return GitConfigParser(self._get_config_path(config_level), read_only = False)
+
+ def commit(self, rev=None):
+ """The Commit object for the specified revision
+ :param rev: revision specifier, see git-rev-parse for viable options.
+ :return: ``git.Commit``"""
+ if rev is None:
+ return self.active_branch.commit
+ else:
+ return self.rev_parse(str(rev)+"^0")
+
+ def iter_trees(self, *args, **kwargs):
+ """:return: Iterator yielding Tree objects
+ :note: Takes all arguments known to iter_commits method"""
+ return ( c.tree for c in self.iter_commits(*args, **kwargs) )
+
+ def tree(self, rev=None):
+ """The Tree object for the given treeish revision
+ Examples::
+
+ repo.tree(repo.heads[0])
+
+ :param rev: is a revision pointing to a Treeish ( being a commit or tree )
+ :return: ``git.Tree``
+
+ :note:
+ If you need a non-root level tree, find it by iterating the root tree. Otherwise
+ it cannot know about its path relative to the repository root and subsequent
+ operations might have unexpected results."""
+ if rev is None:
+ return self.active_branch.commit.tree
+ else:
+ return self.rev_parse(str(rev)+"^{tree}")
+
+ def iter_commits(self, rev=None, paths='', **kwargs):
+ """A list of Commit objects representing the history of a given ref/commit
+
+ :parm rev:
+ revision specifier, see git-rev-parse for viable options.
+ If None, the active branch will be used.
+
+ :parm paths:
+ is an optional path or a list of paths to limit the returned commits to
+ Commits that do not contain that path or the paths will not be returned.
+
+ :parm kwargs:
+ Arguments to be passed to git-rev-list - common ones are
+ max_count and skip
+
+ :note: to receive only commits between two named revisions, use the
+ "revA..revB" revision specifier
+
+ :return ``git.Commit[]``"""
+ if rev is None:
+ rev = self.active_branch
+
+ return Commit.iter_items(self, rev, paths, **kwargs)
+
+ def _get_daemon_export(self):
+ filename = join(self.git_dir, self.DAEMON_EXPORT_FILE)
+ return os.path.exists(filename)
+
+ def _set_daemon_export(self, value):
+ filename = join(self.git_dir, self.DAEMON_EXPORT_FILE)
+ fileexists = os.path.exists(filename)
+ if value and not fileexists:
+ touch(filename)
+ elif not value and fileexists:
+ os.unlink(filename)
+
+ daemon_export = property(_get_daemon_export, _set_daemon_export,
+ doc="If True, git-daemon may export this repository")
+ del _get_daemon_export
+ del _set_daemon_export
+
+ def _get_alternates(self):
+ """The list of alternates for this repo from which objects can be retrieved
+
+ :return: list of strings being pathnames of alternates"""
+ alternates_path = join(self.git_dir, 'objects', 'info', 'alternates')
+
+ if os.path.exists(alternates_path):
+ try:
+ f = open(alternates_path)
+ alts = f.read()
+ finally:
+ f.close()
+ return alts.strip().splitlines()
+ else:
+ return list()
+
+ def _set_alternates(self, alts):
+ """Sets the alternates
+
+ :parm alts:
+ is the array of string paths representing the alternates at which
+ git should look for objects, i.e. /home/user/repo/.git/objects
+
+ :raise NoSuchPathError:
+ :note:
+ The method does not check for the existance of the paths in alts
+ as the caller is responsible."""
+ alternates_path = join(self.git_dir, 'objects', 'info', 'alternates')
+ if not alts:
+ if isfile(alternates_path):
+ os.remove(alternates_path)
+ else:
+ try:
+ f = open(alternates_path, 'w')
+ f.write("\n".join(alts))
+ finally:
+ f.close()
+ # END file handling
+ # END alts handling
+
+ alternates = property(_get_alternates, _set_alternates, doc="Retrieve a list of alternates paths or set a list paths to be used as alternates")
+
+ def is_dirty(self, index=True, working_tree=True, untracked_files=False):
+ """
+ :return:
+ ``True``, the repository is considered dirty. By default it will react
+ like a git-status without untracked files, hence it is dirty if the
+ index or the working copy have changes."""
+ if self._bare:
+ # Bare repositories with no associated working directory are
+ # always consired to be clean.
+ return False
+
+ # start from the one which is fastest to evaluate
+ default_args = ('--abbrev=40', '--full-index', '--raw')
+ if index:
+ # diff index against HEAD
+ if isfile(self.index.path) and self.head.is_valid() and \
+ len(self.git.diff('HEAD', '--cached', *default_args)):
+ return True
+ # END index handling
+ if working_tree:
+ # diff index against working tree
+ if len(self.git.diff(*default_args)):
+ return True
+ # END working tree handling
+ if untracked_files:
+ if len(self.untracked_files):
+ return True
+ # END untracked files
+ return False
+
+ @property
+ def untracked_files(self):
+ """
+ :return:
+ list(str,...)
+
+ Files currently untracked as they have not been staged yet. Paths
+ are relative to the current working directory of the git command.
+
+ :note:
+ ignored files will not appear here, i.e. files mentioned in .gitignore"""
+ # make sure we get all files, no only untracked directores
+ proc = self.git.status(untracked_files=True, as_process=True)
+ stream = iter(proc.stdout)
+ untracked_files = list()
+ for line in stream:
+ if not line.startswith("# Untracked files:"):
+ continue
+ # skip two lines
+ stream.next()
+ stream.next()
+
+ for untracked_info in stream:
+ if not untracked_info.startswith("#\t"):
+ break
+ untracked_files.append(untracked_info.replace("#\t", "").rstrip())
+ # END for each utracked info line
+ # END for each line
+ return untracked_files
+
+ @property
+ def active_branch(self):
+ """The name of the currently active branch.
+
+ :return: Head to the active branch"""
+ return self.head.reference
+
+ def blame(self, rev, file):
+ """The blame information for the given file at the given revision.
+
+ :parm rev: revision specifier, see git-rev-parse for viable options.
+ :return:
+ list: [git.Commit, list: [<line>]]
+ A list of tuples associating a Commit object with a list of lines that
+ changed within the given commit. The Commit objects will be given in order
+ of appearance."""
+ data = self.git.blame(rev, '--', file, p=True)
+ commits = dict()
+ blames = list()
+ info = None
+
+ for line in data.splitlines(False):
+ parts = self.re_whitespace.split(line, 1)
+ firstpart = parts[0]
+ if self.re_hexsha_only.search(firstpart):
+ # handles
+ # 634396b2f541a9f2d58b00be1a07f0c358b999b3 1 1 7 - indicates blame-data start
+ # 634396b2f541a9f2d58b00be1a07f0c358b999b3 2 2
+ digits = parts[-1].split(" ")
+ if len(digits) == 3:
+ info = {'id': firstpart}
+ blames.append([None, []])
+ # END blame data initialization
+ else:
+ m = self.re_author_committer_start.search(firstpart)
+ if m:
+ # handles:
+ # author Tom Preston-Werner
+ # author-mail <tom@mojombo.com>
+ # author-time 1192271832
+ # author-tz -0700
+ # committer Tom Preston-Werner
+ # committer-mail <tom@mojombo.com>
+ # committer-time 1192271832
+ # committer-tz -0700 - IGNORED BY US
+ role = m.group(0)
+ if firstpart.endswith('-mail'):
+ info["%s_email" % role] = parts[-1]
+ elif firstpart.endswith('-time'):
+ info["%s_date" % role] = int(parts[-1])
+ elif role == firstpart:
+ info[role] = parts[-1]
+ # END distinguish mail,time,name
+ else:
+ # handle
+ # filename lib/grit.rb
+ # summary add Blob
+ # <and rest>
+ if firstpart.startswith('filename'):
+ info['filename'] = parts[-1]
+ elif firstpart.startswith('summary'):
+ info['summary'] = parts[-1]
+ elif firstpart == '':
+ if info:
+ sha = info['id']
+ c = commits.get(sha)
+ if c is None:
+ c = Commit( self, hex_to_bin(sha),
+ author=Actor._from_string(info['author'] + ' ' + info['author_email']),
+ authored_date=info['author_date'],
+ committer=Actor._from_string(info['committer'] + ' ' + info['committer_email']),
+ committed_date=info['committer_date'],
+ message=info['summary'])
+ commits[sha] = c
+ # END if commit objects needs initial creation
+ m = self.re_tab_full_line.search(line)
+ text, = m.groups()
+ blames[-1][0] = c
+ blames[-1][1].append( text )
+ info = None
+ # END if we collected commit info
+ # END distinguish filename,summary,rest
+ # END distinguish author|committer vs filename,summary,rest
+ # END distinguish hexsha vs other information
+ return blames
+
+ @classmethod
+ def init(cls, path=None, mkdir=True, **kwargs):
+ """Initialize a git repository at the given path if specified
+
+ :param path:
+ is the full path to the repo (traditionally ends with /<name>.git)
+ or None in which case the repository will be created in the current
+ working directory
+
+ :parm mkdir:
+ if specified will create the repository directory if it doesn't
+ already exists. Creates the directory with a mode=0755.
+ Only effective if a path is explicitly given
+
+ :parm kwargs:
+ keyword arguments serving as additional options to the git-init command
+
+ :return: ``git.Repo`` (the newly created repo)"""
+
+ if mkdir and path and not os.path.exists(path):
+ os.makedirs(path, 0755)
+
+ # git command automatically chdir into the directory
+ git = Git(path)
+ output = git.init(**kwargs)
+ return Repo(path)
+
+ @classmethod
+ def _clone(cls, git, url, path, odb_default_type, **kwargs):
+ # special handling for windows for path at which the clone should be
+ # created.
+ # tilde '~' will be expanded to the HOME no matter where the ~ occours. Hence
+ # we at least give a proper error instead of letting git fail
+ prev_cwd = None
+ prev_path = None
+ odbt = kwargs.pop('odbt', odb_default_type)
+ if os.name == 'nt':
+ if '~' in path:
+ raise OSError("Git cannot handle the ~ character in path %r correctly" % path)
+
+ # on windows, git will think paths like c: are relative and prepend the
+ # current working dir ( before it fails ). We temporarily adjust the working
+ # dir to make this actually work
+ match = re.match("(\w:[/\\\])(.*)", path)
+ if match:
+ prev_cwd = os.getcwd()
+ prev_path = path
+ drive, rest_of_path = match.groups()
+ os.chdir(drive)
+ path = rest_of_path
+ kwargs['with_keep_cwd'] = True
+ # END cwd preparation
+ # END windows handling
+
+ try:
+ git.clone(url, path, **kwargs)
+ finally:
+ if prev_cwd is not None:
+ os.chdir(prev_cwd)
+ path = prev_path
+ # END reset previous working dir
+ # END bad windows handling
+
+ # our git command could have a different working dir than our actual
+ # environment, hence we prepend its working dir if required
+ if not os.path.isabs(path) and git.working_dir:
+ path = join(git._working_dir, path)
+
+ # adjust remotes - there may be operating systems which use backslashes,
+ # These might be given as initial paths, but when handling the config file
+ # that contains the remote from which we were clones, git stops liking it
+ # as it will escape the backslashes. Hence we undo the escaping just to be
+ # sure
+ repo = cls(os.path.abspath(path), odbt = odbt)
+ if repo.remotes:
+ repo.remotes[0].config_writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/"))
+ # END handle remote repo
+ return repo
+
+ def clone(self, path, **kwargs):
+ """Create a clone from this repository.
+ :param path:
+ is the full path of the new repo (traditionally ends with ./<name>.git).
+
+ :param kwargs:
+ odbt = ObjectDatabase Type, allowing to determine the object database
+ implementation used by the returned Repo instance
+
+ All remaining keyword arguments are given to the git-clone command
+
+ :return: ``git.Repo`` (the newly cloned repo)"""
+ return self._clone(self.git, self.git_dir, path, type(self.odb), **kwargs)
+
+ @classmethod
+ def clone_from(cls, url, to_path, **kwargs):
+ """Create a clone from the given URL
+ :param url: valid git url, see http://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
+ :param to_path: Path to which the repository should be cloned to
+ :param kwargs: see the ``clone`` method
+ :return: Repo instance pointing to the cloned directory"""
+ return cls._clone(Git(os.getcwd()), url, to_path, GitCmdObjectDB, **kwargs)
+
+ def archive(self, ostream, treeish=None, prefix=None, **kwargs):
+ """Archive the tree at the given revision.
+ :parm ostream: file compatible stream object to which the archive will be written
+ :parm treeish: is the treeish name/id, defaults to active branch
+ :parm prefix: is the optional prefix to prepend to each filename in the archive
+ :parm kwargs:
+ Additional arguments passed to git-archive
+ NOTE: Use the 'format' argument to define the kind of format. Use
+ specialized ostreams to write any format supported by python
+
+ :raise GitCommandError: in case something went wrong
+ :return: self"""
+ if treeish is None:
+ treeish = self.active_branch
+ if prefix and 'prefix' not in kwargs:
+ kwargs['prefix'] = prefix
+ kwargs['output_stream'] = ostream
+
+ self.git.archive(treeish, **kwargs)
+ return self
+
+ rev_parse = rev_parse
+
+ def __repr__(self):
+ return '<git.Repo "%s">' % self.git_dir
diff --git a/git/repo/fun.py b/git/repo/fun.py
new file mode 100644
index 00000000..7a5984d3
--- /dev/null
+++ b/git/repo/fun.py
@@ -0,0 +1,280 @@
+"""Package with general repository related functions"""
+import os
+from gitdb.exc import BadObject
+from git.refs import SymbolicReference
+from git.objects import Object
+from gitdb.util import (
+ join,
+ isdir,
+ isfile,
+ hex_to_bin,
+ bin_to_hex
+ )
+from string import digits
+
+__all__ = ('rev_parse', 'is_git_dir', 'touch')
+
+def touch(filename):
+ fp = open(filename, "a")
+ fp.close()
+
+def is_git_dir(d):
+ """ This is taken from the git setup.c:is_git_directory
+ function."""
+ if isdir(d) and \
+ isdir(join(d, 'objects')) and \
+ isdir(join(d, 'refs')):
+ headref = join(d, 'HEAD')
+ return isfile(headref) or \
+ (os.path.islink(headref) and
+ os.readlink(headref).startswith('refs'))
+ return False
+
+
+def short_to_long(odb, hexsha):
+ """:return: long hexadecimal sha1 from the given less-than-40 byte hexsha
+ or None if no candidate could be found.
+ :param hexsha: hexsha with less than 40 byte"""
+ try:
+ return bin_to_hex(odb.partial_to_complete_sha_hex(hexsha))
+ except BadObject:
+ return None
+ # END exception handling
+
+
+def name_to_object(repo, name, return_ref=False):
+ """
+ :return: object specified by the given name, hexshas ( short and long )
+ as well as references are supported
+ :param return_ref: if name specifies a reference, we will return the reference
+ instead of the object. Otherwise it will raise BadObject
+ """
+ hexsha = None
+
+ # is it a hexsha ? Try the most common ones, which is 7 to 40
+ if repo.re_hexsha_shortened.match(name):
+ if len(name) != 40:
+ # find long sha for short sha
+ hexsha = short_to_long(repo.odb, name)
+ else:
+ hexsha = name
+ # END handle short shas
+ else:
+ for base in ('%s', 'refs/%s', 'refs/tags/%s', 'refs/heads/%s', 'refs/remotes/%s', 'refs/remotes/%s/HEAD'):
+ try:
+ hexsha = SymbolicReference.dereference_recursive(repo, base % name)
+ if return_ref:
+ return SymbolicReference(repo, base % name)
+ #END handle symbolic ref
+ break
+ except ValueError:
+ pass
+ # END for each base
+ # END handle hexsha
+
+ # didn't find any ref, this is an error
+ if return_ref:
+ raise BadObject("Couldn't find reference named %r" % name)
+ #END handle return ref
+
+ # tried everything ? fail
+ if hexsha is None:
+ raise BadObject(name)
+ # END assert hexsha was found
+
+ return Object.new_from_sha(repo, hex_to_bin(hexsha))
+
+def deref_tag(tag):
+ """Recursively dereerence a tag and return the resulting object"""
+ while True:
+ try:
+ tag = tag.object
+ except AttributeError:
+ break
+ # END dereference tag
+ return tag
+
+def to_commit(obj):
+ """Convert the given object to a commit if possible and return it"""
+ if obj.type == 'tag':
+ obj = deref_tag(obj)
+
+ if obj.type != "commit":
+ raise ValueError("Cannot convert object %r to type commit" % obj)
+ # END verify type
+ return obj
+
+def rev_parse(repo, rev):
+ """
+ :return: Object at the given revision, either Commit, Tag, Tree or Blob
+ :param rev: git-rev-parse compatible revision specification, please see
+ http://www.kernel.org/pub/software/scm/git/docs/git-rev-parse.html
+ for details
+ :note: Currently there is no access to the rev-log, rev-specs may only contain
+ topological tokens such ~ and ^.
+ :raise BadObject: if the given revision could not be found
+ :raise ValueError: If rev couldn't be parsed
+ :raise IndexError: If invalid reflog index is specified"""
+
+ # colon search mode ?
+ if rev.startswith(':/'):
+ # colon search mode
+ raise NotImplementedError("commit by message search ( regex )")
+ # END handle search
+
+ obj = None
+ ref = None
+ output_type = "commit"
+ start = 0
+ parsed_to = 0
+ lr = len(rev)
+ while start < lr:
+ if rev[start] not in "^~:@":
+ start += 1
+ continue
+ # END handle start
+
+ token = rev[start]
+
+ if obj is None:
+ # token is a rev name
+ if start == 0:
+ ref = repo.head.ref
+ else:
+ if token == '@':
+ ref = name_to_object(repo, rev[:start], return_ref=True)
+ else:
+ obj = name_to_object(repo, rev[:start])
+ #END handle token
+ #END handle refname
+
+ if ref is not None:
+ obj = ref.commit
+ #END handle ref
+ # END initialize obj on first token
+
+
+ start += 1
+
+ # try to parse {type}
+ if start < lr and rev[start] == '{':
+ end = rev.find('}', start)
+ if end == -1:
+ raise ValueError("Missing closing brace to define type in %s" % rev)
+ output_type = rev[start+1:end] # exclude brace
+
+ # handle type
+ if output_type == 'commit':
+ pass # default
+ elif output_type == 'tree':
+ try:
+ obj = to_commit(obj).tree
+ except (AttributeError, ValueError):
+ pass # error raised later
+ # END exception handling
+ elif output_type in ('', 'blob'):
+ if obj.type == 'tag':
+ obj = deref_tag(obj)
+ else:
+ # cannot do anything for non-tags
+ pass
+ # END handle tag
+ elif token == '@':
+ # try single int
+ assert ref is not None, "Requre Reference to access reflog"
+ revlog_index = None
+ try:
+ # transform reversed index into the format of our revlog
+ revlog_index = -(int(output_type)+1)
+ except ValueError:
+ # TODO: Try to parse the other date options, using parse_date
+ # maybe
+ raise NotImplementedError("Support for additional @{...} modes not implemented")
+ #END handle revlog index
+
+ try:
+ entry = ref.log_entry(revlog_index)
+ except IndexError:
+ raise IndexError("Invalid revlog index: %i" % revlog_index)
+ #END handle index out of bound
+
+ obj = Object.new_from_sha(repo, hex_to_bin(entry.newhexsha))
+
+ # make it pass the following checks
+ output_type = None
+ else:
+ raise ValueError("Invalid output type: %s ( in %s )" % (output_type, rev))
+ # END handle output type
+
+ # empty output types don't require any specific type, its just about dereferencing tags
+ if output_type and obj.type != output_type:
+ raise ValueError("Could not accomodate requested object type %r, got %s" % (output_type, obj.type))
+ # END verify ouput type
+
+ start = end+1 # skip brace
+ parsed_to = start
+ continue
+ # END parse type
+
+ # try to parse a number
+ num = 0
+ if token != ":":
+ found_digit = False
+ while start < lr:
+ if rev[start] in digits:
+ num = num * 10 + int(rev[start])
+ start += 1
+ found_digit = True
+ else:
+ break
+ # END handle number
+ # END number parse loop
+
+ # no explicit number given, 1 is the default
+ # It could be 0 though
+ if not found_digit:
+ num = 1
+ # END set default num
+ # END number parsing only if non-blob mode
+
+
+ parsed_to = start
+ # handle hiererarchy walk
+ try:
+ if token == "~":
+ obj = to_commit(obj)
+ for item in xrange(num):
+ obj = obj.parents[0]
+ # END for each history item to walk
+ elif token == "^":
+ obj = to_commit(obj)
+ # must be n'th parent
+ if num:
+ obj = obj.parents[num-1]
+ elif token == ":":
+ if obj.type != "tree":
+ obj = obj.tree
+ # END get tree type
+ obj = obj[rev[start:]]
+ parsed_to = lr
+ else:
+ raise ValueError("Invalid token: %r" % token)
+ # END end handle tag
+ except (IndexError, AttributeError):
+ raise BadObject("Invalid Revision in %s" % rev)
+ # END exception handling
+ # END parse loop
+
+ # still no obj ? Its probably a simple name
+ if obj is None:
+ obj = name_to_object(repo, rev)
+ parsed_to = lr
+ # END handle simple name
+
+ if obj is None:
+ raise ValueError("Revision specifier could not be parsed: %s" % rev)
+
+ if parsed_to != lr:
+ raise ValueError("Didn't consume complete rev spec %s, consumed part: %s" % (rev, rev[:parsed_to]))
+
+ return obj
diff --git a/git/setup.py b/git/setup.py
new file mode 100755
index 00000000..54d78c50
--- /dev/null
+++ b/git/setup.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+try:
+ from setuptools import setup, find_packages
+except ImportError:
+ from ez_setup import use_setuptools
+ use_setuptools()
+ from setuptools import setup, find_packages
+
+from distutils.command.build_py import build_py as _build_py
+from setuptools.command.sdist import sdist as _sdist
+import os
+from os import path
+
+v = open(path.join(path.dirname(__file__), 'VERSION'))
+VERSION = v.readline().strip()
+v.close()
+
+
+class build_py(_build_py):
+ def run(self):
+ init = path.join(self.build_lib, 'git', '__init__.py')
+ if path.exists(init):
+ os.unlink(init)
+ _build_py.run(self)
+ _stamp_version(init)
+ self.byte_compile([init])
+
+
+class sdist(_sdist):
+ def make_release_tree (self, base_dir, files):
+ _sdist.make_release_tree(self, base_dir, files)
+ orig = '__init__.py'
+ assert path.exists(orig)
+ dest = path.join(base_dir, orig)
+ if hasattr(os, 'link') and path.exists(dest):
+ os.unlink(dest)
+ self.copy_file(orig, dest)
+ _stamp_version(dest)
+
+
+def _stamp_version(filename):
+ found, out = False, []
+ f = open(filename, 'r')
+ for line in f:
+ if '__version__ =' in line:
+ line = line.replace("'git'", "'%s'" % VERSION)
+ found = True
+ out.append(line)
+ f.close()
+
+ if found:
+ f = open(filename, 'w')
+ f.writelines(out)
+ f.close()
+ else:
+ print >> sys.stderr, "WARNING: Couldn't find version line in file %s" % filename
+
+
+setup(name = "GitPython",
+ cmdclass={'build_py': build_py, 'sdist': sdist},
+ version = VERSION,
+ description = "Python Git Library",
+ author = "Sebastian Thiel, Michael Trier",
+ author_email = "byronimo@gmail.com, mtrier@gmail.com",
+ url = "http://gitorious.org/projects/git-python/",
+ packages = ['git.'+p for p in find_packages('.')],
+ py_modules = ['git.'+f[:-3] for f in os.listdir('.') if f.endswith('.py')],
+ package_data = {'git.test' : ['fixtures/*']},
+ package_dir = {'git':''},
+ license = "BSD License",
+ requires=('gitdb (>=0.5.1)',),
+ install_requires='gitdb >= 0.5.1',
+ zip_safe=False,
+ long_description = """\
+GitPython is a python library used to interact with Git repositories""",
+ classifiers = [
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2.5",
+ "Programming Language :: Python :: 2.6",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ ]
+ )
diff --git a/git/test/__init__.py b/git/test/__init__.py
new file mode 100644
index 00000000..757cbad1
--- /dev/null
+++ b/git/test/__init__.py
@@ -0,0 +1,5 @@
+# __init__.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
diff --git a/git/test/fixtures/blame b/git/test/fixtures/blame
new file mode 100644
index 00000000..10c141dd
--- /dev/null
+++ b/git/test/fixtures/blame
@@ -0,0 +1,131 @@
+634396b2f541a9f2d58b00be1a07f0c358b999b3 1 1 7
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1191997100
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1191997100
+committer-tz -0700
+filename lib/grit.rb
+summary initial grit setup
+boundary
+ $:.unshift File.dirname(__FILE__) # For use/testing when no gem is installed
+634396b2f541a9f2d58b00be1a07f0c358b999b3 2 2
+
+634396b2f541a9f2d58b00be1a07f0c358b999b3 3 3
+ # core
+634396b2f541a9f2d58b00be1a07f0c358b999b3 4 4
+
+634396b2f541a9f2d58b00be1a07f0c358b999b3 5 5
+ # stdlib
+634396b2f541a9f2d58b00be1a07f0c358b999b3 6 6
+
+634396b2f541a9f2d58b00be1a07f0c358b999b3 7 7
+ # internal requires
+3b1930208a82457747d76729ae088e90edca4673 8 8 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192267241
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192267241
+committer-tz -0700
+filename lib/grit.rb
+summary big refactor to do lazy loading
+ require 'grit/lazy'
+4c8124ffcf4039d292442eeccabdeca5af5c5017 8 9 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1191999972
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1191999972
+committer-tz -0700
+filename lib/grit.rb
+summary implement Grit#heads
+ require 'grit/errors'
+d01a4cfad6ea50285c4710243e3cbe019d381eba 9 10 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192032303
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192032303
+committer-tz -0700
+filename lib/grit.rb
+summary convert to Grit module, refactor to be more OO
+ require 'grit/git'
+4c8124ffcf4039d292442eeccabdeca5af5c5017 9 11 1
+ require 'grit/head'
+a47fd41f3aa4610ea527dcc1669dfdb9c15c5425 10 12 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192002639
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192002639
+committer-tz -0700
+filename lib/grit.rb
+summary add more comments throughout
+ require 'grit/commit'
+b17b974691f0a26f26908495d24d9c4c718920f8 13 13 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192271832
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192271832
+committer-tz -0700
+filename lib/grit.rb
+summary started implementing Tree
+ require 'grit/tree'
+74fd66519e983a0f29e16a342a6059dbffe36020 14 14 1
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192317005
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192317005
+committer-tz -0700
+filename lib/grit.rb
+summary add Blob
+ require 'grit/blob'
+d01a4cfad6ea50285c4710243e3cbe019d381eba 12 15 1
+ require 'grit/repo'
+634396b2f541a9f2d58b00be1a07f0c358b999b3 9 16 1
+
+d01a4cfad6ea50285c4710243e3cbe019d381eba 14 17 1
+ module Grit
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 18 18 6
+author Tom Preston-Werner
+author-mail <tom@mojombo.com>
+author-time 1192860483
+author-tz -0700
+committer Tom Preston-Werner
+committer-mail <tom@mojombo.com>
+committer-time 1192860483
+committer-tz -0700
+filename lib/grit.rb
+summary implement Repo.init_bare
+ class << self
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 19 19
+ attr_accessor :debug
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 20 20
+ end
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 21 21
+
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 22 22
+ self.debug = false
+b6e1b765e0c15586a2c5b9832854f95defd71e1f 23 23
+
+634396b2f541a9f2d58b00be1a07f0c358b999b3 11 24 2
+ VERSION = '1.0.0'
+634396b2f541a9f2d58b00be1a07f0c358b999b3 12 25
+ end \ No newline at end of file
diff --git a/git/test/fixtures/cat_file_blob b/git/test/fixtures/cat_file_blob
new file mode 100644
index 00000000..70c379b6
--- /dev/null
+++ b/git/test/fixtures/cat_file_blob
@@ -0,0 +1 @@
+Hello world \ No newline at end of file
diff --git a/git/test/fixtures/cat_file_blob_nl b/git/test/fixtures/cat_file_blob_nl
new file mode 100644
index 00000000..802992c4
--- /dev/null
+++ b/git/test/fixtures/cat_file_blob_nl
@@ -0,0 +1 @@
+Hello world
diff --git a/git/test/fixtures/cat_file_blob_size b/git/test/fixtures/cat_file_blob_size
new file mode 100644
index 00000000..b4de3947
--- /dev/null
+++ b/git/test/fixtures/cat_file_blob_size
@@ -0,0 +1 @@
+11
diff --git a/git/test/fixtures/diff_2 b/git/test/fixtures/diff_2
new file mode 100644
index 00000000..218b6bae
--- /dev/null
+++ b/git/test/fixtures/diff_2
@@ -0,0 +1,54 @@
+diff --git a/lib/grit/commit.rb b/lib/grit/commit.rb
+index a093bb1db8e884cccf396b297259181d1caebed4..80fd3d527f269ecbd570b65b8e21fd85baedb6e9 100644
+--- a/lib/grit/com mit.rb
++++ b/lib/grit/com mit.rb
+@@ -156,12 +156,8 @@ module Grit
+
+ def diffs
+ if parents.empty?
+- diff = @repo.git.show({:full_index => true, :pretty => 'raw'}, @id)
+- if diff =~ /diff --git a/
+- diff = diff.sub(/.+?(diff --git a)/m, '\1')
+- else
+- diff = ''
+- end
++ diff = @repo.git.show({:full_index => true, :pretty => 'raw'}, @id)
++ diff = diff.sub(/.+?(diff --git a)/m, '\1')
+ Diff.list_from_string(@repo, diff)
+ else
+ self.class.diff(@repo, parents.first.id, @id)
+diff --git a/test/fixtures/show_empty_commit b/test/fixtures/show_empty_commit
+deleted file mode 100644
+index ea25e32a409fdf74c1b9268820108d1c16dcc553..0000000000000000000000000000000000000000
+--- a/test/fixtures/show_empty_commit
++++ /dev/null
+@@ -1,6 +0,0 @@
+-commit 1e3824339762bd48316fe87bfafc853732d43264
+-tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904
+-author Tom Preston-Werner <tom@mojombo.com> 1157392833 +0000
+-committer Tom Preston-Werner <tom@mojombo.com> 1157392833 +0000
+-
+- initial directory structure
+diff --git a/test/test_commit.rb b/test/test_commit.rb
+index fdeb9000089b052f0b31a845e0173e9b089e06a0..bdbc450e08084d7d611e985cfa12fb424cab29b2 100644
+--- a/test/test_commit.rb
++++ b/test/test_commit.rb
+@@ -98,18 +98,6 @@ class TestCommit < Test::Unit::TestCase
+ assert_equal true, diffs[5].new_file
+ end
+
+- def test_diffs_on_initial_import_with_empty_commit
+- Git.any_instance.expects(:show).with(
+- {:full_index => true, :pretty => 'raw'},
+- '634396b2f541a9f2d58b00be1a07f0c358b999b3'
+- ).returns(fixture('show_empty_commit'))
+-
+- @c = Commit.create(@r, :id => '634396b2f541a9f2d58b00be1a07f0c358b999b3')
+- diffs = @c.diffs
+-
+- assert_equal [], diffs
+- end
+-
+ # to_s
+
+ def test_to_s
diff --git a/git/test/fixtures/diff_2f b/git/test/fixtures/diff_2f
new file mode 100644
index 00000000..5246cd6b
--- /dev/null
+++ b/git/test/fixtures/diff_2f
@@ -0,0 +1,19 @@
+diff --git a/lib/grit/commit.rb b/lib/grit/commit.rb
+index a093bb1db8e884cccf396b297259181d1caebed4..80fd3d527f269ecbd570b65b8e21fd85baedb6e9 100644
+--- a/lib/grit/commit.rb
++++ b/lib/grit/commit.rb
+@@ -156,12 +156,8 @@ module Grit
+
+ def diffs
+ if parents.empty?
+- diff = @repo.git.show({:full_index => true, :pretty => 'raw'}, @id)
+- if diff =~ /diff --git a/
+- diff = diff.sub(/.+?(diff --git a)/m, '\1')
+- else
+- diff = ''
+- end
++ diff = @repo.git.show({:full_index => true, :pretty => 'raw'}, @id)
++ diff = diff.sub(/.+?(diff --git a)/m, '\1')
+ Diff.list_from_string(@repo, diff)
+ else
+ self.class.diff(@repo, parents.first.id, @id)
diff --git a/git/test/fixtures/diff_f b/git/test/fixtures/diff_f
new file mode 100644
index 00000000..48a49256
--- /dev/null
+++ b/git/test/fixtures/diff_f
@@ -0,0 +1,15 @@
+diff --git a/lib/grit/diff.rb b/lib/grit/diff.rb
+index 537955bb86a8ceaa19aea89e75ccbea5ce6f2698..00b0b4a67eca9242db5f8991e99625acd55f040c 100644
+--- a/lib/grit/diff.rb
++++ b/lib/grit/diff.rb
+@@ -27,6 +27,10 @@ module Grit
+ while !lines.empty?
+ m, a_path, b_path = *lines.shift.match(%r{^diff --git a/(\S+) b/(\S+)$})
+
++ if lines.first =~ /^old mode/
++ 2.times { lines.shift }
++ end
++
+ new_file = false
+ deleted_file = false
+
diff --git a/git/test/fixtures/diff_i b/git/test/fixtures/diff_i
new file mode 100644
index 00000000..cec64e1d
--- /dev/null
+++ b/git/test/fixtures/diff_i
@@ -0,0 +1,201 @@
+commit 634396b2f541a9f2d58b00be1a07f0c358b999b3
+Author: Tom Preston-Werner <tom@mojombo.com>
+Date: Tue Oct 9 23:18:20 2007 -0700
+
+ initial grit setup
+
+diff --git a/History.txt b/History.txt
+new file mode 100644
+index 0000000000000000000000000000000000000000..81d2c27608b352814cbe979a6acd678d30219678
+--- /dev/null
++++ b/History.txt
+@@ -0,0 +1,5 @@
++== 1.0.0 / 2007-10-09
++
++* 1 major enhancement
++ * Birthday!
++
+diff --git a/Manifest.txt b/Manifest.txt
+new file mode 100644
+index 0000000000000000000000000000000000000000..641972d82c6d1b51122274ae8f6a0ecdfb56ee22
+--- /dev/null
++++ b/Manifest.txt
+@@ -0,0 +1,7 @@
++History.txt
++Manifest.txt
++README.txt
++Rakefile
++bin/grit
++lib/grit.rb
++test/test_grit.rb
+\ No newline at end of file
+diff --git a/README.txt b/README.txt
+new file mode 100644
+index 0000000000000000000000000000000000000000..8b1e02c0fb554eed2ce2ef737a68bb369d7527df
+--- /dev/null
++++ b/README.txt
+@@ -0,0 +1,48 @@
++grit
++ by FIX (your name)
++ FIX (url)
++
++== DESCRIPTION:
++
++FIX (describe your package)
++
++== FEATURES/PROBLEMS:
++
++* FIX (list of features or problems)
++
++== SYNOPSIS:
++
++ FIX (code sample of usage)
++
++== REQUIREMENTS:
++
++* FIX (list of requirements)
++
++== INSTALL:
++
++* FIX (sudo gem install, anything else)
++
++== LICENSE:
++
++(The MIT License)
++
++Copyright (c) 2007 FIX
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++'Software'), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
++IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
++TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
++SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+diff --git a/Rakefile b/Rakefile
+new file mode 100644
+index 0000000000000000000000000000000000000000..ff69c3684a18592c741332b290492aa39d980e02
+--- /dev/null
++++ b/Rakefile
+@@ -0,0 +1,17 @@
++# -*- ruby -*-
++
++require 'rubygems'
++require 'hoe'
++require './lib/grit.rb'
++
++Hoe.new('grit', GitPython.VERSION) do |p|
++ p.rubyforge_name = 'grit'
++ # p.author = 'FIX'
++ # p.email = 'FIX'
++ # p.summary = 'FIX'
++ # p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
++ # p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[1..-1]
++ p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
++end
++
++# vim: syntax=Ruby
+diff --git a/bin/grit b/bin/grit
+new file mode 100644
+index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
+diff --git a/lib/grit.rb b/lib/grit.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..32cec87d1e78946a827ddf6a8776be4d81dcf1d1
+--- /dev/null
++++ b/lib/grit.rb
+@@ -0,0 +1,12 @@
++$:.unshift File.dirname(__FILE__) # For use/testing when no gem is installed
++
++# core
++
++# stdlib
++
++# internal requires
++require 'grit/grit'
++
++class Grit
++ VERSION = '1.0.0'
++end
+\ No newline at end of file
+diff --git a/lib/grit/errors.rb b/lib/grit/errors.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..b3be31553741937607a89be8b6a2ab1df208852e
+--- /dev/null
++++ b/lib/grit/errors.rb
+@@ -0,0 +1,4 @@
++class Grit
++ class InvalidGitRepositoryError < StandardError
++ end
++end
+\ No newline at end of file
+diff --git a/lib/grit/grit.rb b/lib/grit/grit.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..48fd36e16081ec09903f7a0e2253b3d16f9efb01
+--- /dev/null
++++ b/lib/grit/grit.rb
+@@ -0,0 +1,24 @@
++class Grit
++ attr_accessor :path
++
++ # Create a new Grit instance
++ # +path+ is the path to either the root git directory or the bare git repo
++ #
++ # Examples
++ # g = Grit.new("/Users/tom/dev/grit")
++ # g = Grit.new("/Users/tom/public/grit.git")
++ def initialize(path)
++ if File.exist?(File.join(path, '.git'))
++ self.path = File.join(path, '.git')
++ elsif File.exist?(path) && path =~ /\.git$/
++ self.path = path
++ else
++ raise InvalidGitRepositoryError.new(path) unless File.exist?(path)
++ end
++ end
++
++ # Return the project's description. Taken verbatim from REPO/description
++ def description
++ File.open(File.join(self.path, 'description')).read.chomp
++ end
++end
+\ No newline at end of file
+diff --git a/test/helper.rb b/test/helper.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..56e21da6b4ce3021d2754775dfa589947a4e37e5
+--- /dev/null
++++ b/test/helper.rb
+@@ -0,0 +1,5 @@
++require File.join(File.dirname(__FILE__), *%w[.. lib grit])
++
++require 'test/unit'
++
++GRIT_REPO = File.join(File.dirname(__FILE__), *%w[..])
+diff --git a/test/test_grit.rb b/test/test_grit.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..93aa481b37629797df739380306ae689e13f2855
+--- /dev/null
++++ b/test/test_grit.rb
+@@ -0,0 +1,11 @@
++require File.dirname(__FILE__) + '/helper'
++
++class TestGrit < Test::Unit::TestCase
++ def setup
++ @g = Grit.new(GRIT_REPO)
++ end
++
++ def test_description
++ assert_equal "Grit is a ruby library for interfacing with git repositories.", @g.description
++ end
++end
+\ No newline at end of file
diff --git a/git/test/fixtures/diff_mode_only b/git/test/fixtures/diff_mode_only
new file mode 100755
index 00000000..6fc18f69
--- /dev/null
+++ b/git/test/fixtures/diff_mode_only
@@ -0,0 +1,1152 @@
+diff --git a/bin/merb b/bin/merb
+old mode 100644
+new mode 100755
+diff --git a/lib/merb.rb b/lib/merb.rb
+index 76cb3e269e46fdf9b63cda7cb563c6cf40fdcb15..a2ab4ed47f9cb2ab942da5c46a2b561758a0d704 100644
+--- a/lib/merb.rb
++++ b/lib/merb.rb
+@@ -15,7 +15,7 @@ require 'merb_core/core_ext'
+ require 'merb_core/gem_ext/erubis'
+ require 'merb_core/logger'
+ require 'merb_core/version'
+-
++require 'merb_core/controller/mime'
+
+ module Merb
+ class << self
+@@ -23,6 +23,7 @@ module Merb
+ def start(argv=ARGV)
+ Merb::Config.parse_args(argv)
+ BootLoader.run
++
+ case Merb::Config[:adapter]
+ when "mongrel"
+ adapter = Merb::Rack::Mongrel
+diff --git a/lib/merb_core/boot/bootloader.rb b/lib/merb_core/boot/bootloader.rb
+index d873924860bf4da06ac93db5c6a188f63dd1c3cc..57da75f05e28e8a256922bf345ccd3902e0a0b02 100644
+--- a/lib/merb_core/boot/bootloader.rb
++++ b/lib/merb_core/boot/bootloader.rb
+@@ -20,7 +20,7 @@ module Merb
+ end
+
+ def run
+- subclasses.each {|klass| Object.full_const_get(klass).new.run }
++ subclasses.each {|klass| Object.full_const_get(klass).run }
+ end
+
+ def after(klass)
+@@ -37,95 +37,128 @@ module Merb
+
+ end
+
+-class Merb::BootLoader::BuildFramework < Merb::BootLoader
+- def run
+- build_framework
++class Merb::BootLoader::LoadInit < Merb::BootLoader
++ def self.run
++ if Merb::Config[:init_file]
++ require Merb.root / Merb::Config[:init_file]
++ elsif File.exists?(Merb.root / "config" / "merb_init.rb")
++ require Merb.root / "config" / "merb_init"
++ elsif File.exists?(Merb.root / "merb_init.rb")
++ require Merb.root / "merb_init"
++ elsif File.exists?(Merb.root / "application.rb")
++ require Merb.root / "application"
++ end
++ end
++end
++
++class Merb::BootLoader::Environment < Merb::BootLoader
++ def self.run
++ Merb.environment = Merb::Config[:environment]
++ end
++end
++
++class Merb::BootLoader::Logger < Merb::BootLoader
++ def self.run
++ Merb.logger = Merb::Logger.new(Merb.dir_for(:log) / "test_log")
++ Merb.logger.level = Merb::Logger.const_get(Merb::Config[:log_level].upcase) rescue Merb::Logger::INFO
+ end
++end
++
++class Merb::BootLoader::BuildFramework < Merb::BootLoader
++ class << self
++ def run
++ build_framework
++ end
+
+- # This method should be overridden in merb_init.rb before Merb.start to set up a different
+- # framework structure
+- def build_framework
+- %[view model controller helper mailer part].each do |component|
+- Merb.push_path(component.to_sym, Merb.root_path("app/#{component}s"))
++ # This method should be overridden in merb_init.rb before Merb.start to set up a different
++ # framework structure
++ def build_framework
++ %w[view model controller helper mailer part].each do |component|
++ Merb.push_path(component.to_sym, Merb.root_path("app/#{component}s"))
++ end
++ Merb.push_path(:application, Merb.root_path("app/controllers/application.rb"))
++ Merb.push_path(:config, Merb.root_path("config/router.rb"))
++ Merb.push_path(:lib, Merb.root_path("lib"))
+ end
+- Merb.push_path(:application, Merb.root_path("app/controllers/application.rb"))
+- Merb.push_path(:config, Merb.root_path("config/router.rb"))
+- Merb.push_path(:lib, Merb.root_path("lib"))
+ end
+ end
+
+ class Merb::BootLoader::LoadPaths < Merb::BootLoader
+ LOADED_CLASSES = {}
+
+- def run
+- # Add models, controllers, and lib to the load path
+- $LOAD_PATH.unshift Merb.load_paths[:model].first if Merb.load_paths[:model]
+- $LOAD_PATH.unshift Merb.load_paths[:controller].first if Merb.load_paths[:controller]
+- $LOAD_PATH.unshift Merb.load_paths[:lib].first if Merb.load_paths[:lib]
++ class << self
++ def run
++ # Add models, controllers, and lib to the load path
++ $LOAD_PATH.unshift Merb.load_paths[:model].first if Merb.load_paths[:model]
++ $LOAD_PATH.unshift Merb.load_paths[:controller].first if Merb.load_paths[:controller]
++ $LOAD_PATH.unshift Merb.load_paths[:lib].first if Merb.load_paths[:lib]
+
+- # Require all the files in the registered load paths
+- puts Merb.load_paths.inspect
+- Merb.load_paths.each do |name, path|
+- Dir[path.first / path.last].each do |file|
+- klasses = ObjectSpace.classes.dup
+- require f
+- LOADED_CLASSES[file] = ObjectSpace.classes - klasses
++ # Require all the files in the registered load paths
++ puts Merb.load_paths.inspect
++ Merb.load_paths.each do |name, path|
++ Dir[path.first / path.last].each do |file|
++ klasses = ObjectSpace.classes.dup
++ require file
++ LOADED_CLASSES[file] = ObjectSpace.classes - klasses
++ end
+ end
+ end
+- end
+
+- def reload(file)
+- if klasses = LOADED_CLASSES[file]
+- klasses.each do |klass|
+- remove_constant(klass)
++ def reload(file)
++ if klasses = LOADED_CLASSES[file]
++ klasses.each do |klass|
++ remove_constant(klass)
++ end
+ end
++ load file
+ end
+- load file
+- end
+
+- def remove_constant(const)
+- # This is to support superclasses (like AbstractController) that track
+- # their subclasses in a class variable. Classes that wish to use this
+- # functionality are required to alias it to _subclasses_list. Plugins
+- # for ORMs and other libraries should keep this in mind.
+- if klass.superclass.respond_to?(:_subclasses_list)
+- klass.superclass.send(:_subclasses_list).delete(klass)
+- klass.superclass.send(:_subclasses_list).delete(klass.to_s)
+- end
++ def remove_constant(const)
++ # This is to support superclasses (like AbstractController) that track
++ # their subclasses in a class variable. Classes that wish to use this
++ # functionality are required to alias it to _subclasses_list. Plugins
++ # for ORMs and other libraries should keep this in mind.
++ if klass.superclass.respond_to?(:_subclasses_list)
++ klass.superclass.send(:_subclasses_list).delete(klass)
++ klass.superclass.send(:_subclasses_list).delete(klass.to_s)
++ end
+
+- parts = const.to_s.split("::")
+- base = parts.size == 1 ? Object : Object.full_const_get(parts[0..-2].join("::"))
+- object = parts[-1].intern
+- Merb.logger.debugger("Removing constant #{object} from #{base}")
+- base.send(:remove_const, object) if object
++ parts = const.to_s.split("::")
++ base = parts.size == 1 ? Object : Object.full_const_get(parts[0..-2].join("::"))
++ object = parts[-1].intern
++ Merb.logger.debugger("Removing constant #{object} from #{base}")
++ base.send(:remove_const, object) if object
++ end
+ end
+
+ end
+
+ class Merb::BootLoader::Templates < Merb::BootLoader
+- def run
+- template_paths.each do |path|
+- Merb::Template.inline_template(path)
++ class << self
++ def run
++ template_paths.each do |path|
++ Merb::Template.inline_template(path)
++ end
+ end
+- end
+
+- def template_paths
+- extension_glob = "{#{Merb::Template::EXTENSIONS.keys.join(',')}}"
++ def template_paths
++ extension_glob = "{#{Merb::Template::EXTENSIONS.keys.join(',')}}"
+
+- # This gets all templates set in the controllers template roots
+- # We separate the two maps because most of controllers will have
+- # the same _template_root, so it's silly to be globbing the same
+- # path over and over.
+- template_paths = Merb::AbstractController._abstract_subclasses.map do |klass|
+- Object.full_const_get(klass)._template_root
+- end.uniq.map {|path| Dir["#{path}/**/*.#{extension_glob}"] }
++ # This gets all templates set in the controllers template roots
++ # We separate the two maps because most of controllers will have
++ # the same _template_root, so it's silly to be globbing the same
++ # path over and over.
++ template_paths = Merb::AbstractController._abstract_subclasses.map do |klass|
++ Object.full_const_get(klass)._template_root
++ end.uniq.compact.map {|path| Dir["#{path}/**/*.#{extension_glob}"] }
+
+- # This gets the templates that might be created outside controllers
+- # template roots. eg app/views/shared/*
+- template_paths << Dir["#{Merb.dir_for(:view)}/**/*.#{extension_glob}"] if Merb.dir_for(:view)
++ # This gets the templates that might be created outside controllers
++ # template roots. eg app/views/shared/*
++ template_paths << Dir["#{Merb.dir_for(:view)}/**/*.#{extension_glob}"] if Merb.dir_for(:view)
+
+- template_paths.flatten.compact.uniq
+- end
++ template_paths.flatten.compact.uniq
++ end
++ end
+ end
+
+ class Merb::BootLoader::Libraries < Merb::BootLoader
+@@ -145,18 +178,41 @@ class Merb::BootLoader::Libraries < Merb::BootLoader
+ def self.add_libraries(hsh)
+ @@libraries.merge!(hsh)
+ end
+-
+- def run
++
++ def self.run
+ @@libraries.each do |exclude, choices|
+ require_first_working(*choices) unless Merb::Config[exclude]
+ end
+ end
+-
+- def require_first_working(first, *rest)
++
++ def self.require_first_working(first, *rest)
+ p first, rest
+ require first
+ rescue LoadError
+ raise LoadError if rest.empty?
+ require_first_working rest.unshift, *rest
+ end
++end
++
++class Merb::BootLoader::MimeTypes < Merb::BootLoader
++ def self.run
++ # Sets the default mime-types
++ #
++ # By default, the mime-types include:
++ # :all:: no transform, */*
++ # :yaml:: to_yaml, application/x-yaml or text/yaml
++ # :text:: to_text, text/plain
++ # :html:: to_html, text/html or application/xhtml+xml or application/html
++ # :xml:: to_xml, application/xml or text/xml or application/x-xml, adds "Encoding: UTF-8" response header
++ # :js:: to_json, text/javascript ot application/javascript or application/x-javascript
++ # :json:: to_json, application/json or text/x-json
++ Merb.available_mime_types.clear
++ Merb.add_mime_type(:all, nil, %w[*/*])
++ Merb.add_mime_type(:yaml, :to_yaml, %w[application/x-yaml text/yaml])
++ Merb.add_mime_type(:text, :to_text, %w[text/plain])
++ Merb.add_mime_type(:html, :to_html, %w[text/html application/xhtml+xml application/html])
++ Merb.add_mime_type(:xml, :to_xml, %w[application/xml text/xml application/x-xml], :Encoding => "UTF-8")
++ Merb.add_mime_type(:js, :to_json, %w[text/javascript application/javascript application/x-javascript])
++ Merb.add_mime_type(:json, :to_json, %w[application/json text/x-json])
++ end
+ end
+\ No newline at end of file
+diff --git a/lib/merb_core/config.rb b/lib/merb_core/config.rb
+index c92f2e6f071c234551ecb16a4716d47fa92f6c7b..ab0864e0174b54833c758f9f22a840d3b53c7653 100644
+--- a/lib/merb_core/config.rb
++++ b/lib/merb_core/config.rb
+@@ -92,6 +92,10 @@ module Merb
+ options[:cluster] = nodes
+ end
+
++ opts.on("-I", "--init-file FILE", "Name of the file to load first") do |init_file|
++ options[:init_file] = init_file
++ end
++
+ opts.on("-p", "--port PORTNUM", "Port to run merb on, defaults to 4000.") do |port|
+ options[:port] = port
+ end
+@@ -261,29 +265,29 @@ module Merb
+
+ @configuration = Merb::Config.apply_configuration_from_file options, environment_merb_yml
+
+- case Merb::Config[:environment].to_s
+- when 'production'
+- Merb::Config[:reloader] = Merb::Config.fetch(:reloader, false)
+- Merb::Config[:exception_details] = Merb::Config.fetch(:exception_details, false)
+- Merb::Config[:cache_templates] = true
+- else
+- Merb::Config[:reloader] = Merb::Config.fetch(:reloader, true)
+- Merb::Config[:exception_details] = Merb::Config.fetch(:exception_details, true)
+- end
+-
+- Merb::Config[:reloader_time] ||= 0.5 if Merb::Config[:reloader] == true
+-
+-
+- if Merb::Config[:reloader]
+- Thread.abort_on_exception = true
+- Thread.new do
+- loop do
+- sleep( Merb::Config[:reloader_time] )
+- ::Merb::BootLoader.reload if ::Merb::BootLoader.app_loaded?
+- end
+- Thread.exit
+- end
+- end
++ # case Merb::Config[:environment].to_s
++ # when 'production'
++ # Merb::Config[:reloader] = Merb::Config.fetch(:reloader, false)
++ # Merb::Config[:exception_details] = Merb::Config.fetch(:exception_details, false)
++ # Merb::Config[:cache_templates] = true
++ # else
++ # Merb::Config[:reloader] = Merb::Config.fetch(:reloader, true)
++ # Merb::Config[:exception_details] = Merb::Config.fetch(:exception_details, true)
++ # end
++ #
++ # Merb::Config[:reloader_time] ||= 0.5 if Merb::Config[:reloader] == true
++ #
++ #
++ # if Merb::Config[:reloader]
++ # Thread.abort_on_exception = true
++ # Thread.new do
++ # loop do
++ # sleep( Merb::Config[:reloader_time] )
++ # ::Merb::BootLoader.reload if ::Merb::BootLoader.app_loaded?
++ # end
++ # Thread.exit
++ # end
++ # end
+ @configuration
+ end
+
+diff --git a/lib/merb_core/controller/abstract_controller.rb b/lib/merb_core/controller/abstract_controller.rb
+index fbf83372793da6da4b803b799994f0e341fddf88..f5e9a59057d67a6d56377a516a726cf51aa03d6f 100644
+--- a/lib/merb_core/controller/abstract_controller.rb
++++ b/lib/merb_core/controller/abstract_controller.rb
+@@ -96,7 +96,7 @@ class Merb::AbstractController
+ # the superclass.
+ #---
+ # @public
+- def _template_location(action, controller = controller_name, type = nil)
++ def _template_location(action, type = nil, controller = controller_name)
+ "#{controller}/#{action}"
+ end
+
+@@ -106,6 +106,8 @@ class Merb::AbstractController
+ # own subclasses. We're using a Set so we don't have to worry about
+ # uniqueness.
+ self._abstract_subclasses = Set.new
++ self._template_root = Merb.dir_for(:view)
++
+ def self.subclasses_list() _abstract_subclasses end
+
+ class << self
+@@ -114,7 +116,6 @@ class Merb::AbstractController
+ # The controller that is being inherited from Merb::AbstractController
+ def inherited(klass)
+ _abstract_subclasses << klass.to_s
+- klass._template_root ||= Merb.dir_for(:view)
+ super
+ end
+
+diff --git a/lib/merb_core/controller/merb_controller.rb b/lib/merb_core/controller/merb_controller.rb
+index 7283f006bb0501b29f825da129600cf045264b62..98af6ef3330a6b3f46d7bb1f8643261e28155ae5 100644
+--- a/lib/merb_core/controller/merb_controller.rb
++++ b/lib/merb_core/controller/merb_controller.rb
+@@ -71,6 +71,10 @@ class Merb::Controller < Merb::AbstractController
+ end
+ end
+
++ def _template_location(action, type = nil, controller = controller_name)
++ "#{controller}/#{action}.#{type}"
++ end
++
+ # Sets the variables that came in through the dispatch as available to
+ # the controller. This is called by .build, so see it for more
+ # information.
+@@ -107,9 +111,7 @@ class Merb::Controller < Merb::AbstractController
+ request.cookies[_session_id_key] = request.params[_session_id_key]
+ end
+ end
+- @_request, @_response, @_status, @_headers =
+- request, response, status, headers
+-
++ @request, @response, @status, @headers = request, response, status, headers
+ nil
+ end
+
+@@ -135,7 +137,8 @@ class Merb::Controller < Merb::AbstractController
+ @_benchmarks[:action_time] = Time.now - start
+ end
+
+- _attr_reader :request, :response, :status, :headers
++ attr_reader :request, :response, :headers
++ attr_accessor :status
+ def params() request.params end
+ def cookies() request.cookies end
+ def session() request.session end
+diff --git a/lib/merb_core/controller/mime.rb b/lib/merb_core/controller/mime.rb
+index d17570786ca318cff7201c4b1e947ae229b01de8..ff9abe4d1c452aeabfcf5f7dc7a2c7cdd3f67035 100644
+--- a/lib/merb_core/controller/mime.rb
++++ b/lib/merb_core/controller/mime.rb
+@@ -8,7 +8,7 @@ module Merb
+
+ # Any specific outgoing headers should be included here. These are not
+ # the content-type header but anything in addition to it.
+- # +tranform_method+ should be set to a symbol of the method used to
++ # +transform_method+ should be set to a symbol of the method used to
+ # transform a resource into this mime type.
+ # For example for the :xml mime type an object might be transformed by
+ # calling :to_xml, or for the :js mime type, :to_json.
+@@ -71,27 +71,6 @@ module Merb
+ def mime_by_request_header(header)
+ available_mime_types.find {|key,info| info[request_headers].include?(header)}.first
+ end
+-
+- # Resets the default mime-types
+- #
+- # By default, the mime-types include:
+- # :all:: no transform, */*
+- # :yaml:: to_yaml, application/x-yaml or text/yaml
+- # :text:: to_text, text/plain
+- # :html:: to_html, text/html or application/xhtml+xml or application/html
+- # :xml:: to_xml, application/xml or text/xml or application/x-xml, adds "Encoding: UTF-8" response header
+- # :js:: to_json, text/javascript ot application/javascript or application/x-javascript
+- # :json:: to_json, application/json or text/x-json
+- def reset_default_mime_types!
+- available_mime_types.clear
+- Merb.add_mime_type(:all, nil, %w[*/*])
+- Merb.add_mime_type(:yaml, :to_yaml, %w[application/x-yaml text/yaml])
+- Merb.add_mime_type(:text, :to_text, %w[text/plain])
+- Merb.add_mime_type(:html, :to_html, %w[text/html application/xhtml+xml application/html])
+- Merb.add_mime_type(:xml, :to_xml, %w[application/xml text/xml application/x-xml], :Encoding => "UTF-8")
+- Merb.add_mime_type(:js, :to_json, %w[text/javascript application/javascript application/x-javascript])
+- Merb.add_mime_type(:json, :to_json, %w[application/json text/x-json])
+- end
+
+ end
+ end
+\ No newline at end of file
+diff --git a/lib/merb_core/controller/mixins/render.rb b/lib/merb_core/controller/mixins/render.rb
+index 8e096546d4647bb597ab2e00a4b15d09db35e9c9..a298263af7d655d9ce43007554f3827046831287 100644
+--- a/lib/merb_core/controller/mixins/render.rb
++++ b/lib/merb_core/controller/mixins/render.rb
+@@ -51,21 +51,22 @@ module Merb::RenderMixin
+
+ # If you don't specify a thing to render, assume they want to render the current action
+ thing ||= action_name.to_sym
+-
++
+ # Content negotiation
+ opts[:format] ? (self.content_type = opts[:format]) : content_type
+
+ # Do we have a template to try to render?
+ if thing.is_a?(Symbol) || opts[:template]
+-
++
+ # Find a template path to look up (_template_location adds flexibility here)
+- template_location = _template_root / (opts[:template] || _template_location(thing))
++ template_location = _template_root / (opts[:template] || _template_location(thing, content_type))
++
+ # Get the method name from the previously inlined list
+ template_method = Merb::Template.template_for(template_location)
+
+ # Raise an error if there's no template
+ raise TemplateNotFound, "No template found at #{template_location}" unless
+- self.respond_to?(template_method)
++ template_method && self.respond_to?(template_method)
+
+ # Call the method in question and throw the content for later consumption by the layout
+ throw_content(:for_layout, self.send(template_method))
+diff --git a/lib/merb_core/controller/mixins/responder.rb b/lib/merb_core/controller/mixins/responder.rb
+index e910b2b32c844ab51cf2a10d0ad26c314dbb3631..5ac67fb907aaf9f95effc7eb3cbb07b8963ce022 100644
+--- a/lib/merb_core/controller/mixins/responder.rb
++++ b/lib/merb_core/controller/mixins/responder.rb
+@@ -97,6 +97,8 @@ module Merb
+ # and none of the provides methods can be used.
+ module ResponderMixin
+
++ TYPES = {}
++
+ class ContentTypeAlreadySet < StandardError; end
+
+ # ==== Parameters
+@@ -105,6 +107,7 @@ module Merb
+ base.extend(ClassMethods)
+ base.class_eval do
+ class_inheritable_accessor :class_provided_formats
++ self.class_provided_formats = []
+ end
+ base.reset_provides
+ end
+@@ -178,171 +181,253 @@ module Merb
+ def reset_provides
+ only_provides(:html)
+ end
+-
+- # ==== Returns
+- # The current list of formats provided for this instance of the controller.
+- # It starts with what has been set in the controller (or :html by default)
+- # but can be modifed on a per-action basis.
+- def _provided_formats
+- @_provided_formats ||= class_provided_formats.dup
++ end
++
++ # ==== Returns
++ # The current list of formats provided for this instance of the controller.
++ # It starts with what has been set in the controller (or :html by default)
++ # but can be modifed on a per-action basis.
++ def _provided_formats
++ @_provided_formats ||= class_provided_formats.dup
++ end
++
++ # Sets the provided formats for this action. Usually, you would
++ # use a combination of +provides+, +only_provides+ and +does_not_provide+
++ # to manage this, but you can set it directly.
++ #
++ # ==== Parameters
++ # *formats<Symbol>:: A list of formats to be passed to provides
++ #
++ # ==== Raises
++ # Merb::ResponderMixin::ContentTypeAlreadySet::
++ # Content negotiation already occured, and the content_type is set.
++ #
++ # ==== Returns
++ # Array:: List of formats passed in
++ def _set_provided_formats(*formats)
++ if @_content_type
++ raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set"
+ end
+-
+- # Sets the provided formats for this action. Usually, you would
+- # use a combination of +provides+, +only_provides+ and +does_not_provide+
+- # to manage this, but you can set it directly.
+- #
+- # ==== Parameters
+- # *formats<Symbol>:: A list of formats to be passed to provides
+- #
+- # ==== Raises
+- # Merb::ResponderMixin::ContentTypeAlreadySet::
+- # Content negotiation already occured, and the content_type is set.
+- #
+- # ==== Returns
+- # Array:: List of formats passed in
+- def _set_provided_formats(*formats)
+- if @_content_type
+- raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set"
+- end
+- @_provided_formats = []
+- provides(*formats)
++ @_provided_formats = []
++ provides(*formats)
++ end
++ alias :_provided_formats= :_set_provided_formats
++
++ # Adds formats to the list of provided formats for this particular
++ # request. Usually used to add formats to a single action. See also
++ # the controller-level provides that affects all actions in a controller.
++ #
++ # ==== Parameters
++ # *formats<Symbol>:: A list of formats to add to the per-action list
++ # of provided formats
++ #
++ # ==== Raises
++ # Merb::ResponderMixin::ContentTypeAlreadySet::
++ # Content negotiation already occured, and the content_type is set.
++ #
++ # ==== Returns
++ # Array:: List of formats passed in
++ #
++ #---
++ # @public
++ def provides(*formats)
++ if @_content_type
++ raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set"
+ end
+- alias :_provided_formats= :_set_provided_formats
+-
+- # Adds formats to the list of provided formats for this particular
+- # request. Usually used to add formats to a single action. See also
+- # the controller-level provides that affects all actions in a controller.
+- #
+- # ==== Parameters
+- # *formats<Symbol>:: A list of formats to add to the per-action list
+- # of provided formats
+- #
+- # ==== Raises
+- # Merb::ResponderMixin::ContentTypeAlreadySet::
+- # Content negotiation already occured, and the content_type is set.
+- #
+- # ==== Returns
+- # Array:: List of formats passed in
+- #
+- #---
+- # @public
+- def provides(*formats)
+- if @_content_type
+- raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set"
+- end
+- formats.each do |fmt|
+- _provided_formats << fmt unless _provided_formats.include?(fmt)
+- end
++ formats.each do |fmt|
++ _provided_formats << fmt unless _provided_formats.include?(fmt)
+ end
++ end
+
+- # Sets list of provided formats for this particular
+- # request. Usually used to limit formats to a single action. See also
+- # the controller-level only_provides that affects all actions
+- # in a controller.
+- #
+- # ==== Parameters
+- # *formats<Symbol>:: A list of formats to use as the per-action list
+- # of provided formats
+- #
+- # ==== Returns
+- # Array:: List of formats passed in
+- #
+- #---
+- # @public
+- def only_provides(*formats)
+- self._provided_formats = *formats
+- end
+-
+- # Removes formats from the list of provided formats for this particular
+- # request. Usually used to remove formats from a single action. See
+- # also the controller-level does_not_provide that affects all actions in a
+- # controller.
+- #
+- # ==== Parameters
+- # *formats<Symbol>:: Registered mime-type
+- #
+- # ==== Returns
+- # Array:: List of formats that remain after removing the ones not to provide
+- #
+- #---
+- # @public
+- def does_not_provide(*formats)
+- formats.flatten!
+- self._provided_formats -= formats
+- end
+-
+- # Do the content negotiation:
+- # 1. if params[:format] is there, and provided, use it
+- # 2. Parse the Accept header
+- # 3. If it's */*, use the first provided format
+- # 4. Look for one that is provided, in order of request
+- # 5. Raise 406 if none found
+- def _perform_content_negotiation # :nodoc:
+- raise Merb::ControllerExceptions::NotAcceptable if provided_formats.empty?
+- if fmt = params[:format]
+- return fmt.to_sym if provided_formats.include?(fmt.to_sym)
+- else
+- accepts = Responder.parse(request.accept).map {|t| t.to_sym}
+- return provided_formats.first if accepts.include?(:all)
+- return accepts.each { |type| break type if provided_formats.include?(type) }
+- end
+- raise Merb::ControllerExceptions::NotAcceptable
++ # Sets list of provided formats for this particular
++ # request. Usually used to limit formats to a single action. See also
++ # the controller-level only_provides that affects all actions
++ # in a controller.
++ #
++ # ==== Parameters
++ # *formats<Symbol>:: A list of formats to use as the per-action list
++ # of provided formats
++ #
++ # ==== Returns
++ # Array:: List of formats passed in
++ #
++ #---
++ # @public
++ def only_provides(*formats)
++ self._provided_formats = *formats
++ end
++
++ # Removes formats from the list of provided formats for this particular
++ # request. Usually used to remove formats from a single action. See
++ # also the controller-level does_not_provide that affects all actions in a
++ # controller.
++ #
++ # ==== Parameters
++ # *formats<Symbol>:: Registered mime-type
++ #
++ # ==== Returns
++ # Array:: List of formats that remain after removing the ones not to provide
++ #
++ #---
++ # @public
++ def does_not_provide(*formats)
++ formats.flatten!
++ self._provided_formats -= formats
++ end
++
++ # Do the content negotiation:
++ # 1. if params[:format] is there, and provided, use it
++ # 2. Parse the Accept header
++ # 3. If it's */*, use the first provided format
++ # 4. Look for one that is provided, in order of request
++ # 5. Raise 406 if none found
++ def _perform_content_negotiation # :nodoc:
++ raise Merb::ControllerExceptions::NotAcceptable if _provided_formats.empty?
++ if fmt = params[:format] && _provided_formats.include?(fmt.to_sym)
++ return fmt.to_sym
+ end
++ accepts = Responder.parse(request.accept).map {|t| t.to_sym}
++ return _provided_formats.first if accepts.include?(:all)
++ (accepts & _provided_formats).first || (raise Merb::ControllerExceptions::NotAcceptable)
++ end
+
+- # Returns the output format for this request, based on the
+- # provided formats, <tt>params[:format]</tt> and the client's HTTP
+- # Accept header.
+- #
+- # The first time this is called, it triggers content negotiation
+- # and caches the value. Once you call +content_type+ you can
+- # not set or change the list of provided formats.
+- #
+- # Called automatically by +render+, so you should only call it if
+- # you need the value, not to trigger content negotiation.
+- #
+- # ==== Parameters
+- # fmt<String?>::
+- # An optional format to use instead of performing content negotiation.
+- # This can be used to pass in the values of opts[:format] from the
+- # render function to short-circuit content-negotiation when it's not
+- # necessary. This optional parameter should not be considered part
+- # of the public API.
+- #
+- # ==== Returns
+- # Symbol:: The content-type that will be used for this controller.
+- #
+- #---
+- # @public
+- def content_type(fmt = nil)
+- self.content_type = (fmt || _perform_content_negotiation) unless @_content_type
+- @_content_type
++ # Returns the output format for this request, based on the
++ # provided formats, <tt>params[:format]</tt> and the client's HTTP
++ # Accept header.
++ #
++ # The first time this is called, it triggers content negotiation
++ # and caches the value. Once you call +content_type+ you can
++ # not set or change the list of provided formats.
++ #
++ # Called automatically by +render+, so you should only call it if
++ # you need the value, not to trigger content negotiation.
++ #
++ # ==== Parameters
++ # fmt<String?>::
++ # An optional format to use instead of performing content negotiation.
++ # This can be used to pass in the values of opts[:format] from the
++ # render function to short-circuit content-negotiation when it's not
++ # necessary. This optional parameter should not be considered part
++ # of the public API.
++ #
++ # ==== Returns
++ # Symbol:: The content-type that will be used for this controller.
++ #
++ #---
++ # @public
++ def content_type(fmt = nil)
++ @_content_type = (fmt || _perform_content_negotiation) unless @_content_type
++ @_content_type
++ end
++
++ # Sets the content type of the current response to a value based on
++ # a passed in key. The Content-Type header will be set to the first
++ # registered header for the mime-type.
++ #
++ # ==== Parameters
++ # type<Symbol>:: A type that is in the list of registered mime-types.
++ #
++ # ==== Raises
++ # ArgumentError:: "type" is not in the list of registered mime-types.
++ #
++ # ==== Returns
++ # Symbol:: The content-type that was passed in.
++ #
++ #---
++ # @semipublic
++ def content_type=(type)
++ unless Merb.available_mime_types.has_key?(type)
++ raise Merb::ControllerExceptions::NotAcceptable.new("Unknown content_type for response: #{type}")
++ end
++ headers['Content-Type'] = Merb.available_mime_types[type].first
++ @_content_type = type
++ end
++
++ end
++
++ class Responder
++
++ protected
++ def self.parse(accept_header)
++ # parse the raw accept header into a unique, sorted array of AcceptType objects
++ list = accept_header.to_s.split(/,/).enum_for(:each_with_index).map do |entry,index|
++ AcceptType.new(entry,index += 1)
++ end.sort.uniq
++ # firefox (and possibly other browsers) send broken default accept headers.
++ # fix them up by sorting alternate xml forms (namely application/xhtml+xml)
++ # ahead of pure xml types (application/xml,text/xml).
++ if app_xml = list.detect{|e| e.super_range == 'application/xml'}
++ list.select{|e| e.to_s =~ /\+xml/}.each { |acc_type|
++ list[list.index(acc_type)],list[list.index(app_xml)] =
++ list[list.index(app_xml)],list[list.index(acc_type)] }
+ end
+-
+- # Sets the content type of the current response to a value based on
+- # a passed in key. The Content-Type header will be set to the first
+- # registered header for the mime-type.
+- #
+- # ==== Parameters
+- # type<Symbol>:: A type that is in the list of registered mime-types.
+- #
+- # ==== Raises
+- # ArgumentError:: "type" is not in the list of registered mime-types.
+- #
+- # ==== Returns
+- # Symbol:: The content-type that was passed in.
+- #
+- #---
+- # @semipublic
+- def content_type=(type)
+- unless Merb.available_mime_types.has_key?(type)
+- raise Merb::ControllerExceptions::NotAcceptable.new("Unknown content_type for response: #{type}")
+- end
+- headers['Content-Type'] = Merb.available_mime_types[type].first
+- @_content_type = type
++ list
++ end
++
++ public
++ def self.params_to_query_string(value, prefix = nil)
++ case value
++ when Array
++ value.map { |v|
++ params_to_query_string(v, "#{prefix}[]")
++ } * "&"
++ when Hash
++ value.map { |k, v|
++ params_to_query_string(v, prefix ? "#{prefix}[#{Merb::Request.escape(k)}]" : Merb::Request.escape(k))
++ } * "&"
++ else
++ "#{prefix}=#{Merb::Request.escape(value)}"
+ end
++ end
+
+- end
++ end
++
++ class AcceptType
++
++ attr_reader :media_range, :quality, :index, :type, :sub_type
+
++ def initialize(entry,index)
++ @index = index
++ @media_range, quality = entry.split(/;\s*q=/).map{|a| a.strip }
++ @type, @sub_type = @media_range.split(/\//)
++ quality ||= 0.0 if @media_range == '*/*'
++ @quality = ((quality || 1.0).to_f * 100).to_i
++ end
++
++ def <=>(entry)
++ c = entry.quality <=> quality
++ c = index <=> entry.index if c == 0
++ c
++ end
++
++ def eql?(entry)
++ synonyms.include?(entry.media_range)
++ end
++
++ def ==(entry); eql?(entry); end
++
++ def hash; super_range.hash; end
++
++ def synonyms
++ @syns ||= Merb.available_mime_types.values.map do |e|
++ e[:request_headers] if e[:request_headers].include?(@media_range)
++ end.compact.flatten
++ end
++
++ def super_range
++ synonyms.first || @media_range
++ end
++
++ def to_sym
++ Merb.available_mime_types.select{|k,v|
++ v[:request_headers] == synonyms || v[:request_headers][0] == synonyms[0]}.flatten.first
++ end
++
++ def to_s
++ @media_range
++ end
++
+ end
++
+
+ end
+\ No newline at end of file
+diff --git a/lib/merb_core/dispatch/dispatcher.rb b/lib/merb_core/dispatch/dispatcher.rb
+index c458c9f9ad454d3b0c3055d6b2a8e88b17712b44..f7fed0f539a20f9cce08b72c551725ad0563bf37 100644
+--- a/lib/merb_core/dispatch/dispatcher.rb
++++ b/lib/merb_core/dispatch/dispatcher.rb
+@@ -33,10 +33,10 @@ class Merb::Dispatcher
+
+ # this is the custom dispatch_exception; it allows failures to still be dispatched
+ # to the error controller
+- rescue => exception
+- Merb.logger.error(Merb.exception(exception))
+- exception = controller_exception(exception)
+- dispatch_exception(request, response, exception)
++ # rescue => exception
++ # Merb.logger.error(Merb.exception(exception))
++ # exception = controller_exception(exception)
++ # dispatch_exception(request, response, exception)
+ end
+
+ private
+@@ -49,10 +49,10 @@ class Merb::Dispatcher
+ def dispatch_action(klass, action, request, response, status=200)
+ # build controller
+ controller = klass.build(request, response, status)
+- if @@use_mutex
+- @@mutex.synchronize { controller.dispatch(action) }
++ if use_mutex
++ @@mutex.synchronize { controller._dispatch(action) }
+ else
+- controller.dispatch(action)
++ controller._dispatch(action)
+ end
+ [controller, action]
+ end
+diff --git a/lib/merb_core/rack/adapter.rb b/lib/merb_core/rack/adapter.rb
+index ffc7117e9733e83b0567bbe4a43fac7663800b7d..217399a5382d0b3878aaea3d3e302173c5b5f119 100644
+--- a/lib/merb_core/rack/adapter.rb
++++ b/lib/merb_core/rack/adapter.rb
+@@ -40,7 +40,7 @@ module Merb
+ begin
+ controller, action = ::Merb::Dispatcher.handle(request, response)
+ rescue Object => e
+- return [500, {"Content-Type"=>"text/html"}, "Internal Server Error"]
++ return [500, {"Content-Type"=>"text/html"}, e.message + "<br/>" + e.backtrace.join("<br/>")]
+ end
+ [controller.status, controller.headers, controller.body]
+ end
+diff --git a/lib/merb_core/test/request_helper.rb b/lib/merb_core/test/request_helper.rb
+index 10a9fb3ace56eaf1db0fa300df3fb2ab88a7118a..f302a3b71539182ba142cd208fe6d6aae171b1a1 100644
+--- a/lib/merb_core/test/request_helper.rb
++++ b/lib/merb_core/test/request_helper.rb
+@@ -26,8 +26,10 @@ module Merb::Test::RequestHelper
+ Merb::Test::FakeRequest.new(env, StringIO.new(req))
+ end
+
+- def dispatch_to(controller_klass, action, env = {}, opt = {}, &blk)
+- request = fake_request(env, opt)
++ def dispatch_to(controller_klass, action, params = {}, env = {}, &blk)
++ request = fake_request(env,
++ :query_string => Merb::Responder.params_to_query_string(params))
++
+ controller = controller_klass.build(request)
+ controller.instance_eval(&blk) if block_given?
+ controller._dispatch(action)
+diff --git a/spec/public/abstract_controller/spec_helper.rb b/spec/public/abstract_controller/spec_helper.rb
+index df759008d14e7572b5c44de24f77f828f83f1682..694cee2592a210a5c1fa40ca7846beeaa09725fe 100644
+--- a/spec/public/abstract_controller/spec_helper.rb
++++ b/spec/public/abstract_controller/spec_helper.rb
+@@ -1,12 +1,10 @@
+ __DIR__ = File.dirname(__FILE__)
+ require File.join(__DIR__, "..", "..", "spec_helper")
+
+-# The framework structure *must* be set up before loading in framework
+-# files.
+ require File.join(__DIR__, "controllers", "filters")
+ require File.join(__DIR__, "controllers", "render")
+
+-Merb::BootLoader::Templates.new.run
++Merb::BootLoader::Templates.run
+
+ module Merb::Test::Behaviors
+ def dispatch_should_make_body(klass, body, action = :index)
+diff --git a/spec/public/controller/base_spec.rb b/spec/public/controller/base_spec.rb
+index 1709e612629ed2c2b6af4579a8b89684aca9aa3c..5bcdb59948cc22592639b1aee9bd233ff2c306fa 100644
+--- a/spec/public/controller/base_spec.rb
++++ b/spec/public/controller/base_spec.rb
+@@ -10,11 +10,11 @@ describe Merb::Controller, " callable actions" do
+ end
+
+ it "should dispatch to callable actions" do
+- dispatch_to(Merb::Test::Fixtures::TestFoo, :index).body.should == "index"
++ dispatch_to(Merb::Test::Fixtures::TestBase, :index).body.should == "index"
+ end
+
+ it "should not dispatch to hidden actions" do
+- calling { dispatch_to(Merb::Test::Fixtures::TestFoo, :hidden) }.
++ calling { dispatch_to(Merb::Test::Fixtures::TestBase, :hidden) }.
+ should raise_error(Merb::ControllerExceptions::ActionNotFound)
+ end
+
+diff --git a/spec/public/controller/controllers/base.rb b/spec/public/controller/controllers/base.rb
+index a1b3beb27899df781d943427d9b23945f02e14de..c4b69a440a9da3c3486208d2cb95ccb8bdb974b9 100644
+--- a/spec/public/controller/controllers/base.rb
++++ b/spec/public/controller/controllers/base.rb
+@@ -3,7 +3,7 @@ module Merb::Test::Fixtures
+ self._template_root = File.dirname(__FILE__) / "views"
+ end
+
+- class TestFoo < ControllerTesting
++ class TestBase < ControllerTesting
+ def index
+ "index"
+ end
+diff --git a/spec/public/controller/controllers/responder.rb b/spec/public/controller/controllers/responder.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..867192e8f6e995a43fd5cd3daffa0ec11b3d31e5
+--- /dev/null
++++ b/spec/public/controller/controllers/responder.rb
+@@ -0,0 +1,25 @@
++module Merb::Test::Fixtures
++ class ControllerTesting < Merb::Controller
++ self._template_root = File.dirname(__FILE__) / "views"
++ end
++
++ class TestResponder < ControllerTesting
++ def index
++ render
++ end
++ end
++
++ class TestHtmlDefault < TestResponder; end
++
++ class TestClassProvides < TestResponder;
++ provides :xml
++ end
++
++ class TestLocalProvides < TestResponder;
++ def index
++ provides :xml
++ render
++ end
++ end
++
++end
+\ No newline at end of file
+diff --git a/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.html.erb b/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.html.erb
+new file mode 100644
+index 0000000000000000000000000000000000000000..1bfb77d4a44c444bba6888ae7740f7df4b074c58
+--- /dev/null
++++ b/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.html.erb
+@@ -0,0 +1 @@
++This should not be rendered
+\ No newline at end of file
+diff --git a/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.xml.erb b/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.xml.erb
+new file mode 100644
+index 0000000000000000000000000000000000000000..7c91f633987348e87e5e34e1d9e87d9dd0e5100c
+--- /dev/null
++++ b/spec/public/controller/controllers/views/merb/test/fixtures/test_class_provides/index.xml.erb
+@@ -0,0 +1 @@
++<XML:Class provides='true' />
+\ No newline at end of file
+diff --git a/spec/public/controller/controllers/views/merb/test/fixtures/test_html_default/index.html.erb b/spec/public/controller/controllers/views/merb/test/fixtures/test_html_default/index.html.erb
+new file mode 100644
+index 0000000000000000000000000000000000000000..eb4b52bf5a7aaba8f1706de419f42789c05684a2
+--- /dev/null
++++ b/spec/public/controller/controllers/views/merb/test/fixtures/test_html_default/index.html.erb
+@@ -0,0 +1 @@
++HTML: Default
+\ No newline at end of file
+diff --git a/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.html.erb b/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.html.erb
+new file mode 100644
+index 0000000000000000000000000000000000000000..a3a841a89c62e6174038935a42da9cd24ff54413
+--- /dev/null
++++ b/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.html.erb
+@@ -0,0 +1 @@
++This should not render
+\ No newline at end of file
+diff --git a/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.xml.erb b/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.xml.erb
+new file mode 100644
+index 0000000000000000000000000000000000000000..c1384ec6af0357b585cc367035d1bc3a30347ade
+--- /dev/null
++++ b/spec/public/controller/controllers/views/merb/test/fixtures/test_local_provides/index.xml.erb
+@@ -0,0 +1 @@
++<XML:Local provides='true' />
+\ No newline at end of file
+diff --git a/spec/public/controller/responder_spec.rb b/spec/public/controller/responder_spec.rb
+index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..bcf18532442e5965cf6ca8501770d7b7a1eb2429 100644
+--- a/spec/public/controller/responder_spec.rb
++++ b/spec/public/controller/responder_spec.rb
+@@ -0,0 +1,31 @@
++require File.join(File.dirname(__FILE__), "spec_helper")
++
++describe Merb::Controller, " responds" do
++
++ before do
++ Merb.push_path(:layout, File.dirname(__FILE__) / "controllers" / "views" / "layouts")
++ Merb::Router.prepare do |r|
++ r.default_routes
++ end
++ end
++
++ it "should default the mime-type to HTML" do
++ dispatch_to(Merb::Test::Fixtures::TestHtmlDefault, :index).body.should == "HTML: Default"
++ end
++
++ it "should use other mime-types if they are provided on the class level" do
++ controller = dispatch_to(Merb::Test::Fixtures::TestClassProvides, :index, {}, :http_accept => "application/xml")
++ controller.body.should == "<XML:Class provides='true' />"
++ end
++
++ it "should fail if none of the acceptable mime-types are available" do
++ calling { dispatch_to(Merb::Test::Fixtures::TestClassProvides, :index, {}, :http_accept => "application/json") }.
++ should raise_error(Merb::ControllerExceptions::NotAcceptable)
++ end
++
++ it "should use mime-types that are provided at the local level" do
++ controller = dispatch_to(Merb::Test::Fixtures::TestLocalProvides, :index, {}, :http_accept => "application/xml")
++ controller.body.should == "<XML:Local provides='true' />"
++ end
++
++end
+\ No newline at end of file
+diff --git a/spec/public/controller/spec_helper.rb b/spec/public/controller/spec_helper.rb
+index f68628a63740f4ce0235a15d71c5889e55ecaf78..e360194c1fbaf72c3298c61543c2d3a19b512b41 100644
+--- a/spec/public/controller/spec_helper.rb
++++ b/spec/public/controller/spec_helper.rb
+@@ -1,4 +1,10 @@
+ __DIR__ = File.dirname(__FILE__)
++require 'ruby-debug'
++
+ require File.join(__DIR__, "..", "..", "spec_helper")
+
+-require File.join(__DIR__, "controllers", "base")
+\ No newline at end of file
++require File.join(__DIR__, "controllers", "base")
++require File.join(__DIR__, "controllers", "responder")
++
++Merb::BootLoader::Templates.run
++Merb::BootLoader::MimeTypes.run
+\ No newline at end of file
diff --git a/git/test/fixtures/diff_new_mode b/git/test/fixtures/diff_new_mode
new file mode 100644
index 00000000..29705386
--- /dev/null
+++ b/git/test/fixtures/diff_new_mode
@@ -0,0 +1,14 @@
+diff --git a/conf/global_settings.py b/conf/global_settings.py
+old mode 100644
+new mode 100755
+index 9ec1bac000000000000000000000000000000000..1c4f83b000000000000000000000000000000000
+--- a/conf/global_settings.py
++++ b/conf/global_settings.py
+@@ -58,6 +58,7 @@ TEMPLATE_CONTEXT_PROCESSORS = (
+ )
+
+ MIDDLEWARE_CLASSES = (
++ "django.middleware.cache.CacheMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
diff --git a/git/test/fixtures/diff_numstat b/git/test/fixtures/diff_numstat
new file mode 100644
index 00000000..44c6ca2d
--- /dev/null
+++ b/git/test/fixtures/diff_numstat
@@ -0,0 +1,2 @@
+29 18 a.txt
+0 5 b.txt
diff --git a/git/test/fixtures/diff_p b/git/test/fixtures/diff_p
new file mode 100644
index 00000000..af4759e5
--- /dev/null
+++ b/git/test/fixtures/diff_p
@@ -0,0 +1,610 @@
+diff --git a/.gitignore b/.gitignore
+index 4ebc8aea50e0a67e000ba29a30809d0a7b9b2666..2dd02534615434d88c51307beb0f0092f21fd103 100644
+--- a/.gitignore
++++ b/.gitignore
+@@ -1 +1,2 @@
+ coverage
++pkg
+diff --git a/Manifest.txt b/Manifest.txt
+index 641972d82c6d1b51122274ae8f6a0ecdfb56ee22..38bf80c54a526e76d74820a0f48606fe1ca7b1be 100644
+--- a/Manifest.txt
++++ b/Manifest.txt
+@@ -4,4 +4,31 @@ README.txt
+ Rakefile
+ bin/grit
+ lib/grit.rb
+-test/test_grit.rb
+\ No newline at end of file
++lib/grit/actor.rb
++lib/grit/blob.rb
++lib/grit/commit.rb
++lib/grit/errors.rb
++lib/grit/git.rb
++lib/grit/head.rb
++lib/grit/lazy.rb
++lib/grit/repo.rb
++lib/grit/tree.rb
++test/fixtures/blame
++test/fixtures/cat_file_blob
++test/fixtures/cat_file_blob_size
++test/fixtures/for_each_ref
++test/fixtures/ls_tree_a
++test/fixtures/ls_tree_b
++test/fixtures/rev_list
++test/fixtures/rev_list_single
++test/helper.rb
++test/profile.rb
++test/suite.rb
++test/test_actor.rb
++test/test_blob.rb
++test/test_commit.rb
++test/test_git.rb
++test/test_head.rb
++test/test_reality.rb
++test/test_repo.rb
++test/test_tree.rb
+diff --git a/README.txt b/README.txt
+index 8b1e02c0fb554eed2ce2ef737a68bb369d7527df..fca94f84afd7d749c62626011f972a509f6a5ac6 100644
+--- a/README.txt
++++ b/README.txt
+@@ -1,32 +1,185 @@
+ grit
+- by FIX (your name)
+- FIX (url)
++ by Tom Preston-Werner
++ grit.rubyforge.org
+
+ == DESCRIPTION:
++
++Grit is a Ruby library for extracting information from a git repository in and
++object oriented manner.
++
++== REQUIREMENTS:
++
++* git (http://git.or.cz) tested with 1.5.3.4
++
++== INSTALL:
++
++sudo gem install grit
++
++== USAGE:
++
++Grit gives you object model access to your git repository. Once you have
++created a repository object, you can traverse it to find parent commit(s),
++trees, blobs, etc.
++
++= Initialize a Repo object
++
++The first step is to create a GitPython.Repo object to represent your repo. I
++include the Grit module so reduce typing.
++
++ include Grit
++ repo = Repo.new("/Users/tom/dev/grit")
+
+-FIX (describe your package)
++In the above example, the directory /Users/tom/dev/grit is my working
++repo and contains the .git directory. You can also initialize Grit with a
++bare repo.
+
+-== FEATURES/PROBLEMS:
++ repo = Repo.new("/var/git/grit.git")
+
+-* FIX (list of features or problems)
++= Getting a list of commits
+
+-== SYNOPSIS:
++From the Repo object, you can get a list of commits as an array of Commit
++objects.
+
+- FIX (code sample of usage)
++ repo.commits
++ # => [#<GitPython.Commit "e80bbd2ce67651aa18e57fb0b43618ad4baf7750">,
++ #<GitPython.Commit "91169e1f5fa4de2eaea3f176461f5dc784796769">,
++ #<GitPython.Commit "038af8c329ef7c1bae4568b98bd5c58510465493">,
++ #<GitPython.Commit "40d3057d09a7a4d61059bca9dca5ae698de58cbe">,
++ #<GitPython.Commit "4ea50f4754937bf19461af58ce3b3d24c77311d9">]
++
++Called without arguments, Repo#commits returns a list of up to ten commits
++reachable by the master branch (starting at the latest commit). You can ask
++for commits beginning at a different branch, commit, tag, etc.
+
+-== REQUIREMENTS:
++ repo.commits('mybranch')
++ repo.commits('40d3057d09a7a4d61059bca9dca5ae698de58cbe')
++ repo.commits('v0.1')
++
++You can specify the maximum number of commits to return.
+
+-* FIX (list of requirements)
++ repo.commits('master', 100)
++
++If you need paging, you can specify a number of commits to skip.
+
+-== INSTALL:
++ repo.commits('master', 10, 20)
++
++The above will return commits 21-30 from the commit list.
++
++= The Commit object
++
++Commit objects contain information about that commit.
++
++ head = repo.commits.first
++
++ head.id
++ # => "e80bbd2ce67651aa18e57fb0b43618ad4baf7750"
++
++ head.parents
++ # => [#<GitPython.Commit "91169e1f5fa4de2eaea3f176461f5dc784796769">]
++
++ head.tree
++ # => #<GitPython.Tree "3536eb9abac69c3e4db583ad38f3d30f8db4771f">
++
++ head.author
++ # => #<GitPython.Actor "Tom Preston-Werner <tom@mojombo.com>">
++
++ head.authored_date
++ # => Wed Oct 24 22:02:31 -0700 2007
++
++ head.committer
++ # => #<GitPython.Actor "Tom Preston-Werner <tom@mojombo.com>">
++
++ head.committed_date
++ # => Wed Oct 24 22:02:31 -0700 2007
++
++ head.message
++ # => "add Actor inspect"
++
++You can traverse a commit's ancestry by chaining calls to #parents.
++
++ repo.commits.first.parents[0].parents[0].parents[0]
++
++The above corresponds to master^^^ or master~3 in git parlance.
++
++= The Tree object
++
++A tree records pointers to the contents of a directory. Let's say you want
++the root tree of the latest commit on the master branch.
++
++ tree = repo.commits.first.tree
++ # => #<GitPython.Tree "3536eb9abac69c3e4db583ad38f3d30f8db4771f">
++
++ tree.id
++ # => "3536eb9abac69c3e4db583ad38f3d30f8db4771f"
++
++Once you have a tree, you can get the contents.
++
++ contents = tree.contents
++ # => [#<GitPython.Blob "4ebc8aea50e0a67e000ba29a30809d0a7b9b2666">,
++ #<GitPython.Blob "81d2c27608b352814cbe979a6acd678d30219678">,
++ #<GitPython.Tree "c3d07b0083f01a6e1ac969a0f32b8d06f20c62e5">,
++ #<GitPython.Tree "4d00fe177a8407dbbc64a24dbfc564762c0922d8">]
++
++This tree contains two Blob objects and two Tree objects. The trees are
++subdirectories and the blobs are files. Trees below the root have additional
++attributes.
++
++ contents.last.name
++ # => "lib"
++
++ contents.last.mode
++ # => "040000"
++
++There is a convenience method that allows you to get a named sub-object
++from a tree.
++
++ tree/"lib"
++ # => #<GitPython.Tree "e74893a3d8a25cbb1367cf241cc741bfd503c4b2">
++
++You can also get a tree directly from the repo if you know its name.
++
++ repo.tree
++ # => #<GitPython.Tree "master">
++
++ repo.tree("91169e1f5fa4de2eaea3f176461f5dc784796769")
++ # => #<GitPython.Tree "91169e1f5fa4de2eaea3f176461f5dc784796769">
++
++= The Blob object
++
++A blob represents a file. Trees often contain blobs.
++
++ blob = tree.contents.first
++ # => #<GitPython.Blob "4ebc8aea50e0a67e000ba29a30809d0a7b9b2666">
++
++A blob has certain attributes.
++
++ blob.id
++ # => "4ebc8aea50e0a67e000ba29a30809d0a7b9b2666"
++
++ blob.name
++ # => "README.txt"
++
++ blob.mode
++ # => "100644"
++
++ blob.size
++ # => 7726
++
++You can get the data of a blob as a string.
++
++ blob.data
++ # => "Grit is a library to ..."
++
++You can also get a blob directly from the repo if you know its name.
+
+-* FIX (sudo gem install, anything else)
++ repo.blob("4ebc8aea50e0a67e000ba29a30809d0a7b9b2666")
++ # => #<GitPython.Blob "4ebc8aea50e0a67e000ba29a30809d0a7b9b2666">
+
+ == LICENSE:
+
+ (The MIT License)
+
+-Copyright (c) 2007 FIX
++Copyright (c) 2007 Tom Preston-Werner
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+diff --git a/Rakefile b/Rakefile
+index 5bfb62163af455ca54422fd0b2e723ba1021ad12..72fde8c9ca87a1c992ce992bab13c3c4f13cddb9 100644
+--- a/Rakefile
++++ b/Rakefile
+@@ -4,11 +4,11 @@ require './lib/grit.rb'
+
+ Hoe.new('grit', GitPython.VERSION) do |p|
+ p.rubyforge_name = 'grit'
+- # p.author = 'FIX'
+- # p.email = 'FIX'
+- # p.summary = 'FIX'
+- # p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
+- # p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[1..-1]
++ p.author = 'Tom Preston-Werner'
++ p.email = 'tom@rubyisawesome.com'
++ p.summary = 'Object model interface to a git repo'
++ p.description = p.paragraphs_of('README.txt', 2..2).join("\n\n")
++ p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[2..-1].map { |u| u.strip }
+ p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
+ end
+
+diff --git a/lib/grit.rb b/lib/grit.rb
+index ae0792ae39d4891ebc1af996102a4f9df703394d..ae55fd7961ac49233f6ca515622a61e90d516044 100644
+--- a/lib/grit.rb
++++ b/lib/grit.rb
+@@ -1,4 +1,4 @@
+-$:.unshift File.dirname(__FILE__) # For use/testing when no gem is installed
++$:.unshift File.dirname(__FILE__) # For use/testing when no gem is installed
+
+ # core
+
+@@ -12,6 +12,8 @@ require 'grit/head'
+ require 'grit/commit'
+ require 'grit/tree'
+ require 'grit/blob'
++require 'grit/actor'
++require 'grit/diff'
+ require 'grit/repo'
+
+ module Grit
+@@ -21,5 +23,5 @@ module Grit
+
+ self.debug = false
+
+- VERSION = '1.0.0'
++ VERSION = '0.1.0'
+ end
+\ No newline at end of file
+diff --git a/lib/grit/actor.rb b/lib/grit/actor.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..f733bce6b57c0e5e353206e692b0e3105c2527f4
+--- /dev/null
++++ b/lib/grit/actor.rb
+@@ -0,0 +1,35 @@
++module Grit
++
++ class Actor
++ attr_reader :name
++ attr_reader :email
++
++ def initialize(name, email)
++ @name = name
++ @email = email
++ end
++
++ # Create an Actor from a string.
++ # +str+ is the string, which is expected to be in regular git format
++ #
++ # Format
++ # John Doe <jdoe@example.com>
++ #
++ # Returns Actor
++ def self.from_string(str)
++ case str
++ when /<.+>/
++ m, name, email = *str.match(/(.*) <(.+?)>/)
++ return self.new(name, email)
++ else
++ return self.new(str, nil)
++ end
++ end
++
++ # Pretty object inspection
++ def inspect
++ %Q{#<GitPython.Actor "#{@name} <#{@email}>">}
++ end
++ end # Actor
++
++end # Grit
+\ No newline at end of file
+diff --git a/lib/grit/blob.rb b/lib/grit/blob.rb
+index c863646d4278bfee2a7bcb64caace6b31f89ef03..87d43fab37844afdc2f8814dba3abdaa791f1370 100644
+--- a/lib/grit/blob.rb
++++ b/lib/grit/blob.rb
+@@ -81,9 +81,9 @@ module Grit
+ c = commits[info[:id]]
+ unless c
+ c = Commit.create(repo, :id => info[:id],
+- :author => info[:author],
++ :author => Actor.from_string(info[:author] + ' ' + info[:author_email]),
+ :authored_date => info[:author_date],
+- :committer => info[:committer],
++ :committer => Actor.from_string(info[:committer] + ' ' + info[:committer_email]),
+ :committed_date => info[:committer_date],
+ :message => info[:summary])
+ commits[info[:id]] = c
+@@ -102,11 +102,6 @@ module Grit
+ def inspect
+ %Q{#<GitPython.Blob "#{@id}">}
+ end
+-
+- # private
+-
+- def self.read_
+- end
+ end # Blob
+
+ end # Grit
+\ No newline at end of file
+diff --git a/lib/grit/commit.rb b/lib/grit/commit.rb
+index c2a9e2f81657b19925fe9bab4bc5d7ac130e5880..cd9c3e3184c97e83a8982fab9499cad3aec339f6 100644
+--- a/lib/grit/commit.rb
++++ b/lib/grit/commit.rb
+@@ -136,6 +136,11 @@ module Grit
+ commits
+ end
+
++ def self.diff(repo, id)
++ text = repo.git.diff({:full_index => true}, id)
++ Diff.list_from_string(repo, text)
++ end
++
+ # Convert this Commit to a String which is just the SHA1 id
+ def to_s
+ @id
+@@ -153,7 +158,7 @@ module Grit
+ # Returns [String (actor name and email), Time (acted at time)]
+ def self.actor(line)
+ m, actor, epoch = *line.match(/^.+? (.*) (\d+) .*$/)
+- [actor, Time.at(epoch.to_i)]
++ [Actor.from_string(actor), Time.at(epoch.to_i)]
+ end
+ end # Commit
+
+diff --git a/lib/grit/git.rb b/lib/grit/git.rb
+index 1d5251d40fb65ac89184ec662a3e1b04d0c24861..98eeddda5ed2b0e215e21128112393bdc9bc9039 100644
+--- a/lib/grit/git.rb
++++ b/lib/grit/git.rb
+@@ -13,17 +13,6 @@ module Grit
+ self.git_dir = git_dir
+ end
+
+- # Converstion hash from Ruby style options to git command line
+- # style options
+- TRANSFORM = {:max_count => "--max-count=",
+- :skip => "--skip=",
+- :pretty => "--pretty=",
+- :sort => "--sort=",
+- :format => "--format=",
+- :since => "--since=",
+- :p => "-p",
+- :s => "-s"}
+-
+ # Run the given git command with the specified arguments and return
+ # the result as a String
+ # +cmd+ is the command
+@@ -52,12 +41,19 @@ module Grit
+ def transform_options(options)
+ args = []
+ options.keys.each do |opt|
+- if TRANSFORM[opt]
++ if opt.to_s.size == 1
++ if options[opt] == true
++ args << "-#{opt}"
++ else
++ val = options.delete(opt)
++ args << "-#{opt.to_s} #{val}"
++ end
++ else
+ if options[opt] == true
+- args << TRANSFORM[opt]
++ args << "--#{opt.to_s.gsub(/_/, '-')}"
+ else
+ val = options.delete(opt)
+- args << TRANSFORM[opt] + val.to_s
++ args << "--#{opt.to_s.gsub(/_/, '-')}=#{val}"
+ end
+ end
+ end
+diff --git a/lib/grit/repo.rb b/lib/grit/repo.rb
+index 624991d07e240ae66ff2a0dc55e2f2b5e262c75b..63bf03b839374c96a3d42a07d56681a797f52a71 100644
+--- a/lib/grit/repo.rb
++++ b/lib/grit/repo.rb
+@@ -93,6 +93,17 @@ module Grit
+ def blob(id)
+ Blob.create(self, :id => id)
+ end
++
++ # The commit log for a treeish
++ #
++ # Returns GitPython.Commit[]
++ def log(commit = 'master', path = nil, options = {})
++ default_options = {:pretty => "raw"}
++ actual_options = default_options.merge(options)
++ arg = path ? "#{commit} -- #{path}" : commit
++ commits = self.git.log(actual_options, arg)
++ Commit.list_from_string(self, commits)
++ end
+
+ # The diff from commit +a+ to commit +b+, optionally restricted to the given file(s)
+ # +a+ is the base commit
+@@ -121,4 +132,4 @@ module Grit
+ end
+ end # Repo
+
+-end # Grit
+\ No newline at end of file
++end # Grit
+diff --git a/test/test_actor.rb b/test/test_actor.rb
+new file mode 100644
+index 0000000000000000000000000000000000000000..08391f12336831d048122c8d13bc8404f27e6b91
+--- /dev/null
++++ b/test/test_actor.rb
+@@ -0,0 +1,28 @@
++require File.dirname(__FILE__) + '/helper'
++
++class TestActor < Test::Unit::TestCase
++ def setup
++
++ end
++
++ # from_string
++
++ def test_from_string_should_separate_name_and_email
++ a = Actor.from_string("Tom Werner <tom@example.com>")
++ assert_equal "Tom Werner", a.name
++ assert_equal "tom@example.com", a.email
++ end
++
++ def test_from_string_should_handle_just_name
++ a = Actor.from_string("Tom Werner")
++ assert_equal "Tom Werner", a.name
++ assert_equal nil, a.email
++ end
++
++ # inspect
++
++ def test_inspect
++ a = Actor.from_string("Tom Werner <tom@example.com>")
++ assert_equal %Q{#<GitPython.Actor "Tom Werner <tom@example.com>">}, a.inspect
++ end
++end
+\ No newline at end of file
+diff --git a/test/test_blob.rb b/test/test_blob.rb
+index 6fa087d785661843034d03c7e0b917a8a80d5d8c..9ef84cc14266141b070771706b8aeebc3dfbef82 100644
+--- a/test/test_blob.rb
++++ b/test/test_blob.rb
+@@ -40,9 +40,11 @@ class TestBlob < Test::Unit::TestCase
+ c = b.first.first
+ c.expects(:__bake__).times(0)
+ assert_equal '634396b2f541a9f2d58b00be1a07f0c358b999b3', c.id
+- assert_equal 'Tom Preston-Werner', c.author
++ assert_equal 'Tom Preston-Werner', c.author.name
++ assert_equal 'tom@mojombo.com', c.author.email
+ assert_equal Time.at(1191997100), c.authored_date
+- assert_equal 'Tom Preston-Werner', c.committer
++ assert_equal 'Tom Preston-Werner', c.committer.name
++ assert_equal 'tom@mojombo.com', c.committer.email
+ assert_equal Time.at(1191997100), c.committed_date
+ assert_equal 'initial grit setup', c.message
+ # c.expects(:__bake__).times(1)
+diff --git a/test/test_commit.rb b/test/test_commit.rb
+index 3bd6af75deda05725900eb7fd06e8107df14c655..0936c90e5b29ede2b5214d6dc26d256a8c6646f4 100644
+--- a/test/test_commit.rb
++++ b/test/test_commit.rb
+@@ -10,9 +10,28 @@ class TestCommit < Test::Unit::TestCase
+ def test_bake
+ Git.any_instance.expects(:rev_list).returns(fixture('rev_list_single'))
+ @c = Commit.create(@r, :id => '4c8124ffcf4039d292442eeccabdeca5af5c5017')
+- @c.author # cause bake-age
++ @c.author # bake
+
+- assert_equal "Tom Preston-Werner <tom@mojombo.com>", @c.author
++ assert_equal "Tom Preston-Werner", @c.author.name
++ assert_equal "tom@mojombo.com", @c.author.email
++ end
++
++ # diff
++
++ def test_diff
++ Git.any_instance.expects(:diff).returns(fixture('diff_p'))
++ diffs = Commit.diff(@r, 'master')
++
++ assert_equal 15, diffs.size
++
++ assert_equal '.gitignore', diffs.first.a_path
++ assert_equal '.gitignore', diffs.first.b_path
++ assert_equal '4ebc8ae', diffs.first.a_commit
++ assert_equal '2dd0253', diffs.first.b_commit
++ assert_equal '100644', diffs.first.mode
++ assert_equal false, diffs.first.new_file
++ assert_equal false, diffs.first.deleted_file
++ assert_equal "--- a/.gitignore\n+++ b/.gitignore\n@@ -1 +1,2 @@\n coverage\n+pkg", diffs.first.diff
+ end
+
+ # to_s
+diff --git a/test/test_git.rb b/test/test_git.rb
+index e615a035d096b6cbc984e2f4213c06d0ac785321..72a18ec424f078f6daee75dbc62265c02ba7a892 100644
+--- a/test/test_git.rb
++++ b/test/test_git.rb
+@@ -10,6 +10,12 @@ class TestGit < Test::Unit::TestCase
+ end
+
+ def test_transform_options
++ assert_equal ["-s"], @git.transform_options({:s => true})
++ assert_equal ["-s 5"], @git.transform_options({:s => 5})
++
++ assert_equal ["--max-count"], @git.transform_options({:max_count => true})
+ assert_equal ["--max-count=5"], @git.transform_options({:max_count => 5})
++
++ assert_equal ["-t", "-s"], @git.transform_options({:s => true, :t => true})
+ end
+ end
+\ No newline at end of file
+diff --git a/test/test_repo.rb b/test/test_repo.rb
+index d53476a51e3286be270c7b515ec1d65e5c1716e0..114a4464fa248550be10cc4abe0735d6025b5fca 100644
+--- a/test/test_repo.rb
++++ b/test/test_repo.rb
+@@ -59,9 +59,11 @@ class TestRepo < Test::Unit::TestCase
+ assert_equal '4c8124ffcf4039d292442eeccabdeca5af5c5017', c.id
+ assert_equal ["634396b2f541a9f2d58b00be1a07f0c358b999b3"], c.parents.map { |p| p.id }
+ assert_equal "672eca9b7f9e09c22dcb128c283e8c3c8d7697a4", c.tree.id
+- assert_equal "Tom Preston-Werner <tom@mojombo.com>", c.author
++ assert_equal "Tom Preston-Werner", c.author.name
++ assert_equal "tom@mojombo.com", c.author.email
+ assert_equal Time.at(1191999972), c.authored_date
+- assert_equal "Tom Preston-Werner <tom@mojombo.com>", c.committer
++ assert_equal "Tom Preston-Werner", c.committer.name
++ assert_equal "tom@mojombo.com", c.committer.email
+ assert_equal Time.at(1191999972), c.committed_date
+ assert_equal "implement Grit#heads", c.message
+
+@@ -125,4 +127,18 @@ class TestRepo < Test::Unit::TestCase
+ def test_inspect
+ assert_equal %Q{#<GitPython.Repo "#{File.expand_path(GRIT_REPO)}/.git">}, @r.inspect
+ end
+-end
+\ No newline at end of file
++
++ # log
++
++ def test_log
++ Git.any_instance.expects(:log).times(2).with({:pretty => 'raw'}, 'master').returns(fixture('rev_list'))
++
++ assert_equal '4c8124ffcf4039d292442eeccabdeca5af5c5017', @r.log.first.id
++ assert_equal 'ab25fd8483882c3bda8a458ad2965d2248654335', @r.log.last.id
++ end
++
++ def test_log_with_path_and_options
++ Git.any_instance.expects(:log).with({:pretty => 'raw', :max_count => 1}, 'master -- file.rb').returns(fixture('rev_list'))
++ @r.log('master', 'file.rb', :max_count => 1)
++ end
++end
diff --git a/git/test/fixtures/diff_rename b/git/test/fixtures/diff_rename
new file mode 100644
index 00000000..13abae0e
--- /dev/null
+++ b/git/test/fixtures/diff_rename
@@ -0,0 +1,12 @@
+commit 2524c44334a8ba6b2ab8f3f0a478f04c5b073cc8
+tree e126e7b4203dadf083f5eb8e2f34c255b51d8bee
+parent d789e23b9ea8d90221d13c46f7c228d729385f92
+author Michael Trier <mtrier@gmail.com> 1229389391 -0500
+committer Michael Trier <mtrier@gmail.com> 1229389391 -0500
+
+ Renamed AUTHORS to CONTRIBUTORS because it's cooler.
+
+diff --git a/AUTHORS b/CONTRIBUTORS
+similarity index 100%
+rename from AUTHORS
+rename to CONTRIBUTORS
diff --git a/git/test/fixtures/diff_tree_numstat_root b/git/test/fixtures/diff_tree_numstat_root
new file mode 100644
index 00000000..bebdaa6d
--- /dev/null
+++ b/git/test/fixtures/diff_tree_numstat_root
@@ -0,0 +1,3 @@
+634396b2f541a9f2d58b00be1a07f0c358b999b3
+18 29 a.txt
+5 0 b.txt
diff --git a/git/test/fixtures/for_each_ref_with_path_component b/git/test/fixtures/for_each_ref_with_path_component
new file mode 100644
index 00000000..e723b4ae
--- /dev/null
+++ b/git/test/fixtures/for_each_ref_with_path_component
Binary files differ
diff --git a/git/test/fixtures/git_config b/git/test/fixtures/git_config
new file mode 100644
index 00000000..3c91985f
--- /dev/null
+++ b/git/test/fixtures/git_config
@@ -0,0 +1,23 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = false
+ logallrefupdates = true
+[remote "origin"]
+ fetch = +refs/heads/*:refs/remotes/origin/*
+ url = git://gitorious.org/~byron/git-python/byrons-clone.git
+ pushurl = git@gitorious.org:~byron/git-python/byrons-clone.git
+[branch "master"]
+ remote = origin
+ merge = refs/heads/master
+[remote "mainline"]
+ url = git://gitorious.org/git-python/mainline.git
+ fetch = +refs/heads/*:refs/remotes/mainline/*
+[remote "MartinMarcher"]
+ url = git://gitorious.org/~martin.marcher/git-python/serverhorror.git
+ fetch = +refs/heads/*:refs/remotes/MartinMarcher/*
+[gui]
+ geometry = 1316x820+219+243 207 192
+[branch "mainline_performance"]
+ remote = mainline
+ merge = refs/heads/master
diff --git a/git/test/fixtures/git_config_global b/git/test/fixtures/git_config_global
new file mode 100644
index 00000000..1a55397f
--- /dev/null
+++ b/git/test/fixtures/git_config_global
@@ -0,0 +1,24 @@
+[alias]
+ st = status
+ ci = commit
+ co = checkout
+ br = branch
+[color]
+ branch = auto
+ diff = auto
+ interactive = auto
+ status = auto
+[user]
+ name = Sebastian Thiel
+ email = byronimo@gmail.com
+[core]
+ editor = vim
+ autocrlf = false
+ packedGitLimit = 1g
+ packedGitWindowSize = 512m
+[pack]
+ windowMemory = 512m
+[merge]
+ tool = meld
+[diff]
+ tool = meld
diff --git a/git/test/fixtures/index b/git/test/fixtures/index
new file mode 100644
index 00000000..40914bac
--- /dev/null
+++ b/git/test/fixtures/index
Binary files differ
diff --git a/git/test/fixtures/index_merge b/git/test/fixtures/index_merge
new file mode 100644
index 00000000..2a743455
--- /dev/null
+++ b/git/test/fixtures/index_merge
Binary files differ
diff --git a/git/test/fixtures/ls_tree_a b/git/test/fixtures/ls_tree_a
new file mode 100644
index 00000000..69b76f4a
--- /dev/null
+++ b/git/test/fixtures/ls_tree_a
@@ -0,0 +1,7 @@
+100644 blob 81d2c27608b352814cbe979a6acd678d30219678 History.txt
+100644 blob 641972d82c6d1b51122274ae8f6a0ecdfb56ee22 Manifest.txt
+100644 blob 8b1e02c0fb554eed2ce2ef737a68bb369d7527df README.txt
+100644 blob 735d7338b7cb208563aa282f0376c5c4049453a7 Rakefile
+040000 tree c3d07b0083f01a6e1ac969a0f32b8d06f20c62e5 bin
+040000 tree aa06ba24b4e3f463b3c4a85469d0fb9e5b421cf8 lib
+040000 tree 650fa3f0c17f1edb4ae53d8dcca4ac59d86e6c44 test
diff --git a/git/test/fixtures/ls_tree_b b/git/test/fixtures/ls_tree_b
new file mode 100644
index 00000000..329aff39
--- /dev/null
+++ b/git/test/fixtures/ls_tree_b
@@ -0,0 +1,2 @@
+100644 blob aa94e396335d2957ca92606f909e53e7beaf3fbb grit.rb
+040000 tree 34868e6e7384cb5ee51c543a8187fdff2675b5a7 grit
diff --git a/git/test/fixtures/ls_tree_commit b/git/test/fixtures/ls_tree_commit
new file mode 100644
index 00000000..d97aca04
--- /dev/null
+++ b/git/test/fixtures/ls_tree_commit
@@ -0,0 +1,3 @@
+040000 tree 2afb47bcedf21663580d5e6d2f406f08f3f65f19 foo
+160000 commit d35b34c6e931b9da8f6941007a92c9c9a9b0141a bar
+040000 tree f623ee576a09ca491c4a27e48c0dfe04be5f4a2e baz
diff --git a/git/test/fixtures/reflog_HEAD b/git/test/fixtures/reflog_HEAD
new file mode 100644
index 00000000..7b2272ac
--- /dev/null
+++ b/git/test/fixtures/reflog_HEAD
@@ -0,0 +1,460 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
+69361d96a59381fde0ac34d19df2d4aff05fb9a9 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1272612605 +0200 checkout: moving from 69361d96a59381fde0ac34d19df2d4aff05fb9a9 to integration
+69361d96a59381fde0ac34d19df2d4aff05fb9a9 b75c3103a700ac65b6cd18f66e2d0a07cfc09797 Sebastian Thiel <byronimo@gmail.com> 1272612605 +0200 pull git://gitorious.org/git-python/mainline.git refs/merge-requests/14: Merge made by recursive.
+b75c3103a700ac65b6cd18f66e2d0a07cfc09797 0d6ceabf5b90e7c0690360fc30774d36644f563c Sebastian Thiel <byronimo@gmail.com> 1272614223 +0200 commit: Added additional tz_offset testing in performance test to call it more often.
+0d6ceabf5b90e7c0690360fc30774d36644f563c 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1272614242 +0200 checkout: moving from integration to master
+69361d96a59381fde0ac34d19df2d4aff05fb9a9 0d6ceabf5b90e7c0690360fc30774d36644f563c Sebastian Thiel <byronimo@gmail.com> 1272614247 +0200 merge integration: Fast-forward
+0d6ceabf5b90e7c0690360fc30774d36644f563c 997b7611dc5ec41d0e3860e237b530f387f3524a Sebastian Thiel <byronimo@gmail.com> 1272874921 +0200 checkout: moving from master to 997b7611dc5ec41d0e3860e237b530f387f3524a
+997b7611dc5ec41d0e3860e237b530f387f3524a 0d6ceabf5b90e7c0690360fc30774d36644f563c Sebastian Thiel <byronimo@gmail.com> 1272919096 +0200 checkout: moving from 997b7611dc5ec41d0e3860e237b530f387f3524a to master
+22a0289972b365b7912340501b52ca3dd98be289 143b927307d46ccb8f1cc095739e9625c03c82ff Sebastian Thiel <byronimo@gmail.com> 1272988814 +0200 commit: TODO: Removed all entries but left a mesage about where to find the issuee on lighthouse.
+143b927307d46ccb8f1cc095739e9625c03c82ff e41c727be8dbf8f663e67624b109d9f8b135a4ab Sebastian Thiel <byronimo@gmail.com> 1273140152 +0200 commit: README: Added mailing list and issue tracker information
+c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba Sebastian Thiel <byronimo@gmail.com> 1273522280 +0200 checkout: moving from c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba to integration
+c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba de5bc8f7076c5736ef1efa57345564fbc563bd19 Sebastian Thiel <byronimo@gmail.com> 1273522570 +0200 commit: Handle filenames with embedded spaces when generating diffs
+de5bc8f7076c5736ef1efa57345564fbc563bd19 8caeec1b15645fa53ec5ddc6e990e7030ffb7c5a Sebastian Thiel <byronimo@gmail.com> 1273529174 +0200 commit: IndexFile.add: Fixed incorrect path handling if path rewriting was desired and absolute paths were given
+600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c de5bc8f7076c5736ef1efa57345564fbc563bd19 Sebastian Thiel <byronimo@gmail.com> 1274808939 +0200 checkout: moving from master to master~2
+de5bc8f7076c5736ef1efa57345564fbc563bd19 600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c Sebastian Thiel <byronimo@gmail.com> 1274808999 +0200 checkout: moving from de5bc8f7076c5736ef1efa57345564fbc563bd19 to master
+600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba Sebastian Thiel <byronimo@gmail.com> 1274809635 +0200 checkout: moving from master to HEAD~3
+c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba 600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c Sebastian Thiel <byronimo@gmail.com> 1274809694 +0200 checkout: moving from c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba to master
+600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1274811103 +0200 commit: diff: by limiting the splitcount to 5, a subtle bug was introduced as the newline at the end of the split line was not split away automatically. Added test for this, and the trivial fix
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af 17af1f64d5f1e62d40e11b75b1dd48e843748b49 Sebastian Thiel <byronimo@gmail.com> 1274877948 +0200 commit: BlockingLockFile: added sanity check that raises IOError if the directory containing the lock was removed. This is unlikely to happen in a production envrironment, but may happen during testing, as folders are moved/deleted once the test is complete. Daemons might still be waiting for something, and they should be allowed to terminate instead of waiting for a possibly long time
+17af1f64d5f1e62d40e11b75b1dd48e843748b49 34ba8ffba0b3b4d21da7bcea594cc3631e422142 Sebastian Thiel <byronimo@gmail.com> 1274906080 +0200 commit: refs: a Reference can now be created by assigning a commit or object (for convenience)
+34ba8ffba0b3b4d21da7bcea594cc3631e422142 11dc82538cc1ebb537c866c8e76146e384cdfe24 Sebastian Thiel <byronimo@gmail.com> 1274906333 +0200 commit: refs: a Reference can now be created by assigning a commit or object (for convenience)
+11dc82538cc1ebb537c866c8e76146e384cdfe24 34ba8ffba0b3b4d21da7bcea594cc3631e422142 Sebastian Thiel <byronimo@gmail.com> 1274906338 +0200 HEAD~1: updating HEAD
+34ba8ffba0b3b4d21da7bcea594cc3631e422142 de84cbdd0f9ef97fcd3477b31b040c57192e28d9 Sebastian Thiel <byronimo@gmail.com> 1274906431 +0200 commit (amend): refs: a Reference can now be created by assigning a commit or object (for convenience)
+de84cbdd0f9ef97fcd3477b31b040c57192e28d9 ecf37a1b4c2f70f1fc62a6852f40178bf08b9859 Sebastian Thiel <byronimo@gmail.com> 1274910053 +0200 commit: index: index-add fixed to always append a newline after each item. In git has unified its way it reads from stdin, now it wants all items to be terminated by a newline usually. Previously, it could have been that it really didn't want to have a termination character when the last item was written to the file. Bumped the minimum requirements to 1.7.0 to be sure it is working as I think it will.
+ecf37a1b4c2f70f1fc62a6852f40178bf08b9859 1ee2afb00afaf77c883501eac8cd614c8229a444 Sebastian Thiel <byronimo@gmail.com> 1274914700 +0200 commit: cmd: By default, on linux, the parent file handles will be closed to leave the child less cluttered, and make it easier to debug as it will only have the file descriptors we set. It appears to be more stable regarding the stdin-is-closed-but-child-doesn't-realize-this issue
+1ee2afb00afaf77c883501eac8cd614c8229a444 bd45e9267ab0d3f37e59ecc8b87d0ad19abad4ad Sebastian Thiel <byronimo@gmail.com> 1275324366 +0200 commit: gitcmd: may now receive extra keyword arguments to be passed directly to the subproces.Popen invocation. It could be used to pass custom environments, without changing the own one
+bd45e9267ab0d3f37e59ecc8b87d0ad19abad4ad 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275324409 +0200 commit (amend): gitcmd: may now receive extra keyword arguments to be passed directly to the subproces.Popen invocation. It could be used to pass custom environments, without changing the own one (#26)
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275417756 +0200 checkout: moving from master to commit
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e 14212649c0c48d0a7e5a83430873cae20aad4c83 Sebastian Thiel <byronimo@gmail.com> 1275432496 +0200 commit: commit: initial version of commit_from_tree which could create commit objects if it could serialize itself
+14212649c0c48d0a7e5a83430873cae20aad4c83 7f6aa55077819e04dace82bc3ffbdea641b3e9ce Sebastian Thiel <byronimo@gmail.com> 1275432507 +0200 commit: commit: initial version of commit_from_tree which could create commit objects if it could serialize itself
+7f6aa55077819e04dace82bc3ffbdea641b3e9ce 14212649c0c48d0a7e5a83430873cae20aad4c83 Sebastian Thiel <byronimo@gmail.com> 1275432513 +0200 HEAD~1: updating HEAD
+14212649c0c48d0a7e5a83430873cae20aad4c83 1a0ec7154ea961d68ecfd4dec50f9fc1718686a2 Sebastian Thiel <byronimo@gmail.com> 1275433336 +0200 commit (amend): commit: initial version of commit_from_tree which could create commit objects if it could serialize itself
+1a0ec7154ea961d68ecfd4dec50f9fc1718686a2 df0892351a394d768489b5647d47b73c24d3ef5f Sebastian Thiel <byronimo@gmail.com> 1275433456 +0200 commit (amend): commit: initial version of commit_from_tree which could create commit objects if it could serialize itself
+df0892351a394d768489b5647d47b73c24d3ef5f df0892351a394d768489b5647d47b73c24d3ef5f Sebastian Thiel <byronimo@gmail.com> 1275474032 +0200 checkout: moving from commit to odb
+df0892351a394d768489b5647d47b73c24d3ef5f 0e88ee839eaa5966f0d652372247fd14d80f9bb3 Sebastian Thiel <byronimo@gmail.com> 1275474633 +0200 commit: commit: refactored existing code to decode commits from streams - unfortunately this involves a little bit more python involvement currently, so performance might be slightly worse than before atm
+0e88ee839eaa5966f0d652372247fd14d80f9bb3 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275474676 +0200 checkout: moving from odb to master
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e 0e88ee839eaa5966f0d652372247fd14d80f9bb3 Sebastian Thiel <byronimo@gmail.com> 1275474732 +0200 checkout: moving from master to odb
+0e88ee839eaa5966f0d652372247fd14d80f9bb3 714e42d6315806dff61d39d8750ef8b250fb8d82 Sebastian Thiel <byronimo@gmail.com> 1275475614 +0200 commit (amend): commit: refactored existing code to decode commits from streams - performance is slightly better
+714e42d6315806dff61d39d8750ef8b250fb8d82 8c1a87d11df666d308d14e4ae7ee0e9d614296b6 Sebastian Thiel <byronimo@gmail.com> 1275475865 +0200 commit (amend): commit: refactored existing code to decode commits from streams - performance is slightly better
+8c1a87d11df666d308d14e4ae7ee0e9d614296b6 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275475911 +0200 checkout: moving from odb to master
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e 8c1a87d11df666d308d14e4ae7ee0e9d614296b6 Sebastian Thiel <byronimo@gmail.com> 1275475929 +0200 checkout: moving from master to odb
+8c1a87d11df666d308d14e4ae7ee0e9d614296b6 8c1a87d11df666d308d14e4ae7ee0e9d614296b6 Sebastian Thiel <byronimo@gmail.com> 1275476474 +0200 checkout: moving from odb to perftest
+8c1a87d11df666d308d14e4ae7ee0e9d614296b6 4a25347d7f4c371345da2348ac6cceec7a143da2 Sebastian Thiel <byronimo@gmail.com> 1275476487 +0200 commit: Added commit-iteration test
+4a25347d7f4c371345da2348ac6cceec7a143da2 4e1c89ec97ec90037583e85d0e9e71e9c845a19b Sebastian Thiel <byronimo@gmail.com> 1275488012 +0200 commit: Added performance testing foundation library, reworked existing performance tests to work on larger repositories
+4e1c89ec97ec90037583e85d0e9e71e9c845a19b ae5a69f67822d81bbbd8f4af93be68703e730b37 Sebastian Thiel <byronimo@gmail.com> 1275489688 +0200 commit: commit: redesigned revlist and commit parsing, commits are always retrieved from their object information directly. This is faster, and resolves issues with the rev-list format and empty commit messages
+ae5a69f67822d81bbbd8f4af93be68703e730b37 02004a7ea4d26dc45f194d3a34780a50634ef497 Sebastian Thiel <byronimo@gmail.com> 1275493157 +0200 commit: git.cmd: added test for stream section constraint used in git command, found bug of course which just didn't kick in yet
+02004a7ea4d26dc45f194d3a34780a50634ef497 ae5a69f67822d81bbbd8f4af93be68703e730b37 Sebastian Thiel <byronimo@gmail.com> 1275493169 +0200 HEAD~1: updating HEAD
+ae5a69f67822d81bbbd8f4af93be68703e730b37 538820055ce1bf9dd07ecda48210832f96194504 Sebastian Thiel <byronimo@gmail.com> 1275493189 +0200 commit: git.cmd: added test for stream section constraint used in git command, found bug of course which just didn't kick in yet
+538820055ce1bf9dd07ecda48210832f96194504 282018b79cc8df078381097cb3aeb29ff56e83c6 Sebastian Thiel <byronimo@gmail.com> 1275502260 +0200 commit: Added first design and frame for object database. In a first step, loose objects will be written using our utilities, and certain object retrieval functionality moves into the GitObjectDatabase which is used by the repo instance
+282018b79cc8df078381097cb3aeb29ff56e83c6 8c1a87d11df666d308d14e4ae7ee0e9d614296b6 Sebastian Thiel <byronimo@gmail.com> 1275502285 +0200 checkout: moving from perftest to odb
+8c1a87d11df666d308d14e4ae7ee0e9d614296b6 282018b79cc8df078381097cb3aeb29ff56e83c6 Sebastian Thiel <byronimo@gmail.com> 1275502288 +0200 merge perftest: Fast-forward
+282018b79cc8df078381097cb3aeb29ff56e83c6 8b86f9b399a8f5af792a04025fdeefc02883f3e5 Sebastian Thiel <byronimo@gmail.com> 1275511252 +0200 commit: initial version of loose object writing and simple cached object lookup appears to be working
+8b86f9b399a8f5af792a04025fdeefc02883f3e5 6f8ce8901e21587cd2320562df412e05b5ab1731 Sebastian Thiel <byronimo@gmail.com> 1275515609 +0200 commit: added frame for object reading, including simple test
+6f8ce8901e21587cd2320562df412e05b5ab1731 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275549198 +0200 checkout: moving from odb to master
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e e79999c956e2260c37449139080d351db4aa3627 Sebastian Thiel <byronimo@gmail.com> 1275549608 +0200 commit: git.cmd: moved hardcoded chunksize when duplicating stream data into easy-to-change class member variable
+e79999c956e2260c37449139080d351db4aa3627 6f8ce8901e21587cd2320562df412e05b5ab1731 Sebastian Thiel <byronimo@gmail.com> 1275549707 +0200 checkout: moving from master to odb
+6f8ce8901e21587cd2320562df412e05b5ab1731 e79999c956e2260c37449139080d351db4aa3627 Sebastian Thiel <byronimo@gmail.com> 1275550120 +0200 checkout: moving from odb to master
+e79999c956e2260c37449139080d351db4aa3627 412632599479a8e5991a07ecb67bc52b85c60755 Sebastian Thiel <byronimo@gmail.com> 1275550524 +0200 commit: git.cmd: using communicate in the main branch of execution, which might not make a big difference, but perhaps its smarter about broken pipes.
+412632599479a8e5991a07ecb67bc52b85c60755 25dca42bac17d511b7e2ebdd9d1d679e7626db5f Sebastian Thiel <byronimo@gmail.com> 1275550670 +0200 commit (amend): git.cmd: using communicate in the main branch of execution, which might not make a big difference, but perhaps its smarter about broken pipes.
+25dca42bac17d511b7e2ebdd9d1d679e7626db5f 6f8ce8901e21587cd2320562df412e05b5ab1731 Sebastian Thiel <byronimo@gmail.com> 1275551315 +0200 checkout: moving from master to odb
+6f8ce8901e21587cd2320562df412e05b5ab1731 38d59fc8ccccae8882fa48671377bf40a27915a7 Sebastian Thiel <byronimo@gmail.com> 1275575735 +0200 commit: odb: implemented loose object streaming, which is impossible to do efficiently considering that it copies string buffers all the time
+38d59fc8ccccae8882fa48671377bf40a27915a7 26e138cb47dccc859ff219f108ce9b7d96cbcbcd Sebastian Thiel <byronimo@gmail.com> 1275582065 +0200 commit: odb: fixed streamed decompression reader ( specific tests would still be missing ) and added performance tests which are extremely promising
+26e138cb47dccc859ff219f108ce9b7d96cbcbcd 4295787b65d4a85ac1e0e20741aa59ec19a97353 Sebastian Thiel <byronimo@gmail.com> 1275584658 +0200 commit: Added performance comparison to cgit ... and yes, git-python is faster :)
+4295787b65d4a85ac1e0e20741aa59ec19a97353 4b4a514e51fbc7dc6ddcb27c188159d57b5d1fa9 Sebastian Thiel <byronimo@gmail.com> 1275590443 +0200 commit (amend): Added performance comparison to cgit ... and yes, git-python is faster :)
+4b4a514e51fbc7dc6ddcb27c188159d57b5d1fa9 1e2b46138ba58033738a24dadccc265748fce2ca Sebastian Thiel <byronimo@gmail.com> 1275600034 +0200 commit: commit.create_from_tree now uses pure python implementation, fixed message parsing which truncated newlines although it was ilegitimate. Its up to the reader to truncate therse, nowhere in the git code I could find anyone adding newlines to commits where it is written
+1e2b46138ba58033738a24dadccc265748fce2ca 1906ee4df9ae4e734288c5203cf79894dff76cab Sebastian Thiel <byronimo@gmail.com> 1275600429 +0200 commit: Fixed compatability issues with python 2.5, made sure all tests run
+1906ee4df9ae4e734288c5203cf79894dff76cab b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 Sebastian Thiel <byronimo@gmail.com> 1275602940 +0200 commit: db: implemented GitObjectDB using the git command to make sure we can lookup everything. Next is to implement pack-file reading, then alternates which should allow to resolve everything
+b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 Sebastian Thiel <byronimo@gmail.com> 1275643039 +0200 checkout: moving from odb to asyncredesign
+b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 a1e80445ad5cb6da4c0070d7cb8af89da3b0803b Sebastian Thiel <byronimo@gmail.com> 1275655275 +0200 commit: initial version of new odb design to facilitate a channel based multi-threading implementation of all odb functions
+a1e80445ad5cb6da4c0070d7cb8af89da3b0803b e746f96bcc29238b79118123028ca170adc4ff0f Sebastian Thiel <byronimo@gmail.com> 1275664928 +0200 commit: Fixed implementation after design change to deal with it - all tests run, but next there will have to be more through testing
+e746f96bcc29238b79118123028ca170adc4ff0f b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 Sebastian Thiel <byronimo@gmail.com> 1275664939 +0200 checkout: moving from asyncredesign to odb
+b01ca6a3e4ae9d944d799743c8ff774e2a7a82b6 e746f96bcc29238b79118123028ca170adc4ff0f Sebastian Thiel <byronimo@gmail.com> 1275664943 +0200 merge asyncredesign: Fast-forward
+e746f96bcc29238b79118123028ca170adc4ff0f 25dca42bac17d511b7e2ebdd9d1d679e7626db5f Sebastian Thiel <byronimo@gmail.com> 1275664990 +0200 checkout: moving from odb to master
+25dca42bac17d511b7e2ebdd9d1d679e7626db5f 6fbb69306c0e14bacb8dcb92a89af27d3d5d631f Sebastian Thiel <byronimo@gmail.com> 1275665431 +0200 commit (merge): Merge branch 'odb'
+6fbb69306c0e14bacb8dcb92a89af27d3d5d631f a243827ab3346e188e99db2f9fc1f916941c9b1a Sebastian Thiel <byronimo@gmail.com> 1275685591 +0200 commit: Implemented stream tests, found a bug on the way, slowly a test-framework for streams starts to show up, but its not yet there
+a243827ab3346e188e99db2f9fc1f916941c9b1a 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1275690001 +0200 commit: Removed compression flag from IStream and OStream types, as a valid object will always be compressed if generated by the system ( even future memory db's will compress it )
+7c1169f6ea406fec1e26e99821e18e66437e65eb c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca Sebastian Thiel <byronimo@gmail.com> 1275746174 +0200 commit: Added basic channel implementation including test
+c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca Sebastian Thiel <byronimo@gmail.com> 1275746191 +0200 checkout: moving from master to async
+c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca Sebastian Thiel <byronimo@gmail.com> 1275746194 +0200 checkout: moving from async to master
+c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1275746196 +0200 HEAD~1: updating HEAD
+7c1169f6ea406fec1e26e99821e18e66437e65eb c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca Sebastian Thiel <byronimo@gmail.com> 1275746213 +0200 checkout: moving from master to async
+c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca 65c9fe0baa579173afa5a2d463ac198d06ef4993 Sebastian Thiel <byronimo@gmail.com> 1275746839 +0200 commit: A code donation: Donating a worker thread implementation inclduding tests to Git-Python. I have the feeling it can do much good here :)
+65c9fe0baa579173afa5a2d463ac198d06ef4993 50e469109eed3a752d9a1b0297f16466ad92f8d2 Sebastian Thiel <byronimo@gmail.com> 1275755186 +0200 commit: Initial pool design added, allowing for lazy channel based evaluation of inter-dependent tasks
+50e469109eed3a752d9a1b0297f16466ad92f8d2 61138f2ece0cb864b933698174315c34a78835d1 Sebastian Thiel <byronimo@gmail.com> 1275760757 +0200 commit: Moved multiprocessing modules into own package, as they in fact have nothing to do with the object db. If that really works the way I want, it will become an own project, called async
+61138f2ece0cb864b933698174315c34a78835d1 ab59f78341f1dd188aaf4c30526f6295c63438b1 Sebastian Thiel <byronimo@gmail.com> 1275760989 +0200 commit: Renamed mp to async, as this is a much better name for what is actually going on. The default implementation uses threads, which ends up being nothing more than async, as they are all locked down by internal and the global interpreter lock
+ab59f78341f1dd188aaf4c30526f6295c63438b1 b72e2704022d889f116e49abf3e1e5d3e3192d3b Sebastian Thiel <byronimo@gmail.com> 1275778812 +0200 commit: Improved pool design and started rough implementation, top down to learn while going. Tests will be written soon for verification, its still quite theoretical
+b72e2704022d889f116e49abf3e1e5d3e3192d3b ec28ad575ce1d7bb6a616ffc404f32bbb1af67b2 Sebastian Thiel <byronimo@gmail.com> 1275821305 +0200 commit: thread: adjusted worker thread not to provide an output queue anymore - this is handled by the task system
+ec28ad575ce1d7bb6a616ffc404f32bbb1af67b2 b3cde0ee162b8f0cb67da981311c8f9c16050a62 Sebastian Thiel <byronimo@gmail.com> 1275840801 +0200 commit: First step of testing the pool - tasks have been separated into a new module including own tests, their design improved to prepare them for some specifics that would be needed for multiprocessing support
+b3cde0ee162b8f0cb67da981311c8f9c16050a62 8d74950510bbd74aa06afe4ec4c19e4739462d6a Sebastian Thiel <byronimo@gmail.com> 1275851713 +0200 commit: Plenty of fixes in the chunking routine, made possible by a serialized chunking test. Next up, actual async processing
+8d74950510bbd74aa06afe4ec4c19e4739462d6a 1b27292936c81637f6b9a7141dafaad1126f268e Sebastian Thiel <byronimo@gmail.com> 1275852711 +0200 commit (amend): Plenty of fixes in the chunking routine, made possible by a serialized chunking test. Next up, actual async processing
+1b27292936c81637f6b9a7141dafaad1126f268e 867129e2950458ab75523b920a5e227e3efa8bbc Sebastian Thiel <byronimo@gmail.com> 1275858486 +0200 commit: channel.read: enhanced to be sure we don't run into non-atomicity issues related to our channel closed flag, which is the only way not to block forever on read(0) channels which were closed by a thread 'in the meanwhile'
+867129e2950458ab75523b920a5e227e3efa8bbc 6335fe0abcedc99145dd1400509b7540568ac2cc Sebastian Thiel <byronimo@gmail.com> 1275860480 +0200 commit: pool: First version which works as expected in async mode. The task model is very simple still, but its getting there
+6335fe0abcedc99145dd1400509b7540568ac2cc 320c5329995cc8d364a88ba83103e1db584410ce Sebastian Thiel <byronimo@gmail.com> 1275860784 +0200 commit (amend): pool: First version which works as expected in async mode. The task model is very simple still, but its getting there
+320c5329995cc8d364a88ba83103e1db584410ce d759d0b97aaf5fd60a1df0ea0f60e67863a6c3d7 Sebastian Thiel <byronimo@gmail.com> 1275861899 +0200 commit (amend): pool: First version which works as expected in async mode. The task model is very simple still, but its getting there
+d759d0b97aaf5fd60a1df0ea0f60e67863a6c3d7 6a252661c3bf4202a4d571f9c41d2afa48d9d75f Sebastian Thiel <byronimo@gmail.com> 1275861909 +0200 commit (amend): pool: First version which works as expected in async mode. Its just using a single task for now, but next up are dependent tasks
+6a252661c3bf4202a4d571f9c41d2afa48d9d75f a8a448b7864e21db46184eab0f0a21d7725d074f Sebastian Thiel <byronimo@gmail.com> 1275899902 +0200 commit: pool.consumed_tasks: is now a queue to be thread safe, in preparation for multiple connected pools
+a8a448b7864e21db46184eab0f0a21d7725d074f 856af48fbffaf1b935d513429afeb319e4795d2d Sebastian Thiel <byronimo@gmail.com> 1275905456 +0200 commit: changed scheduling and chunksize calculation in respect to the task.min_count. Previously, it would possibly not produce enough items in case T1 wants to produce less items than t2 needs ... in fact, it would work even then, committing this anyway
+856af48fbffaf1b935d513429afeb319e4795d2d 619662a9138fd78df02c52cae6dc89db1d70a0e5 Sebastian Thiel <byronimo@gmail.com> 1275905984 +0200 commit (amend): changed scheduling and chunksize calculation in respect to the task.min_count, to fix theoretical option for a deadlock in serial mode, and unnecessary blocking in async mode
+619662a9138fd78df02c52cae6dc89db1d70a0e5 8c3c271b0d6b5f56b86e3f177caf3e916b509b52 Sebastian Thiel <byronimo@gmail.com> 1275908735 +0200 commit: Added task order cache, and a lock to prevent us walking the graph while changing tasks
+8c3c271b0d6b5f56b86e3f177caf3e916b509b52 edd9e23c766cfd51b3a6f6eee5aac0b791ef2fd0 Sebastian Thiel <byronimo@gmail.com> 1275923808 +0200 commit: added high-speed locking facilities, allowing our Queue to be faster, at least in tests, and with multiple threads. There is still an sync bug in regard to closed channels to be fixed, as the Task.set_done handling is incorrecft
+edd9e23c766cfd51b3a6f6eee5aac0b791ef2fd0 583cd8807259a69fc01874b798f657c1f9ab7828 Sebastian Thiel <byronimo@gmail.com> 1275930764 +0200 commit: Moved pool utilities into util module, fixed critical issue that caused havok - lets call this a safe-state
+583cd8807259a69fc01874b798f657c1f9ab7828 654e54d200135e665e07e9f0097d913a77f169da Sebastian Thiel <byronimo@gmail.com> 1275933662 +0200 commit: task: Fixed incorrect handling of channel closure. Performance is alright for up to 2 threads, but 4 are killing the queue
+654e54d200135e665e07e9f0097d913a77f169da be06e87433685b5ea9cfcc131ab89c56cf8292f2 Sebastian Thiel <byronimo@gmail.com> 1275940847 +0200 commit: improved testing to test the actual async handling of the pool. there are still inconsistencies that need to be fixed, but it already improved, especially the 4-thread performance which now is as fast as the dual-threaded performance
+be06e87433685b5ea9cfcc131ab89c56cf8292f2 be06e87433685b5ea9cfcc131ab89c56cf8292f2 Sebastian Thiel <byronimo@gmail.com> 1275945495 +0200 checkout: moving from async to stasks
+be06e87433685b5ea9cfcc131ab89c56cf8292f2 223701e19722afb0f57fc0de6e366ade542efdc0 Sebastian Thiel <byronimo@gmail.com> 1275945637 +0200 commit: introduced a new counter keeping track of the scheduled tasks - this prevent unnecessary tasks to be scheduled as we keep track of how many items will be produced for the task at hand. This introduces additional locking, but performns well in multithreaded mode. Performance of the master queue is still a huge issue, its currently the limiting factor, as bypassing the master queue in serial moode gives 15x performance, wich is what I would need
+223701e19722afb0f57fc0de6e366ade542efdc0 def0f73989047c4ddf9b11da05ad2c9c8e387331 Sebastian Thiel <byronimo@gmail.com> 1275946081 +0200 commit (amend): introduced a new counter keeping track of the scheduled tasks - this prevent unnecessary tasks to be scheduled as we keep track of how many items will be produced for the task at hand. This introduces additional locking, but performns well in multithreaded mode. Performance of the master queue is still a huge issue, its currently the limiting factor, as bypassing the master queue in serial moode gives 15x performance, wich is what I would need
+def0f73989047c4ddf9b11da05ad2c9c8e387331 be06e87433685b5ea9cfcc131ab89c56cf8292f2 Sebastian Thiel <byronimo@gmail.com> 1275946086 +0200 checkout: moving from stasks to async
+be06e87433685b5ea9cfcc131ab89c56cf8292f2 be06e87433685b5ea9cfcc131ab89c56cf8292f2 Sebastian Thiel <byronimo@gmail.com> 1275946311 +0200 checkout: moving from async to brute
+be06e87433685b5ea9cfcc131ab89c56cf8292f2 293fa4de92c789d67de6a663d7b14a6897b14181 Sebastian Thiel <byronimo@gmail.com> 1275946483 +0200 commit: Removed qsize dependency when reading , now it puts onto the queue everytime someone reads. This does not appear very stable for now as one can, for some reason, deplete the channel, which can only happen if its closed before all tasks finish, which should already be fixed
+293fa4de92c789d67de6a663d7b14a6897b14181 def0f73989047c4ddf9b11da05ad2c9c8e387331 Sebastian Thiel <byronimo@gmail.com> 1275946494 +0200 checkout: moving from brute to stasks
+def0f73989047c4ddf9b11da05ad2c9c8e387331 e825f8b69760e269218b1bf1991018baf3c16b04 Sebastian Thiel <byronimo@gmail.com> 1275946688 +0200 commit: Channel now uses the AsyncQueue, boosting performance by factor 4, its a start
+e825f8b69760e269218b1bf1991018baf3c16b04 898d47d1711accdfded8ee470520fdb96fb12d46 Sebastian Thiel <byronimo@gmail.com> 1275947226 +0200 commit: Task scheduled items lock now uses a dummy lock in serial mode, improving its performance considerably.
+898d47d1711accdfded8ee470520fdb96fb12d46 be06e87433685b5ea9cfcc131ab89c56cf8292f2 Sebastian Thiel <byronimo@gmail.com> 1275947355 +0200 checkout: moving from stasks to async
+be06e87433685b5ea9cfcc131ab89c56cf8292f2 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1275947360 +0200 merge stasks: Merge made by recursive.
+3e2ba9c2028f21d11988558f3557905d21e93808 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1275948503 +0200 checkout: moving from async to master
+7c1169f6ea406fec1e26e99821e18e66437e65eb 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1275948509 +0200 checkout: moving from master to async
+3e2ba9c2028f21d11988558f3557905d21e93808 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1275948864 +0200 checkout: moving from async to queue
+3e2ba9c2028f21d11988558f3557905d21e93808 5d996892ac76199886ba3e2754ff9c9fac2456d6 Sebastian Thiel <byronimo@gmail.com> 1275949953 +0200 commit: test implementation of async-queue with everything stripped from it that didn't seem necessary - its a failure, something is wrong - performance not much better than the original one, its depending on the condition performance actually, which I don't get faster
+5d996892ac76199886ba3e2754ff9c9fac2456d6 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1275949960 +0200 checkout: moving from queue to async
+3e2ba9c2028f21d11988558f3557905d21e93808 5d996892ac76199886ba3e2754ff9c9fac2456d6 Sebastian Thiel <byronimo@gmail.com> 1275979377 +0200 checkout: moving from async to queue
+5d996892ac76199886ba3e2754ff9c9fac2456d6 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1275979426 +0200 checkout: moving from queue to async
+3e2ba9c2028f21d11988558f3557905d21e93808 5d996892ac76199886ba3e2754ff9c9fac2456d6 Sebastian Thiel <byronimo@gmail.com> 1275979446 +0200 checkout: moving from async to queue
+5d996892ac76199886ba3e2754ff9c9fac2456d6 f32ef32960ae8aa8a20c00cd3f7e78b441ee664b Sebastian Thiel <byronimo@gmail.com> 1275986714 +0200 commit: both versions of the async queue still have trouble in certain situations, at least with my totally overwritten version of the condition - the previous one was somewhat more stable it seems
+f32ef32960ae8aa8a20c00cd3f7e78b441ee664b 09c3f39ceb545e1198ad7a3f470d4ec896ce1add Sebastian Thiel <byronimo@gmail.com> 1275986721 +0200 commit (amend): both versions of the async queue still have trouble in certain situations, at least with my totally overwritten version of the condition - the previous one was somewhat more stable it seems. Nonetheless, this is the fastest version so far
+09c3f39ceb545e1198ad7a3f470d4ec896ce1add 3776f7a766851058f6435b9f606b16766425d7ca Sebastian Thiel <byronimo@gmail.com> 1275996284 +0200 commit: The new channeldesign actually works, but it also shows that its located at the wrong spot. The channel is nothing more than an adapter allowing to read multiple items from a thread-safe queue, the queue itself though must be 'closable' for writing, or needs something like a writable flag.
+3776f7a766851058f6435b9f606b16766425d7ca 53152a824f5186452504f0b68306d10ebebee416 Sebastian Thiel <byronimo@gmail.com> 1275999838 +0200 commit: queue: adjusted queue to be closable ( without own testing yet, except for the pool which runs it ) - its not yet stable, but should be solvable.
+53152a824f5186452504f0b68306d10ebebee416 619c11787742ce00a0ee8f841cec075897873c79 Sebastian Thiel <byronimo@gmail.com> 1276008468 +0200 commit: Its getting better already - intermediate commit before further chaning the task class
+619c11787742ce00a0ee8f841cec075897873c79 13dd59ba5b3228820841682b59bad6c22476ff66 Sebastian Thiel <byronimo@gmail.com> 1276010743 +0200 commit: task: now deletes itself once its done - for the test this doesn't change a thing as the task deletes itself too late - its time for a paradigm change, the task should be deleted with its RPoolChannel or explicitly by the user. The test needs to adapt, and shouldn't assume anything unless the RPoolChannel is gone
+13dd59ba5b3228820841682b59bad6c22476ff66 e5c0002d069382db1768349bf0c5ff40aafbf140 Sebastian Thiel <byronimo@gmail.com> 1276014012 +0200 commit: Revised task deletion works well, adjusted test to be creating new tasks all the time instead of reusing its own one, it was somewhat hard to manage its state over time and could cause bugs. It works okay, but it occasionally hangs, it appears to be an empty queue, have to gradually put certain things back in, although in the current mode of operation, it should never have empty queues from the pool to the user
+e5c0002d069382db1768349bf0c5ff40aafbf140 772b95631916223e472989b43f3a31f61e237f31 Sebastian Thiel <byronimo@gmail.com> 1276017933 +0200 commit: workerthread: adjusted to use a blocking queue, it will receive termination events only with its queue, with boosts performance into brigt green levels
+772b95631916223e472989b43f3a31f61e237f31 3e2ba9c2028f21d11988558f3557905d21e93808 Sebastian Thiel <byronimo@gmail.com> 1276017957 +0200 checkout: moving from queue to async
+3e2ba9c2028f21d11988558f3557905d21e93808 f78d4a28f307a9d7943a06be9f919304c25ac2d9 Sebastian Thiel <byronimo@gmail.com> 1276017963 +0200 merge queue: Merge made by recursive.
+f78d4a28f307a9d7943a06be9f919304c25ac2d9 15941ca090a2c3c987324fc911bbc6f89e941c47 Sebastian Thiel <byronimo@gmail.com> 1276072452 +0200 commit: queue: fixed critical bug in the notify method, as it was not at all thread-safe, causing locks to be released multiple times. Now it runs very fast, and very stable apparently.
+15941ca090a2c3c987324fc911bbc6f89e941c47 f2c8d26d3b25b864ad48e6de018757266b59f708 Sebastian Thiel <byronimo@gmail.com> 1276075717 +0200 commit: thread: fixed initialization problem if an empty iterable was handed in
+f2c8d26d3b25b864ad48e6de018757266b59f708 2054561da184955c4be4a92f0b4fa5c5c1c01350 Sebastian Thiel <byronimo@gmail.com> 1276075884 +0200 commit: HSCondition: using a deck to store waiters, for further speedup
+2054561da184955c4be4a92f0b4fa5c5c1c01350 1090701721888474d34f8a4af28ee1bb1c3fdaaa Sebastian Thiel <byronimo@gmail.com> 1276076141 +0200 commit: HSCondition: now deriving from deque, as the AsyncQeue does, to elimitate one more level of indirection. Clearly this not good from a design standpoint, as a Condition is no Deque, but it helps speeding things up which is what this is about. Could make it a hidden class to indicate how 'special' it is
+1090701721888474d34f8a4af28ee1bb1c3fdaaa a988e6985849e4f6a561b4a5468d525c25ce74fe Sebastian Thiel <byronimo@gmail.com> 1276076725 +0200 commit: HSCondition: now gets a lock even in the single-notify case, as it was required due to the non-atomiciy of the invovled operation. Removed one level of indirection for the lock, by refraining from calling my own 'wrapper' methods, which brought it back to the performance it had before the locking was introduced for the n==1 case
+a988e6985849e4f6a561b4a5468d525c25ce74fe 4e6bece08aea01859a232e99a1e1ad8cc1eb7d36 Sebastian Thiel <byronimo@gmail.com> 1276084911 +0200 commit: HSCondition: Fixed terrible bug which it inherited from its default python Condition implementation, related to the notify method not being treadsafe. Although I was aware of it, I missed the first check which tests for the size - the result could be incorrect if the whole method wasn't locked.
+4e6bece08aea01859a232e99a1e1ad8cc1eb7d36 ffb5b95cb2cec5c5a79234dfc47c3fcf1f724101 Sebastian Thiel <byronimo@gmail.com> 1276087661 +0200 commit: Channel: Read method revised - now it really really doesn't block anymore, and it runs faster as well, about 2/3 of the performance we have when being in serial mode
+ffb5b95cb2cec5c5a79234dfc47c3fcf1f724101 0974f8737a3c56a7c076f9d0b757c6cb106324fb Sebastian Thiel <byronimo@gmail.com> 1276087839 +0200 commit (amend): Channel: Read method revised - now it really really doesn't block anymore, and it runs faster as well, about 2/3 of the performance we have when being in serial mode
+0974f8737a3c56a7c076f9d0b757c6cb106324fb 57a4e09294230a36cc874a6272c71757c48139f2 Sebastian Thiel <byronimo@gmail.com> 1276090187 +0200 commit: Channel: removed pseudoconstructor, which clearly improves the design and makes it easier to constomize
+57a4e09294230a36cc874a6272c71757c48139f2 07996a1a1e53ffdd2680d4bfbc2f4059687859a5 Sebastian Thiel <byronimo@gmail.com> 1276090851 +0200 commit: task: removed scheduled task support, which at some point was introduced to improve performance, but which now hinders performance, besides being unnecessary ;)
+07996a1a1e53ffdd2680d4bfbc2f4059687859a5 ea81f14dafbfb24d70373c74b5f8dabf3f2225d9 Sebastian Thiel <byronimo@gmail.com> 1276094301 +0200 commit: Channel: Callbacks reviewed - they are now part of Subclasses of the default channel implementation, one of which is used as base by the Pool Read channel, releasing it of the duty to call these itself. The write channel with callback subclass allows the transformation of the item to be written
+ea81f14dafbfb24d70373c74b5f8dabf3f2225d9 365fb14ced88a5571d3287ff1698582ceacd80d6 Sebastian Thiel <byronimo@gmail.com> 1276095557 +0200 commit: task: redesigned write channel access to allow the task creator to set own write channels, possibly some with callbacks installed etc.. Pool.add_task will respect the users choice now, but provide defaults which are optimized for performance
+365fb14ced88a5571d3287ff1698582ceacd80d6 257a8a9441fca9a9bc384f673ba86ef5c3f1715d Sebastian Thiel <byronimo@gmail.com> 1276111194 +0200 commit: test: prepared task dependency test, which already helped to find bug in the reference counting mechanism, causing references to the pool to be kepts via cycles
+257a8a9441fca9a9bc384f673ba86ef5c3f1715d 3323464f85b986cba23176271da92a478b33ab9c Sebastian Thiel <byronimo@gmail.com> 1276122289 +0200 commit: messy first version of a properly working depth-first graph method, which allows the pool to work as expected. Many more tests need to be added, and there still is a problem with shutdown as sometimes it won't kill all threads, mainly because the process came up with worker threads started, which cannot be
+3323464f85b986cba23176271da92a478b33ab9c 3323464f85b986cba23176271da92a478b33ab9c Sebastian Thiel <byronimo@gmail.com> 1276122375 +0200 checkout: moving from async to async
+3323464f85b986cba23176271da92a478b33ab9c 257a8a9441fca9a9bc384f673ba86ef5c3f1715d Sebastian Thiel <byronimo@gmail.com> 1276122387 +0200 HEAD~1: updating HEAD
+257a8a9441fca9a9bc384f673ba86ef5c3f1715d 3323464f85b986cba23176271da92a478b33ab9c Sebastian Thiel <byronimo@gmail.com> 1276122419 +0200 checkout: moving from async to taskdep
+3323464f85b986cba23176271da92a478b33ab9c cfb278d74ad01f3f1edf5e0ad113974a9555038d Sebastian Thiel <byronimo@gmail.com> 1276157672 +0200 commit: InputChannelTask now has interface for properly handling the reading from the same and different pools
+cfb278d74ad01f3f1edf5e0ad113974a9555038d 01eac1a959c1fa5894a86bf11e6b92f96762bdd8 Sebastian Thiel <byronimo@gmail.com> 1276164376 +0200 commit: Added more dependency task tests, especially the single-reads are not yet fully deterministic as tasks still run into the problem that they try to write into a closed channel, it was closed by one of their task-mates who didn't know someone else was still computing
+01eac1a959c1fa5894a86bf11e6b92f96762bdd8 01eac1a959c1fa5894a86bf11e6b92f96762bdd8 Sebastian Thiel <byronimo@gmail.com> 1276166920 +0200 checkout: moving from taskdep to channel
+01eac1a959c1fa5894a86bf11e6b92f96762bdd8 01eac1a959c1fa5894a86bf11e6b92f96762bdd8 Sebastian Thiel <byronimo@gmail.com> 1276167802 +0200 checkout: moving from channel to taskdep
+01eac1a959c1fa5894a86bf11e6b92f96762bdd8 01eac1a959c1fa5894a86bf11e6b92f96762bdd8 Sebastian Thiel <byronimo@gmail.com> 1276169390 +0200 checkout: moving from taskdep to channel
+01eac1a959c1fa5894a86bf11e6b92f96762bdd8 55e757928e493ce93056822d510482e4ffcaac2d Sebastian Thiel <byronimo@gmail.com> 1276173597 +0200 commit: channel: Changed design to be more logical - a channel now has any amount of readers and writers, a ready is not connected to its writer anymore. This changes the refcounting of course, which is why the auto-cleanup for the pool is currently broken.
+55e757928e493ce93056822d510482e4ffcaac2d 7c36f3648e39ace752c67c71867693ce1eee52a3 Sebastian Thiel <byronimo@gmail.com> 1276177120 +0200 commit: Now tracking the amount of concurrent writers to assure the channel is closed only when there is no one else writing to it. This assures that all tasks can continue working, and put their results accordingly. Shutdown is still not working correctly, but that should be solvable as well. Its still not perfect though ...
+7c36f3648e39ace752c67c71867693ce1eee52a3 c34343d0b714d2c4657972020afea034a167a682 Sebastian Thiel <byronimo@gmail.com> 1276177952 +0200 commit: tasks can now terminate faster when no items were read, without neglecting their duty to close the channel if required. Code is a little less maintainable now, but faster, it appears
+c34343d0b714d2c4657972020afea034a167a682 4c2fa54d8122e9e5bc20a938ff8ccc5caf96dafe Sebastian Thiel <byronimo@gmail.com> 1276206673 +0200 commit: tasks can now terminate faster when no items were read, without neglecting their duty to close the channel if required. Code is a little less maintainable now, but faster, it appearsgst
+4c2fa54d8122e9e5bc20a938ff8ccc5caf96dafe c34343d0b714d2c4657972020afea034a167a682 Sebastian Thiel <byronimo@gmail.com> 1276206682 +0200 HEAD~1: updating HEAD
+c34343d0b714d2c4657972020afea034a167a682 fbe062bf6dacd3ad63dd827d898337fa542931ac Sebastian Thiel <byronimo@gmail.com> 1276206950 +0200 commit: Added dependency-task tests, and fixed plenty of ref-count related bugs, as well as concurrency issues. Now it works okay, but the thread-shutdown is still an issue, as it causes incorrect behaviour making the tests fail. Its good, as it hints at additional issues that need to be solved. There is just a little more left on the feature side, but its nearly there
+fbe062bf6dacd3ad63dd827d898337fa542931ac 6d1212e8c412b0b4802bc1080d38d54907db879d Sebastian Thiel <byronimo@gmail.com> 1276249921 +0200 commit: IMPORTANT: sometimes, when notifying waiters by releasing their lock, the lock is not actually released or they are not actually notifyied, staying in a beautysleep. This glitch is probably caused by some detail not treated correctly in the thread python module, which is something we cannot fix. It works most of the time as expected though - maybe some cleanup is not done correctly which causes this
+6d1212e8c412b0b4802bc1080d38d54907db879d 01eac1a959c1fa5894a86bf11e6b92f96762bdd8 Sebastian Thiel <byronimo@gmail.com> 1276249936 +0200 checkout: moving from channel to taskdep
+01eac1a959c1fa5894a86bf11e6b92f96762bdd8 18b14ade522c71000a69ff80569eda7335d6f64c Sebastian Thiel <byronimo@gmail.com> 1276249944 +0200 merge channel: Merge made by recursive.
+18b14ade522c71000a69ff80569eda7335d6f64c 583e6a25b0d891a2f531a81029f2bac0c237cbf9 Sebastian Thiel <byronimo@gmail.com> 1276258825 +0200 commit (amend): Merge branch 'channel' into taskdep
+583e6a25b0d891a2f531a81029f2bac0c237cbf9 5ff864138cd1e680a78522c26b583639f8f5e313 Sebastian Thiel <byronimo@gmail.com> 1276259871 +0200 commit: test.async: split test_pool up into task implenetations and related utilities, as well as the tests themselves. File became too large
+5ff864138cd1e680a78522c26b583639f8f5e313 18e3252a1f655f09093a4cffd5125342a8f94f3b Sebastian Thiel <byronimo@gmail.com> 1276261131 +0200 commit: Finished dependent task testing according to the features we would currently like to see
+18e3252a1f655f09093a4cffd5125342a8f94f3b 257a8a9441fca9a9bc384f673ba86ef5c3f1715d Sebastian Thiel <byronimo@gmail.com> 1276261137 +0200 checkout: moving from taskdep to async
+257a8a9441fca9a9bc384f673ba86ef5c3f1715d f606937a7a21237c866efafcad33675e6539c103 Sebastian Thiel <byronimo@gmail.com> 1276261142 +0200 merge taskdep: Merge made by recursive.
+f606937a7a21237c866efafcad33675e6539c103 f606937a7a21237c866efafcad33675e6539c103 Sebastian Thiel <byronimo@gmail.com> 1276261390 +0200 checkout: moving from async to cleanup
+f606937a7a21237c866efafcad33675e6539c103 02de401bc0b5ab5b97736265b18c7d1d6f53b9c1 Sebastian Thiel <byronimo@gmail.com> 1276266327 +0200 commit: Improved shutdown handling - although its impossible to prevent some stderr printing thanks to the underlying threading implementation, we can at least make sure that the interpreter doesn't block during shutdown. Now it appears to be running smoothly
+02de401bc0b5ab5b97736265b18c7d1d6f53b9c1 1873db442dc7511fc2c92fbaeb8d998d3e62723d Sebastian Thiel <byronimo@gmail.com> 1276266595 +0200 commit (amend): Improved shutdown handling - although its impossible to prevent some stderr printing thanks to the underlying threading implementation, we can at least make sure that the interpreter doesn't block during shutdown. Now it appears to be running smoothly
+1873db442dc7511fc2c92fbaeb8d998d3e62723d e14e3f143e7260de9581aee27e5a9b2645db72de Sebastian Thiel <byronimo@gmail.com> 1276267329 +0200 commit: Removed commented-out debug code and additional debug printings. Verified it works on py2.4, 2.5 and 2.6
+e14e3f143e7260de9581aee27e5a9b2645db72de f606937a7a21237c866efafcad33675e6539c103 Sebastian Thiel <byronimo@gmail.com> 1276267656 +0200 checkout: moving from cleanup to async
+f606937a7a21237c866efafcad33675e6539c103 29eb123beb1c55e5db4aa652d843adccbd09ae18 Sebastian Thiel <byronimo@gmail.com> 1276267660 +0200 merge cleanup: Merge made by recursive.
+29eb123beb1c55e5db4aa652d843adccbd09ae18 cac6e06cc9ef2903a15e594186445f3baa989a1a Sebastian Thiel <byronimo@gmail.com> 1276268324 +0200 commit: test_task: fixed import error, made all modules from x import * safe
+cac6e06cc9ef2903a15e594186445f3baa989a1a a28942bdf01f4ddb9d0b5a0489bd6f4e101dd775 Sebastian Thiel <byronimo@gmail.com> 1276280001 +0200 commit: Added performance test, improved iterator task which will now be usable by default. It shows that there must be the notion of a producer, which can work if there are no items read
+a28942bdf01f4ddb9d0b5a0489bd6f4e101dd775 9e7fbc06cbcb51efb8c88fedaeb257a435c0c162 Sebastian Thiel <byronimo@gmail.com> 1276334358 +0200 commit: Cleaned up channel design, Reader and Writer bases don't require a channel anymore, but are abstract.
+9e7fbc06cbcb51efb8c88fedaeb257a435c0c162 be8955a0fbb77d673587974b763f17c214904b57 Sebastian Thiel <byronimo@gmail.com> 1276334369 +0200 commit (amend): Cleaned up channel design, Reader and Writer bases don't require a channel anymore, but are abstract.
+be8955a0fbb77d673587974b763f17c214904b57 1d8a577ffc6ad7ce1465001ddebdc157aecc1617 Sebastian Thiel <byronimo@gmail.com> 1276335670 +0200 commit: channel: cleaned up inheritance hierarchy, adding mixing for callback functionality - previously the callback functionality was bound to channel based readers/writers
+1d8a577ffc6ad7ce1465001ddebdc157aecc1617 eded3fb0c820761d51d462e0d96187371c1758dc Sebastian Thiel <byronimo@gmail.com> 1276339082 +0200 commit: task: improved naming of task types, improved pool test to be less dependent on starting with just the main thread
+eded3fb0c820761d51d462e0d96187371c1758dc 7a0b79ee574999ecbc76696506352e4a5a0d7159 Sebastian Thiel <byronimo@gmail.com> 1276339207 +0200 commit (amend): task: improved naming of task types, improved pool test to be less dependent on starting with just the main thread
+7a0b79ee574999ecbc76696506352e4a5a0d7159 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1276339273 +0200 checkout: moving from async to master
+7c1169f6ea406fec1e26e99821e18e66437e65eb f91495e271597034226f1b9651345091083172c4 Sebastian Thiel <byronimo@gmail.com> 1276339280 +0200 merge async: Merge made by recursive.
+f91495e271597034226f1b9651345091083172c4 5c631ca192848fed3068b31b1389cd92a0c0cdca Sebastian Thiel <byronimo@gmail.com> 1276340638 +0200 commit: Removed async from this repository, put it into own one which now comes in as external, using a git-submodule
+5c631ca192848fed3068b31b1389cd92a0c0cdca f91495e271597034226f1b9651345091083172c4 Sebastian Thiel <byronimo@gmail.com> 1276345979 +0200 HEAD~1: updating HEAD
+f91495e271597034226f1b9651345091083172c4 86ea63504f3e8a74cfb1d533be9d9602d2d17e27 Sebastian Thiel <byronimo@gmail.com> 1276346049 +0200 commit: Removed async from tree
+86ea63504f3e8a74cfb1d533be9d9602d2d17e27 6c1faef799095f3990e9970bc2cb10aa0221cf9c Sebastian Thiel <byronimo@gmail.com> 1276356043 +0200 commit: Removed odb from project, it is now used as a submodule named gitdb, which was added instead
+6c1faef799095f3990e9970bc2cb10aa0221cf9c 28ed48c93f4cc8b6dd23c951363e5bd4e6880992 Sebastian Thiel <byronimo@gmail.com> 1276503381 +0200 commit: Implemented initial version of tree serialization which appears to work according to a simple test
+28ed48c93f4cc8b6dd23c951363e5bd4e6880992 fe5289ed8311fecf39913ce3ae86b1011eafe5f7 Sebastian Thiel <byronimo@gmail.com> 1276506168 +0200 commit: tree now uses less memory for its cache as it stores the bare deserialized information - this also speeds up later serialization after changes. its clear though that retrieving actual objects is slower currently as these are not cached anymore. Its worth thinking about moving these encoding, decoding routines to gitdb
+fe5289ed8311fecf39913ce3ae86b1011eafe5f7 f8dabbf4f92a7023181777e9d40355562474f71a Sebastian Thiel <byronimo@gmail.com> 1276512508 +0200 commit: tree: added TreeModifier, allowing to adjust existing trees safely and or fast, while staying compatible with serialization which requires it to be sorted
+f8dabbf4f92a7023181777e9d40355562474f71a d9240918aa03e49feabe43af619019805ac76786 Sebastian Thiel <byronimo@gmail.com> 1276512707 +0200 commit (amend): tree: added TreeModifier, allowing to adjust existing trees safely and or fast, while staying compatible with serialization which requires it to be sorted
+d9240918aa03e49feabe43af619019805ac76786 d9240918aa03e49feabe43af619019805ac76786 Sebastian Thiel <byronimo@gmail.com> 1276520481 +0200 checkout: moving from master to index
+d9240918aa03e49feabe43af619019805ac76786 af32b6e0ad4ab244dc70a5ade0f8a27ab45942f8 Sebastian Thiel <byronimo@gmail.com> 1276524270 +0200 commit: index: split index file into multiple files of a single package. This didn't reduce the file size as much as I would have liked, but certainly is a start for further 'outsourcing'
+af32b6e0ad4ab244dc70a5ade0f8a27ab45942f8 0ad4af53d4704489f2fd8bd067241bf12c8ee35a Sebastian Thiel <byronimo@gmail.com> 1276525421 +0200 commit: Implemented the serializable interface - by refactoring code
+0ad4af53d4704489f2fd8bd067241bf12c8ee35a abaefc59a7f2986ab344a65ef2a3653ce7dd339f Sebastian Thiel <byronimo@gmail.com> 1276527582 +0200 commit (amend): Implemented the serializable interface - by refactoring code
+abaefc59a7f2986ab344a65ef2a3653ce7dd339f d9240918aa03e49feabe43af619019805ac76786 Sebastian Thiel <byronimo@gmail.com> 1276527605 +0200 checkout: moving from index to master
+d9240918aa03e49feabe43af619019805ac76786 38b3cfb9b24a108e0720f7a3f8d6355f7e0bb1a9 Sebastian Thiel <byronimo@gmail.com> 1276527612 +0200 merge index: Merge made by recursive.
+38b3cfb9b24a108e0720f7a3f8d6355f7e0bb1a9 c9dbf201b4f0b3c2b299464618cb4ecb624d272c Sebastian Thiel <byronimo@gmail.com> 1276529105 +0200 commit: Moved small types that had their own module into the utils module
+c9dbf201b4f0b3c2b299464618cb4ecb624d272c 45e87305bd4f050c2d0309c32fe5de499fc38df3 Sebastian Thiel <byronimo@gmail.com> 1276554725 +0200 commit: Reimplemented Lock handling to be conforming to the git lock protocol, which is actually more efficient than the previous implementation
+45e87305bd4f050c2d0309c32fe5de499fc38df3 06590aee389f4466e02407f39af1674366a74705 Sebastian Thiel <byronimo@gmail.com> 1276555536 +0200 commit (amend): Reimplemented Lock handling to be conforming to the git lock protocol, which is actually more efficient than the previous implementation
+06590aee389f4466e02407f39af1674366a74705 1d2307532d679393ae067326e4b6fa1a2ba5cc06 Sebastian Thiel <byronimo@gmail.com> 1276556905 +0200 commit: Moved LockedFD and its test into the gitdb project
+1d2307532d679393ae067326e4b6fa1a2ba5cc06 e837b901dcfac82e864f806c80f4a9cbfdb9c9f3 Sebastian Thiel <byronimo@gmail.com> 1276607908 +0200 commit: Move LazyMixin type to gitdb, index reading now uses file_contents_ro from gitdb as well
+e837b901dcfac82e864f806c80f4a9cbfdb9c9f3 b82dbf538ac0d03968a0f5b7e2318891abefafaa Sebastian Thiel <byronimo@gmail.com> 1276870827 +0200 commit: GitCmd implementation of gitdb base moved to git-python where it belongs. Previously it was located in gitdb, which doesn't have any facilities to use the git command
+b82dbf538ac0d03968a0f5b7e2318891abefafaa f164627a85ed7b816759871a76db258515b85678 Sebastian Thiel <byronimo@gmail.com> 1277057845 +0200 commit: db: added pure python git database
+f164627a85ed7b816759871a76db258515b85678 ac62760c52abf28d1fd863f0c0dd48bc4a23d223 Sebastian Thiel <byronimo@gmail.com> 1277117506 +0200 commit: index.add: now uses gitdb.store functionality instead of git-hash-file. The python version is about as fast, but could support multithreading using async
+ac62760c52abf28d1fd863f0c0dd48bc4a23d223 0fdf6c3aaff49494c47aaeb0caa04b3016e10a26 Sebastian Thiel <byronimo@gmail.com> 1277127929 +0200 commit: index: Entries are now using flags internally, instead of reducing the flag information to just the stage ( just to be closer to the git-original )
+0fdf6c3aaff49494c47aaeb0caa04b3016e10a26 0aeb491d3d8f53e07fb21f36251be4880170c5ab Sebastian Thiel <byronimo@gmail.com> 1277129321 +0200 commit: index.add does not need the git clt anymore
+0aeb491d3d8f53e07fb21f36251be4880170c5ab 91725f0fc59aa05ef68ab96e9b29009ce84668a5 Sebastian Thiel <byronimo@gmail.com> 1277129385 +0200 commit (amend): index.add does not need the git clt anymore
+91725f0fc59aa05ef68ab96e9b29009ce84668a5 91725f0fc59aa05ef68ab96e9b29009ce84668a5 Sebastian Thiel <byronimo@gmail.com> 1277132483 +0200 checkout: moving from master to writetree
+91725f0fc59aa05ef68ab96e9b29009ce84668a5 1044116d25f0311033e0951d2ab30579bba4b051 Sebastian Thiel <byronimo@gmail.com> 1277144182 +0200 commit: index: put serialization methods into new 'fun' module, this makes the calls faster as it removes one level of indirection, and makes the main file smaller, improving maintainability
+1044116d25f0311033e0951d2ab30579bba4b051 69dd8750be1fbf55010a738dc1ced4655e727f23 Sebastian Thiel <byronimo@gmail.com> 1277157937 +0200 commit: index.write_tree: initial version implemented, although its not yet working correctly, a test to explicitly compare the git version with the python implementation is still missing
+69dd8750be1fbf55010a738dc1ced4655e727f23 cadce432d8ef07e7293f8b760342d717bd350671 Sebastian Thiel <byronimo@gmail.com> 1277188932 +0200 commit: intermediate commit, rollback
+cadce432d8ef07e7293f8b760342d717bd350671 69dd8750be1fbf55010a738dc1ced4655e727f23 Sebastian Thiel <byronimo@gmail.com> 1277189427 +0200 HEAD~1: updating HEAD
+69dd8750be1fbf55010a738dc1ced4655e727f23 d2d9197cfe5d3b43cb8aee182b2e65c73ef9ab7b Sebastian Thiel <byronimo@gmail.com> 1277193172 +0200 commit: Tree-Writing now works after fixing an off-by-one error
+d2d9197cfe5d3b43cb8aee182b2e65c73ef9ab7b c4f49fb232acb2c02761a82acc12c4040699685d Sebastian Thiel <byronimo@gmail.com> 1277201017 +0200 commit: index.write_tree: now uses MemoryDB, making tree handling more efficient as IO will only be done when required. A possible disadvantage though is that time is spent on compressing the trees, although only the raw data and their shas would theoretically be needed. On the other hand, compressing their data uses less memory. An optimal implementation would just sha the data, check for existance, and compress it to write it to the database right away. This would mean more specialized code though, introducing redundancy. If IStreams would know whether they contain compressed or uncompressed data, and if there was a method to get a sha from data, this would work nicely in the existing framework though
+c4f49fb232acb2c02761a82acc12c4040699685d 91725f0fc59aa05ef68ab96e9b29009ce84668a5 Sebastian Thiel <byronimo@gmail.com> 1277201030 +0200 checkout: moving from writetree to master
+91725f0fc59aa05ef68ab96e9b29009ce84668a5 778234d544b3f58dd415aaf10679d15b01a5281f Sebastian Thiel <byronimo@gmail.com> 1277201033 +0200 merge writetree: Merge made by recursive.
+778234d544b3f58dd415aaf10679d15b01a5281f 778234d544b3f58dd415aaf10679d15b01a5281f Sebastian Thiel <byronimo@gmail.com> 1277209257 +0200 checkout: moving from master to fromtree
+778234d544b3f58dd415aaf10679d15b01a5281f be97c4558992a437cde235aafc7ae2bd6df84ac8 Sebastian Thiel <byronimo@gmail.com> 1277234627 +0200 commit: Initial frame for implementing read_tree using pure python. As git-read-tree can do much more than we can ( and faster assumably ), the .new method is used to create new index instances from up to 3 trees.
+be97c4558992a437cde235aafc7ae2bd6df84ac8 c0ef65b43688b1a4615a1e7332f6215f9a8abb19 Sebastian Thiel <byronimo@gmail.com> 1277245716 +0200 commit: Implemented simple tree merging and a simple test, more elaborate testing is in progress
+c0ef65b43688b1a4615a1e7332f6215f9a8abb19 aea0243840a46021e6f77c759c960a06151d91c9 Sebastian Thiel <byronimo@gmail.com> 1277293745 +0200 commit: Added test for aggressive_tree_merge
+aea0243840a46021e6f77c759c960a06151d91c9 1e2265a23ecec4e4d9ad60d788462e7f124f1bb7 Sebastian Thiel <byronimo@gmail.com> 1277300937 +0200 commit: fixed critical bug in traverse_trees_recursive, implemented IndexFile.new including simple test, it may be simple as the methods it uses are throroughly tested
+1e2265a23ecec4e4d9ad60d788462e7f124f1bb7 778234d544b3f58dd415aaf10679d15b01a5281f Sebastian Thiel <byronimo@gmail.com> 1277300988 +0200 checkout: moving from fromtree to master
+778234d544b3f58dd415aaf10679d15b01a5281f 57050184f3d962bf91511271af59ee20f3686c3f Sebastian Thiel <byronimo@gmail.com> 1277301014 +0200 merge fromtree: Merge made by recursive.
+57050184f3d962bf91511271af59ee20f3686c3f 129f90aa8d83d9b250c87b0ba790605c4a2bb06a Sebastian Thiel <byronimo@gmail.com> 1277334478 +0200 commit: Multiple partly critical bugfixes related to index handling
+129f90aa8d83d9b250c87b0ba790605c4a2bb06a a1adb421c2ee3e4868ea70d440dd82896219ed8f Sebastian Thiel <byronimo@gmail.com> 1277388148 +0200 commit: aggressive_tree_merge: fixed incorrect handling of one branch, it was just not implemented causing incorrect merge results. Added test to cover this issue
+a1adb421c2ee3e4868ea70d440dd82896219ed8f 55dcc17c331f580b3beeb4d5decf64d3baf94f2e Sebastian Thiel <byronimo@gmail.com> 1277395720 +0200 commit (amend): aggressive_tree_merge: fixed incorrect handling of one branch, it was just not implemented causing incorrect merge results. Added test to cover this issue
+55dcc17c331f580b3beeb4d5decf64d3baf94f2e ca131dd61e26f46f49ee3f70763f994cf9512665 Sebastian Thiel <byronimo@gmail.com> 1277401303 +0200 commit: GitCmdStreamReader: fixed terrible bug which only kicked in if the stream was actually empty. This is a rare case that can happen during stream testing. Theoretically there shouldn't be any empty streams of course, but practically they do exist sometimes ;)
+ca131dd61e26f46f49ee3f70763f994cf9512665 feb1ea0f4aacb9ea6dc4133900e65bf34c0ee02d Sebastian Thiel <byronimo@gmail.com> 1277401306 +0200 commit (amend): GitCmdStreamReader: fixed terrible bug which only kicked in if the stream was actually empty. This is a rare case that can happen during stream testing. Theoretically there shouldn't be any empty streams of course, but practically they do exist sometimes ;); fixed stream.seek implementation, which previously used seek on standard output
+feb1ea0f4aacb9ea6dc4133900e65bf34c0ee02d 402a6c2808db4333217aa300d0312836fd7923bd Sebastian Thiel <byronimo@gmail.com> 1277407147 +0200 commit: IndexFile.add: writing of the index file can now optionally be turned off. The default is to write the physical index, which is the behaviour you would expect
+402a6c2808db4333217aa300d0312836fd7923bd 402a6c2808db4333217aa300d0312836fd7923bd Sebastian Thiel <byronimo@gmail.com> 1277417929 +0200 checkout: moving from master to index
+402a6c2808db4333217aa300d0312836fd7923bd 4d30dfb07f78517b1ba20b88506e01678edd527c Sebastian Thiel <byronimo@gmail.com> 1277417979 +0200 commit: index.reset is now partly implemented using python, but in fact it resorts to using git-read-tree to keep the stat information when merging one tree in. After all this is what needed to be implemented in python as well
+4d30dfb07f78517b1ba20b88506e01678edd527c 58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 Sebastian Thiel <byronimo@gmail.com> 1277473186 +0200 commit (amend): index.reset is now partly implemented using python, but in fact it resorts to using git-read-tree to keep the stat information when merging one tree in. After all this is what needed to be implemented in python as well
+58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 402a6c2808db4333217aa300d0312836fd7923bd Sebastian Thiel <byronimo@gmail.com> 1277473192 +0200 checkout: moving from index to master
+402a6c2808db4333217aa300d0312836fd7923bd 58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 Sebastian Thiel <byronimo@gmail.com> 1277473196 +0200 merge index: Fast-forward
+58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 Sebastian Thiel <byronimo@gmail.com> 1277473218 +0200 checkout: moving from master to sha20
+58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 47e3138ee978ce708a41f38a0d874376d7ae5c78 Sebastian Thiel <byronimo@gmail.com> 1277503104 +0200 commit: Adjusted all files to (hopefully) deal with the fact that all objects now use 20 byte sha's internally as it is closer to the GitDB implementation
+47e3138ee978ce708a41f38a0d874376d7ae5c78 7abe9065aab9dec56015ede5f2b0082837c5dc2e Sebastian Thiel <byronimo@gmail.com> 1277745342 +0200 commit: All tests adjusted to work with the changed internal sha representation
+7abe9065aab9dec56015ede5f2b0082837c5dc2e 1fe889ea0cb2547584075dc1eb77f52c54b9a8c4 Sebastian Thiel <byronimo@gmail.com> 1277745354 +0200 commit (amend): All tests adjusted to work with the changed internal sha representation
+1fe889ea0cb2547584075dc1eb77f52c54b9a8c4 58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 Sebastian Thiel <byronimo@gmail.com> 1277745377 +0200 checkout: moving from sha20 to master
+58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 8d2239f24f6a54d98201413d4f46256df0d6a5f3 Sebastian Thiel <byronimo@gmail.com> 1277745383 +0200 merge sha20: Merge made by recursive.
+8d2239f24f6a54d98201413d4f46256df0d6a5f3 f1401803ccf7db5d897a5ef4b27e2176627c430e Sebastian Thiel <byronimo@gmail.com> 1277756712 +0200 commit: Fixed performance tests which broke in the course of the sha1-20 byte changes
+f1401803ccf7db5d897a5ef4b27e2176627c430e 6917ae4ce9eaa0f5ea91592988c1ea830626ac3a Sebastian Thiel <byronimo@gmail.com> 1277806256 +0200 commit: Diff: fixed bug that caused a string to end up as a blob mode
+6917ae4ce9eaa0f5ea91592988c1ea830626ac3a 6917ae4ce9eaa0f5ea91592988c1ea830626ac3a Sebastian Thiel <byronimo@gmail.com> 1277819766 +0200 checkout: moving from master to docs
+6917ae4ce9eaa0f5ea91592988c1ea830626ac3a 160081b9a7ca191afbec077c4bf970cfd9070d2c Sebastian Thiel <byronimo@gmail.com> 1277828911 +0200 commit: Updated and fixed sphinx API docs, which included one quick skim-through
+160081b9a7ca191afbec077c4bf970cfd9070d2c 791765c0dc2d00a9ffa4bc857d09f615cfe3a759 Sebastian Thiel <byronimo@gmail.com> 1277830741 +0200 commit: Removed repo tests which for some reason left the 'repos' directory around, replaced them by a real test which actually executes code, and puts everything into the tmp directory
+791765c0dc2d00a9ffa4bc857d09f615cfe3a759 77cd6659b64cb1950a82e6a3cccdda94f15ae739 Sebastian Thiel <byronimo@gmail.com> 1277834446 +0200 commit: Renamed modules utils to util, and errors to exc to be more conforming to the submodules's naming conventions
+77cd6659b64cb1950a82e6a3cccdda94f15ae739 18be0972304dc7f1a2a509595de7da689bddbefa Sebastian Thiel <byronimo@gmail.com> 1277835397 +0200 commit: Removed blob.data property as there is no real reason for an exception to the rule of trying not to cache possibly heavy data. The data_stream method should be used instead
+18be0972304dc7f1a2a509595de7da689bddbefa 0369384c8b79c44c5369f1b6c05046899f8886da Sebastian Thiel <byronimo@gmail.com> 1277840971 +0200 commit: revised tutorial to match the changed usage, added basic information about object databases
+0369384c8b79c44c5369f1b6c05046899f8886da ee58d55133c571db6384acf916e4a1c3592be07b Sebastian Thiel <byronimo@gmail.com> 1277848046 +0200 commit: Added whatsnew and put it into the index
+ee58d55133c571db6384acf916e4a1c3592be07b 77d083040248deeccb3ac1ad125eb2969b5cb370 Sebastian Thiel <byronimo@gmail.com> 1277848184 +0200 commit (amend): Added whatsnew and put it into the index
+77d083040248deeccb3ac1ad125eb2969b5cb370 fde6522c40a346c8b1d588a2b8d4dd362ae1f58f Sebastian Thiel <byronimo@gmail.com> 1277848539 +0200 commit (amend): Added whatsnew and put it into the index
+fde6522c40a346c8b1d588a2b8d4dd362ae1f58f 28a33ca17ac5e0816a3e24febb47ffcefa663980 Sebastian Thiel <byronimo@gmail.com> 1277991925 +0200 commit: Added further information about the required submodules, and how to install them. Incremeneted version to 0.3.0 beta1
+28a33ca17ac5e0816a3e24febb47ffcefa663980 586aa65f772f69eb6d08224968c74dda1ca45612 Sebastian Thiel <byronimo@gmail.com> 1277993503 +0200 commit: Moved all source files into the source folder, separating the build from the source directory
+586aa65f772f69eb6d08224968c74dda1ca45612 d2ebc6193f7205fd1686678a5707262cb1c59bb0 Sebastian Thiel <byronimo@gmail.com> 1277993585 +0200 commit (amend): Moved all source files into the source folder, separating the build from the source directory
+d2ebc6193f7205fd1686678a5707262cb1c59bb0 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1277994361 +0200 checkout: moving from docs to hub/0.2
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af 7bfca5efce8988e6070e7284bd409d1a731a3fbc Sebastian Thiel <byronimo@gmail.com> 1277994419 +0200 checkout: moving from 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af to hub/0.1
+7bfca5efce8988e6070e7284bd409d1a731a3fbc d2ebc6193f7205fd1686678a5707262cb1c59bb0 Sebastian Thiel <byronimo@gmail.com> 1277994469 +0200 checkout: moving from 7bfca5efce8988e6070e7284bd409d1a731a3fbc to docs
+d2ebc6193f7205fd1686678a5707262cb1c59bb0 c3bd05b426a0e3dec8224244c3c9c0431d1ff130 Sebastian Thiel <byronimo@gmail.com> 1277994911 +0200 commit: Added doc-index, which helps to keep documentation of prior but still somewhat supported versions alive
+c3bd05b426a0e3dec8224244c3c9c0431d1ff130 6917ae4ce9eaa0f5ea91592988c1ea830626ac3a Sebastian Thiel <byronimo@gmail.com> 1277999895 +0200 checkout: moving from docs to master
+6917ae4ce9eaa0f5ea91592988c1ea830626ac3a fd96cceded27d1372bdc1a851448d2d8613f60f3 Sebastian Thiel <byronimo@gmail.com> 1277999899 +0200 merge docs: Merge made by recursive.
+fd96cceded27d1372bdc1a851448d2d8613f60f3 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1278082024 +0200 checkout: moving from master to 0.2.0-beta1
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af fd96cceded27d1372bdc1a851448d2d8613f60f3 Sebastian Thiel <byronimo@gmail.com> 1278082288 +0200 checkout: moving from 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af to master
+fd96cceded27d1372bdc1a851448d2d8613f60f3 f683c6623f73252645bb2819673046c9d397c567 Sebastian Thiel <byronimo@gmail.com> 1278082451 +0200 commit: Fixed broken 0.2 documentation, it didn't contain the API reference previously due to import errors and a somewhat inconsistent working tree that occurred when switching branches ...
+f683c6623f73252645bb2819673046c9d397c567 a4287f65878000b42d11704692f9ea3734014b4c Sebastian Thiel <byronimo@gmail.com> 1278092317 +0200 commit: win32 compatability adjustments
+a4287f65878000b42d11704692f9ea3734014b4c a4287f65878000b42d11704692f9ea3734014b4c Sebastian Thiel <byronimo@gmail.com> 1278315351 +0200 checkout: moving from master to revparse
+a4287f65878000b42d11704692f9ea3734014b4c f963881e53a9f0a2746a11cb9cdfa82eb1f90d8c Sebastian Thiel <byronimo@gmail.com> 1278369330 +0200 commit: Initial version of the rev-parse routine, which doesn't work too bad, but its still rather slow and many tests are not yet implemented
+f963881e53a9f0a2746a11cb9cdfa82eb1f90d8c 1c6d7830d9b87f47a0bfe82b3b5424a32e3164ad Sebastian Thiel <byronimo@gmail.com> 1278405962 +0200 commit: RevParse now generally works, but there are still some more specialized tests missing
+1c6d7830d9b87f47a0bfe82b3b5424a32e3164ad a32a6bcd784fca9cb2b17365591c29d15c2f638e Sebastian Thiel <byronimo@gmail.com> 1278407809 +0200 commit: Refs now use object.new_from_sha where possible, preventing git-batch-check to be started up for sha resolution
+a32a6bcd784fca9cb2b17365591c29d15c2f638e 355aa879cff8630c9eedaf151f90a229f2ba5135 Sebastian Thiel <byronimo@gmail.com> 1278410951 +0200 commit: Implemented main rev-parsing, including long hexshas, tags and refs. Short Shas still to be done
+355aa879cff8630c9eedaf151f90a229f2ba5135 73959f3a2d4f224fbda03c8a8850f66f53d8cb3b Sebastian Thiel <byronimo@gmail.com> 1278418771 +0200 commit (amend): Implemented main rev-parsing, including long hexshas, tags and refs. Short Shas still to be done
+73959f3a2d4f224fbda03c8a8850f66f53d8cb3b 9059525a75b91e6eb6a425f1edcc608739727168 Sebastian Thiel <byronimo@gmail.com> 1278440512 +0200 commit: Made repo.py a package to allow better localization of functions and utilities - the repo module got rather large
+9059525a75b91e6eb6a425f1edcc608739727168 f068cdc5a1a13539c4a1d756ae950aab65f5348b Sebastian Thiel <byronimo@gmail.com> 1278497773 +0200 commit: Initially working implementation of short-sha parsing and interpretation, thanks to new gitdb functionality
+f068cdc5a1a13539c4a1d756ae950aab65f5348b bc31651674648f026464fd4110858c4ffeac3c18 Sebastian Thiel <byronimo@gmail.com> 1278516647 +0200 commit: Adjusted previous object creators to use the rev_parse method directly. rev_parse could be adjusted not to return Objects anymore, providing better performance for those who just want a sha only. On the other hand, the method is high-level and should be convenient to use as well, its a starting point for more usually, hence its unlikely to call it in tight loops
+bc31651674648f026464fd4110858c4ffeac3c18 8a73805d9b26b5a6c54f2e8d53f948df7db8b3d4 Sebastian Thiel <byronimo@gmail.com> 1278517362 +0200 commit: Added test for GitCmdObjectDB in order to verify the method is working as expected with different input
+8a73805d9b26b5a6c54f2e8d53f948df7db8b3d4 01ab5b96e68657892695c99a93ef909165456689 Sebastian Thiel <byronimo@gmail.com> 1278517373 +0200 commit (amend): Added test for GitCmdObjectDB in order to verify the partial_to_complete_sha_hex is working as expected with different input ( it wasn't, of course ;) )
+01ab5b96e68657892695c99a93ef909165456689 a4287f65878000b42d11704692f9ea3734014b4c Sebastian Thiel <byronimo@gmail.com> 1278517411 +0200 checkout: moving from revparse to master
+a4287f65878000b42d11704692f9ea3734014b4c ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d Sebastian Thiel <byronimo@gmail.com> 1278517416 +0200 merge revparse: Merge made by recursive.
+ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d a4287f65878000b42d11704692f9ea3734014b4c Sebastian Thiel <byronimo@gmail.com> 1278525786 +0200 checkout: moving from master to master~1
+a4287f65878000b42d11704692f9ea3734014b4c ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d Sebastian Thiel <byronimo@gmail.com> 1278525793 +0200 checkout: moving from a4287f65878000b42d11704692f9ea3734014b4c to master
+ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d 01ab5b96e68657892695c99a93ef909165456689 Sebastian Thiel <byronimo@gmail.com> 1278525803 +0200 checkout: moving from master to master^2
+01ab5b96e68657892695c99a93ef909165456689 bc31651674648f026464fd4110858c4ffeac3c18 Sebastian Thiel <byronimo@gmail.com> 1278525815 +0200 checkout: moving from 01ab5b96e68657892695c99a93ef909165456689 to master^2~1
+bc31651674648f026464fd4110858c4ffeac3c18 f068cdc5a1a13539c4a1d756ae950aab65f5348b Sebastian Thiel <byronimo@gmail.com> 1278525821 +0200 checkout: moving from bc31651674648f026464fd4110858c4ffeac3c18 to master^2~2
+f068cdc5a1a13539c4a1d756ae950aab65f5348b 73959f3a2d4f224fbda03c8a8850f66f53d8cb3b Sebastian Thiel <byronimo@gmail.com> 1278525826 +0200 checkout: moving from f068cdc5a1a13539c4a1d756ae950aab65f5348b to master^2~4
+73959f3a2d4f224fbda03c8a8850f66f53d8cb3b ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d Sebastian Thiel <byronimo@gmail.com> 1278525829 +0200 checkout: moving from 73959f3a2d4f224fbda03c8a8850f66f53d8cb3b to master
+ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d 5fd6cc37fd07c25cb921b77b4f658b7e8fc132b3 Sebastian Thiel <byronimo@gmail.com> 1278536545 +0200 commit: Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+5fd6cc37fd07c25cb921b77b4f658b7e8fc132b3 76af62b3c5a26638fcad9a3fe401fba566fb7037 Sebastian Thiel <byronimo@gmail.com> 1278538933 +0200 commit (amend): Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+76af62b3c5a26638fcad9a3fe401fba566fb7037 b425301ad16f265157abdaf47f7af1c1ea879068 Sebastian Thiel <byronimo@gmail.com> 1278539147 +0200 commit (amend): Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+b425301ad16f265157abdaf47f7af1c1ea879068 3288a244428751208394d8137437878277ceb71f Sebastian Thiel <byronimo@gmail.com> 1278582561 +0200 commit: setup.py: fixed requirement - its interesting to see that there are two different keywords for distutils and setuptools, the latter one doesn't read the ones of the first one, unfortunately
+3288a244428751208394d8137437878277ceb71f 08457a7a6b6ad4f518fad0d5bca094a2b5b38fbe Sebastian Thiel <byronimo@gmail.com> 1278670718 +0200 commit: Added python 2.4 support: Repo will now use the original GitCmdObjectDB in python 2.4, as the pure python implementation cannot work without memory maps
+08457a7a6b6ad4f518fad0d5bca094a2b5b38fbe 258403da9c2a087b10082d26466528fce3de38d4 Sebastian Thiel <byronimo@gmail.com> 1278671744 +0200 commit: bumped verison to 0.3.0 beta2
+258403da9c2a087b10082d26466528fce3de38d4 55b67e8194b8b4d9e73e27feadbf9af6593e4600 Sebastian Thiel <byronimo@gmail.com> 1278927490 +0200 pull gitorious master: Fast-forward
+55b67e8194b8b4d9e73e27feadbf9af6593e4600 bcd37b68533d0cceb7e73dd1ed1428fa09f7dc17 Sebastian Thiel <byronimo@gmail.com> 1279007300 +0200 commit: Fixed incorrect use of Blob.data in performance test
+bcd37b68533d0cceb7e73dd1ed1428fa09f7dc17 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279110447 +0200 commit: Added performance test to compare inst.__class__() vs type(inst)() class. The first one is faster, although I would have expected the latter one to be faster
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279130572 +0200 checkout: moving from master to integration
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 2603363b435a695f9bf1bbbd5c1c59c353ec0450 Sebastian Thiel <byronimo@gmail.com> 1279130575 +0200 pull git://gitorious.org/git-python/mainline.git refs/merge-requests/2104: Merge made by recursive.
+2603363b435a695f9bf1bbbd5c1c59c353ec0450 c9c8d48a42d45d22a120e2191ae2f838483caccc Sebastian Thiel <byronimo@gmail.com> 1279130613 +0200 HEAD^2: updating HEAD
+c9c8d48a42d45d22a120e2191ae2f838483caccc 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279130629 +0200 checkout: moving from integration to master
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 a64c5f23c38f06c37a0ca64796368019dfb0d818 Sebastian Thiel <byronimo@gmail.com> 1279130658 +0200 commit: Correct Repo documentation in API reference
+a64c5f23c38f06c37a0ca64796368019dfb0d818 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279130719 +0200 HEAD~1: updating HEAD
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 17a172920fde8c6688c8a1a39f258629b8b73757 Sebastian Thiel <byronimo@gmail.com> 1279131269 +0200 commit: Fixed the missing Repo type documentation thanks to a hint of gasche, fixed a few more resT syntax errors on the way
+17a172920fde8c6688c8a1a39f258629b8b73757 cbb58869063fe803d232f099888fe9c23510de7b Sebastian Thiel <byronimo@gmail.com> 1279197254 +0200 merge gitorious/master: Fast-forward
+cbb58869063fe803d232f099888fe9c23510de7b 89422841e46efa99bda49acfbe33ee1ca5122845 Sebastian Thiel <byronimo@gmail.com> 1279533542 +0200 pull gitorious master: Fast-forward
+89422841e46efa99bda49acfbe33ee1ca5122845 192472f9673b18c91ce618e64e935f91769c50e7 Sebastian Thiel <byronimo@gmail.com> 1279814094 +0200 merge gitorious/master: Fast-forward
+192472f9673b18c91ce618e64e935f91769c50e7 fa3fce80aeb57299633535b9e1c9d08db710ff05 Sebastian Thiel <byronimo@gmail.com> 1281701051 +0200 commit: unicode handling in messages and trees was improved. Messages are now written according to the encoding of the commit object, and decoded using that information as well. Trees will encode and decode their names with utf8
+fa3fce80aeb57299633535b9e1c9d08db710ff05 394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 Sebastian Thiel <byronimo@gmail.com> 1281702078 +0200 commit (amend): unicode handling in messages and trees was improved. Messages are now written according to the encoding of the commit object, and decoded using that information as well. Trees will encode and decode their names with utf8
+394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 Sebastian Thiel <byronimo@gmail.com> 1283969667 +0200 checkout: moving from master to integration
+394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a Sebastian Thiel <byronimo@gmail.com> 1283969669 +0200 pull git://gitorious.org/git-python/mainline.git refs/merge-requests/14: Fast-forward
+c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a 394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 Sebastian Thiel <byronimo@gmail.com> 1283969687 +0200 checkout: moving from integration to master
+394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a Sebastian Thiel <byronimo@gmail.com> 1283969691 +0200 merge integration: Fast-forward
+c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287134990 +0200 commit: Fixed bug in http://byronimo.lighthouseapp.com/projects/51787/tickets/44-remoteref-fails-when-there-is-character-in-the-name using supplied patch ( which was manually applied ).
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a fc650aa6869639548435ce2760d42c9cdd909d99 Sebastian Thiel <byronimo@gmail.com> 1287135891 +0200 commit: Added test to verify the actor class can handle unicode names correctly. This works because regex can handle unicode, and will return unicode instances instead of strings if required. Its quite amazing actually.
+fc650aa6869639548435ce2760d42c9cdd909d99 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287136504 +0200 HEAD~1: updating HEAD
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287136515 +0200 checkout: moving from master to unicode
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a 0f88fb96869b6ac3ed4dac7d23310a9327d3c89c Sebastian Thiel <byronimo@gmail.com> 1287136588 +0200 commit: Added test to verify the actor type can handle and parse unicode if it is passed in
+0f88fb96869b6ac3ed4dac7d23310a9327d3c89c d39889bcac1735e429ac640ac6838d0e56835afb Sebastian Thiel <byronimo@gmail.com> 1287138883 +0200 commit: Added unicode handling for author names. They will now be properly encoded into the byte stream, as well as decoded from it
+d39889bcac1735e429ac640ac6838d0e56835afb 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287138889 +0200 checkout: moving from unicode to master
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a a88173281ec56cb378a293d0170e11a1bda96a55 Sebastian Thiel <byronimo@gmail.com> 1287138898 +0200 merge unicode: Merge made by recursive.
+a88173281ec56cb378a293d0170e11a1bda96a55 d39889bcac1735e429ac640ac6838d0e56835afb Sebastian Thiel <byronimo@gmail.com> 1287139063 +0200 checkout: moving from master to unicode
+d39889bcac1735e429ac640ac6838d0e56835afb a88173281ec56cb378a293d0170e11a1bda96a55 Sebastian Thiel <byronimo@gmail.com> 1287139078 +0200 checkout: moving from unicode to master
+a88173281ec56cb378a293d0170e11a1bda96a55 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287139082 +0200 HEAD~1: updating HEAD
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a d39889bcac1735e429ac640ac6838d0e56835afb Sebastian Thiel <byronimo@gmail.com> 1287139092 +0200 checkout: moving from master to unicode
+d39889bcac1735e429ac640ac6838d0e56835afb 0019d7dc8c72839d238065473a62b137c3c350f5 Sebastian Thiel <byronimo@gmail.com> 1287139239 +0200 commit (amend): Added unicode handling for author names. They will now be properly encoded into the byte stream, as well as decoded from it
+0019d7dc8c72839d238065473a62b137c3c350f5 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287139252 +0200 checkout: moving from unicode to master
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a 13647590f96fb5a22cb60f12c5a70e00065a7f3a Sebastian Thiel <byronimo@gmail.com> 1287139254 +0200 merge unicode: Merge made by recursive.
+13647590f96fb5a22cb60f12c5a70e00065a7f3a 94029ce1420ced83c3e5dcd181a2280b26574bc9 Sebastian Thiel <byronimo@gmail.com> 1287139994 +0200 commit: Adjusted regex to support whitespace - it was a little restrictive previously, although there was absolutely no need for that.
+94029ce1420ced83c3e5dcd181a2280b26574bc9 cfd2121eda5fadd18fba6819f90efb8868fad14a Sebastian Thiel <byronimo@gmail.com> 1287729183 +0200 checkout: moving from master to dotsenko/stable
+cfd2121eda5fadd18fba6819f90efb8868fad14a 7bfca5efce8988e6070e7284bd409d1a731a3fbc Sebastian Thiel <byronimo@gmail.com> 1287729293 +0200 checkout: moving from cfd2121eda5fadd18fba6819f90efb8868fad14a to HEAD~1
+7bfca5efce8988e6070e7284bd409d1a731a3fbc 94029ce1420ced83c3e5dcd181a2280b26574bc9 Sebastian Thiel <byronimo@gmail.com> 1287729331 +0200 checkout: moving from 7bfca5efce8988e6070e7284bd409d1a731a3fbc to master
+94029ce1420ced83c3e5dcd181a2280b26574bc9 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288000691 +0200 commit: .gitignore will now ignore netbeans projects
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 a2b9ded87baf0f32ae94c10c5851a0468a45f003 Sebastian Thiel <byronimo@gmail.com> 1288198935 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+a2b9ded87baf0f32ae94c10c5851a0468a45f003 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1288198941 +0200 checkout: moving from master to 0.2.0-beta1
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af a2b9ded87baf0f32ae94c10c5851a0468a45f003 Sebastian Thiel <byronimo@gmail.com> 1288198964 +0200 checkout: moving from 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af to master
+a2b9ded87baf0f32ae94c10c5851a0468a45f003 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288198984 +0200 HEAD~1: updating HEAD
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 Sebastian Thiel <byronimo@gmail.com> 1288198991 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1288198995 +0200 checkout: moving from master to 0.2.0-beta1
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af 148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 Sebastian Thiel <byronimo@gmail.com> 1288199017 +0200 checkout: moving from 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af to master
+148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288199023 +0200 HEAD~1: updating HEAD
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 538e8265e04f69bb9bd73a10ddb4e8e9677fb140 Sebastian Thiel <byronimo@gmail.com> 1288199049 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+538e8265e04f69bb9bd73a10ddb4e8e9677fb140 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1288199066 +0200 checkout: moving from master to 0.2.0-beta1
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af ec97ed84d114ef131fa98acee4ce7be32f8c591f Sebastian Thiel <byronimo@gmail.com> 1288199163 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+ec97ed84d114ef131fa98acee4ce7be32f8c591f ec97ed84d114ef131fa98acee4ce7be32f8c591f Sebastian Thiel <byronimo@gmail.com> 1288199260 +0200 checkout: moving from ec97ed84d114ef131fa98acee4ce7be32f8c591f to 0.2fixes
+ec97ed84d114ef131fa98acee4ce7be32f8c591f 538e8265e04f69bb9bd73a10ddb4e8e9677fb140 Sebastian Thiel <byronimo@gmail.com> 1288199399 +0200 checkout: moving from 0.2 to master
+538e8265e04f69bb9bd73a10ddb4e8e9677fb140 97ab197140b16027975c7465a5e8786e6cc8fea1 Sebastian Thiel <byronimo@gmail.com> 1288203452 +0200 commit (amend): docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+97ab197140b16027975c7465a5e8786e6cc8fea1 3da3837fe2ec8152e1460f747d18290b52304868 Sebastian Thiel <byronimo@gmail.com> 1288203532 +0200 commit: cmd: improved error handling and debug printing
+3da3837fe2ec8152e1460f747d18290b52304868 2c0b92e40ece170b59bced0cea752904823e06e7 Sebastian Thiel <byronimo@gmail.com> 1288203543 +0200 commit (amend): cmd: improved error handling and debug printing
+2c0b92e40ece170b59bced0cea752904823e06e7 1b6b9510e0724bfcb4250f703ddf99d1e4020bbc Sebastian Thiel <byronimo@gmail.com> 1288205467 +0200 commit: Fixed bug that would cause the author's email to be a generic default one, instead of the existing and valid. The rest of the ConfigParser handling is correct, as it reads all configuration files available to git
+1b6b9510e0724bfcb4250f703ddf99d1e4020bbc 0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f Sebastian Thiel <byronimo@gmail.com> 1288208986 +0200 commit: index.reset: updated parameter docs, but most importantly, the method now has better testing for the use of paths during reset. The IndexFile now implements this on its own, which also allows for something equivalent to git-reset --hard -- <paths>, which is not possible in the git command for some probably very good reason
+0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f 702f3909520d39e8c343dece7b1e2d72e1479bbe Sebastian Thiel <byronimo@gmail.com> 1289032478 +0100 checkout: moving from master to integration
+702f3909520d39e8c343dece7b1e2d72e1479bbe 0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f Sebastian Thiel <byronimo@gmail.com> 1289032798 +0100 checkout: moving from integration to master
+0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f 735b28bc65964da5b181dffcccb1d05555b5acab Sebastian Thiel <byronimo@gmail.com> 1289033220 +0100 commit: test_refs: fixed failing tests just by making it less strict. It is dependent on the setup of the surrounding repository, hence the amount of ref-types found is actually variable, as long as they get more
+735b28bc65964da5b181dffcccb1d05555b5acab 702f3909520d39e8c343dece7b1e2d72e1479bbe Sebastian Thiel <byronimo@gmail.com> 1289033228 +0100 checkout: moving from master to integration
+702f3909520d39e8c343dece7b1e2d72e1479bbe b14a52ae2f8d6e38f57c34f8be150541cf00dafa Sebastian Thiel <byronimo@gmail.com> 1289034355 +0100 commit: Some cleanup of Daniels contribution. Now it appears to be ready for a merge
+b14a52ae2f8d6e38f57c34f8be150541cf00dafa 90d73cd6ffa6e848da59cb2a35dec74e0fabd00a Sebastian Thiel <byronimo@gmail.com> 1289034615 +0100 commit (amend): Some cleanup of Daniels contribution. Now it appears to be ready for a merge
+90d73cd6ffa6e848da59cb2a35dec74e0fabd00a 7bfca5efce8988e6070e7284bd409d1a731a3fbc Sebastian Thiel <byronimo@gmail.com> 1289034647 +0100 checkout: moving from integration to 0.1
+7bfca5efce8988e6070e7284bd409d1a731a3fbc 90d73cd6ffa6e848da59cb2a35dec74e0fabd00a Sebastian Thiel <byronimo@gmail.com> 1289034656 +0100 merge integration: Fast-forward
+90d73cd6ffa6e848da59cb2a35dec74e0fabd00a 735b28bc65964da5b181dffcccb1d05555b5acab Sebastian Thiel <byronimo@gmail.com> 1289034706 +0100 checkout: moving from 0.1 to master
+735b28bc65964da5b181dffcccb1d05555b5acab bd7fb976ab0607592875b5697dc76c117a18dc73 Sebastian Thiel <byronimo@gmail.com> 1289034759 +0100 commit (amend): test_refs: fixed failing tests just by making it less strict. It is dependent on the setup of the surrounding repository, hence the amount of ref-types found is actually variable, as long as they get more
+bd7fb976ab0607592875b5697dc76c117a18dc73 258403da9c2a087b10082d26466528fce3de38d4 Sebastian Thiel <byronimo@gmail.com> 1289338336 +0100 checkout: moving from master to 0.3.0-beta2
+258403da9c2a087b10082d26466528fce3de38d4 bd7fb976ab0607592875b5697dc76c117a18dc73 Sebastian Thiel <byronimo@gmail.com> 1289338796 +0100 checkout: moving from 258403da9c2a087b10082d26466528fce3de38d4 to master
+bd7fb976ab0607592875b5697dc76c117a18dc73 a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 Sebastian Thiel <byronimo@gmail.com> 1289379557 +0100 commit: tutorial: Fixed incorrect initialization code for bare repo, thank you, Bryan Bishop
+a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 Sebastian Thiel <byronimo@gmail.com> 1289385325 +0100 checkout: moving from master to submodule
+a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 a1e2f63e64875a29e8c01a7ae17f5744680167a5 Sebastian Thiel <byronimo@gmail.com> 1289817434 +0100 commit: submodule: Fleshed out interface, and a partial test which is not yet usable. It showed that the ConfigParser needs some work. If the root is set, it also needs to refer to the root_commit instead of to the root-tree, as it will have to decide whether it works on the working tree's version of the .gitmodules file or the one in the repository
+a1e2f63e64875a29e8c01a7ae17f5744680167a5 4d36f8ff4d1274a8815e932285ad6dbd6b2888af Sebastian Thiel <byronimo@gmail.com> 1289819639 +0100 commit: Improved GitConfigurationParser to better deal with streams and the corresponding locks. Submodule class now operates on parent_commits, the configuration is either streamed from the repository or written directly into a blob ( or file ) dependending on whether we have a working tree checkout or not which matches our parent_commit
+4d36f8ff4d1274a8815e932285ad6dbd6b2888af 00ce31ad308ff4c7ef874d2fa64374f47980c85c Sebastian Thiel <byronimo@gmail.com> 1289836392 +0100 commit: Objects: Constructor now manually checks and sets the input arguments to the local cache - previously a procedural approach was used, which was less code, but slower too. Especially in case of CommitObjects unrolling the loop manually makes a difference.
+00ce31ad308ff4c7ef874d2fa64374f47980c85c f97653aa06cf84bcf160be3786b6fce49ef52961 Sebastian Thiel <byronimo@gmail.com> 1289842964 +0100 commit: Repo: added submodule query and iteration methods similar to the ones provided for Remotes, including test
+f97653aa06cf84bcf160be3786b6fce49ef52961 624556eae1c292a1dc283d9dca1557e28abe8ee3 Sebastian Thiel <byronimo@gmail.com> 1289844233 +0100 commit: Optimized test-decorators, by completely removing with_bare_rw_repo, which was mainly copy-paste from with_rw_repo, what a shame
+624556eae1c292a1dc283d9dca1557e28abe8ee3 ceee7d7e0d98db12067744ac3cd0ab3a49602457 Sebastian Thiel <byronimo@gmail.com> 1289855525 +0100 commit: Added partial implementation of update, but realized that using refs in general may be contradicting if a tag is given there, as well as a commit sha of the submodule. Hence it should really be only a branch
+ceee7d7e0d98db12067744ac3cd0ab3a49602457 d923bce2a8964541cf804428ccf3953ebbbdcf7d Sebastian Thiel <byronimo@gmail.com> 1289863093 +0100 commit: Submodule now only supports branches to be given as hint that will svn-external like behaviour. Implemented first version of update, which works for now, but probably needs to see more features
+d923bce2a8964541cf804428ccf3953ebbbdcf7d d4fd7fca515ba9b088a7c811292f76f47d16cd7b Sebastian Thiel <byronimo@gmail.com> 1289863587 +0100 commit (amend): Submodule now only supports branches to be given as hint that will svn-external like behaviour. Implemented first version of update, which works for now, but probably needs to see more features
+d4fd7fca515ba9b088a7c811292f76f47d16cd7b af5abca21b56fcf641ff916bd567680888c364aa Sebastian Thiel <byronimo@gmail.com> 1289896210 +0100 commit: Added a few utility methods and improved the test. Refs need an improvement though to allow easy configuration of branch-specific settings
+af5abca21b56fcf641ff916bd567680888c364aa 38b81ad137e5f5486ce97a35702c84b9f869ccef Sebastian Thiel <byronimo@gmail.com> 1289901931 +0100 commit: remote: added methods to set and query the tracking branch status of normal heads, including test.
+38b81ad137e5f5486ce97a35702c84b9f869ccef 9f73e8ba55f33394161b403bf7b8c2e0e05f47b0 Sebastian Thiel <byronimo@gmail.com> 1289901972 +0100 commit (amend): remote: added methods to set and query the tracking branch status of normal heads, including test.
+9f73e8ba55f33394161b403bf7b8c2e0e05f47b0 3035781875f3004734ff5fe3be77f66b3cef299e Sebastian Thiel <byronimo@gmail.com> 1289903243 +0100 commit: Improved efficiency of the submodule.update process, improved test
+3035781875f3004734ff5fe3be77f66b3cef299e 21b4db556619db2ef25f0e0d90fef7e38e6713e5 Sebastian Thiel <byronimo@gmail.com> 1289903575 +0100 commit (amend): Improved efficiency of the submodule.update process, improved test
+21b4db556619db2ef25f0e0d90fef7e38e6713e5 78d2cd65b8b778f3b0cfef5268b0684314ca22ef Sebastian Thiel <byronimo@gmail.com> 1289905889 +0100 commit: implemented update to_last_revision option including test. Its now possible to update submodules such as svn-externals
+78d2cd65b8b778f3b0cfef5268b0684314ca22ef c750f599a1b05ac855b55abc771729a704119833 Sebastian Thiel <byronimo@gmail.com> 1289924204 +0100 commit: Implemented deletion of submodules including proper tests
+c750f599a1b05ac855b55abc771729a704119833 3d061a1a506b71234f783628ba54a7bdf79bbce9 Sebastian Thiel <byronimo@gmail.com> 1289924802 +0100 commit (amend): Implemented deletion of submodules including proper tests
+3d061a1a506b71234f783628ba54a7bdf79bbce9 98e6edb546116cd98abdc3b37c6744e859bbde5c Sebastian Thiel <byronimo@gmail.com> 1289930487 +0100 commit: Initial implementation of submodule.add without any tests. These are to come next
+98e6edb546116cd98abdc3b37c6744e859bbde5c 33964afb47ce3af8a32e6613b0834e5f94bdfe68 Sebastian Thiel <byronimo@gmail.com> 1289938053 +0100 commit: Added tests for all failure modes of submodule add ( except for one ), and fixed a few issues on the way
+33964afb47ce3af8a32e6613b0834e5f94bdfe68 7b3ef45167e1c2f7d1b7507c13fcedd914f87da9 Sebastian Thiel <byronimo@gmail.com> 1289938869 +0100 commit: The submodule's branch is now a branch instance, not a plain string anymore
+7b3ef45167e1c2f7d1b7507c13fcedd914f87da9 ef48ca5f54fe31536920ec4171596ff8468db5fe Sebastian Thiel <byronimo@gmail.com> 1289950137 +0100 commit: Added rest of submodule.add test code which should be pretty much 100% coverage for it
+ef48ca5f54fe31536920ec4171596ff8468db5fe e84d05f4bbf7090a9802e9cd198d1c383974cb12 Sebastian Thiel <byronimo@gmail.com> 1289989025 +0100 commit: Repo: scetched out submodule_update
+e84d05f4bbf7090a9802e9cd198d1c383974cb12 403c31fe3c7075652c892ecd3b6dc6d321bb1226 Sebastian Thiel <byronimo@gmail.com> 1290001921 +0100 commit: index: Sped up reading and writing of the index file by reducing the amount of attribute lookups considerably
+403c31fe3c7075652c892ecd3b6dc6d321bb1226 b03933057df80ea9f860cc616eb7733f140f866e Sebastian Thiel <byronimo@gmail.com> 1290001937 +0100 commit (amend): index: Sped up reading and writing of the index file by reducing the amount of attribute lookups considerably
+b03933057df80ea9f860cc616eb7733f140f866e a1e6234c27abf041e4c8cd1a799950e7cd9104f6 Sebastian Thiel <byronimo@gmail.com> 1290003888 +0100 commit: Inital implementation of Submodule.move including a very simple and to-be-improved test
+a1e6234c27abf041e4c8cd1a799950e7cd9104f6 abda960de327e922fd9eaa429bef9e92918c8387 Sebastian Thiel <byronimo@gmail.com> 1290010524 +0100 commit: submodule: removed module_path method as it is implemented in the abspath property alrdeady
+abda960de327e922fd9eaa429bef9e92918c8387 609a46a72764dc71104aa5d7b1ca5f53d4237a75 Sebastian Thiel <byronimo@gmail.com> 1290011090 +0100 commit (amend): submodule: removed module_path method as it is implemented in the abspath property alrdeady
+609a46a72764dc71104aa5d7b1ca5f53d4237a75 6066f04d529b04e96295b37b5cceb2556414a472 Sebastian Thiel <byronimo@gmail.com> 1290025995 +0100 commit: repo: Added create_submodule method which fits into the tradition of offering a create_* method for most important entities.
+6066f04d529b04e96295b37b5cceb2556414a472 609a46a72764dc71104aa5d7b1ca5f53d4237a75 Sebastian Thiel <byronimo@gmail.com> 1290026006 +0100 HEAD~1: updating HEAD
+609a46a72764dc71104aa5d7b1ca5f53d4237a75 7cc4d748a132377ffe63534e9777d7541a3253c5 Sebastian Thiel <byronimo@gmail.com> 1290026013 +0100 commit: repo: Added create_submodule method which fits into the tradition of offering a create_* method for most important entities.
+7cc4d748a132377ffe63534e9777d7541a3253c5 1687283c13caf7ff8d1959591541dff6a171ca1e Sebastian Thiel <byronimo@gmail.com> 1290029890 +0100 commit: RootModule.update: initial implementation of update method, which should be able to handle submodule removals, additions, path changes and branch changes. All this still needs to be tested though
+1687283c13caf7ff8d1959591541dff6a171ca1e 9a0c4009edb35bb81d7e41d7d235675ccdfcffba Sebastian Thiel <byronimo@gmail.com> 1290068415 +0100 commit: commit: when creating a new commit and advancing the head, it will now write the ORIG_HEAD reference as well
+9a0c4009edb35bb81d7e41d7d235675ccdfcffba 7a320abc52307b4d4010166bd899ac75024ec9a7 Sebastian Thiel <byronimo@gmail.com> 1290068612 +0100 commit (amend): commit: when creating a new commit and advancing the head, it will now write the ORIG_HEAD reference as well
+7a320abc52307b4d4010166bd899ac75024ec9a7 1185ee2c9cda379bada7e08694f13dc124b27e93 Sebastian Thiel <byronimo@gmail.com> 1290073216 +0100 commit: ORIG_HEAD handling is now implemented in the ref-class itself, instead of being a special case of the commit method; includes tests
+1185ee2c9cda379bada7e08694f13dc124b27e93 82849578e61a7dfb47fc76dcbe18b1e3b6a36951 Sebastian Thiel <byronimo@gmail.com> 1290073599 +0100 commit (amend): ORIG_HEAD handling is now implemented in the ref-class itself, instead of being a special case of the commit method; includes tests
+82849578e61a7dfb47fc76dcbe18b1e3b6a36951 0c1834134ce177cdbd30a56994fcc4bf8f5be8b2 Sebastian Thiel <byronimo@gmail.com> 1290076876 +0100 commit: Added test-setup which can test all aspects of the (smart) update method
+0c1834134ce177cdbd30a56994fcc4bf8f5be8b2 50095005bab7ffae14cb7d2ec8faeee7eed579ee Sebastian Thiel <byronimo@gmail.com> 1290096572 +0100 commit: first update test succeeds, so it verifies that existing repositories can be moved later if the configuration changed
+50095005bab7ffae14cb7d2ec8faeee7eed579ee ca86a09651674d4bd2c22fd7f2f2ae6ca1c1d3d6 Sebastian Thiel <byronimo@gmail.com> 1290097988 +0100 commit (amend): first update test succeeds, so it verifies that existing repositories can be moved later if the configuration changed
+ca86a09651674d4bd2c22fd7f2f2ae6ca1c1d3d6 bf2f7449beafcfb4578d08c90370d3953ff5f073 Sebastian Thiel <byronimo@gmail.com> 1290098006 +0100 commit (amend): first update test succeeds, so it verifies that existing repositories can be moved later if the configuration changed, and actually it also verifies that the url-change is handled correctly (as we changed the url from the default to the local path)
+bf2f7449beafcfb4578d08c90370d3953ff5f073 c0990b2a6dd2e777b46c1685ddb985b3c0ef59a2 Sebastian Thiel <byronimo@gmail.com> 1290098151 +0100 commit (amend): first update test succeeds, so it verifies that existing repositories can be moved later if the configuration changed, and actually it also verifies that the url-change is handled correctly (as we changed the url from the default to the local path)
+c0990b2a6dd2e777b46c1685ddb985b3c0ef59a2 cf5eaddde33e983bc7b496f458bdd49154f6f498 Sebastian Thiel <byronimo@gmail.com> 1290109461 +0100 commit: Updated tests and implementation to verify functionality for handling submodule removals, as well as url changes
+cf5eaddde33e983bc7b496f458bdd49154f6f498 3f2d76ba8e6d004ff5849ed8c7c34f6a4ac2e1e3 Sebastian Thiel <byronimo@gmail.com> 1290112561 +0100 commit: Added test for branch changes - it appears to work well, at least as far as the restricted tests are concerned
+3f2d76ba8e6d004ff5849ed8c7c34f6a4ac2e1e3 ebe8f644e751c1b2115301c1a961bef14d2cce89 Sebastian Thiel <byronimo@gmail.com> 1290114666 +0100 commit: Added test for the recursive code path.
+ebe8f644e751c1b2115301c1a961bef14d2cce89 ebe8f644e751c1b2115301c1a961bef14d2cce89 Sebastian Thiel <byronimo@gmail.com> 1290114835 +0100 checkout: moving from submodule to refactor
+ebe8f644e751c1b2115301c1a961bef14d2cce89 4c34d5c3f2a4ed7194276a026e0ec6437d339c67 Sebastian Thiel <byronimo@gmail.com> 1290116693 +0100 commit: Split big submodule file into smaller files. Tried to manually get imports right, but its not yet tested
+4c34d5c3f2a4ed7194276a026e0ec6437d339c67 9519f186ce757cdba217f222c95c20033d00f91d Sebastian Thiel <byronimo@gmail.com> 1290118223 +0100 commit: Fixed all imports, refactoring appears to be complete
+9519f186ce757cdba217f222c95c20033d00f91d ebe8f644e751c1b2115301c1a961bef14d2cce89 Sebastian Thiel <byronimo@gmail.com> 1290118281 +0100 checkout: moving from refactor to submodule
+ebe8f644e751c1b2115301c1a961bef14d2cce89 7cf2d5fcf0a3db793678dd6ba9fc1c24d4eeb36a Sebastian Thiel <byronimo@gmail.com> 1290118286 +0100 merge refactor: Merge made by recursive.
+7cf2d5fcf0a3db793678dd6ba9fc1c24d4eeb36a a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 Sebastian Thiel <byronimo@gmail.com> 1290118288 +0100 checkout: moving from submodule to master
+a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 f1545bd9cd6953c5b39c488bf7fe179676060499 Sebastian Thiel <byronimo@gmail.com> 1290118292 +0100 merge submodule: Merge made by recursive.
+f1545bd9cd6953c5b39c488bf7fe179676060499 f1545bd9cd6953c5b39c488bf7fe179676060499 Sebastian Thiel <byronimo@gmail.com> 1290119272 +0100 checkout: moving from master to docs
+f1545bd9cd6953c5b39c488bf7fe179676060499 75c75fa136f6181f6ba2e52b8b85a98d3fe1718e Sebastian Thiel <byronimo@gmail.com> 1290120222 +0100 commit: Changed name/id of gitdb submodule to something that doesn't look like a path
+75c75fa136f6181f6ba2e52b8b85a98d3fe1718e a25e1d4aa7e5898ab1224d0e5cc5ecfbe8ed8821 Sebastian Thiel <byronimo@gmail.com> 1290122817 +0100 commit: Updated tutorial with a brief introduction to submodules
+a25e1d4aa7e5898ab1224d0e5cc5ecfbe8ed8821 a8014d2ec56fd684dc81478dee73ca7eda0ab8a7 Sebastian Thiel <byronimo@gmail.com> 1290122851 +0100 commit: Updated gitdb submodule, and added note about how the submodule package manages its dependencies
+a8014d2ec56fd684dc81478dee73ca7eda0ab8a7 f1545bd9cd6953c5b39c488bf7fe179676060499 Sebastian Thiel <byronimo@gmail.com> 1290122855 +0100 checkout: moving from docs to master
+f1545bd9cd6953c5b39c488bf7fe179676060499 45c0f285a6d9d9214f8167742d12af2855f527fb Sebastian Thiel <byronimo@gmail.com> 1290122860 +0100 merge docs: Merge made by recursive.
+45c0f285a6d9d9214f8167742d12af2855f527fb 315c303214cef855499f0c7eda46b7ed82dceecb Sebastian Thiel <byronimo@gmail.com> 1290158850 +0100 commit: test_submodule: fixed failures that arose due to changes of the original submodule names. Also, a major bug was fixed that cased submodules to always being updated recursively when using the RootModule.update method
+315c303214cef855499f0c7eda46b7ed82dceecb 7dd618655c96ff32b5c30e41a5406c512bcbb65f Sebastian Thiel <byronimo@gmail.com> 1290158895 +0100 commit (amend): test_submodule: fixed failures that arose due to changes of the original submodule names. Also, a major bug was fixed that cased submodules to always being updated recursively when using the RootModule.update method
+7dd618655c96ff32b5c30e41a5406c512bcbb65f 2ab454f0ccf09773a4f51045329a69fd73559414 Sebastian Thiel <byronimo@gmail.com> 1290188727 +0100 commit: remote: parsing of fetch information now reacts to fatal errors. Previously it would just bump into an assertion
+2ab454f0ccf09773a4f51045329a69fd73559414 b00ad00130389f5b00da9dbfd89c3e02319d2999 Sebastian Thiel <byronimo@gmail.com> 1290196658 +0100 commit: submodule: When adding an existing submodule, when retrieving the binsha, we will now consider not only the tree, but the index too
+b00ad00130389f5b00da9dbfd89c3e02319d2999 8867348ca772cdce7434e76eed141f035b63e928 Sebastian Thiel <byronimo@gmail.com> 1290196804 +0100 commit: Bumped version number to 0.3.1
+8867348ca772cdce7434e76eed141f035b63e928 8d0aa1ef19e2c3babee458bd4504820f415148e0 Sebastian Thiel <byronimo@gmail.com> 1290271885 +0100 commit: Fixed performance tests which broke in the meanwhile - they definitely don't run often enough, which is because they intentionally don't have a package initialization file
+8d0aa1ef19e2c3babee458bd4504820f415148e0 8e0e315a371cdfc80993a1532f938d56ed7acee4 Sebastian Thiel <byronimo@gmail.com> 1290280591 +0100 commit: submodule: Fixed capital error when handling the submodule's branch, which was returned in the submodules super repository, not in the submodule's module
+8e0e315a371cdfc80993a1532f938d56ed7acee4 7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e Sebastian Thiel <byronimo@gmail.com> 1290286993 +0100 merge gitorious/win32: Merge made by recursive.
+7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e 7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e Sebastian Thiel <byronimo@gmail.com> 1290287857 +0100 checkout: moving from master to osx
+7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e 6c6223b854e4c7985b08493a027b47f668493832 Sebastian Thiel <byronimo@gmail.com> 1290287908 +0100 commit: testing:added special case for osx to solve a special issue with the temp directory
+6c6223b854e4c7985b08493a027b47f668493832 20b07fa542d2376a287435a26c967a5ee104667f Sebastian Thiel <byronimo@gmail.com> 1290288287 +0100 commit (amend): testing:added special case for osx to solve a special issue with the temp directory
+20b07fa542d2376a287435a26c967a5ee104667f 7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e Sebastian Thiel <byronimo@gmail.com> 1290288327 +0100 checkout: moving from osx to master
+7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e 517ae56f517f5e7253f878dd1dc3c7c49f53df1a Sebastian Thiel <byronimo@gmail.com> 1290288333 +0100 merge osx: Merge made by recursive.
+517ae56f517f5e7253f878dd1dc3c7c49f53df1a 22a88a7ec38e29827264f558f0c1691b99102e23 Sebastian Thiel <byronimo@gmail.com> 1290289085 +0100 commit: fixed performance tests ... again, previously I was just working on an incorrect repository
+22a88a7ec38e29827264f558f0c1691b99102e23 685760ab33b8f9d7455b18a9ecb8c4c5b3315d66 Sebastian Thiel <byronimo@gmail.com> 1290342054 +0100 commit: Added zip_safe info to setup.py file
+685760ab33b8f9d7455b18a9ecb8c4c5b3315d66 9d6310db456de9952453361c860c3ae61b8674ea Sebastian Thiel <byronimo@gmail.com> 1290342681 +0100 commit: docs: added final docs for version 0.3.0, started new release 0.3.1
+9d6310db456de9952453361c860c3ae61b8674ea 0b813371f5a8af95152cae109d28c7c97bfaf79f Sebastian Thiel <byronimo@gmail.com> 1290358083 +0100 commit: Fixed API reference docs as far as possible
+0b813371f5a8af95152cae109d28c7c97bfaf79f 0b813371f5a8af95152cae109d28c7c97bfaf79f Sebastian Thiel <byronimo@gmail.com> 1290363019 +0100 checkout: moving from master to structure
+0b813371f5a8af95152cae109d28c7c97bfaf79f fafe8a77db75083de3e7af92185ecdb7f2d542d3 Sebastian Thiel <byronimo@gmail.com> 1290363468 +0100 commit: moved all contents, incl. submodule gitdb, up to the root directory
+fafe8a77db75083de3e7af92185ecdb7f2d542d3 e6019e16d5a74dc49eb7129ee7fd78b4de51dac2 Sebastian Thiel <byronimo@gmail.com> 1290363544 +0100 commit: flattened test folder structure, didn't adjust any file content yet
+e6019e16d5a74dc49eb7129ee7fd78b4de51dac2 83a9e4a0dad595188ff3fb35bc3dfc4d931eff6d Sebastian Thiel <byronimo@gmail.com> 1290366107 +0100 commit: Fixed setup script to work with changed folder structure
+83a9e4a0dad595188ff3fb35bc3dfc4d931eff6d 557800a1eba3b9cface0f319d8e6aa6bd2ec188d Sebastian Thiel <byronimo@gmail.com> 1290367011 +0100 commit: Updated MANIFEST and setup to include fixtures. Adjusted includes in all tests to work with the new directory structure
+557800a1eba3b9cface0f319d8e6aa6bd2ec188d 9ca0f897376e765989e92e44628228514f431458 Sebastian Thiel <byronimo@gmail.com> 1290371673 +0100 commit (amend): Updated MANIFEST and setup to include fixtures. Adjusted includes in all tests to work with the new directory structure
+9ca0f897376e765989e92e44628228514f431458 0b813371f5a8af95152cae109d28c7c97bfaf79f Sebastian Thiel <byronimo@gmail.com> 1290372294 +0100 checkout: moving from structure to master
+0b813371f5a8af95152cae109d28c7c97bfaf79f 9ca0f897376e765989e92e44628228514f431458 Sebastian Thiel <byronimo@gmail.com> 1290372302 +0100 checkout: moving from master to structure
+9ca0f897376e765989e92e44628228514f431458 6befb28efd86556e45bb0b213bcfbfa866cac379 Sebastian Thiel <byronimo@gmail.com> 1290372430 +0100 commit: updated changelog
+6befb28efd86556e45bb0b213bcfbfa866cac379 0b813371f5a8af95152cae109d28c7c97bfaf79f Sebastian Thiel <byronimo@gmail.com> 1290372432 +0100 checkout: moving from structure to master
+0b813371f5a8af95152cae109d28c7c97bfaf79f 94140bbfc523ae13e1e8045ebfed8a76fe0a1872 Sebastian Thiel <byronimo@gmail.com> 1290372438 +0100 merge structure: Merge made by recursive.
+94140bbfc523ae13e1e8045ebfed8a76fe0a1872 d01b428dbac4103b4f7d7b8fca32e01f70746c53 Sebastian Thiel <byronimo@gmail.com> 1290372442 +0100 commit (amend): !!WARNING!!: Directory structure changed, see commit message for instructions
+d01b428dbac4103b4f7d7b8fca32e01f70746c53 db3423d1eab11d00c5475e36eae8952512b07f4e Sebastian Thiel <byronimo@gmail.com> 1290373147 +0100 commit (amend): !**WARNING**!: Directory structure changed, see commit message for instructions
+db3423d1eab11d00c5475e36eae8952512b07f4e 5ed5b2011ec7cf72f19e6d53b588eea4adca68e5 Sebastian Thiel <byronimo@gmail.com> 1290373168 +0100 commit (amend): *!*WARNING*!*: Directory structure changed, see commit message for instructions
+5ed5b2011ec7cf72f19e6d53b588eea4adca68e5 470d4a7cc865d2702c326d9d1d1b0ab7afb49f0e Sebastian Thiel <byronimo@gmail.com> 1290373186 +0100 commit (amend): !##WARNING##!: Directory structure changed, see commit message for instructions
+470d4a7cc865d2702c326d9d1d1b0ab7afb49f0e e088424eb01bd47c6f0d313f465a21ee742e6f4a Sebastian Thiel <byronimo@gmail.com> 1290373209 +0100 commit (amend): If you use git-python as a submodule of your own project, which alters the sys.path to import it,
+e088424eb01bd47c6f0d313f465a21ee742e6f4a 48a17c87c15b2fa7ce2e84afa09484f354d57a39 Sebastian Thiel <byronimo@gmail.com> 1290373245 +0100 commit (amend): -#######->WARNING<-####### Directory structure changed, see commit message
+48a17c87c15b2fa7ce2e84afa09484f354d57a39 fca367548e365f93c58c47dea45507025269f59a Sebastian Thiel <byronimo@gmail.com> 1290374761 +0100 commit: Changed version to 0.3.1 (removed beta1) so that other projects can actually depend on git-python using the setuptools. Previously it would claim the version did not exist, probably because the setuptools are just comparing strings
+fca367548e365f93c58c47dea45507025269f59a fca367548e365f93c58c47dea45507025269f59a Sebastian Thiel <byronimo@gmail.com> 1290435685 +0100 checkout: moving from master to refs
+fca367548e365f93c58c47dea45507025269f59a dec4663129f72321a14efd6de63f14a7419e3ed2 Sebastian Thiel <byronimo@gmail.com> 1290500057 +0100 commit: Split ref implementation up into multiple files, to make room for the log implementation
+dec4663129f72321a14efd6de63f14a7419e3ed2 fca367548e365f93c58c47dea45507025269f59a Sebastian Thiel <byronimo@gmail.com> 1290500094 +0100 checkout: moving from refs to master
+fca367548e365f93c58c47dea45507025269f59a dec4663129f72321a14efd6de63f14a7419e3ed2 Sebastian Thiel <byronimo@gmail.com> 1290500104 +0100 checkout: moving from master to refs
+dec4663129f72321a14efd6de63f14a7419e3ed2 739fa140235cc9d65c632eaf1f5cacc944d87cfb Sebastian Thiel <byronimo@gmail.com> 1290501284 +0100 commit: Fixed remaining tests - lets hope that everything is indeed working correctly - as imports changed, every line of code needs to be run to assure all names can be resolved
+739fa140235cc9d65c632eaf1f5cacc944d87cfb d89b2cbcd57ecd5600ecf0202b396141c1a856a3 Sebastian Thiel <byronimo@gmail.com> 1290512134 +0100 commit: Initial interface including some of the implementation of the RefLog. TestCase scetched out for now
+d89b2cbcd57ecd5600ecf0202b396141c1a856a3 6e5aae2fc8c3832bdae1cd5e0a269405fb059231 Sebastian Thiel <byronimo@gmail.com> 1290512159 +0100 commit (amend): Initial interface including some of the implementation of the RefLog. TestCase scetched out for now
diff --git a/git/test/fixtures/reflog_invalid_date b/git/test/fixtures/reflog_invalid_date
new file mode 100644
index 00000000..938e4f75
--- /dev/null
+++ b/git/test/fixtures/reflog_invalid_date
@@ -0,0 +1,2 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1271229940 commit: conf.py: Adjusted version to match with the actual version
diff --git a/git/test/fixtures/reflog_invalid_email b/git/test/fixtures/reflog_invalid_email
new file mode 100644
index 00000000..121096aa
--- /dev/null
+++ b/git/test/fixtures/reflog_invalid_email
@@ -0,0 +1,2 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail. 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
diff --git a/git/test/fixtures/reflog_invalid_newsha b/git/test/fixtures/reflog_invalid_newsha
new file mode 100644
index 00000000..0d45bb7a
--- /dev/null
+++ b/git/test/fixtures/reflog_invalid_newsha
@@ -0,0 +1,2 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d Sebastian Thiel <byronimo@gmail.com> 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
diff --git a/git/test/fixtures/reflog_invalid_oldsha b/git/test/fixtures/reflog_invalid_oldsha
new file mode 100644
index 00000000..b78605ff
--- /dev/null
+++ b/git/test/fixtures/reflog_invalid_oldsha
@@ -0,0 +1,2 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb3 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
diff --git a/git/test/fixtures/reflog_invalid_sep b/git/test/fixtures/reflog_invalid_sep
new file mode 100644
index 00000000..fddcd6e5
--- /dev/null
+++ b/git/test/fixtures/reflog_invalid_sep
@@ -0,0 +1,2 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
diff --git a/git/test/fixtures/reflog_master b/git/test/fixtures/reflog_master
new file mode 100644
index 00000000..2fa13e21
--- /dev/null
+++ b/git/test/fixtures/reflog_master
@@ -0,0 +1,124 @@
+501bf602abea7d21c3dbb409b435976e92033145 82b8902e033430000481eb355733cd7065342037 Sebastian Thiel <byronimo@gmail.com> 1270634931 +0200 commit: Used this release for a first beta of the 0.2 branch of development
+82b8902e033430000481eb355733cd7065342037 69361d96a59381fde0ac34d19df2d4aff05fb9a9 Sebastian Thiel <byronimo@gmail.com> 1271229940 +0200 commit: conf.py: Adjusted version to match with the actual version
+69361d96a59381fde0ac34d19df2d4aff05fb9a9 0d6ceabf5b90e7c0690360fc30774d36644f563c Sebastian Thiel <byronimo@gmail.com> 1272614247 +0200 merge integration: Fast-forward
+22a0289972b365b7912340501b52ca3dd98be289 143b927307d46ccb8f1cc095739e9625c03c82ff Sebastian Thiel <byronimo@gmail.com> 1272988814 +0200 commit: TODO: Removed all entries but left a mesage about where to find the issuee on lighthouse.
+143b927307d46ccb8f1cc095739e9625c03c82ff e41c727be8dbf8f663e67624b109d9f8b135a4ab Sebastian Thiel <byronimo@gmail.com> 1273140152 +0200 commit: README: Added mailing list and issue tracker information
+c083f3d0b853e723d0d4b00ff2f1ec5f65f05cba de5bc8f7076c5736ef1efa57345564fbc563bd19 Sebastian Thiel <byronimo@gmail.com> 1273522570 +0200 commit: Handle filenames with embedded spaces when generating diffs
+de5bc8f7076c5736ef1efa57345564fbc563bd19 8caeec1b15645fa53ec5ddc6e990e7030ffb7c5a Sebastian Thiel <byronimo@gmail.com> 1273529174 +0200 commit: IndexFile.add: Fixed incorrect path handling if path rewriting was desired and absolute paths were given
+600fcbc1a2d723f8d51e5f5ab6d9e4c389010e1c 1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af Sebastian Thiel <byronimo@gmail.com> 1274811103 +0200 commit: diff: by limiting the splitcount to 5, a subtle bug was introduced as the newline at the end of the split line was not split away automatically. Added test for this, and the trivial fix
+1019d4cf68d1acdbb4d6c1abb7e71ac9c0f581af 17af1f64d5f1e62d40e11b75b1dd48e843748b49 Sebastian Thiel <byronimo@gmail.com> 1274877948 +0200 commit: BlockingLockFile: added sanity check that raises IOError if the directory containing the lock was removed. This is unlikely to happen in a production envrironment, but may happen during testing, as folders are moved/deleted once the test is complete. Daemons might still be waiting for something, and they should be allowed to terminate instead of waiting for a possibly long time
+17af1f64d5f1e62d40e11b75b1dd48e843748b49 34ba8ffba0b3b4d21da7bcea594cc3631e422142 Sebastian Thiel <byronimo@gmail.com> 1274906080 +0200 commit: refs: a Reference can now be created by assigning a commit or object (for convenience)
+34ba8ffba0b3b4d21da7bcea594cc3631e422142 11dc82538cc1ebb537c866c8e76146e384cdfe24 Sebastian Thiel <byronimo@gmail.com> 1274906333 +0200 commit: refs: a Reference can now be created by assigning a commit or object (for convenience)
+11dc82538cc1ebb537c866c8e76146e384cdfe24 34ba8ffba0b3b4d21da7bcea594cc3631e422142 Sebastian Thiel <byronimo@gmail.com> 1274906338 +0200 HEAD~1: updating HEAD
+34ba8ffba0b3b4d21da7bcea594cc3631e422142 de84cbdd0f9ef97fcd3477b31b040c57192e28d9 Sebastian Thiel <byronimo@gmail.com> 1274906431 +0200 commit (amend): refs: a Reference can now be created by assigning a commit or object (for convenience)
+de84cbdd0f9ef97fcd3477b31b040c57192e28d9 ecf37a1b4c2f70f1fc62a6852f40178bf08b9859 Sebastian Thiel <byronimo@gmail.com> 1274910053 +0200 commit: index: index-add fixed to always append a newline after each item. In git has unified its way it reads from stdin, now it wants all items to be terminated by a newline usually. Previously, it could have been that it really didn't want to have a termination character when the last item was written to the file. Bumped the minimum requirements to 1.7.0 to be sure it is working as I think it will.
+ecf37a1b4c2f70f1fc62a6852f40178bf08b9859 1ee2afb00afaf77c883501eac8cd614c8229a444 Sebastian Thiel <byronimo@gmail.com> 1274914700 +0200 commit: cmd: By default, on linux, the parent file handles will be closed to leave the child less cluttered, and make it easier to debug as it will only have the file descriptors we set. It appears to be more stable regarding the stdin-is-closed-but-child-doesn't-realize-this issue
+1ee2afb00afaf77c883501eac8cd614c8229a444 bd45e9267ab0d3f37e59ecc8b87d0ad19abad4ad Sebastian Thiel <byronimo@gmail.com> 1275324366 +0200 commit: gitcmd: may now receive extra keyword arguments to be passed directly to the subproces.Popen invocation. It could be used to pass custom environments, without changing the own one
+bd45e9267ab0d3f37e59ecc8b87d0ad19abad4ad 6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e Sebastian Thiel <byronimo@gmail.com> 1275324409 +0200 commit (amend): gitcmd: may now receive extra keyword arguments to be passed directly to the subproces.Popen invocation. It could be used to pass custom environments, without changing the own one (#26)
+6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e e79999c956e2260c37449139080d351db4aa3627 Sebastian Thiel <byronimo@gmail.com> 1275549608 +0200 commit: git.cmd: moved hardcoded chunksize when duplicating stream data into easy-to-change class member variable
+e79999c956e2260c37449139080d351db4aa3627 412632599479a8e5991a07ecb67bc52b85c60755 Sebastian Thiel <byronimo@gmail.com> 1275550524 +0200 commit: git.cmd: using communicate in the main branch of execution, which might not make a big difference, but perhaps its smarter about broken pipes.
+412632599479a8e5991a07ecb67bc52b85c60755 25dca42bac17d511b7e2ebdd9d1d679e7626db5f Sebastian Thiel <byronimo@gmail.com> 1275550670 +0200 commit (amend): git.cmd: using communicate in the main branch of execution, which might not make a big difference, but perhaps its smarter about broken pipes.
+25dca42bac17d511b7e2ebdd9d1d679e7626db5f 6fbb69306c0e14bacb8dcb92a89af27d3d5d631f Sebastian Thiel <byronimo@gmail.com> 1275665431 +0200 commit (merge): Merge branch 'odb'
+6fbb69306c0e14bacb8dcb92a89af27d3d5d631f a243827ab3346e188e99db2f9fc1f916941c9b1a Sebastian Thiel <byronimo@gmail.com> 1275685591 +0200 commit: Implemented stream tests, found a bug on the way, slowly a test-framework for streams starts to show up, but its not yet there
+a243827ab3346e188e99db2f9fc1f916941c9b1a 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1275690001 +0200 commit: Removed compression flag from IStream and OStream types, as a valid object will always be compressed if generated by the system ( even future memory db's will compress it )
+7c1169f6ea406fec1e26e99821e18e66437e65eb c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca Sebastian Thiel <byronimo@gmail.com> 1275746174 +0200 commit: Added basic channel implementation including test
+c69b6b979e3d6bd01ec40e75b92b21f7a391f0ca 7c1169f6ea406fec1e26e99821e18e66437e65eb Sebastian Thiel <byronimo@gmail.com> 1275746196 +0200 HEAD~1: updating HEAD
+7c1169f6ea406fec1e26e99821e18e66437e65eb f91495e271597034226f1b9651345091083172c4 Sebastian Thiel <byronimo@gmail.com> 1276339280 +0200 merge async: Merge made by recursive.
+f91495e271597034226f1b9651345091083172c4 5c631ca192848fed3068b31b1389cd92a0c0cdca Sebastian Thiel <byronimo@gmail.com> 1276340638 +0200 commit: Removed async from this repository, put it into own one which now comes in as external, using a git-submodule
+5c631ca192848fed3068b31b1389cd92a0c0cdca f91495e271597034226f1b9651345091083172c4 Sebastian Thiel <byronimo@gmail.com> 1276345979 +0200 HEAD~1: updating HEAD
+f91495e271597034226f1b9651345091083172c4 86ea63504f3e8a74cfb1d533be9d9602d2d17e27 Sebastian Thiel <byronimo@gmail.com> 1276346049 +0200 commit: Removed async from tree
+86ea63504f3e8a74cfb1d533be9d9602d2d17e27 6c1faef799095f3990e9970bc2cb10aa0221cf9c Sebastian Thiel <byronimo@gmail.com> 1276356043 +0200 commit: Removed odb from project, it is now used as a submodule named gitdb, which was added instead
+6c1faef799095f3990e9970bc2cb10aa0221cf9c 28ed48c93f4cc8b6dd23c951363e5bd4e6880992 Sebastian Thiel <byronimo@gmail.com> 1276503381 +0200 commit: Implemented initial version of tree serialization which appears to work according to a simple test
+28ed48c93f4cc8b6dd23c951363e5bd4e6880992 fe5289ed8311fecf39913ce3ae86b1011eafe5f7 Sebastian Thiel <byronimo@gmail.com> 1276506168 +0200 commit: tree now uses less memory for its cache as it stores the bare deserialized information - this also speeds up later serialization after changes. its clear though that retrieving actual objects is slower currently as these are not cached anymore. Its worth thinking about moving these encoding, decoding routines to gitdb
+fe5289ed8311fecf39913ce3ae86b1011eafe5f7 f8dabbf4f92a7023181777e9d40355562474f71a Sebastian Thiel <byronimo@gmail.com> 1276512508 +0200 commit: tree: added TreeModifier, allowing to adjust existing trees safely and or fast, while staying compatible with serialization which requires it to be sorted
+f8dabbf4f92a7023181777e9d40355562474f71a d9240918aa03e49feabe43af619019805ac76786 Sebastian Thiel <byronimo@gmail.com> 1276512707 +0200 commit (amend): tree: added TreeModifier, allowing to adjust existing trees safely and or fast, while staying compatible with serialization which requires it to be sorted
+d9240918aa03e49feabe43af619019805ac76786 38b3cfb9b24a108e0720f7a3f8d6355f7e0bb1a9 Sebastian Thiel <byronimo@gmail.com> 1276527612 +0200 merge index: Merge made by recursive.
+38b3cfb9b24a108e0720f7a3f8d6355f7e0bb1a9 c9dbf201b4f0b3c2b299464618cb4ecb624d272c Sebastian Thiel <byronimo@gmail.com> 1276529105 +0200 commit: Moved small types that had their own module into the utils module
+c9dbf201b4f0b3c2b299464618cb4ecb624d272c 45e87305bd4f050c2d0309c32fe5de499fc38df3 Sebastian Thiel <byronimo@gmail.com> 1276554725 +0200 commit: Reimplemented Lock handling to be conforming to the git lock protocol, which is actually more efficient than the previous implementation
+45e87305bd4f050c2d0309c32fe5de499fc38df3 06590aee389f4466e02407f39af1674366a74705 Sebastian Thiel <byronimo@gmail.com> 1276555536 +0200 commit (amend): Reimplemented Lock handling to be conforming to the git lock protocol, which is actually more efficient than the previous implementation
+06590aee389f4466e02407f39af1674366a74705 1d2307532d679393ae067326e4b6fa1a2ba5cc06 Sebastian Thiel <byronimo@gmail.com> 1276556905 +0200 commit: Moved LockedFD and its test into the gitdb project
+1d2307532d679393ae067326e4b6fa1a2ba5cc06 e837b901dcfac82e864f806c80f4a9cbfdb9c9f3 Sebastian Thiel <byronimo@gmail.com> 1276607908 +0200 commit: Move LazyMixin type to gitdb, index reading now uses file_contents_ro from gitdb as well
+e837b901dcfac82e864f806c80f4a9cbfdb9c9f3 b82dbf538ac0d03968a0f5b7e2318891abefafaa Sebastian Thiel <byronimo@gmail.com> 1276870827 +0200 commit: GitCmd implementation of gitdb base moved to git-python where it belongs. Previously it was located in gitdb, which doesn't have any facilities to use the git command
+b82dbf538ac0d03968a0f5b7e2318891abefafaa f164627a85ed7b816759871a76db258515b85678 Sebastian Thiel <byronimo@gmail.com> 1277057845 +0200 commit: db: added pure python git database
+f164627a85ed7b816759871a76db258515b85678 ac62760c52abf28d1fd863f0c0dd48bc4a23d223 Sebastian Thiel <byronimo@gmail.com> 1277117506 +0200 commit: index.add: now uses gitdb.store functionality instead of git-hash-file. The python version is about as fast, but could support multithreading using async
+ac62760c52abf28d1fd863f0c0dd48bc4a23d223 0fdf6c3aaff49494c47aaeb0caa04b3016e10a26 Sebastian Thiel <byronimo@gmail.com> 1277127929 +0200 commit: index: Entries are now using flags internally, instead of reducing the flag information to just the stage ( just to be closer to the git-original )
+0fdf6c3aaff49494c47aaeb0caa04b3016e10a26 0aeb491d3d8f53e07fb21f36251be4880170c5ab Sebastian Thiel <byronimo@gmail.com> 1277129321 +0200 commit: index.add does not need the git clt anymore
+0aeb491d3d8f53e07fb21f36251be4880170c5ab 91725f0fc59aa05ef68ab96e9b29009ce84668a5 Sebastian Thiel <byronimo@gmail.com> 1277129385 +0200 commit (amend): index.add does not need the git clt anymore
+91725f0fc59aa05ef68ab96e9b29009ce84668a5 778234d544b3f58dd415aaf10679d15b01a5281f Sebastian Thiel <byronimo@gmail.com> 1277201033 +0200 merge writetree: Merge made by recursive.
+778234d544b3f58dd415aaf10679d15b01a5281f 57050184f3d962bf91511271af59ee20f3686c3f Sebastian Thiel <byronimo@gmail.com> 1277301014 +0200 merge fromtree: Merge made by recursive.
+57050184f3d962bf91511271af59ee20f3686c3f 129f90aa8d83d9b250c87b0ba790605c4a2bb06a Sebastian Thiel <byronimo@gmail.com> 1277334478 +0200 commit: Multiple partly critical bugfixes related to index handling
+129f90aa8d83d9b250c87b0ba790605c4a2bb06a a1adb421c2ee3e4868ea70d440dd82896219ed8f Sebastian Thiel <byronimo@gmail.com> 1277388148 +0200 commit: aggressive_tree_merge: fixed incorrect handling of one branch, it was just not implemented causing incorrect merge results. Added test to cover this issue
+a1adb421c2ee3e4868ea70d440dd82896219ed8f 55dcc17c331f580b3beeb4d5decf64d3baf94f2e Sebastian Thiel <byronimo@gmail.com> 1277395720 +0200 commit (amend): aggressive_tree_merge: fixed incorrect handling of one branch, it was just not implemented causing incorrect merge results. Added test to cover this issue
+55dcc17c331f580b3beeb4d5decf64d3baf94f2e ca131dd61e26f46f49ee3f70763f994cf9512665 Sebastian Thiel <byronimo@gmail.com> 1277401303 +0200 commit: GitCmdStreamReader: fixed terrible bug which only kicked in if the stream was actually empty. This is a rare case that can happen during stream testing. Theoretically there shouldn't be any empty streams of course, but practically they do exist sometimes ;)
+ca131dd61e26f46f49ee3f70763f994cf9512665 feb1ea0f4aacb9ea6dc4133900e65bf34c0ee02d Sebastian Thiel <byronimo@gmail.com> 1277401306 +0200 commit (amend): GitCmdStreamReader: fixed terrible bug which only kicked in if the stream was actually empty. This is a rare case that can happen during stream testing. Theoretically there shouldn't be any empty streams of course, but practically they do exist sometimes ;); fixed stream.seek implementation, which previously used seek on standard output
+feb1ea0f4aacb9ea6dc4133900e65bf34c0ee02d 402a6c2808db4333217aa300d0312836fd7923bd Sebastian Thiel <byronimo@gmail.com> 1277407147 +0200 commit: IndexFile.add: writing of the index file can now optionally be turned off. The default is to write the physical index, which is the behaviour you would expect
+402a6c2808db4333217aa300d0312836fd7923bd 58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 Sebastian Thiel <byronimo@gmail.com> 1277473196 +0200 merge index: Fast-forward
+58fb1187b7b8f1e62d3930bdba9be5aba47a52c6 8d2239f24f6a54d98201413d4f46256df0d6a5f3 Sebastian Thiel <byronimo@gmail.com> 1277745383 +0200 merge sha20: Merge made by recursive.
+8d2239f24f6a54d98201413d4f46256df0d6a5f3 f1401803ccf7db5d897a5ef4b27e2176627c430e Sebastian Thiel <byronimo@gmail.com> 1277756712 +0200 commit: Fixed performance tests which broke in the course of the sha1-20 byte changes
+f1401803ccf7db5d897a5ef4b27e2176627c430e 6917ae4ce9eaa0f5ea91592988c1ea830626ac3a Sebastian Thiel <byronimo@gmail.com> 1277806256 +0200 commit: Diff: fixed bug that caused a string to end up as a blob mode
+6917ae4ce9eaa0f5ea91592988c1ea830626ac3a fd96cceded27d1372bdc1a851448d2d8613f60f3 Sebastian Thiel <byronimo@gmail.com> 1277999899 +0200 merge docs: Merge made by recursive.
+fd96cceded27d1372bdc1a851448d2d8613f60f3 f683c6623f73252645bb2819673046c9d397c567 Sebastian Thiel <byronimo@gmail.com> 1278082451 +0200 commit: Fixed broken 0.2 documentation, it didn't contain the API reference previously due to import errors and a somewhat inconsistent working tree that occurred when switching branches ...
+f683c6623f73252645bb2819673046c9d397c567 a4287f65878000b42d11704692f9ea3734014b4c Sebastian Thiel <byronimo@gmail.com> 1278092317 +0200 commit: win32 compatability adjustments
+a4287f65878000b42d11704692f9ea3734014b4c ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d Sebastian Thiel <byronimo@gmail.com> 1278517416 +0200 merge revparse: Merge made by recursive.
+ca288d443f4fc9d790eecb6e1cdf82b6cdd8dc0d 5fd6cc37fd07c25cb921b77b4f658b7e8fc132b3 Sebastian Thiel <byronimo@gmail.com> 1278536545 +0200 commit: Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+5fd6cc37fd07c25cb921b77b4f658b7e8fc132b3 76af62b3c5a26638fcad9a3fe401fba566fb7037 Sebastian Thiel <byronimo@gmail.com> 1278538933 +0200 commit (amend): Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+76af62b3c5a26638fcad9a3fe401fba566fb7037 b425301ad16f265157abdaf47f7af1c1ea879068 Sebastian Thiel <byronimo@gmail.com> 1278539147 +0200 commit (amend): Adjusted clone method to allow static classmethod clone ( using clone_from ) as well as the previous instance method clone to keep it compatible
+b425301ad16f265157abdaf47f7af1c1ea879068 3288a244428751208394d8137437878277ceb71f Sebastian Thiel <byronimo@gmail.com> 1278582561 +0200 commit: setup.py: fixed requirement - its interesting to see that there are two different keywords for distutils and setuptools, the latter one doesn't read the ones of the first one, unfortunately
+3288a244428751208394d8137437878277ceb71f 08457a7a6b6ad4f518fad0d5bca094a2b5b38fbe Sebastian Thiel <byronimo@gmail.com> 1278670718 +0200 commit: Added python 2.4 support: Repo will now use the original GitCmdObjectDB in python 2.4, as the pure python implementation cannot work without memory maps
+08457a7a6b6ad4f518fad0d5bca094a2b5b38fbe 258403da9c2a087b10082d26466528fce3de38d4 Sebastian Thiel <byronimo@gmail.com> 1278671744 +0200 commit: bumped verison to 0.3.0 beta2
+258403da9c2a087b10082d26466528fce3de38d4 55b67e8194b8b4d9e73e27feadbf9af6593e4600 Sebastian Thiel <byronimo@gmail.com> 1278927490 +0200 pull gitorious master: Fast-forward
+55b67e8194b8b4d9e73e27feadbf9af6593e4600 bcd37b68533d0cceb7e73dd1ed1428fa09f7dc17 Sebastian Thiel <byronimo@gmail.com> 1279007300 +0200 commit: Fixed incorrect use of Blob.data in performance test
+bcd37b68533d0cceb7e73dd1ed1428fa09f7dc17 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279110447 +0200 commit: Added performance test to compare inst.__class__() vs type(inst)() class. The first one is faster, although I would have expected the latter one to be faster
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 a64c5f23c38f06c37a0ca64796368019dfb0d818 Sebastian Thiel <byronimo@gmail.com> 1279130658 +0200 commit: Correct Repo documentation in API reference
+a64c5f23c38f06c37a0ca64796368019dfb0d818 24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 Sebastian Thiel <byronimo@gmail.com> 1279130719 +0200 HEAD~1: updating HEAD
+24740f22c59c3bcafa7b2c1f2ec997e4e14f3615 17a172920fde8c6688c8a1a39f258629b8b73757 Sebastian Thiel <byronimo@gmail.com> 1279131269 +0200 commit: Fixed the missing Repo type documentation thanks to a hint of gasche, fixed a few more resT syntax errors on the way
+17a172920fde8c6688c8a1a39f258629b8b73757 cbb58869063fe803d232f099888fe9c23510de7b Sebastian Thiel <byronimo@gmail.com> 1279197254 +0200 merge gitorious/master: Fast-forward
+cbb58869063fe803d232f099888fe9c23510de7b 89422841e46efa99bda49acfbe33ee1ca5122845 Sebastian Thiel <byronimo@gmail.com> 1279533542 +0200 pull gitorious master: Fast-forward
+89422841e46efa99bda49acfbe33ee1ca5122845 192472f9673b18c91ce618e64e935f91769c50e7 Sebastian Thiel <byronimo@gmail.com> 1279814094 +0200 merge gitorious/master: Fast-forward
+192472f9673b18c91ce618e64e935f91769c50e7 fa3fce80aeb57299633535b9e1c9d08db710ff05 Sebastian Thiel <byronimo@gmail.com> 1281701051 +0200 commit: unicode handling in messages and trees was improved. Messages are now written according to the encoding of the commit object, and decoded using that information as well. Trees will encode and decode their names with utf8
+fa3fce80aeb57299633535b9e1c9d08db710ff05 394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 Sebastian Thiel <byronimo@gmail.com> 1281702078 +0200 commit (amend): unicode handling in messages and trees was improved. Messages are now written according to the encoding of the commit object, and decoded using that information as well. Trees will encode and decode their names with utf8
+394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3 c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a Sebastian Thiel <byronimo@gmail.com> 1283969691 +0200 merge integration: Fast-forward
+c4d5caa79e6d88bb3f98bfbefa3bfa039c7e157a 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287134990 +0200 commit: Fixed bug in http://byronimo.lighthouseapp.com/projects/51787/tickets/44-remoteref-fails-when-there-is-character-in-the-name using supplied patch ( which was manually applied ).
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a fc650aa6869639548435ce2760d42c9cdd909d99 Sebastian Thiel <byronimo@gmail.com> 1287135891 +0200 commit: Added test to verify the actor class can handle unicode names correctly. This works because regex can handle unicode, and will return unicode instances instead of strings if required. Its quite amazing actually.
+fc650aa6869639548435ce2760d42c9cdd909d99 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287136504 +0200 HEAD~1: updating HEAD
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a a88173281ec56cb378a293d0170e11a1bda96a55 Sebastian Thiel <byronimo@gmail.com> 1287138898 +0200 merge unicode: Merge made by recursive.
+a88173281ec56cb378a293d0170e11a1bda96a55 741dfaadf732d4a2a897250c006d5ef3d3cd9f3a Sebastian Thiel <byronimo@gmail.com> 1287139082 +0200 HEAD~1: updating HEAD
+741dfaadf732d4a2a897250c006d5ef3d3cd9f3a 13647590f96fb5a22cb60f12c5a70e00065a7f3a Sebastian Thiel <byronimo@gmail.com> 1287139254 +0200 merge unicode: Merge made by recursive.
+13647590f96fb5a22cb60f12c5a70e00065a7f3a 94029ce1420ced83c3e5dcd181a2280b26574bc9 Sebastian Thiel <byronimo@gmail.com> 1287139994 +0200 commit: Adjusted regex to support whitespace - it was a little restrictive previously, although there was absolutely no need for that.
+94029ce1420ced83c3e5dcd181a2280b26574bc9 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288000691 +0200 commit: .gitignore will now ignore netbeans projects
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 a2b9ded87baf0f32ae94c10c5851a0468a45f003 Sebastian Thiel <byronimo@gmail.com> 1288198935 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+a2b9ded87baf0f32ae94c10c5851a0468a45f003 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288198984 +0200 HEAD~1: updating HEAD
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 Sebastian Thiel <byronimo@gmail.com> 1288198991 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+148eb761aeaa4c3913e1766db0a7df0a5b5c8b20 8858a63cb33319f3e739edcbfafdae3ec0fefa33 Sebastian Thiel <byronimo@gmail.com> 1288199023 +0200 HEAD~1: updating HEAD
+8858a63cb33319f3e739edcbfafdae3ec0fefa33 538e8265e04f69bb9bd73a10ddb4e8e9677fb140 Sebastian Thiel <byronimo@gmail.com> 1288199049 +0200 commit: docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+538e8265e04f69bb9bd73a10ddb4e8e9677fb140 97ab197140b16027975c7465a5e8786e6cc8fea1 Sebastian Thiel <byronimo@gmail.com> 1288203452 +0200 commit (amend): docs: untracked_files is a property, but was used like a function, see http://groups.google.com/group/git-python/browse_thread/thread/84ed1835e26a5296?hl=en
+97ab197140b16027975c7465a5e8786e6cc8fea1 3da3837fe2ec8152e1460f747d18290b52304868 Sebastian Thiel <byronimo@gmail.com> 1288203532 +0200 commit: cmd: improved error handling and debug printing
+3da3837fe2ec8152e1460f747d18290b52304868 2c0b92e40ece170b59bced0cea752904823e06e7 Sebastian Thiel <byronimo@gmail.com> 1288203543 +0200 commit (amend): cmd: improved error handling and debug printing
+2c0b92e40ece170b59bced0cea752904823e06e7 1b6b9510e0724bfcb4250f703ddf99d1e4020bbc Sebastian Thiel <byronimo@gmail.com> 1288205467 +0200 commit: Fixed bug that would cause the author's email to be a generic default one, instead of the existing and valid. The rest of the ConfigParser handling is correct, as it reads all configuration files available to git
+1b6b9510e0724bfcb4250f703ddf99d1e4020bbc 0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f Sebastian Thiel <byronimo@gmail.com> 1288208986 +0200 commit: index.reset: updated parameter docs, but most importantly, the method now has better testing for the use of paths during reset. The IndexFile now implements this on its own, which also allows for something equivalent to git-reset --hard -- <paths>, which is not possible in the git command for some probably very good reason
+0d5bfb5d6d22f8fe8c940f36e1fbe16738965d5f 735b28bc65964da5b181dffcccb1d05555b5acab Sebastian Thiel <byronimo@gmail.com> 1289033220 +0100 commit: test_refs: fixed failing tests just by making it less strict. It is dependent on the setup of the surrounding repository, hence the amount of ref-types found is actually variable, as long as they get more
+735b28bc65964da5b181dffcccb1d05555b5acab bd7fb976ab0607592875b5697dc76c117a18dc73 Sebastian Thiel <byronimo@gmail.com> 1289034759 +0100 commit (amend): test_refs: fixed failing tests just by making it less strict. It is dependent on the setup of the surrounding repository, hence the amount of ref-types found is actually variable, as long as they get more
+bd7fb976ab0607592875b5697dc76c117a18dc73 a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 Sebastian Thiel <byronimo@gmail.com> 1289379557 +0100 commit: tutorial: Fixed incorrect initialization code for bare repo, thank you, Bryan Bishop
+a1d1d2cb421f16bd277d7c4ce88398ff0f5afb29 f1545bd9cd6953c5b39c488bf7fe179676060499 Sebastian Thiel <byronimo@gmail.com> 1290118292 +0100 merge submodule: Merge made by recursive.
+f1545bd9cd6953c5b39c488bf7fe179676060499 45c0f285a6d9d9214f8167742d12af2855f527fb Sebastian Thiel <byronimo@gmail.com> 1290122860 +0100 merge docs: Merge made by recursive.
+45c0f285a6d9d9214f8167742d12af2855f527fb 315c303214cef855499f0c7eda46b7ed82dceecb Sebastian Thiel <byronimo@gmail.com> 1290158850 +0100 commit: test_submodule: fixed failures that arose due to changes of the original submodule names. Also, a major bug was fixed that cased submodules to always being updated recursively when using the RootModule.update method
+315c303214cef855499f0c7eda46b7ed82dceecb 7dd618655c96ff32b5c30e41a5406c512bcbb65f Sebastian Thiel <byronimo@gmail.com> 1290158895 +0100 commit (amend): test_submodule: fixed failures that arose due to changes of the original submodule names. Also, a major bug was fixed that cased submodules to always being updated recursively when using the RootModule.update method
+7dd618655c96ff32b5c30e41a5406c512bcbb65f 2ab454f0ccf09773a4f51045329a69fd73559414 Sebastian Thiel <byronimo@gmail.com> 1290188727 +0100 commit: remote: parsing of fetch information now reacts to fatal errors. Previously it would just bump into an assertion
+2ab454f0ccf09773a4f51045329a69fd73559414 b00ad00130389f5b00da9dbfd89c3e02319d2999 Sebastian Thiel <byronimo@gmail.com> 1290196658 +0100 commit: submodule: When adding an existing submodule, when retrieving the binsha, we will now consider not only the tree, but the index too
+b00ad00130389f5b00da9dbfd89c3e02319d2999 8867348ca772cdce7434e76eed141f035b63e928 Sebastian Thiel <byronimo@gmail.com> 1290196804 +0100 commit: Bumped version number to 0.3.1
+8867348ca772cdce7434e76eed141f035b63e928 8d0aa1ef19e2c3babee458bd4504820f415148e0 Sebastian Thiel <byronimo@gmail.com> 1290271885 +0100 commit: Fixed performance tests which broke in the meanwhile - they definitely don't run often enough, which is because they intentionally don't have a package initialization file
+8d0aa1ef19e2c3babee458bd4504820f415148e0 8e0e315a371cdfc80993a1532f938d56ed7acee4 Sebastian Thiel <byronimo@gmail.com> 1290280591 +0100 commit: submodule: Fixed capital error when handling the submodule's branch, which was returned in the submodules super repository, not in the submodule's module
+8e0e315a371cdfc80993a1532f938d56ed7acee4 7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e Sebastian Thiel <byronimo@gmail.com> 1290286993 +0100 merge gitorious/win32: Merge made by recursive.
+7c72b9a3eaabbe927ba77d4f69a62f35fbe60e2e 517ae56f517f5e7253f878dd1dc3c7c49f53df1a Sebastian Thiel <byronimo@gmail.com> 1290288333 +0100 merge osx: Merge made by recursive.
+517ae56f517f5e7253f878dd1dc3c7c49f53df1a 22a88a7ec38e29827264f558f0c1691b99102e23 Sebastian Thiel <byronimo@gmail.com> 1290289085 +0100 commit: fixed performance tests ... again, previously I was just working on an incorrect repository
+22a88a7ec38e29827264f558f0c1691b99102e23 685760ab33b8f9d7455b18a9ecb8c4c5b3315d66 Sebastian Thiel <byronimo@gmail.com> 1290342054 +0100 commit: Added zip_safe info to setup.py file
+685760ab33b8f9d7455b18a9ecb8c4c5b3315d66 9d6310db456de9952453361c860c3ae61b8674ea Sebastian Thiel <byronimo@gmail.com> 1290342681 +0100 commit: docs: added final docs for version 0.3.0, started new release 0.3.1
+9d6310db456de9952453361c860c3ae61b8674ea 0b813371f5a8af95152cae109d28c7c97bfaf79f Sebastian Thiel <byronimo@gmail.com> 1290358083 +0100 commit: Fixed API reference docs as far as possible
+0b813371f5a8af95152cae109d28c7c97bfaf79f 94140bbfc523ae13e1e8045ebfed8a76fe0a1872 Sebastian Thiel <byronimo@gmail.com> 1290372438 +0100 merge structure: Merge made by recursive.
+94140bbfc523ae13e1e8045ebfed8a76fe0a1872 d01b428dbac4103b4f7d7b8fca32e01f70746c53 Sebastian Thiel <byronimo@gmail.com> 1290372442 +0100 commit (amend): !!WARNING!!: Directory structure changed, see commit message for instructions
+d01b428dbac4103b4f7d7b8fca32e01f70746c53 db3423d1eab11d00c5475e36eae8952512b07f4e Sebastian Thiel <byronimo@gmail.com> 1290373147 +0100 commit (amend): !**WARNING**!: Directory structure changed, see commit message for instructions
+db3423d1eab11d00c5475e36eae8952512b07f4e 5ed5b2011ec7cf72f19e6d53b588eea4adca68e5 Sebastian Thiel <byronimo@gmail.com> 1290373168 +0100 commit (amend): *!*WARNING*!*: Directory structure changed, see commit message for instructions
+5ed5b2011ec7cf72f19e6d53b588eea4adca68e5 470d4a7cc865d2702c326d9d1d1b0ab7afb49f0e Sebastian Thiel <byronimo@gmail.com> 1290373186 +0100 commit (amend): !##WARNING##!: Directory structure changed, see commit message for instructions
+470d4a7cc865d2702c326d9d1d1b0ab7afb49f0e e088424eb01bd47c6f0d313f465a21ee742e6f4a Sebastian Thiel <byronimo@gmail.com> 1290373209 +0100 commit (amend): If you use git-python as a submodule of your own project, which alters the sys.path to import it,
+e088424eb01bd47c6f0d313f465a21ee742e6f4a 48a17c87c15b2fa7ce2e84afa09484f354d57a39 Sebastian Thiel <byronimo@gmail.com> 1290373245 +0100 commit (amend): -#######->WARNING<-####### Directory structure changed, see commit message
+48a17c87c15b2fa7ce2e84afa09484f354d57a39 fca367548e365f93c58c47dea45507025269f59a Sebastian Thiel <byronimo@gmail.com> 1290374761 +0100 commit: Changed version to 0.3.1 (removed beta1) so that other projects can actually depend on git-python using the setuptools. Previously it would claim the version did not exist, probably because the setuptools are just comparing strings
diff --git a/git/test/fixtures/rev_list b/git/test/fixtures/rev_list
new file mode 100644
index 00000000..1a576118
--- /dev/null
+++ b/git/test/fixtures/rev_list
@@ -0,0 +1,3 @@
+4c8124ffcf4039d292442eeccabdeca5af5c5017
+634396b2f541a9f2d58b00be1a07f0c358b999b3
+ab25fd8483882c3bda8a458ad2965d2248654335
diff --git a/git/test/fixtures/rev_list_bisect_all b/git/test/fixtures/rev_list_bisect_all
new file mode 100644
index 00000000..810b6609
--- /dev/null
+++ b/git/test/fixtures/rev_list_bisect_all
@@ -0,0 +1,51 @@
+commit cf37099ea8d1d8c7fbf9b6d12d7ec0249d3acb8b (dist=2)
+tree 01fb5ddba393df486d850c37f40c9a87f4a28a14
+parent bfdc8e26d36833b3a7106c306fdbe6d38dec817e
+author Florian Apolloner <florian@apolloner.eu> 1218480521 +0200
+committer Florian Apolloner <florian@apolloner.eu> 1218480521 +0200
+
+ use shell=True in windows (git.exe needs to be on %PATH%)
+ One bug remaining: git on windows is returning status 0 for `git this-does-not-exist`, so no GitCommandError is raised.
+
+commit 33ebe7acec14b25c5f84f35a664803fcab2f7781 (dist=1)
+tree 960b40fe368a9882221bcdd8635b9080dec01ec6
+author Michael Trier <mtrier@gmail.com> 1210193388 -0400
+committer Michael Trier <mtrier@gmail.com> 1210193388 -0400
+
+ initial project
+
+commit a6604a00a652e754cb8b6b0b9f194f839fc38d7c (dist=1)
+tree 547e8af2f10ffa77c4ed4d0a8381e64141f986b4
+parent cf37099ea8d1d8c7fbf9b6d12d7ec0249d3acb8b
+author Florian Apolloner <florian@apolloner.eu> 1219330141 +0200
+committer Florian Apolloner <florian@apolloner.eu> 1219330141 +0200
+
+ fixed unneeded list unpacking
+
+commit 8df638c22c75ddc9a43ecdde90c0c9939f5009e7 (dist=0)
+tree 43a63b045e538a38161c8da5e154ff1c9436ea4e
+parent a6604a00a652e754cb8b6b0b9f194f839fc38d7c
+parent 127e511ea2e22f3bd9a0279e747e9cfa9509986d
+author Florian Apolloner <florian@apolloner.eu> 1219330182 +0200
+committer Florian Apolloner <florian@apolloner.eu> 1219330182 +0200
+
+ Merge branch 'master' of git@gitorious.org:git-python/mainline
+
+commit c231551328faa864848bde6ff8127f59c9566e90 (dist=-1)
+tree 991ed402b4f6562209ea56550a3c5050d1aa0118
+parent 8df638c22c75ddc9a43ecdde90c0c9939f5009e7
+author David Aguilar <davvid@gmail.com> 1220418344 -0700
+committer David Aguilar <davvid@gmail.com> 1220418344 -0700
+
+ commit: handle --bisect-all output in Commit.list_from_string
+
+ Rui Abreu Ferrerira pointed out that "git rev-list --bisect-all"
+ returns a slightly different format which we can easily accomodate
+ by changing the way we parse rev-list output.
+
+ http://groups.google.com/group/git-python/browse_thread/thread/aed1d5c4b31d5027
+
+ This resolves the issue mentioned in that thread.
+
+ Signed-off-by: David Aguilar <davvid@gmail.com>
+
diff --git a/git/test/fixtures/rev_list_commit_diffs b/git/test/fixtures/rev_list_commit_diffs
new file mode 100644
index 00000000..20397e2e
--- /dev/null
+++ b/git/test/fixtures/rev_list_commit_diffs
@@ -0,0 +1,8 @@
+commit 91169e1f5fa4de2eaea3f176461f5dc784796769
+tree 802ed53edbf6f02ad664af3f7e5900f514024b2f
+parent 038af8c329ef7c1bae4568b98bd5c58510465493
+author Tom Preston-Werner <tom@mojombo.com> 1193200199 -0700
+committer Tom Preston-Werner <tom@mojombo.com> 1193200199 -0700
+
+ fix some initialization warnings
+
diff --git a/git/test/fixtures/rev_list_commit_idabbrev b/git/test/fixtures/rev_list_commit_idabbrev
new file mode 100644
index 00000000..9385ba71
--- /dev/null
+++ b/git/test/fixtures/rev_list_commit_idabbrev
@@ -0,0 +1,8 @@
+commit 80f136f500dfdb8c3e8abf4ae716f875f0a1b57f
+tree 3fffd0fce0655433c945e6bdc5e9f338b087b211
+parent 44f82e5ac93ba322161019dce44b78c5bd1fdce2
+author tom <tom@taco.(none)> 1195608462 -0800
+committer tom <tom@taco.(none)> 1195608462 -0800
+
+ fix tests on other machines
+
diff --git a/git/test/fixtures/rev_list_commit_stats b/git/test/fixtures/rev_list_commit_stats
new file mode 100644
index 00000000..60aa8cf5
--- /dev/null
+++ b/git/test/fixtures/rev_list_commit_stats
@@ -0,0 +1,7 @@
+commit 634396b2f541a9f2d58b00be1a07f0c358b999b3
+tree b35b4bf642d667fdd613eebcfe4e17efd420fb8a
+author Tom Preston-Werner <tom@mojombo.com> 1191997100 -0700
+committer Tom Preston-Werner <tom@mojombo.com> 1191997100 -0700
+
+ initial grit setup
+
diff --git a/git/test/fixtures/rev_list_count b/git/test/fixtures/rev_list_count
new file mode 100644
index 00000000..a802c139
--- /dev/null
+++ b/git/test/fixtures/rev_list_count
@@ -0,0 +1,655 @@
+72223ed47d7792924083f1966e550694a0259d36
+f7cd338ee316482c478805aa8b636a33df3e4299
+994566139b90fffdc449c3f1104f42626e90f89f
+e34590b7a2d186b3bb9a1170d02d52b36c791c78
+8977833d74f8681aa0d9a5e84b0dd3d81519774d
+6f5561530cb3a94e4c86454e84732197325be172
+ee419e04a961543444be6db66aef52e6e37936d6
+d845de9d438e1a249a0c2fcb778e8ea3b7e06cef
+0bba4a6c10060405a94d52533af2f9bdacd4f29c
+77711c0722964ead965e0ba2ee9ed4a03cb3d292
+501d23cac6dd911511f15d091ee031a15b90ebde
+07c9bd0abcd47cf9ca68af5d2403e28de33154f1
+103ca320fc8bd48fed16e074df6ace6562bed4b5
+55544624fb9be54a3b9f9e2ec85ef59e08bd0376
+e5c8246dec64eccad0c095c67f5a8bbea7f11aca
+1b54d9f82ee6f3f2129294c85fad910178bef185
+36062a1634fb25de2c4b8f6b406ae3643805baf5
+0896bb9b8d2217163e78b5f1f75022a330d9ddc8
+6646dfce607b043ab7bbe36e51321422673b7c56
+f0bad592abc255fabe6c6d6c62b604b3de5cdce2
+5705e15c71f9e10ca617c0a234b37609cfb74d23
+b006d8b73912eb028355c49e7bfe53a29f97ce7c
+b21eb6173dbe07cac63f4571e353188dde46f049
+a3256f3150ccec73c50b61b85d54e30e39a65848
+c5a32e937166d65757f3dd4c1b6fd4f5ecc10970
+1e90e2c654aab0d81088f615c090d6d46f03ca4c
+924e7685fcd62d83aac19480837e4edd9c4bae5e
+489e1468aea658a333481132957069708127c69f
+970b6b13726889f6883e4347e34d8f9d66deb7c9
+df74c45e8fdb4d29a7de265ac08d0bff76b78a83
+936aa0b946155b2d47528073433fc08b17a4c7cc
+3b6a5e8f12b6269a0a3e0eaeede81abfb3fc4896
+8e0f306dae96d78aa1ea9a08e82647fd95fc1a74
+5eb099e5e274be44c0fd27ce8928d2dc8483dab7
+050fbed693d4806ac6c03460103777b2a4befcf8
+c5d4b6dac74e323d474fa8878a7ea0c233d57019
+8e5daf911943d5ef025025c137fcf97164467141
+bcdf7c2421302b15f4ee4ebbdeae7b644a4518e7
+e2874a42835cbb2fe8856a398f5c4b49a9cd8d30
+f50ea97159e4ae7132e057fbf5ea1e75ec961282
+5dbd614c20e9473240082239894d99c24de42282
+0490e1ac1ffafcb9117029286b224ab39671a015
+ad3620d47f0ea96f24904227d3c7a7f9548c34dd
+fd37e7191ae3d312ced0877a1492cd2ea4578275
+b7f8cc51c9056a469006b5601a4993b67c07e099
+1d849af5083073b8864487938a9a2a8e21d71529
+26d0bb4c9ee3d8591fe291c86f495b2d1900bf9b
+7a25e3056a7225c1ff8542c2c2c1cf6f3a8e13d4
+d0e0de0b13b9c81d2bcf9d54eecdb20591fd6d2f
+0bf82343ade1e07c0aebd14ee66df688a4cc0e87
+d81de0fb6a19342a90cdba9a664659da66296162
+9105667175797bbadea80879e98a5cf849a40065
+12f5af2a169c658cfae1677ceafd527d3775873f
+00ae94689600b5949bd0fcf68205f31f95a36aa4
+8f5d34224e4620c51c16c01578786e76567d025d
+3385eb31651c84384b4c7e93d82bc5b592edf9fb
+eda9179b9af0275d62c4074480e7a0103d356435
+982c2d1e55165fddb4f4c69065e2c4ac39542c84
+7117495ef012719769582939ea59a5533077fc8f
+b7dae75dab5b59a320b8df8a67060d238fed3a8e
+37c684e1a46599fe4d34d1601875685a70b1b879
+0a694fa0cb044a31bb844564776b490c151ac317
+e77c6b459f01ce078aa59183189226a6d48fdf38
+dd0c0eaefdebc38610bb1986e51324a0392e829a
+d8bc2414e9504172da640f29db1b2d29d834a94b
+a9f1119667dd0f5aa9413dec23965a747d1dac05
+f52775f6bc21d999382f4b9b38b108b814114ea1
+e82c77ac679887140867e471a9f47fd3b2d95039
+2db3fff5673bbd4bfcc8139d8398727d243c9efd
+c1805c000c6233a20ac0824cad21c1fe40f93100
+83f7807585cf70018a9a06179af9d89d4a8204b2
+730c326beb29cc6d2624915b125633792a40ca36
+bea422b653d74dd03ec068dcce938169149aa822
+586a57666618299464519c450033eecc3ce89664
+82fba8cf4796f2adbec5ad336bd750ad60a075fd
+9d9b899f836a199fe854075e99091d1ef083de24
+4670357c662596aa2c2922d826de84abd9f877ea
+9b562567430544c74009ea4a6173f44ddb4a44e5
+013d51fbb5f3a60bc748449b1ab73158da9a3203
+3fe67cb90fca9ea76292deb793cb480f4eb5e8d6
+91c80e489fee08e71a79bfbea79fcc28e1aa27f2
+dd9104095bdb08fe399af46d91b334e760986ddd
+a9198904586546a038f855bc6fc0e7cc413722fb
+574a7bad1017d9ed466474881e1f068f892207f4
+f95acec9297b7816284d8b24e984cd5c82104c89
+3907dac65a125b7759172a8eae959b0e70220299
+e5b44576eb2182b16c7b6770fab5977eedbc03c6
+9f4aad9833d0f9a609dd2556e7db784ba813d8fa
+579309c96651a1fed75fdd18f80019db8e6624ec
+5e1a9a48e6c96099d6a0c3aff1e31c9be16b7b8d
+cae4b811038f4e0dd4a8e68122c3db955e10ae81
+fccee1c818f5af5fce593de0949f5a8ecd35b443
+d4187d5a5f9ffe1f882c74f6ced7e0ba1c260ff2
+02ff197aa41d892e623dc668b0055806294bd6c0
+3f81af24214761a6ed77fd4dcd6e45a651dd8f84
+5cb08c5232a669a881606a6d8c4a4cd23aad6755
+5212b25869e0b9aff485af6f5371a159e89f8f07
+a778322bb60f8438a68112a73df78e05a97093ff
+b55c30c3992a766628dfc4a7e22db4d8d9e46b5f
+1d3e4a32e0407f16f641be316c745c1a48f16e2b
+7f35ca3333944165e0ec82a3a95c185f67fba904
+ef6c5bbe2dffe76e4a9698df03b8ee08af702033
+aeb90405ed696c1efcb29d0664b43a52a2bf824f
+e0b8bebd78172229666dfd425933f9bc21863928
+2a71a55154edf75ab51dcb4f2f7dc63592410e16
+a5d25352d326c77d083a2e089c2d80b4ea923624
+a3fbc38b9f1b86bb5f5e6540048083fc1dc6062b
+bbe67e1bdadf4aff186769145a40727f78e39e01
+a02a58c6c6d04001873ba91ac3dc902275879d0f
+eb5281d4f40e18b0e21d275ee5c5964bbbcc855c
+19e9939a098b9cb93c8c1d0d069d46861afb507a
+7a72471f9a4587cc4a7d37da0d26122b0eadaddc
+c6a043eb057cd544130b77bf99f39b7738e0a204
+723b6223726c6772e034d9f4ba5c710e66a1991f
+25b4ff1a26dd3694a98c1ef2eba04a5a500c0b28
+7c571ac2c35a7e1f399651242e904596c93beeb0
+0c90015733521720688bfcb59ad2a3978b2fbbc3
+d6b99183122a97a590e4e54f4617b58f13b90df8
+6b663271af39d69082422866e61ff7801c2b3fa7
+2e9e6ab7651e4c215110eb381678e0ea2bc0f7d8
+967b91e045661c9b6d2a5f011ec152da391db7ec
+7fda8d15bdb3d3d61fce49413153a216849721f9
+f7d7e83ee1cec103a768ddc9f68b6d5075849894
+925953da542a9c21a3fde1ab0891175fb6212a12
+ea2f54455427506583437391cbaf470a1ef4edeb
+f0bdb2cdddefb3a391ec2e3fa9b78ed06d7c874a
+8d289566fa92a96a83ff3c2e24c1f3d12b1718ed
+7fb102615532952c6833e87389668831b37a13d6
+7f7bbe8473158ab606a89ad66d607ffd0e5ba1f7
+a98ea5a00d19406f3e644448039f13db496cefd5
+39f03072d9d84d622ae974b09dd11cf7a2515a7c
+e2050a1c488fff4b114614d7f75096dd0a149f5b
+d2851f113530fbe211b3e948b6181152d30d1fa3
+1eef0fe740f6db35a91e790fe77d4ba1c9065e99
+9608403b012908cc58223db44962553704cab8af
+4911a005ea6b55f34f8b0f504a6a0934c0df896a
+a4400fb8e7d0f1261634dbb89588da86b8b6c93f
+f310729583f6733ee60f534a9732b7a3a9e414d4
+49e78793487ce4d8d7e624b5245fca8a9cc1ba66
+2f2501ce5d28e5ada6018504ee8dcecbbee70428
+f1e127253e1eb07b537b221e9cc96beb16333790
+8bf1684ca9b5a37d91671dd0d63d0ac59bea987a
+24838a6042a134b11fe945bbaa5ab1b2b3fc6eb0
+f53c57af21fded3735fd479b3785fcf7adf80268
+aa8d0a63d61d13524b1395972563b516cd263f05
+16803d64332412a66121ef3fd10cd0d88598d3be
+5f2715ed4d9416fa4940c2cd29b5ca18b6a79b8c
+851ede1f8dceef7d681f35e4769e5693160c0a04
+5264588c6c20c38d54394059eef0a854683aa3fc
+111800d8e66ff86f0757df7eb6533fc62040a22f
+b04de89d31003e468c191cd08dd2a4629d99c38e
+6aef629094e9ee6b4fac2431897844c4dddf2f57
+d1168c999fdae7d1eaac8c163b2b1190afb1815c
+6afc3257929528d9f4de964e8828822d2fa2c93a
+436f30ce1b562efe4f34696def45b0145eb98304
+9afbf904be0e6154f6c424377ad596e86ea38807
+a3cf657305d9283525711e867e03684a2e4b39cc
+5813b4d04b25c385359af4437027b4fe763cd2ba
+0fa594594c97a0d3579312f4ec073304c1436106
+cb7b36c28adb38b1e597fa3f3b5c24c988a25b0e
+5b0c867cbda81ce34df1b5fb67557b556ea24e9e
+44090e9c550c7c5ded01dc2a681a7c934ba901b6
+9ccc89b61736c4a9c02faaa679e97a9ec063dd29
+7828d6d18115b0720888a45e3b547b697910c59e
+618497e48e46fdc00dee67c07cd0f40860e819f9
+69a14ed4f36d880e8322a530d8c5bfd9888a8c13
+0a0cd655e40903abff4840c23b57628fb1a88122
+cb262098646f47e1d80a89662f1480c216bfd81b
+d60e59fce6f698a8bb97e2b4a935c069584621b1
+ca77ba0d6d301cee1d45edb24742dc5cdabd4b83
+17b598510967922690f5181903f20ddae5758e86
+30ad3d9f3164966afb2974640f772387fb796b7d
+48964c5dcc94234dea1737d7fa23220f9eab0fb7
+0fe241f4db12f455c2f5976c6bf6497cc485f503
+04953aca41bd372d990da7f68cc795f4a8b78d94
+2dc9a061595a291d8c53168c42da8d14da52d610
+68b15d34903038e3f2e327f03f0486b2d38784bc
+30ceaaf39b10f9f9c7b4362505144d1650035a40
+e75891a5760f6a51f54a33b671951c16fbce1558
+b2a35989ad3392f26e070b158f89d1d8b75327f2
+8468830b8b37f7c1cdda926a966c0aba2893a7c0
+6a6112e8cde1bafebfa12e4c863dab5834c38e12
+eafcd2ffc25d17fce41eff2afd5c4521730a23ab
+f7eda0752f45c3a4eb13e075b24b86d7e7dd5016
+b634d0d48d0a113bc060a47972b10c9353621428
+49f95235a174f0a056e44bb5c704fea4ab345353
+6eec70a31a6376ffd7d6b143be0968a195ad59d6
+7c9ae1a71aa39efe28a678c18c8a03d759deabed
+a19fd6f13c16720dc38a1f572eebf960022015ad
+87052ac2cbaec195094e1d1a2bad4ac480bd111e
+2cde1b0e69f97a8a67bb47d729c53af3ba8e5700
+91a06d1a4efb6376959c3b444a536fe6b4fd4d6b
+07f73b465b6c509b337c2776fe7a73b56ee178ec
+15218bab55236d62fb8b911c2ae1ee05dde1ee60
+900180ff2aa70e7d857403412753df6384553d26
+a9c43cbeb0542cf6538fe8025edc8863d2526c68
+d7d8f0c9b7d56f554d5a5cf5759f30cc3d36771c
+d703e5d9ac82b8601b8f4bfe402567b5ce3ebbf9
+3905a12ad511ffe157cb893e7769f79335e64181
+73a933454b09ee48ffc873b0ee767e0229f2d609
+c2c91403aa9d95efa09843bffe83ace4d52d3446
+c90f480010097efa3fb7046abe7fac3c9b8b3975
+13e888d5624e8087ea63638de7f4375f5c13ac55
+19344e551c8c5e56e91de14253b4f08ca05f9e69
+b1b8f098bb1e2f0f08cf82805d7bd29d2931f63e
+3a3e025bbb2f3e49facac00e270d8afa5d31b962
+195116405307f7bd9575f9621fd93344304341d1
+31252094210748399f7e43e7b6149190990f4e8c
+357e549bf43126e371a1f85c393d2560597cb32d
+df1f8ab23f915420e9c48744decbc45375f180d3
+f96c2eedf6800b8fc31032a02caf0d2b469ba9ec
+73405f0505813ec1bd25f03f2825315f3520bcca
+7e2447536c35ae67e3737a031fa1ac57026475a0
+970d4c4854dbcc3b0bf9b16edf1d47eabf3be242
+3c73519e6b54d3559555ffac40072957041f62d4
+46d461676fc1fb16fd7dee027065441d9a8b87d5
+f11f64bb55240dcc1767a1ec823aecd3531f1d20
+038e91a424078c5d81cba6c820cd981f0be6086b
+157d6e98ba894cba5582caeb15b429ca0dcbf2d9
+2c768cf9d1bdb6d3d84f662a847966b69c898f59
+4fd0f29459ec3ea65625b943b147df85e5826cd9
+c7e90c64e580ce5f95147eb4e117b56b5cda254b
+cd4f2496b274b0d55b7c48388c2ec0365d9bc266
+68b5e288a29ebbcd65e6d0a8eed47702ee4e689c
+22abd4a7ed7b061364e002f1fe08857850a309ad
+4c3b38be6fda8ba32fe6f29226539e03bd0c55ce
+355e946ca8b8a5e4c17317446b12fc374399810a
+1fc5c0122fffdada1630febc1f2e42952cdc7e2e
+8db042e1faef7be24d62b9287fd3b9add7a1b4cb
+1cbea023ce354939ae9082a62810b46f38ab1cd8
+f5edf5b99d1bae09314b9680e58766a4e3c1bbc0
+58a5ef79958b58736603f47cf211494fe5819601
+8f2038bec169ae6d62885f522202d8171e3f5f5c
+5488e29e68684648b4d733e90c6e3188d3bd5bad
+84c88e813117db46c6ac68b16a7739018eb99e24
+789c3655197585ba8771ce68c0117cbdd41ea390
+0510404a3c0d337763e90e5315548043bac65b06
+2a665d7c6cab59ea8e3bb7fc65249ee947e51fec
+d53423de534d3b5e68a7644d4218d835a8bfe6ce
+73f2a3f332f23579a29e090f70825dcf84dcdbac
+f79ae7f27e750c97c139cdbdd7c3223b39ed1a70
+c84a75f7a4b274c5c133b1df3648a5a24ed9f687
+cdf8e5a49192b81bcd39d9f4e39aa4812b58b80c
+1180461f564674e373222fec3b4fe8c2861ea6a6
+150d93bd910597b85500e74b97b96e7eb4bce2f6
+ec3b819ffe3392bf193483fea94d4404c88966b1
+729fc8ffd38c02a9576640b56376c36b49edf52e
+2ee31128fbd86244d547e3ff66b802dda699210f
+f87f28c563ad602cba605e84bee95693b77b8840
+9e92c5fb59af58867acf5512e95138fc368f7dae
+76b1489042e1bb45909832f7064f9a5437b68b18
+66f5d86face564c095b3c95848f070f50fe4688a
+f9b2b3ec52b88dbd68b2f2c6b246bf07f632b40c
+14f689f05c4fae52ac8bb95762ff43b9f7f4e567
+5ca84af5f7a3f4533b353c43a332b552cb2fc5e4
+c5f33e9eb55201c41691e14fff0d45e32c989a42
+9f83cf471949164a6352cb9e3a201b8bb317b89a
+5532c7b06a2f02e9cafd6673d5099798c4144690
+0d28c20ab4f03b5d8579132048c060affc36c466
+cddce1dfd9d4d7f1fc49003aa211f018bf8fad2a
+169617e3672bac804a271c0aaff9cdbac7b4b45e
+fdebb28d6ae398ccba88f3e2e63ef6d7f10f62f4
+0651bfe384a8d5865d6cab808ca0ce803af93878
+de89eb007459fd5400cd344dddf240fc33fd0b65
+c6a14beb887170d8c901e522f2f4dce3bf0b9ed8
+13dd0647b3ee39fae1140f8eff2b15d7f63ee546
+9f89105c1462f2a80e620ada1b95c3d08a121c3e
+1ed6496751273cf472538779266dcc3dc9797192
+4e8dffa66fc7be8f864cb48cf26abcef4cc32379
+5543fce145ff28a1c424b730b376fd4e3cfa0956
+bd951a4a8574baac21b7e1f3a09d1265aa51850a
+3fd1c12fa880ee45b0ff7b794238a8894306a790
+830ec14bc9edbd2c6522ff46ed0acfe477e7e32a
+e68c3109a709e2b732d0945f860495de161754a2
+1e0f4fda735167ff6d27c76a67b8b4a4ab31aaf6
+c6c40dd0ff4420708c2e0f5a0e0dadde93eae336
+baf0c18ac24acb9ac3d1a7c0030ad5675eeb64d7
+8d30906e9f2f68024eb716be9f482de5cec5b302
+ec9fce551828795e1dace26a11f57f9aaf1af37a
+28fb918d7e9840a7118b7aa0b6151b496f1fc1f0
+b9e58c5b98f7c89054ed5c0a0226066ba9d93c8d
+0c5db457cdd3852182ce70b96cb376337b8ad7ad
+36a48168274cbb6f31c35777a74ee16c06e1a853
+07ef3ad40bb01bc7798b241c88fda2eaba7aad19
+02aa9f2ba871e9639891986a97618e0917955fc8
+5f776d3c74ad532f36ab75a71bcbece6a62c831d
+f31ea9eeea91106481e1b2d30026b601555b6699
+c3d7f6bac18fbf8041662fbdda4f04e3f3b25e3a
+6280c4bcf1195c011d7a7abb5bf689df11d66419
+45fc4ef9adaf514bbe21f496cdea8869a147c81b
+fa1160786e34c057cf1212efd59a72c3931eb2a3
+09b285cc7d7c8768917c7d4e5513e3e73d752b68
+a8da5db6094c887f1087162c5ddfddf601560523
+b6134a31d236c376193e969a2df65c8427d280a0
+793e0d19fef38f8a151622257b13edf6786e469e
+e40e6a17b4df5be46a2cafaa3fca5f4c3cec5001
+4d82e160cb874da6dbddc27af7dfd1036772b8f6
+745ee8e3e74dc0674dd8018999707f025a9634f5
+f507baf298549096f08dc33de22f7301e9799814
+bd7ebd663da867692f2316b94db73c42c0f9a5d1
+697f07726d209cac519b528018559f8957c56069
+2297b5172c0c1c83f2d78fc726fac0803be6eeb9
+91e3543f82039a446c5be8293d5a79ec767d1444
+e997169214440256b5b759f6e7e255a302838c97
+77d174ae14afbc6e212eb7d957b11a231a036d96
+3e81ee29892006f16d5f1f26d9d6b341a8958fb1
+59957e1d84f8fe8117d9697154c3951ba2959480
+96c6fa03962edb98a9b6aa7793be4ff54e79bfd5
+068a293fd6b4fcf216fb84ca982699095613af37
+b3b1804ffad1b7d274bc3f8f5aa11b15049ac030
+63e394c13a50de0d9f6cef55a8c91830200c3dac
+e7ed33eba96d590bbc7179fd26db707c910d1dc5
+6b2084340a988f4123e71c6e30817806ec4cf3a3
+da721d3f48f821faa90d1a4778d77b03fd3dcdeb
+a433cb8d56a4fcd50bfc74b0204c916e08c9d5e6
+067fae6fd778d5b1d6b6436aedc0d25db58334d1
+e34c192a5aef80c7e83c78c2372602830671ca5a
+861a44dc56a983262caebf909be96c62254930cf
+417ed493a824863e30922deda64b9729b1c6d6e7
+2df6a0d803ac21f0d20ae9fce0a970b35b3663ec
+44bedcfc59292d3ff6b36759b324812fcb779b2f
+c620f7e60c8ce4ddee8fc1072b2a161fee862545
+82ce5a39b422aec7572d9a773f85be8eaecb1618
+dc0ea6defad83a0569896a9c23f11f5052a48107
+e1c15f1da71a3aabdd43a8ad669d2a755f315c77
+c78ee1aaa5c499019948c9a3dfca3aaa2f897860
+e66d0d34c541c6588da3ae06c6aff7e7c9ef5745
+c24d513d46b3db5b4c53b36b7e43ce5fdfd5a2e5
+a75d0a4bf6d2e1a9c4026586cb707f254691eb3b
+41e98ebf4526e78d78ab16182b503f237e77fbd7
+2182dce8c27c33f9452e7c910f59750d1e58b1e3
+6b7aa9fdbdd0160ec29b6b3b591169c627fd0f01
+b39470063e41ee5773f47de325a845666d0721ce
+c7941bdc8822ae1842d2a2f42924f31d2d37e864
+fad6e836009429e88c788ab7e7a679d422d8cae1
+a478917ebcf70c5dd6f56c7cd139832108696189
+4101b1ae3b17e229c1a80d9c302b74d215d98f04
+b051ec4e69a99e26d6a6e5d7a393014f841eed6a
+5298ce551a104605b7d5d9872387f3eb704fe5e9
+b14a12bed26d53eaccd1a2c172ca4a38773e1d45
+4ae0790397d05a758013e0496ba2c2b23363361f
+431f01bf3aea6f8baaf06669172561a3ec9e82db
+12476263aa193c7e921ad4b183fc648bd73d2a1e
+8e937050fb12a62e99b0cf685578213552774cc4
+b85a487787454f6dac84be59f905b8c929f0ee94
+dbb2116e0f03fe6d84d2158d67ddd02761938bda
+57186ad57242ad0bcd737c4ca4ecb7c063979a95
+cdb4a295593cb3ad424b4ab86d74154d7bbb97bd
+8e9e1ae0edb776f0a490005b838f8ef82b368be7
+73f8f21a69a03cdc2b1031bca214a6b84f4c867c
+b913c6d878ac5cf570e6f8ba9b5ff022ed601a8b
+b98879530fd51f328441d33c64c6c5f311097e15
+325b21b5370a0c179b40fd596b9daea00b3615c3
+6e722a5c5393dda24172de6f8e08138bcbfb10b8
+44b396caab82c97a6270eb7391d6f96502c9fcf9
+4e6ed6d22079b68551bbb83e5dd797517796a438
+b611fe79daa20893683475cc459dff98b2d4892b
+017d40f9b23f4a4379c74ceeb89ce7b4bccb7460
+a31b0a7fc7190218136d6ff6ffb3ee6af3244135
+861bd42abb90a61ed757728e1fef7cee2d6aa081
+6e9ff586de744d166a9f6f213b609e7386692472
+a790ca7384982e872092766c036d6faa86bff71a
+13485c50ca4dfd885d516154421bdb23cb034230
+c5471e696f3166942a245e77796bcddafe6a607c
+600308daf62d0d651fcfc874110e7bd4f5de648a
+bada607744ec7f37ba9d05c09bb8f41e7fc3d06a
+d3b230b209fd7c3f4a39db965b239ab600fac1fe
+6d730b7ae0b662b1f987101e8ccf9c1828554d69
+f0757668fcd3f8d1f2fe83ce9f0e2355b6be75f9
+40819d9a5631a184a17d38e36240d1171a6fc923
+8a6847ca68ec998df0543c4b5bd5c709c05d5f12
+d8eb0646ae1360b5b984ba7d99bc64e00dd67016
+761bf1cc1e2b86437e71c9a106fc9c341097c3bd
+3b620d960d29fa7719f95cf945163b04e43d2dad
+6be8590f72c2ef158202486e75f273d8598be6bc
+d7f22a15d66139efd65bae28ba780b0bf8d1a914
+e1ebbf612cf9d49cb08d0e0770ac1678ce1436ab
+4db9912f07ce63e4519053f52dbe521ec95c0fba
+b9fd4f4760ef65934b5d38e8b7c0eb2f77822861
+0e0178ecfacd553526afd221734607971b6911f1
+8cd4823a8ac9f846930408ad1759da4496384f9d
+e96cf22a972cc3185739ef1c1ce74a978ab71d11
+a9d63829aa54049801d37429b597eb04c9e1412d
+2519d617e18fa35974e20d10414f1262013501bb
+d02fc8d8483903871d9f65261b32c6acd2e4362d
+569456505d5c97934344d4f989a08fdcdb522de9
+f56d4c60ffb8df8fc1516d32a0512def0b6f8296
+745e899452ec746d3ffbb7b082995b7939a85387
+8c11f9ca2433bf9381840696218c245ec700666c
+bc2a868d1ba12b485a6eac460cefee67bd9ee899
+e628b072d054d982ecbbd7aa7fec628e0d9ee8d6
+e3390afd65e721dd8ef228f48fd4244228de2986
+35102507bd653296eaaa5e7d475405cc1feafbe3
+e2e5342f92148238391665fba101b1ca7dad5582
+621f4743f0165c6ca3f1571773867d2e0da67961
+5f558819695a49bbb92d5d1e07b9f12072874024
+eb45e9da84875e2d1325b78157d2f9e96374bdae
+bc0ab7e4f643e779cf9554f03e567d4f4708bd4d
+fd55e896d6df035cba49a20e26ed6ddd2d7b6024
+dcb9d95840c9a0514f8fd0a7b3b17cc228950c7e
+0bedc3d7a01f9819171c0b664e16900d9965c3ae
+94f6e372fd90e96cfd9a393a5952aa850485de66
+0b889a9cf37997c58a9f8979850da1f4bc84de9b
+b70ec5facdea7fc681c2a10dfb14ca0d8fed6f1d
+03e0192fb34134f25784a2b14791fbfdf69461bf
+9266cc52df3725107edf513aea4a02c131aa153c
+0820a412fdc9941567d86cba02793ca6a6378275
+f1a72254956f63393f6039a7d5da5fca943fcd2c
+abeb9e16d924c1a87c5b525ed12c43031ef1cb2f
+d5813fad322c97bc31d7dd37f838c7442aa68f35
+428b26fdca0ba98a3a01e89629bdb778dec9e8ab
+19ff672db65a7ee25ee0d48baa3f9bbf2d145ecd
+d1eb6283ece7d9c814b0d3d5223207b905d3d720
+9ba934b83a40d26ebc5e8d7304ff29c32541e82d
+9b600cbf0209ad6079d00dd5d6a5270d858d5929
+0f22868d790bfab8a41894fc7eca161256ab6854
+fba092070b6e03432f6d47154f5ae4734e935a05
+11b1bf011fc24c2ca6dd8c81206c7338ac2b2915
+d93c82b17d7416e4c57ed036d6b75a323859d837
+27f762e8d3f1ed8bd0254800c121d0f16e914c2e
+e252d9d270330072e9e5e91257e90f255e7e968d
+e55c3c30785eb50b5dc36f9568e6b6ae39e6de11
+63491807090d814bd7ffccfe44cb05795830eb3b
+8111dbaeb71c53132229c4064c34247746a3769e
+8fe37ca0d79dd1f8132e9add06aa206d371964e3
+eb32fae4665b9f11ffd06a342e763b9d212e1353
+4e923698ee5566143fc6d32fdcc6fb46fcda2d23
+2e3910e29142382f9bbd1705ab9c605d1937a1ae
+533cc5f884885f771d3f6df4164fbfa29bae0e6d
+3fea0404fc58822cfc60d4f10ca404e3223f82a4
+733404a081eda804707c3dde1d6b8161e7a34b3d
+d2be0ea2923344abed57aa21f13dc816d4537eda
+7884465bb9da51c8b6e95a1cbc9888ed696ff68d
+6e63e5a03bfbba52dc3b4f504e6bc41951f56707
+44a9d3ed75c44e817a6e4b56e30be06a15f453c9
+91e12aff0f988bf414e64b97a8c20b9699440309
+008119e510f6a7f8714e63d2ec33ca7cd7776ea2
+27822b01ad020374ff6169428649fd667abf7f8b
+0c972fb8903c656cb7e750b1d5c1ea1f26bd8c50
+3d8f3e1fae697a905e87250aa5c0ae1f6c60ad66
+744421b6f1d3aa30c7558570da8aa1d52f11d39d
+ac017796cd3a5558dd78f73ecb82a6b961d8a3ec
+e11f534a2fd666ecd841f657faf0751d5fe02034
+eca5d275376911916c3e018c2d163cb8eb914263
+a3144ddce360b6ac1b55fc27d19a318be1f224c4
+84fc7d68bf3a309b3687da768f0dc206e647e653
+fd5132bf8e99230a9074ce9bb3d950cd26b3d25b
+720ceb5e566d26803db85af3ef69fc4fa14d355e
+e97f338a79e2248afd3a2b9077d8ac1c334cdf38
+0173ccf8d04014bcc4cc53df4d6574540f4231e4
+52da09b8812d96c14d3e57a77784d56e5749a8ae
+5169648c7429788c777947e21527e121d35aebe9
+41c8c94cdd1c646296946a00dc72dff8fcb6556f
+9b341f77b72b55674a030ad0209ac297e41c5570
+6aacd7b9b8fc571e930d18da63efc8be46e31bdc
+9875e5d15c0750b6ee4c41b0e1321e1dc0bb7810
+fd60909d92b0e124957aa0783ea03471c73fd732
+2f299011d707ffd8502e5a597f38f0d25ab3099b
+6c10423816abd3b0f327863c9b8fcf55cd6265bf
+14cc60568455ac2210f00ccb238ae41ddb473fcb
+74cf0e9a42bf241d3f76f25aaed46e4b6550d842
+9a0eacdab0398ced7d729f5c7a9b173eada2dcaf
+3057f2e5ac8cd11cd018780c062da7c2bb11d2f7
+dab224a6b259d9d7e16af4cf7e2718af8ba4a74c
+fe6dc165cde8c826a3935b536c8cfd1c10ba7d62
+7f3572bca7fd48b66649d761a054412b8369deba
+2ea30dde468795a3ccb307343cd50eb7041f5ee3
+5d4099ededa31d823a355d4ef0e53bed6b833539
+69eb5257143b2de63c8c7471216ba6f025b6d7ef
+e4c7387b32e314cca7e0ee2b1df197340272fad1
+01f14dd38700098d97f933008327c8456c75af34
+94040e25d5aacae0e55c3e9a91fe24d7daaaaaca
+cd64f093886bf092b8d88c75ccd2e2f9118d3ba9
+ceb96f9512f80188fafc61ec8d8d61c93d51a5c2
+9a4e9bf98bd371cee2b69ef62a1189c24cd8baa4
+dd861f56b65404a625538978d50819924f384a60
+b2960c129e39d30f446d27e38f726975bef7b4f0
+8351c6b1293bb0cc4a2e1235995c16433c84c463
+008ba61116504d01558fe8afea0d5b3e90944b76
+cce20d2824a877ffed6a912e3f22d7db3d8e5043
+5e02e12edf58e1dfe37ed770fb32171e64993a81
+7966a56b3a3c9c9ac6db5b9355ba5e96558ea7b6
+5dea2f86730665894cf03f2b1fac98c1217a9fb4
+451a4d8118d2c9c746c687efceaacac799e67ad9
+059dfb5adcde569a19a9260c2ff85c7b47f8c516
+da7449db2898c567fcfb40c595c0c21536c901b8
+db97ce996b09b15049a9f818ce27a680e585bd11
+e1f95b9a8fe2394e1cfb41fe83f130bdb68fe6b4
+fc2c03e29a331cafc8b08abd5eade336904f40dc
+385b11a95469f7477bdcf5b9c743982c4a866c65
+d7e31d19b9ed766048ccf9129723ebe36b4842dc
+9c9af56fb29f510ef75221a39964c128448526bd
+83e3c642af5648aaaa119cce34dfef6ef3c560bc
+a831fc506ca30a11c9d9b33c9cb2c43f6f01a446
+62c5ebf183a0cc2332f04c1ee3323005a9878438
+6bb31edda343bbbc4410e2f780c432129e610b47
+846ef94e8af8f09340a740d11c93157c81079bc0
+47aec581139d8a3ab4f2969b481868c1485e2ac6
+e3f68d2cd84e15063c4f73c8420a444f9fb64a7a
+3db1240470361a7314ea096f63c0fde74810caba
+ae951371c666cc605ef69b5ca3f5f31d0cd30298
+8ec035e739f01aeaa09742a92154f02ab3dbfe93
+4737a65f7c1e125ba37ef35acbc6e99c4db2bed6
+7005d4cae81a16a5a860fcd3c259d6ec07597072
+d98807cb107ad2e9bf95138ee4bfb566bf75cb50
+1e8cbd548f12e1ec861f3aed5fa9f080cf2782c4
+25c2b2cad9cf873edc80747cd2df5874034282aa
+676749cf8f76eadb469289b1d918cb5e485cd56b
+8cba76ab8a5034ee21e95a99196f257b7e527b49
+0151aa85f5a178da21ddf7d5e81398fff87604dc
+f881500552171b5a8a8c3ec7a2dc06e493a1ebbe
+8d39edf2ae13ed33d0529164d4e172bd4d060d7a
+b5c3f29c81e524e860e5f9ebefdc573f83fc600d
+b686bc7a882e461987ffb7bf1a25bdc6f82ccdd3
+ebc1f42a059e7863adb57890562878f652922b56
+b30835cea58d0b827cb56aaf9e4d5f6e673a1bf1
+a2cf1028df49cbf53c57d0f599083fec59cc38b7
+6efa045dbdfb4272f075255411f54fe436c31b8a
+0c3f085a4044e9231287c11e34504624b04ee7cf
+b8e628fdc2a7627283e0601ebfe8e978e91dfc00
+d84e30103d59d6bace53223fc0d5787f03d7f028
+2e0e70d0466bde79d134a215a399b20c2a9d0981
+142de640101e2bee71fe2dc98e567d688c7e3aa7
+8b02a5e91092f7363443a1cf96933dc445f0ce51
+753c065260b1659c0d8d247b62f6b0fbe986c7b2
+1113b6978475c9941be9b140e8cd6bc267469657
+0a01d10b21c039484410c7898250afc4079db28d
+b9bd23fa584a8f1900ada4addb96eeb750ef0a68
+5ecc9b675c4cc5c1bdcd8f84e1a52457ad30144d
+d91b0a31122b251998915b4eb274350fd42a841e
+a829cb9c850cc75546547aa95fa3ca6100ce16f7
+4b9bba5d1063d986be6463e4c5740eb18befc7b6
+ffb2f17926143e242efc18b32ee0c630b5447687
+3feb18fbff52f17a541abb1ebbb4894beec18d55
+4acbde9bdb24bd802ba5bb0ebe19d71c8d753240
+c9dba689c67ad7b16c8f6b1bf1bd382369fdec4e
+ff956cafd71e4787e9ef7b64725142fe8838a65a
+e2c090f1ca171b51d08e6ecbb74b27410bdfa7eb
+73aa4812a2effb88bb64a42f93713a54a88e1ccb
+8e0e0c69b0adb9a65098b18a7b96d6ed3a43940a
+5dc8620cb17c3e606b635f8f95ecebdd66af04ee
+18f8afd6fc87b3731145f61818f23b4b766da703
+0d2d0bd0680557dc28f4f7b23562495cdbb3afc0
+94da53667213590ad9767b335a9f2e51fe1e2c5d
+c6cb97a42dcea5461a2931b097ddfd53b9cc5870
+62a3d5192232ed847f3c7810344c43607a361e68
+aa6992567e763a0b081e6bce753cc42bc287e9d3
+1d67358d33250d456040091d8b29083b1b47d9bb
+65d399a4ac7dc36df20b8b2bc773bbc6fa67f43b
+acf7ea014fd1b7eb351dc6946b199ad2cc98f845
+7e4dcbb7f0fc2b051e33b555c4fdc67796dbbab9
+a07916245a9c21f3874a7b8c898638ca3b65df42
+bb7368d9b07b02aecfbca6d01788a7327743ffed
+60454c29275aac27c450323f0141d60ea8202842
+c4d0ff10c85ca4c12ddfda1830cee475408205d5
+a5da3671524fb761552a4eb5c1e27dd433f80fe4
+43142e711f392ae1bcdade749dbaa9dd98664228
+7aa0bdd118c78d8929e737392457d14f87d625ae
+be921331245c4e04ef9f0ff7e359907e2d101cac
+d6f654de1b8c27f84e34fbff12aadffb30342465
+fef2680b335ffd861021ceff2a2637f5a360f037
+79de53d3b87469e21d510ed6ddb33d809c05a3f6
+475b10017d25db725e73eef11ca789ad7dfcf4ac
+d14f3734dc27ecccfdb4683cf7ef3334a5a70b3f
+f0c394dd6a109b97ba4a9ab16cc71b789d9ee38b
+a57cd5c8278e1fd6fae6f02947c13880be4f3b62
+83c6e4b636f3bf115955b6eeb3f91a5689e7f00b
+b881752a8cc16f49ca605bf6a35af106e7e19c9a
+8362e4bcc30e73460ae1b9731bc545fd2b12d8f0
+b01216229149bed7c110221551353b54ff8e4704
+10ad0e68785b27bab975868b83bc463b9c9c9153
+7a66612abaa223ef0410fae66727a8abac3add03
+8a7bdba957536b078f0421faf5dfaf8d65ff5add
+defd6d03526345a410437eda15cbd067124f9c2c
+f7e6c29aa4d1f7a607e0c87ea20105afeee0372a
+751363e461257a4036a8f2aa740195401883c1ea
+a8d66b5855eda5abf699ebf9c6dd721928007fb8
+35ca716114bdf87a89857f2d633be3f4b13cbc70
+cf319abfba8fc1b33de4c6a6f99e21864cc72563
+4fd36e634e762ff2f94e9d66f24ceabe164f9e26
+d0364113a1b57ed5017dbea6126b0cc5a5c2886d
+9b3d7bf551d20acae4ee943a86c3cf898b6280ba
+b35351d566efdde005747503c7f121d49e864848
+57b1dc2b20f2e67c3313f0c6127b05041d125fb4
+fadcdf4c98e9167f8f06a45dafa08d3acce7a741
+3bcfcb7717bfc0e50c5b8f5c7beaed9f3ddf5478
+b8388b7b5973dd3e84902c25c5378f9a412d6147
+814f07ea363eb0464380ccfce7b4cf5209f1dcb2
+b33315c8551bede3fb867efb3fdb1134cdff5115
+c7bade1e7cc239e8fceb2c0b06f880e60eb8ebec
+bb193f4f0f5b1b8bdc9cc72967f8fa6387faf7c4
+b727e8d9f4a4987cbad41c75c630cfdb445c37a0
+a2103d7fe328871d8231f8e07ba5dc9182f637b3
+e36d269d16660db5bba028746564b5699721def5
+35f9c486cc26bdff903241f4ab2b1dac2536059f
+cd5314af7e8e120bceda896a3c17daa8eeedd528
+200e09df8f0f7b94eb8941136482cf7c60fffb0d
+17a618f241a6236c93af5ba2e09238369fc7d784
+15aeb2bb0401d428cb7058e1d6554e20369ed352
+40b0a406cc23467af8bb63d9a62378fa871e2031
+7abd7f4cb237ef33b9e019f4529b6fb05b84284f
+ac614b7506e820457417c3ea15ba99fbc8146155
+8afd5a714da3f45389e0e4edeb64f49576c57c76
+77d10571047d8b4153180e7a89d5c9aae6a84060
+35479ce1706725f73bfe99428c43e8fe2e3f9157
+360a0864ece712571d3df95e86251d6883bcdf7d
+b5cd910848f592e33efb6de3226c07ae545a2aad
+f5a9c28ca029ec5d1c5d3c594afa09374adf04e5
+b9fce5928a1c5056f66706b67c01cd564e6c0a90
+e5a2250e35706127304cd5ed86b81575f2636b5b
+f30cffe4cef93aa190bcb1caf407ca0767107d06
+45535f6e0af6785676531c81b4a2a3c480a98e70
+740bd201b23beded9ade92a93301cddc67c4d106
+70460e9e601171276dd6844cd6addd8db5eb2465
+44dff2c35acb4736b183cef9e46155386f579716
+46ea31f673bc9365fcca558f15c862ef6a899018
+34556caf76c2422a76be3d1cecd223fcf435d93c
+fea67ed9483b5cc76dc55eb4dd6f52baf445394d
+31b1897ece6222826f379c1aebda891384b4b63a
+80dcf3713b85b78979d4eb443fce9e992675b5c0
+11993c742658321c0c5c200f48231583216d636c
+7b5a089ed3007252e61df0aee3fc17c14d051745
+890881c9a552c22f4be01dee16ee902c88f6700f
+401ba79da09dded82a73996c8e0609a87cbd728b
+e06313f41971de730085dcddf640a4549fc54fc3
+054d52e86a954a615ed1f5add7f9d6842737d965
+d8a60982c456a9cae3de745a37dc3f5985814f7f
+2b39f575a510cf581aa828df494e633cc76fafa6
+e11d353191175b329b3c9f9af7fa33e3ef9f837d
+32ac2659ce98765aaae9c10cc7216d1f1faf155e
+5f7f801227868c7abcce7e58dee3eff855011955
+a013eaf0fe38d8689e27278bddd4ebf87ac5476b
+401b3f3d2d96fa785c5321bb64c97cfb17c509e3
+1fa4fd4321fa708b3db5cfb514e2192b00672aff
+77976b24ff839c59c3b20d80cb28351ccb5e59a8
+09b76d2966e2370a78ed37a31c2f7c23d08609c3
+7000b24511618a21d40b39ee213d397e1d29497d
+c2a6adfcd18c0d95dbed6ea62ac9c9a912d18123
+6ba3609953d5c46a76ca1d0d3d83018be61454e6
+3dff6074fe205e36fae219f277ef87aab097e236
+1cdc8437fa6c621d96c4dfa5f6370c8fdb9cbc3d
+d471720bc8f7ce7109276b49dd9c76b6163007d9
+a67b1bdd027629dfc38601b21dc564272e28712c
+20125a6d37d5c1614ffe1de94ca064095968e7f0
+2b642751ef86265a1c953186810e118740f8bd2d
+e562c1d74e2b6744572184e66a0673e55f9ba0b8
+ba9687b5d746dda28d4a19c5c96d0679d7c77b15
+f39d7d293c3e342b4f447bb440a9b6f72d2d20cc
+95750ad9e700efd15d137963ba0dc443e6c9b6b0
+0f76d8445048dc0bfcaf05e30b61b338a08f0e48
+1a9a4c61d6a371d9e95eaef44fa2452d17a09d22
+912b41aad5983d9735379d322eae8f6d40d8bdca
+eea0b559472874ff48c34f16bb805108967e6489
+ad4e7ba4032e6b1c047230b3144848dbcf66a127
+b6d93107393dee6eebb05376a67f2e4dfcb44311
diff --git a/git/test/fixtures/rev_list_delta_a b/git/test/fixtures/rev_list_delta_a
new file mode 100644
index 00000000..023c5515
--- /dev/null
+++ b/git/test/fixtures/rev_list_delta_a
@@ -0,0 +1,8 @@
+e34590b7a2d186b3bb9a1170d02d52b36c791c78
+8977833d74f8681aa0d9a5e84b0dd3d81519774d
+6f5561530cb3a94e4c86454e84732197325be172
+ee419e04a961543444be6db66aef52e6e37936d6
+d845de9d438e1a249a0c2fcb778e8ea3b7e06cef
+0bba4a6c10060405a94d52533af2f9bdacd4f29c
+77711c0722964ead965e0ba2ee9ed4a03cb3d292
+501d23cac6dd911511f15d091ee031a15b90ebde
diff --git a/git/test/fixtures/rev_list_delta_b b/git/test/fixtures/rev_list_delta_b
new file mode 100644
index 00000000..aea7187f
--- /dev/null
+++ b/git/test/fixtures/rev_list_delta_b
@@ -0,0 +1,11 @@
+4c8124ffcf4039d292442eeccabdeca5af5c5017
+634396b2f541a9f2d58b00be1a07f0c358b999b3
+ab25fd8483882c3bda8a458ad2965d2248654335
+e34590b7a2d186b3bb9a1170d02d52b36c791c78
+8977833d74f8681aa0d9a5e84b0dd3d81519774d
+6f5561530cb3a94e4c86454e84732197325be172
+ee419e04a961543444be6db66aef52e6e37936d6
+d845de9d438e1a249a0c2fcb778e8ea3b7e06cef
+0bba4a6c10060405a94d52533af2f9bdacd4f29c
+77711c0722964ead965e0ba2ee9ed4a03cb3d292
+501d23cac6dd911511f15d091ee031a15b90ebde
diff --git a/git/test/fixtures/rev_list_single b/git/test/fixtures/rev_list_single
new file mode 100644
index 00000000..d8c6431e
--- /dev/null
+++ b/git/test/fixtures/rev_list_single
@@ -0,0 +1,7 @@
+commit 4c8124ffcf4039d292442eeccabdeca5af5c5017
+tree 672eca9b7f9e09c22dcb128c283e8c3c8d7697a4
+parent 634396b2f541a9f2d58b00be1a07f0c358b999b3
+author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
+committer Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
+
+ implement Grit#heads
diff --git a/git/test/fixtures/rev_parse b/git/test/fixtures/rev_parse
new file mode 100644
index 00000000..a639d89e
--- /dev/null
+++ b/git/test/fixtures/rev_parse
@@ -0,0 +1 @@
+80f136f
diff --git a/git/test/fixtures/show_empty_commit b/git/test/fixtures/show_empty_commit
new file mode 100644
index 00000000..ea25e32a
--- /dev/null
+++ b/git/test/fixtures/show_empty_commit
@@ -0,0 +1,6 @@
+commit 1e3824339762bd48316fe87bfafc853732d43264
+tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904
+author Tom Preston-Werner <tom@mojombo.com> 1157392833 +0000
+committer Tom Preston-Werner <tom@mojombo.com> 1157392833 +0000
+
+ initial directory structure
diff --git a/git/test/lib/__init__.py b/git/test/lib/__init__.py
new file mode 100644
index 00000000..77512794
--- /dev/null
+++ b/git/test/lib/__init__.py
@@ -0,0 +1,13 @@
+# __init__.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import inspect
+from mock import *
+from asserts import *
+from helper import *
+
+__all__ = [ name for name, obj in locals().items()
+ if not (name.startswith('_') or inspect.ismodule(obj)) ]
diff --git a/git/test/lib/asserts.py b/git/test/lib/asserts.py
new file mode 100644
index 00000000..fa754b92
--- /dev/null
+++ b/git/test/lib/asserts.py
@@ -0,0 +1,50 @@
+# asserts.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import re
+import unittest
+from nose import tools
+from nose.tools import *
+import stat
+
+__all__ = ['assert_instance_of', 'assert_not_instance_of',
+ 'assert_none', 'assert_not_none',
+ 'assert_match', 'assert_not_match', 'assert_mode_644',
+ 'assert_mode_755'] + tools.__all__
+
+def assert_instance_of(expected, actual, msg=None):
+ """Verify that object is an instance of expected """
+ assert isinstance(actual, expected), msg
+
+def assert_not_instance_of(expected, actual, msg=None):
+ """Verify that object is not an instance of expected """
+ assert not isinstance(actual, expected, msg)
+
+def assert_none(actual, msg=None):
+ """verify that item is None"""
+ assert actual is None, msg
+
+def assert_not_none(actual, msg=None):
+ """verify that item is None"""
+ assert actual is not None, msg
+
+def assert_match(pattern, string, msg=None):
+ """verify that the pattern matches the string"""
+ assert_not_none(re.search(pattern, string), msg)
+
+def assert_not_match(pattern, string, msg=None):
+ """verify that the pattern does not match the string"""
+ assert_none(re.search(pattern, string), msg)
+
+def assert_mode_644(mode):
+ """Verify given mode is 644"""
+ assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP)
+ assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and not (mode & stat.S_IXUSR)
+
+def assert_mode_755(mode):
+ """Verify given mode is 755"""
+ assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP) and (mode & stat.S_IXOTH) and (mode & stat.S_IXGRP)
+ assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and (mode & stat.S_IXUSR) \ No newline at end of file
diff --git a/git/test/lib/helper.py b/git/test/lib/helper.py
new file mode 100644
index 00000000..4e7b5cf6
--- /dev/null
+++ b/git/test/lib/helper.py
@@ -0,0 +1,245 @@
+# helper.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+import sys
+from git import Repo, Remote, GitCommandError
+from unittest import TestCase
+import tempfile
+import shutil
+import cStringIO
+
+GIT_REPO = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
+__all__ = (
+ 'fixture_path', 'fixture', 'absolute_project_path', 'StringProcessAdapter',
+ 'with_rw_repo', 'with_rw_and_rw_remote_repo', 'TestBase', 'TestCase', 'GIT_REPO'
+ )
+
+#{ Routines
+
+def fixture_path(name):
+ test_dir = os.path.dirname(os.path.dirname(__file__))
+ return os.path.join(test_dir, "fixtures", name)
+
+def fixture(name):
+ return open(fixture_path(name), 'rb').read()
+
+def absolute_project_path():
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+
+#} END routines
+
+#{ Adapters
+
+class StringProcessAdapter(object):
+ """Allows to use strings as Process object as returned by SubProcess.Popen.
+ Its tailored to work with the test system only"""
+
+ def __init__(self, input_string):
+ self.stdout = cStringIO.StringIO(input_string)
+ self.stderr = cStringIO.StringIO()
+
+ def wait(self):
+ return 0
+
+ poll = wait
+
+#} END adapters
+
+#{ Decorators
+
+def _mktemp(*args):
+ """Wrapper around default tempfile.mktemp to fix an osx issue"""
+ tdir = tempfile.mktemp(*args)
+ if sys.platform == 'darwin':
+ tdir = '/private' + tdir
+ return tdir
+
+def _rmtree_onerror(osremove, fullpath, exec_info):
+ """
+ Handle the case on windows that read-only files cannot be deleted by
+ os.remove by setting it to mode 777, then retry deletion.
+ """
+ if os.name != 'nt' or osremove is not os.remove:
+ raise
+
+ os.chmod(fullpath, 0777)
+ os.remove(fullpath)
+
+def with_rw_repo(working_tree_ref, bare=False):
+ """
+ Same as with_bare_repo, but clones the rorepo as non-bare repository, checking
+ out the working tree at the given working_tree_ref.
+
+ This repository type is more costly due to the working copy checkout.
+
+ To make working with relative paths easier, the cwd will be set to the working
+ dir of the repository.
+ """
+ assert isinstance(working_tree_ref, basestring), "Decorator requires ref name for working tree checkout"
+ def argument_passer(func):
+ def repo_creator(self):
+ prefix = 'non_'
+ if bare:
+ prefix = ''
+ #END handle prefix
+ repo_dir = _mktemp("%sbare_%s" % (prefix, func.__name__))
+ rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=bare, n=True)
+
+ rw_repo.head.commit = rw_repo.commit(working_tree_ref)
+ if not bare:
+ rw_repo.head.reference.checkout()
+ # END handle checkout
+
+ prev_cwd = os.getcwd()
+ os.chdir(rw_repo.working_dir)
+ try:
+ try:
+ return func(self, rw_repo)
+ except:
+ print >> sys.stderr, "Keeping repo after failure: %s" % repo_dir
+ repo_dir = None
+ raise
+ finally:
+ os.chdir(prev_cwd)
+ rw_repo.git.clear_cache()
+ if repo_dir is not None:
+ shutil.rmtree(repo_dir, onerror=_rmtree_onerror)
+ # END rm test repo if possible
+ # END cleanup
+ # END rw repo creator
+ repo_creator.__name__ = func.__name__
+ return repo_creator
+ # END argument passer
+ return argument_passer
+
+def with_rw_and_rw_remote_repo(working_tree_ref):
+ """
+ Same as with_rw_repo, but also provides a writable remote repository from which the
+ rw_repo has been forked as well as a handle for a git-daemon that may be started to
+ run the remote_repo.
+ The remote repository was cloned as bare repository from the rorepo, wheras
+ the rw repo has a working tree and was cloned from the remote repository.
+
+ remote_repo has two remotes: origin and daemon_origin. One uses a local url,
+ the other uses a server url. The daemon setup must be done on system level
+ and should be an inetd service that serves tempdir.gettempdir() and all
+ directories in it.
+
+ The following scetch demonstrates this::
+ rorepo ---<bare clone>---> rw_remote_repo ---<clone>---> rw_repo
+
+ The test case needs to support the following signature::
+ def case(self, rw_repo, rw_remote_repo)
+
+ This setup allows you to test push and pull scenarios and hooks nicely.
+
+ See working dir info in with_rw_repo
+ """
+ assert isinstance(working_tree_ref, basestring), "Decorator requires ref name for working tree checkout"
+ def argument_passer(func):
+ def remote_repo_creator(self):
+ remote_repo_dir = _mktemp("remote_repo_%s" % func.__name__)
+ repo_dir = _mktemp("remote_clone_non_bare_repo")
+
+ rw_remote_repo = self.rorepo.clone(remote_repo_dir, shared=True, bare=True)
+ rw_repo = rw_remote_repo.clone(repo_dir, shared=True, bare=False, n=True) # recursive alternates info ?
+ rw_repo.head.commit = working_tree_ref
+ rw_repo.head.reference.checkout()
+
+ # prepare for git-daemon
+ rw_remote_repo.daemon_export = True
+
+ # this thing is just annoying !
+ crw = rw_remote_repo.config_writer()
+ section = "daemon"
+ try:
+ crw.add_section(section)
+ except Exception:
+ pass
+ crw.set(section, "receivepack", True)
+ # release lock
+ del(crw)
+
+ # initialize the remote - first do it as local remote and pull, then
+ # we change the url to point to the daemon. The daemon should be started
+ # by the user, not by us
+ d_remote = Remote.create(rw_repo, "daemon_origin", remote_repo_dir)
+ d_remote.fetch()
+ remote_repo_url = "git://localhost%s" % remote_repo_dir
+
+ d_remote.config_writer.set('url', remote_repo_url)
+
+ # try to list remotes to diagnoes whether the server is up
+ try:
+ rw_repo.git.ls_remote(d_remote)
+ except GitCommandError,e:
+ print str(e)
+ if os.name == 'nt':
+ raise AssertionError('git-daemon needs to run this test, but windows does not have one. Otherwise, run: git-daemon "%s"'%tempfile.gettempdir())
+ else:
+ raise AssertionError('Please start a git-daemon to run this test, execute: git-daemon "%s"'%tempfile.gettempdir())
+
+ # adjust working dir
+ prev_cwd = os.getcwd()
+ os.chdir(rw_repo.working_dir)
+ try:
+ return func(self, rw_repo, rw_remote_repo)
+ finally:
+ os.chdir(prev_cwd)
+ rw_repo.git.clear_cache()
+ rw_remote_repo.git.clear_cache()
+ shutil.rmtree(repo_dir, onerror=_rmtree_onerror)
+ shutil.rmtree(remote_repo_dir, onerror=_rmtree_onerror)
+ # END cleanup
+ # END bare repo creator
+ remote_repo_creator.__name__ = func.__name__
+ return remote_repo_creator
+ # END remote repo creator
+ # END argument parsser
+
+ return argument_passer
+
+#} END decorators
+
+class TestBase(TestCase):
+ """
+ Base Class providing default functionality to all tests such as:
+
+ - Utility functions provided by the TestCase base of the unittest method such as::
+ self.fail("todo")
+ self.failUnlessRaises(...)
+
+ - Class level repository which is considered read-only as it is shared among
+ all test cases in your type.
+ Access it using::
+ self.rorepo # 'ro' stands for read-only
+
+ The rorepo is in fact your current project's git repo. If you refer to specific
+ shas for your objects, be sure you choose some that are part of the immutable portion
+ of the project history ( to assure tests don't fail for others ).
+ """
+
+ @classmethod
+ def setUpAll(cls):
+ """
+ Dynamically add a read-only repository to our actual type. This way
+ each test type has its own repository
+ """
+ cls.rorepo = Repo(GIT_REPO)
+
+ def _make_file(self, rela_path, data, repo=None):
+ """
+ Create a file at the given path relative to our repository, filled
+ with the given data. Returns absolute path to created file.
+ """
+ repo = repo or self.rorepo
+ abs_path = os.path.join(repo.working_tree_dir, rela_path)
+ fp = open(abs_path, "w")
+ fp.write(data)
+ fp.close()
+ return abs_path
diff --git a/git/test/performance/lib.py b/git/test/performance/lib.py
new file mode 100644
index 00000000..d0727b60
--- /dev/null
+++ b/git/test/performance/lib.py
@@ -0,0 +1,78 @@
+"""Contains library functions"""
+import os
+from git.test.lib import *
+import shutil
+import tempfile
+
+from git.db import (
+ GitCmdObjectDB,
+ GitDB
+ )
+
+from git import (
+ Repo
+ )
+
+#{ Invvariants
+k_env_git_repo = "GIT_PYTHON_TEST_GIT_REPO_BASE"
+#} END invariants
+
+
+#{ Utilities
+def resolve_or_fail(env_var):
+ """:return: resolved environment variable or raise EnvironmentError"""
+ try:
+ return os.environ[env_var]
+ except KeyError:
+ raise EnvironmentError("Please set the %r envrionment variable and retry" % env_var)
+ # END exception handling
+
+#} END utilities
+
+
+#{ Base Classes
+
+class TestBigRepoR(TestBase):
+ """TestCase providing access to readonly 'big' repositories using the following
+ member variables:
+
+ * gitrorepo
+
+ * Read-Only git repository - actually the repo of git itself
+
+ * puregitrorepo
+
+ * As gitrepo, but uses pure python implementation
+ """
+
+ #{ Invariants
+ head_sha_2k = '235d521da60e4699e5bd59ac658b5b48bd76ddca'
+ head_sha_50 = '32347c375250fd470973a5d76185cac718955fd5'
+ #} END invariants
+
+ @classmethod
+ def setUpAll(cls):
+ super(TestBigRepoR, cls).setUpAll()
+ repo_path = resolve_or_fail(k_env_git_repo)
+ cls.gitrorepo = Repo(repo_path, odbt=GitCmdObjectDB)
+ cls.puregitrorepo = Repo(repo_path, odbt=GitDB)
+
+
+class TestBigRepoRW(TestBigRepoR):
+ """As above, but provides a big repository that we can write to.
+
+ Provides ``self.gitrwrepo`` and ``self.puregitrwrepo``"""
+
+ @classmethod
+ def setUpAll(cls):
+ super(TestBigRepoRW, cls).setUpAll()
+ dirname = tempfile.mktemp()
+ os.mkdir(dirname)
+ cls.gitrwrepo = cls.gitrorepo.clone(dirname, shared=True, bare=True, odbt=GitCmdObjectDB)
+ cls.puregitrwrepo = Repo(dirname, odbt=GitDB)
+
+ @classmethod
+ def tearDownAll(cls):
+ shutil.rmtree(cls.gitrwrepo.working_dir)
+
+#} END base classes
diff --git a/git/test/performance/test_commit.py b/git/test/performance/test_commit.py
new file mode 100644
index 00000000..80421aa2
--- /dev/null
+++ b/git/test/performance/test_commit.py
@@ -0,0 +1,99 @@
+# test_performance.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from lib import *
+from git import *
+from gitdb import IStream
+from git.test.test_commit import assert_commit_serialization
+from cStringIO import StringIO
+from time import time
+import sys
+
+class TestPerformance(TestBigRepoRW):
+
+ # ref with about 100 commits in its history
+ ref_100 = '0.1.6'
+
+ def _query_commit_info(self, c):
+ c.author
+ c.authored_date
+ c.author_tz_offset
+ c.committer
+ c.committed_date
+ c.committer_tz_offset
+ c.message
+ c.parents
+
+ def test_iteration(self):
+ no = 0
+ nc = 0
+
+ # find the first commit containing the given path - always do a full
+ # iteration ( restricted to the path in question ), but in fact it should
+ # return quite a lot of commits, we just take one and hence abort the operation
+
+ st = time()
+ for c in self.rorepo.iter_commits(self.ref_100):
+ nc += 1
+ self._query_commit_info(c)
+ for obj in c.tree.traverse():
+ obj.size
+ no += 1
+ # END for each object
+ # END for each commit
+ elapsed_time = time() - st
+ print >> sys.stderr, "Traversed %i Trees and a total of %i unchached objects in %s [s] ( %f objs/s )" % (nc, no, elapsed_time, no/elapsed_time)
+
+ def test_commit_traversal(self):
+ # bound to cat-file parsing performance
+ nc = 0
+ st = time()
+ for c in self.gitrorepo.commit(self.head_sha_2k).traverse(branch_first=False):
+ nc += 1
+ self._query_commit_info(c)
+ # END for each traversed commit
+ elapsed_time = time() - st
+ print >> sys.stderr, "Traversed %i Commits in %s [s] ( %f commits/s )" % (nc, elapsed_time, nc/elapsed_time)
+
+ def test_commit_iteration(self):
+ # bound to stream parsing performance
+ nc = 0
+ st = time()
+ for c in Commit.iter_items(self.gitrorepo, self.head_sha_2k):
+ nc += 1
+ self._query_commit_info(c)
+ # END for each traversed commit
+ elapsed_time = time() - st
+ print >> sys.stderr, "Iterated %i Commits in %s [s] ( %f commits/s )" % (nc, elapsed_time, nc/elapsed_time)
+
+ def test_commit_serialization(self):
+ assert_commit_serialization(self.gitrwrepo, self.head_sha_2k, True)
+
+ rwrepo = self.gitrwrepo
+ make_object = rwrepo.odb.store
+ # direct serialization - deserialization can be tested afterwards
+ # serialization is probably limited on IO
+ hc = rwrepo.commit(self.head_sha_2k)
+
+ commits = list()
+ nc = 5000
+ st = time()
+ for i in xrange(nc):
+ cm = Commit( rwrepo, Commit.NULL_BIN_SHA, hc.tree,
+ hc.author, hc.authored_date, hc.author_tz_offset,
+ hc.committer, hc.committed_date, hc.committer_tz_offset,
+ str(i), parents=hc.parents, encoding=hc.encoding)
+
+ stream = StringIO()
+ cm._serialize(stream)
+ slen = stream.tell()
+ stream.seek(0)
+
+ cm.binsha = make_object(IStream(Commit.type, slen, stream)).binsha
+ # END commit creation
+ elapsed = time() - st
+
+ print >> sys.stderr, "Serialized %i commits to loose objects in %f s ( %f commits / s )" % (nc, elapsed, nc / elapsed)
diff --git a/git/test/performance/test_odb.py b/git/test/performance/test_odb.py
new file mode 100644
index 00000000..32b70f69
--- /dev/null
+++ b/git/test/performance/test_odb.py
@@ -0,0 +1,70 @@
+"""Performance tests for object store"""
+
+from time import time
+import sys
+import stat
+
+from lib import (
+ TestBigRepoR
+ )
+
+
+class TestObjDBPerformance(TestBigRepoR):
+
+ def test_random_access(self):
+ results = [ ["Iterate Commits"], ["Iterate Blobs"], ["Retrieve Blob Data"] ]
+ for repo in (self.gitrorepo, self.puregitrorepo):
+ # GET COMMITS
+ st = time()
+ root_commit = repo.commit(self.head_sha_2k)
+ commits = list(root_commit.traverse())
+ nc = len(commits)
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i commits from ObjectStore in %g s ( %f commits / s )" % (type(repo.odb), nc, elapsed, nc / elapsed)
+ results[0].append(elapsed)
+
+ # GET TREES
+ # walk all trees of all commits
+ st = time()
+ blobs_per_commit = list()
+ nt = 0
+ for commit in commits:
+ tree = commit.tree
+ blobs = list()
+ for item in tree.traverse():
+ nt += 1
+ if item.type == 'blob':
+ blobs.append(item)
+ # direct access for speed
+ # END while trees are there for walking
+ blobs_per_commit.append(blobs)
+ # END for each commit
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i objects from %i commits in %g s ( %f objects / s )" % (type(repo.odb), nt, len(commits), elapsed, nt / elapsed)
+ results[1].append(elapsed)
+
+ # GET BLOBS
+ st = time()
+ nb = 0
+ too_many = 15000
+ data_bytes = 0
+ for blob_list in blobs_per_commit:
+ for blob in blob_list:
+ data_bytes += len(blob.data_stream.read())
+ # END for each blobsha
+ nb += len(blob_list)
+ if nb > too_many:
+ break
+ # END for each bloblist
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i blob (%i KiB) and their data in %g s ( %f blobs / s, %f KiB / s )" % (type(repo.odb), nb, data_bytes/1000, elapsed, nb / elapsed, (data_bytes / 1000) / elapsed)
+ results[2].append(elapsed)
+ # END for each repo type
+
+ # final results
+ for test_name, a, b in results:
+ print >> sys.stderr, "%s: %f s vs %f s, pure is %f times slower" % (test_name, a, b, b / a)
+ # END for each result
diff --git a/git/test/performance/test_streams.py b/git/test/performance/test_streams.py
new file mode 100644
index 00000000..7f17d722
--- /dev/null
+++ b/git/test/performance/test_streams.py
@@ -0,0 +1,131 @@
+"""Performance data streaming performance"""
+
+from git.test.lib import *
+from gitdb import *
+from gitdb.util import bin_to_hex
+
+from time import time
+import os
+import sys
+import stat
+import subprocess
+
+from gitdb.test.lib import make_memory_file
+
+from lib import (
+ TestBigRepoR
+ )
+
+
+class TestObjDBPerformance(TestBigRepoR):
+
+ large_data_size_bytes = 1000*1000*10 # some MiB should do it
+ moderate_data_size_bytes = 1000*1000*1 # just 1 MiB
+
+ @with_rw_repo('HEAD', bare=True)
+ def test_large_data_streaming(self, rwrepo):
+ # TODO: This part overlaps with the same file in gitdb.test.performance.test_stream
+ # It should be shared if possible
+ ldb = LooseObjectDB(os.path.join(rwrepo.git_dir, 'objects'))
+
+ for randomize in range(2):
+ desc = (randomize and 'random ') or ''
+ print >> sys.stderr, "Creating %s data ..." % desc
+ st = time()
+ size, stream = make_memory_file(self.large_data_size_bytes, randomize)
+ elapsed = time() - st
+ print >> sys.stderr, "Done (in %f s)" % elapsed
+
+ # writing - due to the compression it will seem faster than it is
+ st = time()
+ binsha = ldb.store(IStream('blob', size, stream)).binsha
+ elapsed_add = time() - st
+ assert ldb.has_object(binsha)
+ db_file = ldb.readable_db_object_path(bin_to_hex(binsha))
+ fsize_kib = os.path.getsize(db_file) / 1000
+
+
+ size_kib = size / 1000
+ print >> sys.stderr, "Added %i KiB (filesize = %i KiB) of %s data to loose odb in %f s ( %f Write KiB / s)" % (size_kib, fsize_kib, desc, elapsed_add, size_kib / elapsed_add)
+
+ # reading all at once
+ st = time()
+ ostream = ldb.stream(binsha)
+ shadata = ostream.read()
+ elapsed_readall = time() - st
+
+ stream.seek(0)
+ assert shadata == stream.getvalue()
+ print >> sys.stderr, "Read %i KiB of %s data at once from loose odb in %f s ( %f Read KiB / s)" % (size_kib, desc, elapsed_readall, size_kib / elapsed_readall)
+
+
+ # reading in chunks of 1 MiB
+ cs = 512*1000
+ chunks = list()
+ st = time()
+ ostream = ldb.stream(binsha)
+ while True:
+ data = ostream.read(cs)
+ chunks.append(data)
+ if len(data) < cs:
+ break
+ # END read in chunks
+ elapsed_readchunks = time() - st
+
+ stream.seek(0)
+ assert ''.join(chunks) == stream.getvalue()
+
+ cs_kib = cs / 1000
+ print >> sys.stderr, "Read %i KiB of %s data in %i KiB chunks from loose odb in %f s ( %f Read KiB / s)" % (size_kib, desc, cs_kib, elapsed_readchunks, size_kib / elapsed_readchunks)
+
+ # del db file so git has something to do
+ os.remove(db_file)
+
+ # VS. CGIT
+ ##########
+ # CGIT ! Can using the cgit programs be faster ?
+ proc = rwrepo.git.hash_object('-w', '--stdin', as_process=True, istream=subprocess.PIPE)
+
+ # write file - pump everything in at once to be a fast as possible
+ data = stream.getvalue() # cache it
+ st = time()
+ proc.stdin.write(data)
+ proc.stdin.close()
+ gitsha = proc.stdout.read().strip()
+ proc.wait()
+ gelapsed_add = time() - st
+ del(data)
+ assert gitsha == bin_to_hex(binsha) # we do it the same way, right ?
+
+ # as its the same sha, we reuse our path
+ fsize_kib = os.path.getsize(db_file) / 1000
+ print >> sys.stderr, "Added %i KiB (filesize = %i KiB) of %s data to using git-hash-object in %f s ( %f Write KiB / s)" % (size_kib, fsize_kib, desc, gelapsed_add, size_kib / gelapsed_add)
+
+ # compare ...
+ print >> sys.stderr, "Git-Python is %f %% faster than git when adding big %s files" % (100.0 - (elapsed_add / gelapsed_add) * 100, desc)
+
+
+ # read all
+ st = time()
+ s, t, size, data = rwrepo.git.get_object_data(gitsha)
+ gelapsed_readall = time() - st
+ print >> sys.stderr, "Read %i KiB of %s data at once using git-cat-file in %f s ( %f Read KiB / s)" % (size_kib, desc, gelapsed_readall, size_kib / gelapsed_readall)
+
+ # compare
+ print >> sys.stderr, "Git-Python is %f %% faster than git when reading big %sfiles" % (100.0 - (elapsed_readall / gelapsed_readall) * 100, desc)
+
+
+ # read chunks
+ st = time()
+ s, t, size, stream = rwrepo.git.stream_object_data(gitsha)
+ while True:
+ data = stream.read(cs)
+ if len(data) < cs:
+ break
+ # END read stream
+ gelapsed_readchunks = time() - st
+ print >> sys.stderr, "Read %i KiB of %s data in %i KiB chunks from git-cat-file in %f s ( %f Read KiB / s)" % (size_kib, desc, cs_kib, gelapsed_readchunks, size_kib / gelapsed_readchunks)
+
+ # compare
+ print >> sys.stderr, "Git-Python is %f %% faster than git when reading big %s files in chunks" % (100.0 - (elapsed_readchunks / gelapsed_readchunks) * 100, desc)
+ # END for each randomization factor
diff --git a/git/test/performance/test_utils.py b/git/test/performance/test_utils.py
new file mode 100644
index 00000000..19c1e84a
--- /dev/null
+++ b/git/test/performance/test_utils.py
@@ -0,0 +1,174 @@
+"""Performance of utilities"""
+from time import time
+import sys
+import stat
+
+from lib import (
+ TestBigRepoR
+ )
+
+
+class TestUtilPerformance(TestBigRepoR):
+
+ def test_access(self):
+ # compare dict vs. slot access
+ class Slotty(object):
+ __slots__ = "attr"
+ def __init__(self):
+ self.attr = 1
+
+ class Dicty(object):
+ def __init__(self):
+ self.attr = 1
+
+ class BigSlotty(object):
+ __slots__ = ('attr', ) + tuple('abcdefghijk')
+ def __init__(self):
+ for attr in self.__slots__:
+ setattr(self, attr, 1)
+
+ class BigDicty(object):
+ def __init__(self):
+ for attr in BigSlotty.__slots__:
+ setattr(self, attr, 1)
+
+ ni = 1000000
+ for cls in (Slotty, Dicty, BigSlotty, BigDicty):
+ cli = cls()
+ st = time()
+ for i in xrange(ni):
+ cli.attr
+ # END for each access
+ elapsed = time() - st
+ print >> sys.stderr, "Accessed %s.attr %i times in %s s ( %f acc / s)" % (cls.__name__, ni, elapsed, ni / elapsed)
+ # END for each class type
+
+ # check num of sequence-acceses
+ for cls in (list, tuple):
+ x = 10
+ st = time()
+ s = cls(range(x))
+ for i in xrange(ni):
+ s[0]
+ s[1]
+ s[2]
+ # END for
+ elapsed = time() - st
+ na = ni * 3
+ print >> sys.stderr, "Accessed %s[x] %i times in %s s ( %f acc / s)" % (cls.__name__, na, elapsed, na / elapsed)
+ # END for each sequence
+
+ def test_instantiation(self):
+ ni = 100000
+ max_num_items = 4
+ for mni in range(max_num_items+1):
+ for cls in (tuple, list):
+ st = time()
+ for i in xrange(ni):
+ if mni == 0:
+ cls()
+ elif mni == 1:
+ cls((1,))
+ elif mni == 2:
+ cls((1,2))
+ elif mni == 3:
+ cls((1,2,3))
+ elif mni == 4:
+ cls((1,2,3,4))
+ else:
+ cls(x for x in xrange(mni))
+ # END handle empty cls
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Created %i %ss of size %i in %f s ( %f inst / s)" % (ni, cls.__name__, mni, elapsed, ni / elapsed)
+ # END for each type
+ # END for each item count
+
+ # tuple and tuple direct
+ st = time()
+ for i in xrange(ni):
+ t = (1,2,3,4)
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Created %i tuples (1,2,3,4) in %f s ( %f tuples / s)" % (ni, elapsed, ni / elapsed)
+
+ st = time()
+ for i in xrange(ni):
+ t = tuple((1,2,3,4))
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Created %i tuples tuple((1,2,3,4)) in %f s ( %f tuples / s)" % (ni, elapsed, ni / elapsed)
+
+ def test_unpacking_vs_indexing(self):
+ ni = 1000000
+ list_items = [1,2,3,4]
+ tuple_items = (1,2,3,4)
+
+ for sequence in (list_items, tuple_items):
+ st = time()
+ for i in xrange(ni):
+ one, two, three, four = sequence
+ # END for eac iteration
+ elapsed = time() - st
+ print >> sys.stderr, "Unpacked %i %ss of size %i in %f s ( %f acc / s)" % (ni, type(sequence).__name__, len(sequence), elapsed, ni / elapsed)
+
+ st = time()
+ for i in xrange(ni):
+ one, two, three, four = sequence[0], sequence[1], sequence[2], sequence[3]
+ # END for eac iteration
+ elapsed = time() - st
+ print >> sys.stderr, "Unpacked %i %ss of size %i individually in %f s ( %f acc / s)" % (ni, type(sequence).__name__, len(sequence), elapsed, ni / elapsed)
+
+ st = time()
+ for i in xrange(ni):
+ one, two = sequence[0], sequence[1]
+ # END for eac iteration
+ elapsed = time() - st
+ print >> sys.stderr, "Unpacked %i %ss of size %i individually (2 of 4) in %f s ( %f acc / s)" % (ni, type(sequence).__name__, len(sequence), elapsed, ni / elapsed)
+ # END for each sequence
+
+ def test_large_list_vs_iteration(self):
+ # what costs more: alloc/realloc of lists, or the cpu strain of iterators ?
+ def slow_iter(ni):
+ for i in xrange(ni):
+ yield i
+ # END slow iter - be closer to the real world
+
+ # alloc doesn't play a role here it seems
+ for ni in (500, 1000, 10000, 20000, 40000):
+ st = time()
+ for i in list(xrange(ni)):
+ i
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Iterated %i items from list in %f s ( %f acc / s)" % (ni, elapsed, ni / elapsed)
+
+ st = time()
+ for i in slow_iter(ni):
+ i
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Iterated %i items from iterator in %f s ( %f acc / s)" % (ni, elapsed, ni / elapsed)
+ # END for each number of iterations
+
+ def test_type_vs_inst_class(self):
+ class NewType(object):
+ pass
+
+ # lets see which way is faster
+ inst = NewType()
+
+ ni = 1000000
+ st = time()
+ for i in xrange(ni):
+ inst.__class__()
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Created %i items using inst.__class__ in %f s ( %f items / s)" % (ni, elapsed, ni / elapsed)
+
+ st = time()
+ for i in xrange(ni):
+ type(inst)()
+ # END for each item
+ elapsed = time() - st
+ print >> sys.stderr, "Created %i items using type(inst)() in %f s ( %f items / s)" % (ni, elapsed, ni / elapsed)
diff --git a/git/test/test_actor.py b/git/test/test_actor.py
new file mode 100644
index 00000000..b8e5ba3b
--- /dev/null
+++ b/git/test/test_actor.py
@@ -0,0 +1,36 @@
+# test_actor.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+from git.test.lib import *
+from git import *
+
+class TestActor(object):
+ def test_from_string_should_separate_name_and_email(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert_equal("Michael Trier", a.name)
+ assert_equal("mtrier@example.com", a.email)
+
+ # base type capabilities
+ assert a == a
+ assert not ( a != a )
+ m = set()
+ m.add(a)
+ m.add(a)
+ assert len(m) == 1
+
+ def test_from_string_should_handle_just_name(self):
+ a = Actor._from_string("Michael Trier")
+ assert_equal("Michael Trier", a.name)
+ assert_equal(None, a.email)
+
+ def test_should_display_representation(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert_equal('<git.Actor "Michael Trier <mtrier@example.com>">', repr(a))
+
+ def test_str_should_alias_name(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert_equal(a.name, str(a)) \ No newline at end of file
diff --git a/git/test/test_base.py b/git/test/test_base.py
new file mode 100644
index 00000000..e630d151
--- /dev/null
+++ b/git/test/test_base.py
@@ -0,0 +1,100 @@
+# test_base.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import git.objects.base as base
+import git.refs as refs
+import os
+
+from git.test.lib import *
+from git import *
+from itertools import chain
+from git.objects.util import get_object_type_by_name
+from gitdb.util import hex_to_bin
+import tempfile
+
+class TestBase(TestBase):
+
+ type_tuples = ( ("blob", "8741fc1d09d61f02ffd8cded15ff603eff1ec070", "blob.py"),
+ ("tree", "3a6a5e3eeed3723c09f1ef0399f81ed6b8d82e79", "directory"),
+ ("commit", "4251bd59fb8e11e40c40548cba38180a9536118c", None),
+ ("tag", "e56a60e8e9cd333cfba0140a77cd12b0d9398f10", None) )
+
+ def test_base_object(self):
+ # test interface of base object classes
+ types = (Blob, Tree, Commit, TagObject)
+ assert len(types) == len(self.type_tuples)
+
+ s = set()
+ num_objs = 0
+ num_index_objs = 0
+ for obj_type, (typename, hexsha, path) in zip(types, self.type_tuples):
+ binsha = hex_to_bin(hexsha)
+ item = None
+ if path is None:
+ item = obj_type(self.rorepo,binsha)
+ else:
+ item = obj_type(self.rorepo,binsha, 0, path)
+ # END handle index objects
+ num_objs += 1
+ assert item.hexsha == hexsha
+ assert item.type == typename
+ assert item.size
+ assert item == item
+ assert not item != item
+ assert str(item) == item.hexsha
+ assert repr(item)
+ s.add(item)
+
+ if isinstance(item, base.IndexObject):
+ num_index_objs += 1
+ if hasattr(item,'path'): # never runs here
+ assert not item.path.startswith("/") # must be relative
+ assert isinstance(item.mode, int)
+ # END index object check
+
+ # read from stream
+ data_stream = item.data_stream
+ data = data_stream.read()
+ assert data
+
+ tmpfile = os.tmpfile()
+ assert item == item.stream_data(tmpfile)
+ tmpfile.seek(0)
+ assert tmpfile.read() == data
+ # END stream to file directly
+ # END for each object type to create
+
+ # each has a unique sha
+ assert len(s) == num_objs
+ assert len(s|s) == num_objs
+ assert num_index_objs == 2
+
+ def test_get_object_type_by_name(self):
+ for tname in base.Object.TYPES:
+ assert base.Object in get_object_type_by_name(tname).mro()
+ # END for each known type
+
+ assert_raises( ValueError, get_object_type_by_name, "doesntexist" )
+
+ def test_object_resolution(self):
+ # objects must be resolved to shas so they compare equal
+ assert self.rorepo.head.reference.object == self.rorepo.active_branch.object
+
+ @with_rw_repo('HEAD', bare=True)
+ def test_with_bare_rw_repo(self, bare_rw_repo):
+ assert bare_rw_repo.config_reader("repository").getboolean("core", "bare")
+ assert os.path.isfile(os.path.join(bare_rw_repo.git_dir,'HEAD'))
+
+ @with_rw_repo('0.1.6')
+ def test_with_rw_repo(self, rw_repo):
+ assert not rw_repo.config_reader("repository").getboolean("core", "bare")
+ assert os.path.isdir(os.path.join(rw_repo.working_tree_dir,'lib'))
+
+ @with_rw_and_rw_remote_repo('0.1.6')
+ def test_with_rw_remote_and_rw_repo(self, rw_repo, rw_remote_repo):
+ assert not rw_repo.config_reader("repository").getboolean("core", "bare")
+ assert rw_remote_repo.config_reader("repository").getboolean("core", "bare")
+ assert os.path.isdir(os.path.join(rw_repo.working_tree_dir,'lib'))
diff --git a/git/test/test_blob.py b/git/test/test_blob.py
new file mode 100644
index 00000000..661c0501
--- /dev/null
+++ b/git/test/test_blob.py
@@ -0,0 +1,23 @@
+# test_blob.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+from gitdb.util import hex_to_bin
+
+class TestBlob(TestBase):
+
+ def test_mime_type_should_return_mime_type_for_known_types(self):
+ blob = Blob(self.rorepo, **{'binsha': Blob.NULL_BIN_SHA, 'path': 'foo.png'})
+ assert_equal("image/png", blob.mime_type)
+
+ def test_mime_type_should_return_text_plain_for_unknown_types(self):
+ blob = Blob(self.rorepo, **{'binsha': Blob.NULL_BIN_SHA,'path': 'something'})
+ assert_equal("text/plain", blob.mime_type)
+
+ def test_nodict(self):
+ self.failUnlessRaises(AttributeError, setattr, self.rorepo.tree()['AUTHORS'], 'someattr', 2)
+
diff --git a/git/test/test_commit.py b/git/test/test_commit.py
new file mode 100644
index 00000000..4a8d8b87
--- /dev/null
+++ b/git/test/test_commit.py
@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+# test_commit.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+from gitdb import IStream
+from gitdb.util import hex_to_bin
+
+from cStringIO import StringIO
+import time
+import sys
+
+
+def assert_commit_serialization(rwrepo, commit_id, print_performance_info=False):
+ """traverse all commits in the history of commit identified by commit_id and check
+ if the serialization works.
+ :param print_performance_info: if True, we will show how fast we are"""
+ ns = 0 # num serializations
+ nds = 0 # num deserializations
+
+ st = time.time()
+ for cm in rwrepo.commit(commit_id).traverse():
+ nds += 1
+
+ # assert that we deserialize commits correctly, hence we get the same
+ # sha on serialization
+ stream = StringIO()
+ cm._serialize(stream)
+ ns += 1
+ streamlen = stream.tell()
+ stream.seek(0)
+
+ istream = rwrepo.odb.store(IStream(Commit.type, streamlen, stream))
+ assert istream.hexsha == cm.hexsha
+
+ nc = Commit(rwrepo, Commit.NULL_BIN_SHA, cm.tree,
+ cm.author, cm.authored_date, cm.author_tz_offset,
+ cm.committer, cm.committed_date, cm.committer_tz_offset,
+ cm.message, cm.parents, cm.encoding)
+
+ assert nc.parents == cm.parents
+ stream = StringIO()
+ nc._serialize(stream)
+ ns += 1
+ streamlen = stream.tell()
+ stream.seek(0)
+
+ # reuse istream
+ istream.size = streamlen
+ istream.stream = stream
+ istream.binsha = None
+ nc.binsha = rwrepo.odb.store(istream).binsha
+
+ # if it worked, we have exactly the same contents !
+ assert nc.hexsha == cm.hexsha
+ # END check commits
+ elapsed = time.time() - st
+
+ if print_performance_info:
+ print >> sys.stderr, "Serialized %i and deserialized %i commits in %f s ( (%f, %f) commits / s" % (ns, nds, elapsed, ns/elapsed, nds/elapsed)
+ # END handle performance info
+
+
+class TestCommit(TestBase):
+
+ def test_bake(self):
+
+ commit = self.rorepo.commit('2454ae89983a4496a445ce347d7a41c0bb0ea7ae')
+ # commits have no dict
+ self.failUnlessRaises(AttributeError, setattr, commit, 'someattr', 1)
+ commit.author # bake
+
+ assert_equal("Sebastian Thiel", commit.author.name)
+ assert_equal("byronimo@gmail.com", commit.author.email)
+ assert commit.author == commit.committer
+ assert isinstance(commit.authored_date, int) and isinstance(commit.committed_date, int)
+ assert isinstance(commit.author_tz_offset, int) and isinstance(commit.committer_tz_offset, int)
+ assert commit.message == "Added missing information to docstrings of commit and stats module\n"
+
+
+ def test_stats(self):
+ commit = self.rorepo.commit('33ebe7acec14b25c5f84f35a664803fcab2f7781')
+ stats = commit.stats
+
+ def check_entries(d):
+ assert isinstance(d, dict)
+ for key in ("insertions", "deletions", "lines"):
+ assert key in d
+ # END assertion helper
+ assert stats.files
+ assert stats.total
+
+ check_entries(stats.total)
+ assert "files" in stats.total
+
+ for filepath, d in stats.files.items():
+ check_entries(d)
+ # END for each stated file
+
+ # assure data is parsed properly
+ michael = Actor._from_string("Michael Trier <mtrier@gmail.com>")
+ assert commit.author == michael
+ assert commit.committer == michael
+ assert commit.authored_date == 1210193388
+ assert commit.committed_date == 1210193388
+ assert commit.author_tz_offset == 14400, commit.author_tz_offset
+ assert commit.committer_tz_offset == 14400, commit.committer_tz_offset
+ assert commit.message == "initial project\n"
+
+ def test_unicode_actor(self):
+ # assure we can parse unicode actors correctly
+ name = "Üäöß ÄußÉ".decode("utf-8")
+ assert len(name) == 9
+ special = Actor._from_string(u"%s <something@this.com>" % name)
+ assert special.name == name
+ assert isinstance(special.name, unicode)
+
+ def test_traversal(self):
+ start = self.rorepo.commit("a4d06724202afccd2b5c54f81bcf2bf26dea7fff")
+ first = self.rorepo.commit("33ebe7acec14b25c5f84f35a664803fcab2f7781")
+ p0 = start.parents[0]
+ p1 = start.parents[1]
+ p00 = p0.parents[0]
+ p10 = p1.parents[0]
+
+ # basic branch first, depth first
+ dfirst = start.traverse(branch_first=False)
+ bfirst = start.traverse(branch_first=True)
+ assert dfirst.next() == p0
+ assert dfirst.next() == p00
+
+ assert bfirst.next() == p0
+ assert bfirst.next() == p1
+ assert bfirst.next() == p00
+ assert bfirst.next() == p10
+
+ # at some point, both iterations should stop
+ assert list(bfirst)[-1] == first
+ stoptraverse = self.rorepo.commit("254d04aa3180eb8b8daf7b7ff25f010cd69b4e7d").traverse(as_edge=True)
+ l = list(stoptraverse)
+ assert len(l[0]) == 2
+
+ # ignore self
+ assert start.traverse(ignore_self=False).next() == start
+
+ # depth
+ assert len(list(start.traverse(ignore_self=False, depth=0))) == 1
+
+ # prune
+ assert start.traverse(branch_first=1, prune=lambda i,d: i==p0).next() == p1
+
+ # predicate
+ assert start.traverse(branch_first=1, predicate=lambda i,d: i==p1).next() == p1
+
+ # traversal should stop when the beginning is reached
+ self.failUnlessRaises(StopIteration, first.traverse().next)
+
+ # parents of the first commit should be empty ( as the only parent has a null
+ # sha )
+ assert len(first.parents) == 0
+
+ def test_iteration(self):
+ # we can iterate commits
+ all_commits = Commit.list_items(self.rorepo, self.rorepo.head)
+ assert all_commits
+ assert all_commits == list(self.rorepo.iter_commits())
+
+ # this includes merge commits
+ mcomit = self.rorepo.commit('d884adc80c80300b4cc05321494713904ef1df2d')
+ assert mcomit in all_commits
+
+ # we can limit the result to paths
+ ltd_commits = list(self.rorepo.iter_commits(paths='CHANGES'))
+ assert ltd_commits and len(ltd_commits) < len(all_commits)
+
+ # show commits of multiple paths, resulting in a union of commits
+ less_ltd_commits = list(Commit.iter_items(self.rorepo, 'master', paths=('CHANGES', 'AUTHORS')))
+ assert len(ltd_commits) < len(less_ltd_commits)
+
+ def test_iter_items(self):
+ # pretty not allowed
+ self.failUnlessRaises(ValueError, Commit.iter_items, self.rorepo, 'master', pretty="raw")
+
+ def test_rev_list_bisect_all(self):
+ """
+ 'git rev-list --bisect-all' returns additional information
+ in the commit header. This test ensures that we properly parse it.
+ """
+ revs = self.rorepo.git.rev_list('933d23bf95a5bd1624fbcdf328d904e1fa173474',
+ first_parent=True,
+ bisect_all=True)
+
+ commits = Commit._iter_from_process_or_stream(self.rorepo, StringProcessAdapter(revs))
+ expected_ids = (
+ '7156cece3c49544abb6bf7a0c218eb36646fad6d',
+ '1f66cfbbce58b4b552b041707a12d437cc5f400a',
+ '33ebe7acec14b25c5f84f35a664803fcab2f7781',
+ '933d23bf95a5bd1624fbcdf328d904e1fa173474'
+ )
+ for sha1, commit in zip(expected_ids, commits):
+ assert_equal(sha1, commit.hexsha)
+
+ def test_count(self):
+ assert self.rorepo.tag('refs/tags/0.1.5').commit.count( ) == 143
+
+ def test_list(self):
+ assert isinstance(Commit.list_items(self.rorepo, '0.1.5', max_count=5)[hex_to_bin('5117c9c8a4d3af19a9958677e45cda9269de1541')], Commit)
+
+ def test_str(self):
+ commit = Commit(self.rorepo, Commit.NULL_BIN_SHA)
+ assert_equal(Commit.NULL_HEX_SHA, str(commit))
+
+ def test_repr(self):
+ commit = Commit(self.rorepo, Commit.NULL_BIN_SHA)
+ assert_equal('<git.Commit "%s">' % Commit.NULL_HEX_SHA, repr(commit))
+
+ def test_equality(self):
+ commit1 = Commit(self.rorepo, Commit.NULL_BIN_SHA)
+ commit2 = Commit(self.rorepo, Commit.NULL_BIN_SHA)
+ commit3 = Commit(self.rorepo, "\1"*20)
+ assert_equal(commit1, commit2)
+ assert_not_equal(commit2, commit3)
+
+ def test_iter_parents(self):
+ # should return all but ourselves, even if skip is defined
+ c = self.rorepo.commit('0.1.5')
+ for skip in (0, 1):
+ piter = c.iter_parents(skip=skip)
+ first_parent = piter.next()
+ assert first_parent != c
+ assert first_parent == c.parents[0]
+ # END for each
+
+ def test_base(self):
+ name_rev = self.rorepo.head.commit.name_rev
+ assert isinstance(name_rev, basestring)
+
+ @with_rw_repo('HEAD', bare=True)
+ def test_serialization(self, rwrepo):
+ # create all commits of our repo
+ assert_commit_serialization(rwrepo, '0.1.6')
+
+ def test_serialization_unicode_support(self):
+ assert Commit.default_encoding.lower() == 'utf-8'
+
+ # create a commit with unicode in the message, and the author's name
+ # Verify its serialization and deserialization
+ cmt = self.rorepo.commit('0.1.6')
+ assert isinstance(cmt.message, unicode) # it automatically decodes it as such
+ assert isinstance(cmt.author.name, unicode) # same here
+
+ cmt.message = "üäêèß".decode("utf-8")
+ assert len(cmt.message) == 5
+
+ cmt.author.name = "äüß".decode("utf-8")
+ assert len(cmt.author.name) == 3
+
+ cstream = StringIO()
+ cmt._serialize(cstream)
+ cstream.seek(0)
+ assert len(cstream.getvalue())
+
+ ncmt = Commit(self.rorepo, cmt.binsha)
+ ncmt._deserialize(cstream)
+
+ assert cmt.author.name == ncmt.author.name
+ assert cmt.message == ncmt.message
+ # actually, it can't be printed in a shell as repr wants to have ascii only
+ # it appears
+ cmt.author.__repr__()
+
diff --git a/git/test/test_config.py b/git/test/test_config.py
new file mode 100644
index 00000000..173e380c
--- /dev/null
+++ b/git/test/test_config.py
@@ -0,0 +1,102 @@
+# test_config.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+import StringIO
+from copy import copy
+from ConfigParser import NoSectionError
+
+class TestBase(TestCase):
+
+ def _to_memcache(self, file_path):
+ fp = open(file_path, "r")
+ sio = StringIO.StringIO(fp.read())
+ sio.name = file_path
+ return sio
+
+ def _parsers_equal_or_raise(self, lhs, rhs):
+ pass
+
+ def test_read_write(self):
+ # writer must create the exact same file as the one read before
+ for filename in ("git_config", "git_config_global"):
+ file_obj = self._to_memcache(fixture_path(filename))
+ file_obj_orig = copy(file_obj)
+ w_config = GitConfigParser(file_obj, read_only = False)
+ w_config.read() # enforce reading
+ assert w_config._sections
+ w_config.write() # enforce writing
+ assert file_obj.getvalue() == file_obj_orig.getvalue()
+
+ # creating an additional config writer must fail due to exclusive access
+ self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only = False)
+
+ # should still have a lock and be able to make changes
+ assert w_config._lock._has_lock()
+
+ # changes should be written right away
+ sname = "my_section"
+ oname = "mykey"
+ val = "myvalue"
+ w_config.add_section(sname)
+ assert w_config.has_section(sname)
+ w_config.set(sname, oname, val)
+ assert w_config.has_option(sname,oname)
+ assert w_config.get(sname, oname) == val
+
+ sname_new = "new_section"
+ oname_new = "new_key"
+ ival = 10
+ w_config.set_value(sname_new, oname_new, ival)
+ assert w_config.get_value(sname_new, oname_new) == ival
+
+ file_obj.seek(0)
+ r_config = GitConfigParser(file_obj, read_only=True)
+ assert r_config.has_section(sname)
+ assert r_config.has_option(sname, oname)
+ assert r_config.get(sname, oname) == val
+
+ # END for each filename
+
+ def test_base(self):
+ path_repo = fixture_path("git_config")
+ path_global = fixture_path("git_config_global")
+ r_config = GitConfigParser([path_repo, path_global], read_only=True)
+ assert r_config.read_only
+ num_sections = 0
+ num_options = 0
+
+ # test reader methods
+ assert r_config._is_initialized == False
+ for section in r_config.sections():
+ num_sections += 1
+ for option in r_config.options(section):
+ num_options += 1
+ val = r_config.get(section, option)
+ val_typed = r_config.get_value(section, option)
+ assert isinstance(val_typed, (bool, long, float, basestring))
+ assert val
+ assert "\n" not in option
+ assert "\n" not in val
+
+ # writing must fail
+ self.failUnlessRaises(IOError, r_config.set, section, option, None)
+ self.failUnlessRaises(IOError, r_config.remove_option, section, option )
+ # END for each option
+ self.failUnlessRaises(IOError, r_config.remove_section, section)
+ # END for each section
+ assert num_sections and num_options
+ assert r_config._is_initialized == True
+
+ # get value which doesnt exist, with default
+ default = "my default value"
+ assert r_config.get_value("doesnt", "exist", default) == default
+
+ # it raises if there is no default though
+ self.failUnlessRaises(NoSectionError, r_config.get_value, "doesnt", "exist")
+
+
diff --git a/git/test/test_db.py b/git/test/test_db.py
new file mode 100644
index 00000000..db2d7983
--- /dev/null
+++ b/git/test/test_db.py
@@ -0,0 +1,25 @@
+# test_repo.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.lib import *
+from git.db import *
+from gitdb.util import bin_to_hex
+from git.exc import BadObject
+import os
+
+class TestDB(TestBase):
+
+ def test_base(self):
+ gdb = GitCmdObjectDB(os.path.join(self.rorepo.git_dir, 'objects'), self.rorepo.git)
+
+ # partial to complete - works with everything
+ hexsha = bin_to_hex(gdb.partial_to_complete_sha_hex("0.1.6"))
+ assert len(hexsha) == 40
+
+ assert bin_to_hex(gdb.partial_to_complete_sha_hex(hexsha[:20])) == hexsha
+
+ # fails with BadObject
+ for invalid_rev in ("0000", "bad/ref", "super bad"):
+ self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, invalid_rev)
diff --git a/git/test/test_diff.py b/git/test/test_diff.py
new file mode 100644
index 00000000..83db2df6
--- /dev/null
+++ b/git/test/test_diff.py
@@ -0,0 +1,108 @@
+# test_diff.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+
+class TestDiff(TestBase):
+
+ def _assert_diff_format(self, diffs):
+ # verify that the format of the diff is sane
+ for diff in diffs:
+ if diff.a_mode:
+ assert isinstance(diff.a_mode, int)
+ if diff.b_mode:
+ assert isinstance(diff.b_mode, int)
+
+ if diff.a_blob:
+ assert not diff.a_blob.path.endswith('\n')
+ if diff.b_blob:
+ assert not diff.b_blob.path.endswith('\n')
+ # END for each diff
+ return diffs
+
+ def test_list_from_string_new_mode(self):
+ output = StringProcessAdapter(fixture('diff_new_mode'))
+ diffs = Diff._index_from_patch_format(self.rorepo, output.stdout)
+ self._assert_diff_format(diffs)
+
+ assert_equal(1, len(diffs))
+ assert_equal(10, len(diffs[0].diff.splitlines()))
+
+ def test_diff_with_rename(self):
+ output = StringProcessAdapter(fixture('diff_rename'))
+ diffs = Diff._index_from_patch_format(self.rorepo, output.stdout)
+ self._assert_diff_format(diffs)
+
+ assert_equal(1, len(diffs))
+
+ diff = diffs[0]
+ assert_true(diff.renamed)
+ assert_equal(diff.rename_from, 'AUTHORS')
+ assert_equal(diff.rename_to, 'CONTRIBUTORS')
+
+ def test_diff_patch_format(self):
+ # test all of the 'old' format diffs for completness - it should at least
+ # be able to deal with it
+ fixtures = ("diff_2", "diff_2f", "diff_f", "diff_i", "diff_mode_only",
+ "diff_new_mode", "diff_numstat", "diff_p", "diff_rename",
+ "diff_tree_numstat_root" )
+
+ for fixture_name in fixtures:
+ diff_proc = StringProcessAdapter(fixture(fixture_name))
+ diffs = Diff._index_from_patch_format(self.rorepo, diff_proc.stdout)
+ # END for each fixture
+
+ def test_diff_interface(self):
+ # test a few variations of the main diff routine
+ assertion_map = dict()
+ for i, commit in enumerate(self.rorepo.iter_commits('0.1.6', max_count=2)):
+ diff_item = commit
+ if i%2 == 0:
+ diff_item = commit.tree
+ # END use tree every second item
+
+ for other in (None, commit.Index, commit.parents[0]):
+ for paths in (None, "CHANGES", ("CHANGES", "lib")):
+ for create_patch in range(2):
+ diff_index = diff_item.diff(other, paths, create_patch)
+ assert isinstance(diff_index, DiffIndex)
+
+ if diff_index:
+ self._assert_diff_format(diff_index)
+ for ct in DiffIndex.change_type:
+ key = 'ct_%s'%ct
+ assertion_map.setdefault(key, 0)
+ assertion_map[key] = assertion_map[key]+len(list(diff_index.iter_change_type(ct)))
+ # END for each changetype
+
+ # check entries
+ diff_set = set()
+ diff_set.add(diff_index[0])
+ diff_set.add(diff_index[0])
+ assert len(diff_set) == 1
+ assert diff_index[0] == diff_index[0]
+ assert not (diff_index[0] != diff_index[0])
+ # END diff index checking
+ # END for each patch option
+ # END for each path option
+ # END for each other side
+ # END for each commit
+
+ # assert we could always find at least one instance of the members we
+ # can iterate in the diff index - if not this indicates its not working correctly
+ # or our test does not span the whole range of possibilities
+ for key,value in assertion_map.items():
+ assert value, "Did not find diff for %s" % key
+ # END for each iteration type
+
+ # test path not existing in the index - should be ignored
+ c = self.rorepo.head.commit
+ cp = c.parents[0]
+ diff_index = c.diff(cp, ["does/not/exist"])
+ assert len(diff_index) == 0
+
+
diff --git a/git/test/test_fun.py b/git/test/test_fun.py
new file mode 100644
index 00000000..b7991cdb
--- /dev/null
+++ b/git/test/test_fun.py
@@ -0,0 +1,251 @@
+from git.test.lib import *
+from git.objects.fun import (
+ traverse_tree_recursive,
+ traverse_trees_recursive,
+ tree_to_stream
+ )
+
+from git.index.fun import (
+ aggressive_tree_merge
+ )
+
+from gitdb.util import bin_to_hex
+from gitdb.base import IStream
+from gitdb.typ import str_tree_type
+
+from stat import (
+ S_IFDIR,
+ S_IFREG,
+ S_IFLNK
+ )
+
+from git.index import IndexFile
+from cStringIO import StringIO
+
+class TestFun(TestBase):
+
+ def _assert_index_entries(self, entries, trees):
+ index = IndexFile.from_tree(self.rorepo, *[self.rorepo.tree(bin_to_hex(t)) for t in trees])
+ assert entries
+ assert len(index.entries) == len(entries)
+ for entry in entries:
+ assert (entry.path, entry.stage) in index.entries
+ # END assert entry matches fully
+
+ def test_aggressive_tree_merge(self):
+ # head tree with additions, removals and modification compared to its predecessor
+ odb = self.rorepo.odb
+ HC = self.rorepo.commit("6c1faef799095f3990e9970bc2cb10aa0221cf9c")
+ H = HC.tree
+ B = HC.parents[0].tree
+
+ # entries from single tree
+ trees = [H.binsha]
+ self._assert_index_entries(aggressive_tree_merge(odb, trees), trees)
+
+ # from multiple trees
+ trees = [B.binsha, H.binsha]
+ self._assert_index_entries(aggressive_tree_merge(odb, trees), trees)
+
+ # three way, no conflict
+ tree = self.rorepo.tree
+ B = tree("35a09c0534e89b2d43ec4101a5fb54576b577905")
+ H = tree("4fe5cfa0e063a8d51a1eb6f014e2aaa994e5e7d4")
+ M = tree("1f2b19de3301e76ab3a6187a49c9c93ff78bafbd")
+ trees = [B.binsha, H.binsha, M.binsha]
+ self._assert_index_entries(aggressive_tree_merge(odb, trees), trees)
+
+ # three-way, conflict in at least one file, both modified
+ B = tree("a7a4388eeaa4b6b94192dce67257a34c4a6cbd26")
+ H = tree("f9cec00938d9059882bb8eabdaf2f775943e00e5")
+ M = tree("44a601a068f4f543f73fd9c49e264c931b1e1652")
+ trees = [B.binsha, H.binsha, M.binsha]
+ self._assert_index_entries(aggressive_tree_merge(odb, trees), trees)
+
+ # too many trees
+ self.failUnlessRaises(ValueError, aggressive_tree_merge, odb, trees*2)
+
+ def mktree(self, odb, entries):
+ """create a tree from the given tree entries and safe it to the database"""
+ sio = StringIO()
+ tree_to_stream(entries, sio.write)
+ sio.seek(0)
+ istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
+ return istream.binsha
+
+ @with_rw_repo('0.1.6')
+ def test_three_way_merge(self, rwrepo):
+ def mkfile(name, sha, executable=0):
+ return (sha, S_IFREG | 0644 | executable*0111, name)
+ def mkcommit(name, sha):
+ return (sha, S_IFDIR | S_IFLNK, name)
+ def assert_entries(entries, num_entries, has_conflict=False):
+ assert len(entries) == num_entries
+ assert has_conflict == (len([e for e in entries if e.stage != 0]) > 0)
+ mktree = self.mktree
+
+ shaa = "\1"*20
+ shab = "\2"*20
+ shac = "\3"*20
+
+ odb = rwrepo.odb
+
+ # base tree
+ bfn = 'basefile'
+ fbase = mkfile(bfn, shaa)
+ tb = mktree(odb, [fbase])
+
+ # non-conflicting new files, same data
+ fa = mkfile('1', shab)
+ th = mktree(odb, [fbase, fa])
+ fb = mkfile('2', shac)
+ tm = mktree(odb, [fbase, fb])
+
+ # two new files, same base file
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 3)
+
+ # both delete same file, add own one
+ fa = mkfile('1', shab)
+ th = mktree(odb, [fa])
+ fb = mkfile('2', shac)
+ tm = mktree(odb, [fb])
+
+ # two new files
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 2)
+
+ # same file added in both, differently
+ fa = mkfile('1', shab)
+ th = mktree(odb, [fa])
+ fb = mkfile('1', shac)
+ tm = mktree(odb, [fb])
+
+ # expect conflict
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 2, True)
+
+ # same file added, different mode
+ fa = mkfile('1', shab)
+ th = mktree(odb, [fa])
+ fb = mkcommit('1', shab)
+ tm = mktree(odb, [fb])
+
+ # expect conflict
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 2, True)
+
+ # same file added in both
+ fa = mkfile('1', shab)
+ th = mktree(odb, [fa])
+ fb = mkfile('1', shab)
+ tm = mktree(odb, [fb])
+
+ # expect conflict
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 1)
+
+ # modify same base file, differently
+ fa = mkfile(bfn, shab)
+ th = mktree(odb, [fa])
+ fb = mkfile(bfn, shac)
+ tm = mktree(odb, [fb])
+
+ # conflict, 3 versions on 3 stages
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 3, True)
+
+
+ # change mode on same base file, by making one a commit, the other executable
+ # no content change ( this is totally unlikely to happen in the real world )
+ fa = mkcommit(bfn, shaa)
+ th = mktree(odb, [fa])
+ fb = mkfile(bfn, shaa, executable=1)
+ tm = mktree(odb, [fb])
+
+ # conflict, 3 versions on 3 stages, because of different mode
+ trees = [tb, th, tm]
+ assert_entries(aggressive_tree_merge(odb, trees), 3, True)
+
+ for is_them in range(2):
+ # only we/they change contents
+ fa = mkfile(bfn, shab)
+ th = mktree(odb, [fa])
+
+ trees = [tb, th, tb]
+ if is_them:
+ trees = [tb, tb, th]
+ entries = aggressive_tree_merge(odb, trees)
+ assert len(entries) == 1 and entries[0].binsha == shab
+
+ # only we/they change the mode
+ fa = mkcommit(bfn, shaa)
+ th = mktree(odb, [fa])
+
+ trees = [tb, th, tb]
+ if is_them:
+ trees = [tb, tb, th]
+ entries = aggressive_tree_merge(odb, trees)
+ assert len(entries) == 1 and entries[0].binsha == shaa and entries[0].mode == fa[1]
+
+ # one side deletes, the other changes = conflict
+ fa = mkfile(bfn, shab)
+ th = mktree(odb, [fa])
+ tm = mktree(odb, [])
+ trees = [tb, th, tm]
+ if is_them:
+ trees = [tb, tm, th]
+ # as one is deleted, there are only 2 entries
+ assert_entries(aggressive_tree_merge(odb, trees), 2, True)
+ # END handle ours, theirs
+
+ def _assert_tree_entries(self, entries, num_trees):
+ for entry in entries:
+ assert len(entry) == num_trees
+ paths = set(e[2] for e in entry if e)
+
+ # only one path per set of entries
+ assert len(paths) == 1
+ # END verify entry
+
+ def test_tree_traversal(self):
+ # low level tree tarversal
+ odb = self.rorepo.odb
+ H = self.rorepo.tree('29eb123beb1c55e5db4aa652d843adccbd09ae18') # head tree
+ M = self.rorepo.tree('e14e3f143e7260de9581aee27e5a9b2645db72de') # merge tree
+ B = self.rorepo.tree('f606937a7a21237c866efafcad33675e6539c103') # base tree
+ B_old = self.rorepo.tree('1f66cfbbce58b4b552b041707a12d437cc5f400a') # old base tree
+
+ # two very different trees
+ entries = traverse_trees_recursive(odb, [B_old.binsha, H.binsha], '')
+ self._assert_tree_entries(entries, 2)
+
+ oentries = traverse_trees_recursive(odb, [H.binsha, B_old.binsha], '')
+ assert len(oentries) == len(entries)
+ self._assert_tree_entries(oentries, 2)
+
+ # single tree
+ is_no_tree = lambda i, d: i.type != 'tree'
+ entries = traverse_trees_recursive(odb, [B.binsha], '')
+ assert len(entries) == len(list(B.traverse(predicate=is_no_tree)))
+ self._assert_tree_entries(entries, 1)
+
+ # two trees
+ entries = traverse_trees_recursive(odb, [B.binsha, H.binsha], '')
+ self._assert_tree_entries(entries, 2)
+
+ # tree trees
+ entries = traverse_trees_recursive(odb, [B.binsha, H.binsha, M.binsha], '')
+ self._assert_tree_entries(entries, 3)
+
+ def test_tree_traversal_single(self):
+ max_count = 50
+ count = 0
+ odb = self.rorepo.odb
+ for commit in self.rorepo.commit("29eb123beb1c55e5db4aa652d843adccbd09ae18").traverse():
+ if count >= max_count:
+ break
+ count += 1
+ entries = traverse_tree_recursive(odb, commit.tree.binsha, '')
+ assert entries
+ # END for each commit
diff --git a/git/test/test_git.py b/git/test/test_git.py
new file mode 100644
index 00000000..c92a642b
--- /dev/null
+++ b/git/test/test_git.py
@@ -0,0 +1,84 @@
+# test_git.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os, sys
+from git.test.lib import *
+from git import Git, GitCommandError
+
+class TestGit(TestCase):
+
+ @classmethod
+ def setUpAll(cls):
+ cls.git = Git(GIT_REPO)
+
+ @patch_object(Git, 'execute')
+ def test_call_process_calls_execute(self, git):
+ git.return_value = ''
+ self.git.version()
+ assert_true(git.called)
+ assert_equal(git.call_args, ((['git', 'version'],), {}))
+
+ @raises(GitCommandError)
+ def test_it_raises_errors(self):
+ self.git.this_does_not_exist()
+
+
+ def test_it_transforms_kwargs_into_git_command_arguments(self):
+ assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
+ assert_equal(["-s5"], self.git.transform_kwargs(**{'s': 5}))
+
+ assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
+ assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
+
+ assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True}))
+
+ def test_it_executes_git_to_shell_and_returns_result(self):
+ assert_match('^git version [\d\.]{2}.*$', self.git.execute(["git","version"]))
+
+ def test_it_accepts_stdin(self):
+ filename = fixture_path("cat_file_blob")
+ fh = open(filename, 'r')
+ assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
+ self.git.hash_object(istream=fh, stdin=True))
+ fh.close()
+
+ @patch_object(Git, 'execute')
+ def test_it_ignores_false_kwargs(self, git):
+ # this_should_not_be_ignored=False implies it *should* be ignored
+ output = self.git.version(pass_this_kwarg=False)
+ assert_true("pass_this_kwarg" not in git.call_args[1])
+
+ def test_persistent_cat_file_command(self):
+ # read header only
+ import subprocess as sp
+ hexsha = "b2339455342180c7cc1e9bba3e9f181f7baa5167"
+ g = self.git.cat_file(batch_check=True, istream=sp.PIPE,as_process=True)
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ obj_info = g.stdout.readline()
+
+ # read header + data
+ g = self.git.cat_file(batch=True, istream=sp.PIPE,as_process=True)
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ obj_info_two = g.stdout.readline()
+ assert obj_info == obj_info_two
+
+ # read data - have to read it in one large chunk
+ size = int(obj_info.split()[2])
+ data = g.stdout.read(size)
+ terminating_newline = g.stdout.read(1)
+
+ # now we should be able to read a new object
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ assert g.stdout.readline() == obj_info
+
+
+ # same can be achived using the respective command functions
+ hexsha, typename, size = self.git.get_object_header(hexsha)
+ hexsha, typename_two, size_two, data = self.git.get_object_data(hexsha)
+ assert typename == typename_two and size == size_two
diff --git a/git/test/test_index.py b/git/test/test_index.py
new file mode 100644
index 00000000..5d227897
--- /dev/null
+++ b/git/test/test_index.py
@@ -0,0 +1,669 @@
+# test_index.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+import inspect
+import os
+import sys
+import tempfile
+import glob
+import shutil
+from stat import *
+
+class TestIndex(TestBase):
+
+ def __init__(self, *args):
+ super(TestIndex, self).__init__(*args)
+ self._reset_progress()
+
+ def _assert_fprogress(self, entries):
+ assert len(entries) == len(self._fprogress_map)
+ for path, call_count in self._fprogress_map.iteritems():
+ assert call_count == 2
+ # END for each item in progress map
+ self._reset_progress()
+
+ def _fprogress(self, path, done, item):
+ self._fprogress_map.setdefault(path, 0)
+ curval = self._fprogress_map[path]
+ if curval == 0:
+ assert not done
+ if curval == 1:
+ assert done
+ self._fprogress_map[path] = curval + 1
+
+ def _fprogress_add(self, path, done, item):
+ """Called as progress func - we keep track of the proper
+ call order"""
+ assert item is not None
+ self._fprogress(path, done, item)
+
+ def _reset_progress(self):
+ # maps paths to the count of calls
+ self._fprogress_map = dict()
+
+ def _assert_entries(self, entries):
+ for entry in entries:
+ assert isinstance(entry, BaseIndexEntry)
+ assert not os.path.isabs(entry.path)
+ assert not "\\" in entry.path
+ # END for each entry
+
+ def test_index_file_base(self):
+ # read from file
+ index = IndexFile(self.rorepo, fixture_path("index"))
+ assert index.entries
+ assert index.version > 0
+
+ # test entry
+ last_val = None
+ entry = index.entries.itervalues().next()
+ for attr in ("path","ctime","mtime","dev","inode","mode","uid",
+ "gid","size","binsha", "hexsha", "stage"):
+ val = getattr(entry, attr)
+ # END for each method
+
+ # test update
+ entries = index.entries
+ assert isinstance(index.update(), IndexFile)
+ assert entries is not index.entries
+
+ # test stage
+ index_merge = IndexFile(self.rorepo, fixture_path("index_merge"))
+ assert len(index_merge.entries) == 106
+ assert len(list(e for e in index_merge.entries.itervalues() if e.stage != 0 ))
+
+ # write the data - it must match the original
+ tmpfile = tempfile.mktemp()
+ index_merge.write(tmpfile)
+ fp = open(tmpfile, 'rb')
+ assert fp.read() == fixture("index_merge")
+ fp.close()
+ os.remove(tmpfile)
+
+ def _cmp_tree_index(self, tree, index):
+ # fail unless both objects contain the same paths and blobs
+ if isinstance(tree, str):
+ tree = self.rorepo.commit(tree).tree
+
+ num_blobs = 0
+ blist = list()
+ for blob in tree.traverse(predicate = lambda e,d: e.type == "blob", branch_first=False):
+ assert (blob.path,0) in index.entries
+ blist.append(blob)
+ # END for each blob in tree
+ if len(blist) != len(index.entries):
+ iset = set(k[0] for k in index.entries.keys())
+ bset = set(b.path for b in blist)
+ raise AssertionError( "CMP Failed: Missing entries in index: %s, missing in tree: %s" % (bset-iset, iset-bset) )
+ # END assertion message
+
+ @with_rw_repo('0.1.6')
+ def test_index_file_from_tree(self, rw_repo):
+ common_ancestor_sha = "5117c9c8a4d3af19a9958677e45cda9269de1541"
+ cur_sha = "4b43ca7ff72d5f535134241e7c797ddc9c7a3573"
+ other_sha = "39f85c4358b7346fee22169da9cad93901ea9eb9"
+
+ # simple index from tree
+ base_index = IndexFile.from_tree(rw_repo, common_ancestor_sha)
+ assert base_index.entries
+ self._cmp_tree_index(common_ancestor_sha, base_index)
+
+ # merge two trees - its like a fast-forward
+ two_way_index = IndexFile.from_tree(rw_repo, common_ancestor_sha, cur_sha)
+ assert two_way_index.entries
+ self._cmp_tree_index(cur_sha, two_way_index)
+
+ # merge three trees - here we have a merge conflict
+ three_way_index = IndexFile.from_tree(rw_repo, common_ancestor_sha, cur_sha, other_sha)
+ assert len(list(e for e in three_way_index.entries.values() if e.stage != 0))
+
+
+ # ITERATE BLOBS
+ merge_required = lambda t: t[0] != 0
+ merge_blobs = list(three_way_index.iter_blobs(merge_required))
+ assert merge_blobs
+ assert merge_blobs[0][0] in (1,2,3)
+ assert isinstance(merge_blobs[0][1], Blob)
+
+ # test BlobFilter
+ prefix = 'lib/git'
+ for stage, blob in base_index.iter_blobs(BlobFilter([prefix])):
+ assert blob.path.startswith(prefix)
+
+
+ # writing a tree should fail with an unmerged index
+ self.failUnlessRaises(UnmergedEntriesError, three_way_index.write_tree)
+
+ # removed unmerged entries
+ unmerged_blob_map = three_way_index.unmerged_blobs()
+ assert unmerged_blob_map
+
+ # pick the first blob at the first stage we find and use it as resolved version
+ three_way_index.resolve_blobs( l[0][1] for l in unmerged_blob_map.itervalues() )
+ tree = three_way_index.write_tree()
+ assert isinstance(tree, Tree)
+ num_blobs = 0
+ for blob in tree.traverse(predicate=lambda item,d: item.type == "blob"):
+ assert (blob.path,0) in three_way_index.entries
+ num_blobs += 1
+ # END for each blob
+ assert num_blobs == len(three_way_index.entries)
+
+ @with_rw_repo('0.1.6')
+ def test_index_merge_tree(self, rw_repo):
+ # A bit out of place, but we need a different repo for this:
+ assert self.rorepo != rw_repo and not (self.rorepo == rw_repo)
+ assert len(set((self.rorepo, self.rorepo, rw_repo, rw_repo))) == 2
+
+ # SINGLE TREE MERGE
+ # current index is at the (virtual) cur_commit
+ next_commit = "4c39f9da792792d4e73fc3a5effde66576ae128c"
+ parent_commit = rw_repo.head.commit.parents[0]
+ manifest_key = IndexFile.entry_key('MANIFEST.in', 0)
+ manifest_entry = rw_repo.index.entries[manifest_key]
+ rw_repo.index.merge_tree(next_commit)
+ # only one change should be recorded
+ assert manifest_entry.binsha != rw_repo.index.entries[manifest_key].binsha
+
+ rw_repo.index.reset(rw_repo.head)
+ assert rw_repo.index.entries[manifest_key].binsha == manifest_entry.binsha
+
+ # FAKE MERGE
+ #############
+ # Add a change with a NULL sha that should conflict with next_commit. We
+ # pretend there was a change, but we do not even bother adding a proper
+ # sha for it ( which makes things faster of course )
+ manifest_fake_entry = BaseIndexEntry((manifest_entry[0], "\0"*20, 0, manifest_entry[3]))
+ # try write flag
+ self._assert_entries(rw_repo.index.add([manifest_fake_entry], write=False))
+ # add actually resolves the null-hex-sha for us as a feature, but we can
+ # edit the index manually
+ assert rw_repo.index.entries[manifest_key].binsha != Object.NULL_BIN_SHA
+ # must operate on the same index for this ! Its a bit problematic as
+ # it might confuse people
+ index = rw_repo.index
+ index.entries[manifest_key] = IndexEntry.from_base(manifest_fake_entry)
+ index.write()
+ assert rw_repo.index.entries[manifest_key].hexsha == Diff.NULL_HEX_SHA
+
+ # write an unchanged index ( just for the fun of it )
+ rw_repo.index.write()
+
+ # a three way merge would result in a conflict and fails as the command will
+ # not overwrite any entries in our index and hence leave them unmerged. This is
+ # mainly a protection feature as the current index is not yet in a tree
+ self.failUnlessRaises(GitCommandError, index.merge_tree, next_commit, base=parent_commit)
+
+ # the only way to get the merged entries is to safe the current index away into a tree,
+ # which is like a temporary commit for us. This fails as well as the NULL sha deos not
+ # have a corresponding object
+ # NOTE: missing_ok is not a kwarg anymore, missing_ok is always true
+ # self.failUnlessRaises(GitCommandError, index.write_tree)
+
+ # if missing objects are okay, this would work though ( they are always okay now )
+ tree = index.write_tree()
+
+ # now make a proper three way merge with unmerged entries
+ unmerged_tree = IndexFile.from_tree(rw_repo, parent_commit, tree, next_commit)
+ unmerged_blobs = unmerged_tree.unmerged_blobs()
+ assert len(unmerged_blobs) == 1 and unmerged_blobs.keys()[0] == manifest_key[0]
+
+
+ @with_rw_repo('0.1.6')
+ def test_index_file_diffing(self, rw_repo):
+ # default Index instance points to our index
+ index = IndexFile(rw_repo)
+ assert index.path is not None
+ assert len(index.entries)
+
+ # write the file back
+ index.write()
+
+ # could sha it, or check stats
+
+ # test diff
+ # resetting the head will leave the index in a different state, and the
+ # diff will yield a few changes
+ cur_head_commit = rw_repo.head.reference.commit
+ ref = rw_repo.head.reset('HEAD~6', index=True, working_tree=False)
+
+ # diff against same index is 0
+ diff = index.diff()
+ assert len(diff) == 0
+
+ # against HEAD as string, must be the same as it matches index
+ diff = index.diff('HEAD')
+ assert len(diff) == 0
+
+ # against previous head, there must be a difference
+ diff = index.diff(cur_head_commit)
+ assert len(diff)
+
+ # we reverse the result
+ adiff = index.diff(str(cur_head_commit), R=True)
+ odiff = index.diff(cur_head_commit, R=False) # now its not reversed anymore
+ assert adiff != odiff
+ assert odiff == diff # both unreversed diffs against HEAD
+
+ # against working copy - its still at cur_commit
+ wdiff = index.diff(None)
+ assert wdiff != adiff
+ assert wdiff != odiff
+
+ # against something unusual
+ self.failUnlessRaises(ValueError, index.diff, int)
+
+ # adjust the index to match an old revision
+ cur_branch = rw_repo.active_branch
+ cur_commit = cur_branch.commit
+ rev_head_parent = 'HEAD~1'
+ assert index.reset(rev_head_parent) is index
+
+ assert cur_branch == rw_repo.active_branch
+ assert cur_commit == rw_repo.head.commit
+
+ # there must be differences towards the working tree which is in the 'future'
+ assert index.diff(None)
+
+ # reset the working copy as well to current head,to pull 'back' as well
+ new_data = "will be reverted"
+ file_path = os.path.join(rw_repo.working_tree_dir, "CHANGES")
+ fp = open(file_path, "wb")
+ fp.write(new_data)
+ fp.close()
+ index.reset(rev_head_parent, working_tree=True)
+ assert not index.diff(None)
+ assert cur_branch == rw_repo.active_branch
+ assert cur_commit == rw_repo.head.commit
+ fp = open(file_path,'rb')
+ try:
+ assert fp.read() != new_data
+ finally:
+ fp.close()
+
+ # test full checkout
+ test_file = os.path.join(rw_repo.working_tree_dir, "CHANGES")
+ open(test_file, 'ab').write("some data")
+ rval = index.checkout(None, force=True, fprogress=self._fprogress)
+ assert 'CHANGES' in list(rval)
+ self._assert_fprogress([None])
+ assert os.path.isfile(test_file)
+
+ os.remove(test_file)
+ rval = index.checkout(None, force=False, fprogress=self._fprogress)
+ assert 'CHANGES' in list(rval)
+ self._assert_fprogress([None])
+ assert os.path.isfile(test_file)
+
+ # individual file
+ os.remove(test_file)
+ rval = index.checkout(test_file, fprogress=self._fprogress)
+ assert list(rval)[0] == 'CHANGES'
+ self._assert_fprogress([test_file])
+ assert os.path.exists(test_file)
+
+ # checking out non-existing file throws
+ self.failUnlessRaises(CheckoutError, index.checkout, "doesnt_exist_ever.txt.that")
+ self.failUnlessRaises(CheckoutError, index.checkout, paths=["doesnt/exist"])
+
+ # checkout file with modifications
+ append_data = "hello"
+ fp = open(test_file, "ab")
+ fp.write(append_data)
+ fp.close()
+ try:
+ index.checkout(test_file)
+ except CheckoutError, e:
+ assert len(e.failed_files) == 1 and e.failed_files[0] == os.path.basename(test_file)
+ assert (len(e.failed_files) == len(e.failed_reasons)) and isinstance(e.failed_reasons[0], basestring)
+ assert len(e.valid_files) == 0
+ assert open(test_file).read().endswith(append_data)
+ else:
+ raise AssertionError("Exception CheckoutError not thrown")
+
+ # if we force it it should work
+ index.checkout(test_file, force=True)
+ assert not open(test_file).read().endswith(append_data)
+
+ # checkout directory
+ shutil.rmtree(os.path.join(rw_repo.working_tree_dir, "lib"))
+ rval = index.checkout('lib')
+ assert len(list(rval)) > 1
+
+ def _count_existing(self, repo, files):
+ """
+ Returns count of files that actually exist in the repository directory.
+ """
+ existing = 0
+ basedir = repo.working_tree_dir
+ for f in files:
+ existing += os.path.isfile(os.path.join(basedir, f))
+ # END for each deleted file
+ return existing
+ # END num existing helper
+
+ @with_rw_repo('0.1.6')
+ def test_index_mutation(self, rw_repo):
+ index = rw_repo.index
+ num_entries = len(index.entries)
+ cur_head = rw_repo.head
+
+ uname = "Some Developer"
+ umail = "sd@company.com"
+ rw_repo.config_writer().set_value("user", "name", uname)
+ rw_repo.config_writer().set_value("user", "email", umail)
+
+ # remove all of the files, provide a wild mix of paths, BaseIndexEntries,
+ # IndexEntries
+ def mixed_iterator():
+ count = 0
+ for entry in index.entries.itervalues():
+ type_id = count % 4
+ if type_id == 0: # path
+ yield entry.path
+ elif type_id == 1: # blob
+ yield Blob(rw_repo, entry.binsha, entry.mode, entry.path)
+ elif type_id == 2: # BaseIndexEntry
+ yield BaseIndexEntry(entry[:4])
+ elif type_id == 3: # IndexEntry
+ yield entry
+ else:
+ raise AssertionError("Invalid Type")
+ count += 1
+ # END for each entry
+ # END mixed iterator
+ deleted_files = index.remove(mixed_iterator(), working_tree=False)
+ assert deleted_files
+ assert self._count_existing(rw_repo, deleted_files) == len(deleted_files)
+ assert len(index.entries) == 0
+
+ # reset the index to undo our changes
+ index.reset()
+ assert len(index.entries) == num_entries
+
+ # remove with working copy
+ deleted_files = index.remove(mixed_iterator(), working_tree=True)
+ assert deleted_files
+ assert self._count_existing(rw_repo, deleted_files) == 0
+
+ # reset everything
+ index.reset(working_tree=True)
+ assert self._count_existing(rw_repo, deleted_files) == len(deleted_files)
+
+ # invalid type
+ self.failUnlessRaises(TypeError, index.remove, [1])
+
+ # absolute path
+ deleted_files = index.remove([os.path.join(rw_repo.working_tree_dir,"lib")], r=True)
+ assert len(deleted_files) > 1
+ self.failUnlessRaises(ValueError, index.remove, ["/doesnt/exists"])
+
+ # TEST COMMITTING
+ # commit changed index
+ cur_commit = cur_head.commit
+ commit_message = "commit default head"
+
+ new_commit = index.commit(commit_message, head=False)
+ assert cur_commit != new_commit
+ assert new_commit.author.name == uname
+ assert new_commit.author.email == umail
+ assert new_commit.committer.name == uname
+ assert new_commit.committer.email == umail
+ assert new_commit.message == commit_message
+ assert new_commit.parents[0] == cur_commit
+ assert len(new_commit.parents) == 1
+ assert cur_head.commit == cur_commit
+
+ # same index, no parents
+ commit_message = "index without parents"
+ commit_no_parents = index.commit(commit_message, parent_commits=list(), head=True)
+ assert commit_no_parents.message == commit_message
+ assert len(commit_no_parents.parents) == 0
+ assert cur_head.commit == commit_no_parents
+
+ # same index, multiple parents
+ commit_message = "Index with multiple parents\n commit with another line"
+ commit_multi_parent = index.commit(commit_message,parent_commits=(commit_no_parents, new_commit))
+ assert commit_multi_parent.message == commit_message
+ assert len(commit_multi_parent.parents) == 2
+ assert commit_multi_parent.parents[0] == commit_no_parents
+ assert commit_multi_parent.parents[1] == new_commit
+ assert cur_head.commit == commit_multi_parent
+
+ # re-add all files in lib
+ # get the lib folder back on disk, but get an index without it
+ index.reset(new_commit.parents[0], working_tree=True).reset(new_commit, working_tree=False)
+ lib_file_path = os.path.join("lib", "git", "__init__.py")
+ assert (lib_file_path, 0) not in index.entries
+ assert os.path.isfile(os.path.join(rw_repo.working_tree_dir, lib_file_path))
+
+ # directory
+ entries = index.add(['lib'], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert len(entries)>1
+
+ # glob
+ entries = index.reset(new_commit).add([os.path.join('lib', 'git', '*.py')], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert len(entries) == 14
+
+ # same file
+ entries = index.reset(new_commit).add([os.path.abspath(os.path.join('lib', 'git', 'head.py'))]*2, fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ assert entries[0].mode & 0644 == 0644
+ # would fail, test is too primitive to handle this case
+ # self._assert_fprogress(entries)
+ self._reset_progress()
+ assert len(entries) == 2
+
+ # missing path
+ self.failUnlessRaises(OSError, index.reset(new_commit).add, ['doesnt/exist/must/raise'])
+
+ # blob from older revision overrides current index revision
+ old_blob = new_commit.parents[0].tree.blobs[0]
+ entries = index.reset(new_commit).add([old_blob], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert index.entries[(old_blob.path,0)].hexsha == old_blob.hexsha and len(entries) == 1
+
+ # mode 0 not allowed
+ null_hex_sha = Diff.NULL_HEX_SHA
+ null_bin_sha = "\0" * 20
+ self.failUnlessRaises(ValueError, index.reset(new_commit).add, [BaseIndexEntry((0, null_bin_sha,0,"doesntmatter"))])
+
+ # add new file
+ new_file_relapath = "my_new_file"
+ new_file_path = self._make_file(new_file_relapath, "hello world", rw_repo)
+ entries = index.reset(new_commit).add([BaseIndexEntry((010644, null_bin_sha, 0, new_file_relapath))], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert len(entries) == 1 and entries[0].hexsha != null_hex_sha
+
+ # add symlink
+ if sys.platform != "win32":
+ basename = "my_real_symlink"
+ target = "/etc/that"
+ link_file = os.path.join(rw_repo.working_tree_dir, basename)
+ os.symlink(target, link_file)
+ entries = index.reset(new_commit).add([link_file], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert len(entries) == 1 and S_ISLNK(entries[0].mode)
+ assert S_ISLNK(index.entries[index.entry_key("my_real_symlink", 0)].mode)
+
+ # we expect only the target to be written
+ assert index.repo.odb.stream(entries[0].binsha).read() == target
+ # END real symlink test
+
+ # add fake symlink and assure it checks-our as symlink
+ fake_symlink_relapath = "my_fake_symlink"
+ link_target = "/etc/that"
+ fake_symlink_path = self._make_file(fake_symlink_relapath, link_target, rw_repo)
+ fake_entry = BaseIndexEntry((0120000, null_bin_sha, 0, fake_symlink_relapath))
+ entries = index.reset(new_commit).add([fake_entry], fprogress=self._fprogress_add)
+ self._assert_entries(entries)
+ self._assert_fprogress(entries)
+ assert entries[0].hexsha != null_hex_sha
+ assert len(entries) == 1 and S_ISLNK(entries[0].mode)
+
+ # assure this also works with an alternate method
+ full_index_entry = IndexEntry.from_base(BaseIndexEntry((0120000, entries[0].binsha, 0, entries[0].path)))
+ entry_key = index.entry_key(full_index_entry)
+ index.reset(new_commit)
+
+ assert entry_key not in index.entries
+ index.entries[entry_key] = full_index_entry
+ index.write()
+ index.update() # force reread of entries
+ new_entry = index.entries[entry_key]
+ assert S_ISLNK(new_entry.mode)
+
+ # a tree created from this should contain the symlink
+ tree = index.write_tree()
+ assert fake_symlink_relapath in tree
+ index.write() # flush our changes for the checkout
+
+ # checkout the fakelink, should be a link then
+ assert not S_ISLNK(os.stat(fake_symlink_path)[ST_MODE])
+ os.remove(fake_symlink_path)
+ index.checkout(fake_symlink_path)
+
+ # on windows we will never get symlinks
+ if os.name == 'nt':
+ # simlinks should contain the link as text ( which is what a
+ # symlink actually is )
+ open(fake_symlink_path,'rb').read() == link_target
+ else:
+ assert S_ISLNK(os.lstat(fake_symlink_path)[ST_MODE])
+
+ # TEST RENAMING
+ def assert_mv_rval(rval):
+ for source, dest in rval:
+ assert not os.path.exists(source) and os.path.exists(dest)
+ # END for each renamed item
+ # END move assertion utility
+
+ self.failUnlessRaises(ValueError, index.move, ['just_one_path'])
+ # file onto existing file
+ files = ['AUTHORS', 'LICENSE']
+ self.failUnlessRaises(GitCommandError, index.move, files)
+
+ # again, with force
+ assert_mv_rval(index.move(files, f=True))
+
+ # files into directory - dry run
+ paths = ['LICENSE', 'VERSION', 'doc']
+ rval = index.move(paths, dry_run=True)
+ assert len(rval) == 2
+ assert os.path.exists(paths[0])
+
+ # again, no dry run
+ rval = index.move(paths)
+ assert_mv_rval(rval)
+
+ # dir into dir
+ rval = index.move(['doc', 'test'])
+ assert_mv_rval(rval)
+
+
+ # TEST PATH REWRITING
+ ######################
+ count = [0]
+ def rewriter(entry):
+ rval = str(count[0])
+ count[0] += 1
+ return rval
+ # END rewriter
+
+ def make_paths():
+ # two existing ones, one new one
+ yield 'CHANGES'
+ yield 'ez_setup.py'
+ yield index.entries[index.entry_key('README', 0)]
+ yield index.entries[index.entry_key('.gitignore', 0)]
+
+ for fid in range(3):
+ fname = 'newfile%i' % fid
+ open(fname, 'wb').write("abcd")
+ yield Blob(rw_repo, Blob.NULL_BIN_SHA, 0100644, fname)
+ # END for each new file
+ # END path producer
+ paths = list(make_paths())
+ self._assert_entries(index.add(paths, path_rewriter=rewriter))
+
+ for filenum in range(len(paths)):
+ assert index.entry_key(str(filenum), 0) in index.entries
+
+
+ # TEST RESET ON PATHS
+ ######################
+ arela = "aa"
+ brela = "bb"
+ afile = self._make_file(arela, "adata", rw_repo)
+ bfile = self._make_file(brela, "bdata", rw_repo)
+ akey = index.entry_key(arela, 0)
+ bkey = index.entry_key(brela, 0)
+ keys = (akey, bkey)
+ absfiles = (afile, bfile)
+ files = (arela, brela)
+
+ for fkey in keys:
+ assert not fkey in index.entries
+
+ index.add(files, write=True)
+ nc = index.commit("2 files committed", head=False)
+
+ for fkey in keys:
+ assert fkey in index.entries
+
+ # just the index
+ index.reset(paths=(arela, afile))
+ assert not akey in index.entries
+ assert bkey in index.entries
+
+ # now with working tree - files on disk as well as entries must be recreated
+ rw_repo.head.commit = nc
+ for absfile in absfiles:
+ os.remove(absfile)
+
+ index.reset(working_tree=True, paths=files)
+
+ for fkey in keys:
+ assert fkey in index.entries
+ for absfile in absfiles:
+ assert os.path.isfile(absfile)
+
+
+ @with_rw_repo('HEAD')
+ def test_compare_write_tree(self, rw_repo):
+ # write all trees and compare them
+ # its important to have a few submodules in there too
+ max_count = 25
+ count = 0
+ for commit in rw_repo.head.commit.traverse():
+ if count >= max_count:
+ break
+ count += 1
+ index = rw_repo.index.reset(commit)
+ orig_tree = commit.tree
+ assert index.write_tree() == orig_tree
+ # END for each commit
+
+ def test_index_new(self):
+ B = self.rorepo.tree("6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e")
+ H = self.rorepo.tree("25dca42bac17d511b7e2ebdd9d1d679e7626db5f")
+ M = self.rorepo.tree("e746f96bcc29238b79118123028ca170adc4ff0f")
+
+ for args in ((B,), (B,H), (B,H,M)):
+ index = IndexFile.new(self.rorepo, *args)
+ assert isinstance(index, IndexFile)
+ # END for each arg tuple
+
+
diff --git a/git/test/test_reflog.py b/git/test/test_reflog.py
new file mode 100644
index 00000000..3fdf1fae
--- /dev/null
+++ b/git/test/test_reflog.py
@@ -0,0 +1,102 @@
+from git.test.lib import *
+from git.objects import IndexObject
+from git.refs import *
+from git.util import Actor
+
+import tempfile
+import shutil
+import os
+
+class TestRefLog(TestBase):
+
+ def test_reflogentry(self):
+ nullhexsha = IndexObject.NULL_HEX_SHA
+ hexsha = 'F' * 40
+ actor = Actor('name', 'email')
+ msg = "message"
+
+ self.failUnlessRaises(ValueError, RefLogEntry.new, nullhexsha, hexsha, 'noactor', 0, 0, "")
+ e = RefLogEntry.new(nullhexsha, hexsha, actor, 0, 1, msg)
+
+ assert e.oldhexsha == nullhexsha
+ assert e.newhexsha == hexsha
+ assert e.actor == actor
+ assert e.time[0] == 0
+ assert e.time[1] == 1
+ assert e.message == msg
+
+ # check representation (roughly)
+ assert repr(e).startswith(nullhexsha)
+
+ def test_base(self):
+ rlp_head = fixture_path('reflog_HEAD')
+ rlp_master = fixture_path('reflog_master')
+ tdir = tempfile.mktemp(suffix="test_reflogs")
+ os.mkdir(tdir)
+
+ # verify we have a ref - with the creation of a new ref, the reflog
+ # will be created as well
+ rlp_master_ro = RefLog.path(self.rorepo.heads.master)
+ assert os.path.isfile(rlp_master_ro)
+
+ # simple read
+ reflog = RefLog.from_file(rlp_master_ro)
+ assert reflog._path is not None
+ assert isinstance(reflog, RefLog)
+ assert len(reflog)
+
+ # iter_entries works with path and with stream
+ assert len(list(RefLog.iter_entries(open(rlp_master))))
+ assert len(list(RefLog.iter_entries(rlp_master)))
+
+ # raise on invalid revlog
+ # TODO: Try multiple corrupted ones !
+ pp = 'reflog_invalid_'
+ for suffix in ('oldsha', 'newsha', 'email', 'date', 'sep'):
+ self.failUnlessRaises(ValueError, RefLog.from_file, fixture_path(pp+suffix))
+ #END for each invalid file
+
+ # cannot write an uninitialized reflog
+ self.failUnlessRaises(ValueError, RefLog().write)
+
+ # test serialize and deserialize - results must match exactly
+ binsha = chr(255)*20
+ msg = "my reflog message"
+ cr = self.rorepo.config_reader()
+ for rlp in (rlp_head, rlp_master):
+ reflog = RefLog.from_file(rlp)
+ tfile = os.path.join(tdir, os.path.basename(rlp))
+ reflog.to_file(tfile)
+ assert reflog.write() is reflog
+
+ # parsed result must match ...
+ treflog = RefLog.from_file(tfile)
+ assert treflog == reflog
+
+ # ... as well as each bytes of the written stream
+ assert open(tfile).read() == open(rlp).read()
+
+ # append an entry
+ entry = RefLog.append_entry(cr, tfile, IndexObject.NULL_BIN_SHA, binsha, msg)
+ assert entry.oldhexsha == IndexObject.NULL_HEX_SHA
+ assert entry.newhexsha == 'f'*40
+ assert entry.message == msg
+ assert RefLog.from_file(tfile)[-1] == entry
+
+ # index entry
+ # raises on invalid index
+ self.failUnlessRaises(IndexError, RefLog.entry_at, rlp, 10000)
+
+ # indices can be positive ...
+ assert isinstance(RefLog.entry_at(rlp, 0), RefLogEntry)
+ RefLog.entry_at(rlp, 23)
+
+ # ... and negative
+ for idx in (-1, -24):
+ RefLog.entry_at(rlp, idx)
+ #END for each index to read
+ # END for each reflog
+
+
+ # finally remove our temporary data
+ shutil.rmtree(tdir)
diff --git a/git/test/test_refs.py b/git/test/test_refs.py
new file mode 100644
index 00000000..2338b4e4
--- /dev/null
+++ b/git/test/test_refs.py
@@ -0,0 +1,521 @@
+# test_refs.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from mock import *
+from git.test.lib import *
+from git import *
+import git.refs as refs
+from git.util import Actor
+from git.objects.tag import TagObject
+from itertools import chain
+import os
+
+class TestRefs(TestBase):
+
+ def test_from_path(self):
+ # should be able to create any reference directly
+ for ref_type in ( Reference, Head, TagReference, RemoteReference ):
+ for name in ('rela_name', 'path/rela_name'):
+ full_path = ref_type.to_full_path(name)
+ instance = ref_type.from_path(self.rorepo, full_path)
+ assert isinstance(instance, ref_type)
+ # END for each name
+ # END for each type
+
+ def test_tag_base(self):
+ tag_object_refs = list()
+ for tag in self.rorepo.tags:
+ assert "refs/tags" in tag.path
+ assert tag.name
+ assert isinstance( tag.commit, Commit )
+ if tag.tag is not None:
+ tag_object_refs.append( tag )
+ tagobj = tag.tag
+ # have no dict
+ self.failUnlessRaises(AttributeError, setattr, tagobj, 'someattr', 1)
+ assert isinstance( tagobj, TagObject )
+ assert tagobj.tag == tag.name
+ assert isinstance( tagobj.tagger, Actor )
+ assert isinstance( tagobj.tagged_date, int )
+ assert isinstance( tagobj.tagger_tz_offset, int )
+ assert tagobj.message
+ assert tag.object == tagobj
+ # can't assign the object
+ self.failUnlessRaises(AttributeError, setattr, tag, 'object', tagobj)
+ # END if we have a tag object
+ # END for tag in repo-tags
+ assert tag_object_refs
+ assert isinstance(self.rorepo.tags['0.1.5'], TagReference)
+
+ def test_tags(self):
+ # tag refs can point to tag objects or to commits
+ s = set()
+ ref_count = 0
+ for ref in chain(self.rorepo.tags, self.rorepo.heads):
+ ref_count += 1
+ assert isinstance(ref, refs.Reference)
+ assert str(ref) == ref.name
+ assert repr(ref)
+ assert ref == ref
+ assert not ref != ref
+ s.add(ref)
+ # END for each ref
+ assert len(s) == ref_count
+ assert len(s|s) == ref_count
+
+ @with_rw_repo('HEAD', bare=False)
+ def test_heads(self, rwrepo):
+ for head in rwrepo.heads:
+ assert head.name
+ assert head.path
+ assert "refs/heads" in head.path
+ prev_object = head.object
+ cur_object = head.object
+ assert prev_object == cur_object # represent the same git object
+ assert prev_object is not cur_object # but are different instances
+
+ writer = head.config_writer()
+ tv = "testopt"
+ writer.set_value(tv, 1)
+ assert writer.get_value(tv) == 1
+ del(writer)
+ assert head.config_reader().get_value(tv) == 1
+ head.config_writer().remove_option(tv)
+
+ # after the clone, we might still have a tracking branch setup
+ head.set_tracking_branch(None)
+ assert head.tracking_branch() is None
+ remote_ref = rwrepo.remotes[0].refs[0]
+ assert head.set_tracking_branch(remote_ref) is head
+ assert head.tracking_branch() == remote_ref
+ head.set_tracking_branch(None)
+ assert head.tracking_branch() is None
+ # END for each head
+
+ # verify REFLOG gets altered
+ head = rwrepo.head
+ cur_head = head.ref
+ cur_commit = cur_head.commit
+ pcommit = cur_head.commit.parents[0].parents[0]
+ hlog_len = len(head.log())
+ blog_len = len(cur_head.log())
+ assert head.set_reference(pcommit, 'detached head') is head
+ # one new log-entry
+ thlog = head.log()
+ assert len(thlog) == hlog_len + 1
+ assert thlog[-1].oldhexsha == cur_commit.hexsha
+ assert thlog[-1].newhexsha == pcommit.hexsha
+
+ # the ref didn't change though
+ assert len(cur_head.log()) == blog_len
+
+ # head changes once again, cur_head doesn't change
+ head.set_reference(cur_head, 'reattach head')
+ assert len(head.log()) == hlog_len+2
+ assert len(cur_head.log()) == blog_len
+
+ # adjusting the head-ref also adjust the head, so both reflogs are
+ # altered
+ cur_head.set_commit(pcommit, 'changing commit')
+ assert len(cur_head.log()) == blog_len+1
+ assert len(head.log()) == hlog_len+3
+
+
+ # with automatic dereferencing
+ assert head.set_commit(cur_commit, 'change commit once again') is head
+ assert len(head.log()) == hlog_len+4
+ assert len(cur_head.log()) == blog_len+2
+
+ # a new branch has just a single entry
+ other_head = Head.create(rwrepo, 'mynewhead', pcommit, logmsg='new head created')
+ log = other_head.log()
+ assert len(log) == 1
+ assert log[0].oldhexsha == pcommit.NULL_HEX_SHA
+ assert log[0].newhexsha == pcommit.hexsha
+
+
+ def test_refs(self):
+ types_found = set()
+ for ref in self.rorepo.refs:
+ types_found.add(type(ref))
+ assert len(types_found) >= 3
+
+ def test_is_valid(self):
+ assert Reference(self.rorepo, 'refs/doesnt/exist').is_valid() == False
+ assert self.rorepo.head.is_valid()
+ assert self.rorepo.head.reference.is_valid()
+ assert SymbolicReference(self.rorepo, 'hellothere').is_valid() == False
+
+ def test_orig_head(self):
+ assert type(self.rorepo.head.orig_head()) == SymbolicReference
+
+ @with_rw_repo('0.1.6')
+ def test_head_reset(self, rw_repo):
+ cur_head = rw_repo.head
+ old_head_commit = cur_head.commit
+ new_head_commit = cur_head.ref.commit.parents[0]
+ cur_head.reset(new_head_commit, index=True) # index only
+ assert cur_head.reference.commit == new_head_commit
+
+ self.failUnlessRaises(ValueError, cur_head.reset, new_head_commit, index=False, working_tree=True)
+ new_head_commit = new_head_commit.parents[0]
+ cur_head.reset(new_head_commit, index=True, working_tree=True) # index + wt
+ assert cur_head.reference.commit == new_head_commit
+
+ # paths - make sure we have something to do
+ rw_repo.index.reset(old_head_commit.parents[0])
+ cur_head.reset(cur_head, paths = "test")
+ cur_head.reset(new_head_commit, paths = "lib")
+ # hard resets with paths don't work, its all or nothing
+ self.failUnlessRaises(GitCommandError, cur_head.reset, new_head_commit, working_tree=True, paths = "lib")
+
+ # we can do a mixed reset, and then checkout from the index though
+ cur_head.reset(new_head_commit)
+ rw_repo.index.checkout(["lib"], force=True)#
+
+
+ # now that we have a write write repo, change the HEAD reference - its
+ # like git-reset --soft
+ heads = rw_repo.heads
+ assert heads
+ for head in heads:
+ cur_head.reference = head
+ assert cur_head.reference == head
+ assert isinstance(cur_head.reference, Head)
+ assert cur_head.commit == head.commit
+ assert not cur_head.is_detached
+ # END for each head
+
+ # detach
+ active_head = heads[0]
+ curhead_commit = active_head.commit
+ cur_head.reference = curhead_commit
+ assert cur_head.commit == curhead_commit
+ assert cur_head.is_detached
+ self.failUnlessRaises(TypeError, getattr, cur_head, "reference")
+
+ # tags are references, hence we can point to them
+ some_tag = rw_repo.tags[0]
+ cur_head.reference = some_tag
+ assert not cur_head.is_detached
+ assert cur_head.commit == some_tag.commit
+ assert isinstance(cur_head.reference, TagReference)
+
+ # put HEAD back to a real head, otherwise everything else fails
+ cur_head.reference = active_head
+
+ # type check
+ self.failUnlessRaises(ValueError, setattr, cur_head, "reference", "that")
+
+ # head handling
+ commit = 'HEAD'
+ prev_head_commit = cur_head.commit
+ for count, new_name in enumerate(("my_new_head", "feature/feature1")):
+ actual_commit = commit+"^"*count
+ new_head = Head.create(rw_repo, new_name, actual_commit)
+ assert new_head.is_detached
+ assert cur_head.commit == prev_head_commit
+ assert isinstance(new_head, Head)
+ # already exists, but has the same value, so its fine
+ Head.create(rw_repo, new_name, new_head.commit)
+
+ # its not fine with a different value
+ self.failUnlessRaises(OSError, Head.create, rw_repo, new_name, new_head.commit.parents[0])
+
+ # force it
+ new_head = Head.create(rw_repo, new_name, actual_commit, force=True)
+ old_path = new_head.path
+ old_name = new_head.name
+
+ assert new_head.rename("hello").name == "hello"
+ assert new_head.rename("hello/world").name == "hello/world"
+ assert new_head.rename(old_name).name == old_name and new_head.path == old_path
+
+ # rename with force
+ tmp_head = Head.create(rw_repo, "tmphead")
+ self.failUnlessRaises(GitCommandError, tmp_head.rename, new_head)
+ tmp_head.rename(new_head, force=True)
+ assert tmp_head == new_head and tmp_head.object == new_head.object
+
+ logfile = RefLog.path(tmp_head)
+ assert os.path.isfile(logfile)
+ Head.delete(rw_repo, tmp_head)
+ # deletion removes the log as well
+ assert not os.path.isfile(logfile)
+ heads = rw_repo.heads
+ assert tmp_head not in heads and new_head not in heads
+ # force on deletion testing would be missing here, code looks okay though ;)
+ # END for each new head name
+ self.failUnlessRaises(TypeError, RemoteReference.create, rw_repo, "some_name")
+
+ # tag ref
+ tag_name = "1.0.2"
+ light_tag = TagReference.create(rw_repo, tag_name)
+ self.failUnlessRaises(GitCommandError, TagReference.create, rw_repo, tag_name)
+ light_tag = TagReference.create(rw_repo, tag_name, "HEAD~1", force = True)
+ assert isinstance(light_tag, TagReference)
+ assert light_tag.name == tag_name
+ assert light_tag.commit == cur_head.commit.parents[0]
+ assert light_tag.tag is None
+
+ # tag with tag object
+ other_tag_name = "releases/1.0.2RC"
+ msg = "my mighty tag\nsecond line"
+ obj_tag = TagReference.create(rw_repo, other_tag_name, message=msg)
+ assert isinstance(obj_tag, TagReference)
+ assert obj_tag.name == other_tag_name
+ assert obj_tag.commit == cur_head.commit
+ assert obj_tag.tag is not None
+
+ TagReference.delete(rw_repo, light_tag, obj_tag)
+ tags = rw_repo.tags
+ assert light_tag not in tags and obj_tag not in tags
+
+ # remote deletion
+ remote_refs_so_far = 0
+ remotes = rw_repo.remotes
+ assert remotes
+ for remote in remotes:
+ refs = remote.refs
+ RemoteReference.delete(rw_repo, *refs)
+ remote_refs_so_far += len(refs)
+ for ref in refs:
+ assert ref.remote_name == remote.name
+ # END for each ref to delete
+ assert remote_refs_so_far
+
+ for remote in remotes:
+ # remotes without references throw
+ self.failUnlessRaises(AssertionError, getattr, remote, 'refs')
+ # END for each remote
+
+ # change where the active head points to
+ if cur_head.is_detached:
+ cur_head.reference = rw_repo.heads[0]
+
+ head = cur_head.reference
+ old_commit = head.commit
+ head.commit = old_commit.parents[0]
+ assert head.commit == old_commit.parents[0]
+ assert head.commit == cur_head.commit
+ head.commit = old_commit
+
+ # setting a non-commit as commit fails, but succeeds as object
+ head_tree = head.commit.tree
+ self.failUnlessRaises(ValueError, setattr, head, 'commit', head_tree)
+ assert head.commit == old_commit # and the ref did not change
+ # we allow heds to point to any object
+ head.object = head_tree
+ assert head.object == head_tree
+ # cannot query tree as commit
+ self.failUnlessRaises(TypeError, getattr, head, 'commit')
+
+ # set the commit directly using the head. This would never detach the head
+ assert not cur_head.is_detached
+ head.object = old_commit
+ cur_head.reference = head.commit
+ assert cur_head.is_detached
+ parent_commit = head.commit.parents[0]
+ assert cur_head.is_detached
+ cur_head.commit = parent_commit
+ assert cur_head.is_detached and cur_head.commit == parent_commit
+
+ cur_head.reference = head
+ assert not cur_head.is_detached
+ cur_head.commit = parent_commit
+ assert not cur_head.is_detached
+ assert head.commit == parent_commit
+
+ # test checkout
+ active_branch = rw_repo.active_branch
+ for head in rw_repo.heads:
+ checked_out_head = head.checkout()
+ assert checked_out_head == head
+ # END for each head to checkout
+
+ # checkout with branch creation
+ new_head = active_branch.checkout(b="new_head")
+ assert active_branch != rw_repo.active_branch
+ assert new_head == rw_repo.active_branch
+
+ # checkout with force as we have a changed a file
+ # clear file
+ open(new_head.commit.tree.blobs[-1].abspath,'w').close()
+ assert len(new_head.commit.diff(None))
+
+ # create a new branch that is likely to touch the file we changed
+ far_away_head = rw_repo.create_head("far_head",'HEAD~100')
+ self.failUnlessRaises(GitCommandError, far_away_head.checkout)
+ assert active_branch == active_branch.checkout(force=True)
+ assert rw_repo.head.reference != far_away_head
+
+ # test reference creation
+ partial_ref = 'sub/ref'
+ full_ref = 'refs/%s' % partial_ref
+ ref = Reference.create(rw_repo, partial_ref)
+ assert ref.path == full_ref
+ assert ref.object == rw_repo.head.commit
+
+ self.failUnlessRaises(OSError, Reference.create, rw_repo, full_ref, 'HEAD~20')
+ # it works if it is at the same spot though and points to the same reference
+ assert Reference.create(rw_repo, full_ref, 'HEAD').path == full_ref
+ Reference.delete(rw_repo, full_ref)
+
+ # recreate the reference using a full_ref
+ ref = Reference.create(rw_repo, full_ref)
+ assert ref.path == full_ref
+ assert ref.object == rw_repo.head.commit
+
+ # recreate using force
+ ref = Reference.create(rw_repo, partial_ref, 'HEAD~1', force=True)
+ assert ref.path == full_ref
+ assert ref.object == rw_repo.head.commit.parents[0]
+
+ # rename it
+ orig_obj = ref.object
+ for name in ('refs/absname', 'rela_name', 'feature/rela_name'):
+ ref_new_name = ref.rename(name)
+ assert isinstance(ref_new_name, Reference)
+ assert name in ref_new_name.path
+ assert ref_new_name.object == orig_obj
+ assert ref_new_name == ref
+ # END for each name type
+
+ # References that don't exist trigger an error if we want to access them
+ self.failUnlessRaises(ValueError, getattr, Reference(rw_repo, "refs/doesntexist"), 'commit')
+
+ # exists, fail unless we force
+ ex_ref_path = far_away_head.path
+ self.failUnlessRaises(OSError, ref.rename, ex_ref_path)
+ # if it points to the same commit it works
+ far_away_head.commit = ref.commit
+ ref.rename(ex_ref_path)
+ assert ref.path == ex_ref_path and ref.object == orig_obj
+ assert ref.rename(ref.path).path == ex_ref_path # rename to same name
+
+ # create symbolic refs
+ symref_path = "symrefs/sym"
+ symref = SymbolicReference.create(rw_repo, symref_path, cur_head.reference)
+ assert symref.path == symref_path
+ assert symref.reference == cur_head.reference
+
+ self.failUnlessRaises(OSError, SymbolicReference.create, rw_repo, symref_path, cur_head.reference.commit)
+ # it works if the new ref points to the same reference
+ SymbolicReference.create(rw_repo, symref.path, symref.reference).path == symref.path
+ SymbolicReference.delete(rw_repo, symref)
+ # would raise if the symref wouldn't have been deletedpbl
+ symref = SymbolicReference.create(rw_repo, symref_path, cur_head.reference)
+
+ # test symbolic references which are not at default locations like HEAD
+ # or FETCH_HEAD - they may also be at spots in refs of course
+ symbol_ref_path = "refs/symbol_ref"
+ symref = SymbolicReference(rw_repo, symbol_ref_path)
+ assert symref.path == symbol_ref_path
+ symbol_ref_abspath = os.path.join(rw_repo.git_dir, symref.path)
+
+ # set it
+ symref.reference = new_head
+ assert symref.reference == new_head
+ assert os.path.isfile(symbol_ref_abspath)
+ assert symref.commit == new_head.commit
+
+ for name in ('absname','folder/rela_name'):
+ symref_new_name = symref.rename(name)
+ assert isinstance(symref_new_name, SymbolicReference)
+ assert name in symref_new_name.path
+ assert symref_new_name.reference == new_head
+ assert symref_new_name == symref
+ assert not symref.is_detached
+ # END for each ref
+
+ # create a new non-head ref just to be sure we handle it even if packed
+ Reference.create(rw_repo, full_ref)
+
+ # test ref listing - assure we have packed refs
+ rw_repo.git.pack_refs(all=True, prune=True)
+ heads = rw_repo.heads
+ assert heads
+ assert new_head in heads
+ assert active_branch in heads
+ assert rw_repo.tags
+
+ # we should be able to iterate all symbolic refs as well - in that case
+ # we should expect only symbolic references to be returned
+ for symref in SymbolicReference.iter_items(rw_repo):
+ assert not symref.is_detached
+
+ # when iterating references, we can get references and symrefs
+ # when deleting all refs, I'd expect them to be gone ! Even from
+ # the packed ones
+ # For this to work, we must not be on any branch
+ rw_repo.head.reference = rw_repo.head.commit
+ deleted_refs = set()
+ for ref in Reference.iter_items(rw_repo):
+ if ref.is_detached:
+ ref.delete(rw_repo, ref)
+ deleted_refs.add(ref)
+ # END delete ref
+ # END for each ref to iterate and to delete
+ assert deleted_refs
+
+ for ref in Reference.iter_items(rw_repo):
+ if ref.is_detached:
+ assert ref not in deleted_refs
+ # END for each ref
+
+ # reattach head - head will not be returned if it is not a symbolic
+ # ref
+ rw_repo.head.reference = Head.create(rw_repo, "master")
+
+ # At least the head should still exist
+ assert os.path.isfile(os.path.join(rw_repo.git_dir, 'HEAD'))
+ refs = list(SymbolicReference.iter_items(rw_repo))
+ assert len(refs) == 1
+
+
+ # test creation of new refs from scratch
+ for path in ("basename", "dir/somename", "dir2/subdir/basename"):
+ # REFERENCES
+ ############
+ fpath = Reference.to_full_path(path)
+ ref_fp = Reference.from_path(rw_repo, fpath)
+ assert not ref_fp.is_valid()
+ ref = Reference(rw_repo, fpath)
+ assert ref == ref_fp
+
+ # can be created by assigning a commit
+ ref.commit = rw_repo.head.commit
+ assert ref.is_valid()
+
+ # if the assignment raises, the ref doesn't exist
+ Reference.delete(ref.repo, ref.path)
+ assert not ref.is_valid()
+ self.failUnlessRaises(ValueError, setattr, ref, 'commit', "nonsense")
+ assert not ref.is_valid()
+
+ # I am sure I had my reason to make it a class method at first, but
+ # now it doesn't make so much sense anymore, want an instance method as well
+ # See http://byronimo.lighthouseapp.com/projects/51787-gitpython/tickets/27
+ Reference.delete(ref.repo, ref.path)
+ assert not ref.is_valid()
+
+ ref.object = rw_repo.head.commit
+ assert ref.is_valid()
+
+ Reference.delete(ref.repo, ref.path)
+ assert not ref.is_valid()
+ self.failUnlessRaises(ValueError, setattr, ref, 'object', "nonsense")
+ assert not ref.is_valid()
+
+ # END for each path
+
+ def test_dereference_recursive(self):
+ # for now, just test the HEAD
+ assert SymbolicReference.dereference_recursive(self.rorepo, 'HEAD')
+
+ def test_reflog(self):
+ assert isinstance(self.rorepo.heads.master.log(), RefLog)
+
diff --git a/git/test/test_remote.py b/git/test/test_remote.py
new file mode 100644
index 00000000..af6915a3
--- /dev/null
+++ b/git/test/test_remote.py
@@ -0,0 +1,445 @@
+# test_remote.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+from git.util import IterableList
+import tempfile
+import shutil
+import os
+import random
+
+# assure we have repeatable results
+random.seed(0)
+
+class TestRemoteProgress(RemoteProgress):
+ __slots__ = ( "_seen_lines", "_stages_per_op", '_num_progress_messages' )
+ def __init__(self):
+ super(TestRemoteProgress, self).__init__()
+ self._seen_lines = list()
+ self._stages_per_op = dict()
+ self._num_progress_messages = 0
+
+ def _parse_progress_line(self, line):
+ # we may remove the line later if it is dropped
+ # Keep it for debugging
+ self._seen_lines.append(line)
+ rval = super(TestRemoteProgress, self)._parse_progress_line(line)
+ assert len(line) > 1, "line %r too short" % line
+ return rval
+
+ def line_dropped(self, line):
+ try:
+ self._seen_lines.remove(line)
+ except ValueError:
+ pass
+
+ def update(self, op_code, cur_count, max_count=None, message=''):
+ # check each stage only comes once
+ op_id = op_code & self.OP_MASK
+ assert op_id in (self.COUNTING, self.COMPRESSING, self.WRITING)
+
+ self._stages_per_op.setdefault(op_id, 0)
+ self._stages_per_op[ op_id ] = self._stages_per_op[ op_id ] | (op_code & self.STAGE_MASK)
+
+ if op_code & (self.WRITING|self.END) == (self.WRITING|self.END):
+ assert message
+ # END check we get message
+
+ self._num_progress_messages += 1
+
+
+ def make_assertion(self):
+ # we don't always receive messages
+ if not self._seen_lines:
+ return
+
+ # sometimes objects are not compressed which is okay
+ assert len(self._seen_ops) in (2,3)
+ assert self._stages_per_op
+
+ # must have seen all stages
+ for op, stages in self._stages_per_op.items():
+ assert stages & self.STAGE_MASK == self.STAGE_MASK
+ # END for each op/stage
+
+ def assert_received_message(self):
+ assert self._num_progress_messages
+
+
+class TestRemote(TestBase):
+
+ def _print_fetchhead(self, repo):
+ fp = open(os.path.join(repo.git_dir, "FETCH_HEAD"))
+ fp.close()
+
+
+ def _do_test_fetch_result(self, results, remote):
+ # self._print_fetchhead(remote.repo)
+ assert len(results) > 0 and isinstance(results[0], FetchInfo)
+ for info in results:
+ assert isinstance(info.note, basestring)
+ if isinstance(info.ref, Reference):
+ assert info.flags != 0
+ # END reference type flags handling
+ assert isinstance(info.ref, (SymbolicReference, Reference))
+ if info.flags & (info.FORCED_UPDATE|info.FAST_FORWARD):
+ assert isinstance(info.old_commit, Commit)
+ else:
+ assert info.old_commit is None
+ # END forced update checking
+ # END for each info
+
+ def _do_test_push_result(self, results, remote):
+ assert len(results) > 0 and isinstance(results[0], PushInfo)
+ for info in results:
+ assert info.flags
+ assert isinstance(info.summary, basestring)
+ if info.old_commit is not None:
+ assert isinstance(info.old_commit, Commit)
+ if info.flags & info.ERROR:
+ has_one = False
+ for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE):
+ has_one |= bool(info.flags & bitflag)
+ # END for each bitflag
+ assert has_one
+ else:
+ # there must be a remote commit
+ if info.flags & info.DELETED == 0:
+ assert isinstance(info.local_ref, Reference)
+ else:
+ assert info.local_ref is None
+ assert type(info.remote_ref) in (TagReference, RemoteReference)
+ # END error checking
+ # END for each info
+
+
+ def _do_test_fetch_info(self, repo):
+ self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "nonsense", '')
+ self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "? [up to date] 0.1.7RC -> origin/0.1.7RC", '')
+
+ def _commit_random_file(self, repo):
+ #Create a file with a random name and random data and commit it to repo.
+ # Return the commited absolute file path
+ index = repo.index
+ new_file = self._make_file(os.path.basename(tempfile.mktemp()),str(random.random()), repo)
+ index.add([new_file])
+ index.commit("Committing %s" % new_file)
+ return new_file
+
+ def _do_test_fetch(self,remote, rw_repo, remote_repo):
+ # specialized fetch testing to de-clutter the main test
+ self._do_test_fetch_info(rw_repo)
+
+ def fetch_and_test(remote, **kwargs):
+ progress = TestRemoteProgress()
+ kwargs['progress'] = progress
+ res = remote.fetch(**kwargs)
+ progress.make_assertion()
+ self._do_test_fetch_result(res, remote)
+ return res
+ # END fetch and check
+
+ def get_info(res, remote, name):
+ return res["%s/%s"%(remote,name)]
+
+ # put remote head to master as it is garantueed to exist
+ remote_repo.head.reference = remote_repo.heads.master
+
+ res = fetch_and_test(remote)
+ # all uptodate
+ for info in res:
+ assert info.flags & info.HEAD_UPTODATE
+
+ # rewind remote head to trigger rejection
+ # index must be false as remote is a bare repo
+ rhead = remote_repo.head
+ remote_commit = rhead.commit
+ rhead.reset("HEAD~2", index=False)
+ res = fetch_and_test(remote)
+ mkey = "%s/%s"%(remote,'master')
+ master_info = res[mkey]
+ assert master_info.flags & FetchInfo.FORCED_UPDATE and master_info.note is not None
+
+ # normal fast forward - set head back to previous one
+ rhead.commit = remote_commit
+ res = fetch_and_test(remote)
+ assert res[mkey].flags & FetchInfo.FAST_FORWARD
+
+ # new remote branch
+ new_remote_branch = Head.create(remote_repo, "new_branch")
+ res = fetch_and_test(remote)
+ new_branch_info = get_info(res, remote, new_remote_branch)
+ assert new_branch_info.flags & FetchInfo.NEW_HEAD
+
+ # remote branch rename ( causes creation of a new one locally )
+ new_remote_branch.rename("other_branch_name")
+ res = fetch_and_test(remote)
+ other_branch_info = get_info(res, remote, new_remote_branch)
+ assert other_branch_info.ref.commit == new_branch_info.ref.commit
+
+ # remove new branch
+ Head.delete(new_remote_branch.repo, new_remote_branch)
+ res = fetch_and_test(remote)
+ # deleted remote will not be fetched
+ self.failUnlessRaises(IndexError, get_info, res, remote, new_remote_branch)
+
+ # prune stale tracking branches
+ stale_refs = remote.stale_refs
+ assert len(stale_refs) == 2 and isinstance(stale_refs[0], RemoteReference)
+ RemoteReference.delete(rw_repo, *stale_refs)
+
+ # test single branch fetch with refspec including target remote
+ res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master"%remote)
+ assert len(res) == 1 and get_info(res, remote, 'master')
+
+ # ... with respec and no target
+ res = fetch_and_test(remote, refspec='master')
+ assert len(res) == 1
+
+ # add new tag reference
+ rtag = TagReference.create(remote_repo, "1.0-RV_hello.there")
+ res = fetch_and_test(remote, tags=True)
+ tinfo = res[str(rtag)]
+ assert isinstance(tinfo.ref, TagReference) and tinfo.ref.commit == rtag.commit
+ assert tinfo.flags & tinfo.NEW_TAG
+
+ # adjust tag commit
+ Reference.set_object(rtag, rhead.commit.parents[0].parents[0])
+ res = fetch_and_test(remote, tags=True)
+ tinfo = res[str(rtag)]
+ assert tinfo.commit == rtag.commit
+ assert tinfo.flags & tinfo.TAG_UPDATE
+
+ # delete remote tag - local one will stay
+ TagReference.delete(remote_repo, rtag)
+ res = fetch_and_test(remote, tags=True)
+ self.failUnlessRaises(IndexError, get_info, res, remote, str(rtag))
+
+ # provoke to receive actual objects to see what kind of output we have to
+ # expect. For that we need a remote transport protocol
+ # Create a new UN-shared repo and fetch into it after we pushed a change
+ # to the shared repo
+ other_repo_dir = tempfile.mktemp("other_repo")
+ # must clone with a local path for the repo implementation not to freak out
+ # as it wants local paths only ( which I can understand )
+ other_repo = remote_repo.clone(other_repo_dir, shared=False)
+ remote_repo_url = "git://localhost%s"%remote_repo.git_dir
+
+ # put origin to git-url
+ other_origin = other_repo.remotes.origin
+ other_origin.config_writer.set("url", remote_repo_url)
+ # it automatically creates alternates as remote_repo is shared as well.
+ # It will use the transport though and ignore alternates when fetching
+ # assert not other_repo.alternates # this would fail
+
+ # assure we are in the right state
+ rw_repo.head.reset(remote.refs.master, working_tree=True)
+ try:
+ self._commit_random_file(rw_repo)
+ remote.push(rw_repo.head.reference)
+
+ # here I would expect to see remote-information about packing
+ # objects and so on. Unfortunately, this does not happen
+ # if we are redirecting the output - git explicitly checks for this
+ # and only provides progress information to ttys
+ res = fetch_and_test(other_origin)
+ finally:
+ shutil.rmtree(other_repo_dir)
+ # END test and cleanup
+
+ def _test_push_and_pull(self,remote, rw_repo, remote_repo):
+ # push our changes
+ lhead = rw_repo.head
+ lindex = rw_repo.index
+ # assure we are on master and it is checked out where the remote is
+ try:
+ lhead.reference = rw_repo.heads.master
+ except AttributeError:
+ # if the author is on a non-master branch, the clones might not have
+ # a local master yet. We simply create it
+ lhead.reference = rw_repo.create_head('master')
+ # END master handling
+ lhead.reset(remote.refs.master, working_tree=True)
+
+ # push without spec should fail ( without further configuration )
+ # well, works nicely
+ # self.failUnlessRaises(GitCommandError, remote.push)
+
+ # simple file push
+ self._commit_random_file(rw_repo)
+ progress = TestRemoteProgress()
+ res = remote.push(lhead.reference, progress)
+ assert isinstance(res, IterableList)
+ self._do_test_push_result(res, remote)
+ progress.make_assertion()
+
+ # rejected - undo last commit
+ lhead.reset("HEAD~1")
+ res = remote.push(lhead.reference)
+ assert res[0].flags & PushInfo.ERROR
+ assert res[0].flags & PushInfo.REJECTED
+ self._do_test_push_result(res, remote)
+
+ # force rejected pull
+ res = remote.push('+%s' % lhead.reference)
+ assert res[0].flags & PushInfo.ERROR == 0
+ assert res[0].flags & PushInfo.FORCED_UPDATE
+ self._do_test_push_result(res, remote)
+
+ # invalid refspec
+ res = remote.push("hellothere")
+ assert len(res) == 0
+
+ # push new tags
+ progress = TestRemoteProgress()
+ to_be_updated = "my_tag.1.0RV"
+ new_tag = TagReference.create(rw_repo, to_be_updated)
+ other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", message="my message")
+ res = remote.push(progress=progress, tags=True)
+ assert res[-1].flags & PushInfo.NEW_TAG
+ progress.make_assertion()
+ self._do_test_push_result(res, remote)
+
+ # update push new tags
+ # Rejection is default
+ new_tag = TagReference.create(rw_repo, to_be_updated, ref='HEAD~1', force=True)
+ res = remote.push(tags=True)
+ self._do_test_push_result(res, remote)
+ assert res[-1].flags & PushInfo.REJECTED and res[-1].flags & PushInfo.ERROR
+
+ # push force this tag
+ res = remote.push("+%s" % new_tag.path)
+ assert res[-1].flags & PushInfo.ERROR == 0 and res[-1].flags & PushInfo.FORCED_UPDATE
+
+ # delete tag - have to do it using refspec
+ res = remote.push(":%s" % new_tag.path)
+ self._do_test_push_result(res, remote)
+ assert res[0].flags & PushInfo.DELETED
+ # Currently progress is not properly transferred, especially not using
+ # the git daemon
+ # progress.assert_received_message()
+
+ # push new branch
+ new_head = Head.create(rw_repo, "my_new_branch")
+ progress = TestRemoteProgress()
+ res = remote.push(new_head, progress)
+ assert res[0].flags & PushInfo.NEW_HEAD
+ progress.make_assertion()
+ self._do_test_push_result(res, remote)
+
+ # delete new branch on the remote end and locally
+ res = remote.push(":%s" % new_head.path)
+ self._do_test_push_result(res, remote)
+ Head.delete(rw_repo, new_head)
+ assert res[-1].flags & PushInfo.DELETED
+
+ # --all
+ res = remote.push(all=True)
+ self._do_test_push_result(res, remote)
+
+ remote.pull('master')
+
+ # cleanup - delete created tags and branches as we are in an innerloop on
+ # the same repository
+ TagReference.delete(rw_repo, new_tag, other_tag)
+ remote.push(":%s" % other_tag.path)
+
+ @with_rw_and_rw_remote_repo('0.1.6')
+ def test_base(self, rw_repo, remote_repo):
+ num_remotes = 0
+ remote_set = set()
+ ran_fetch_test = False
+
+ for remote in rw_repo.remotes:
+ num_remotes += 1
+ assert remote == remote
+ assert str(remote) != repr(remote)
+ remote_set.add(remote)
+ remote_set.add(remote) # should already exist
+
+ # REFS
+ refs = remote.refs
+ assert refs
+ for ref in refs:
+ assert ref.remote_name == remote.name
+ assert ref.remote_head
+ # END for each ref
+
+ # OPTIONS
+ # cannot use 'fetch' key anymore as it is now a method
+ for opt in ("url", ):
+ val = getattr(remote, opt)
+ reader = remote.config_reader
+ assert reader.get(opt) == val
+ assert reader.get_value(opt, None) == val
+
+ # unable to write with a reader
+ self.failUnlessRaises(IOError, reader.set, opt, "test")
+
+ # change value
+ writer = remote.config_writer
+ new_val = "myval"
+ writer.set(opt, new_val)
+ assert writer.get(opt) == new_val
+ writer.set(opt, val)
+ assert writer.get(opt) == val
+ del(writer)
+ assert getattr(remote, opt) == val
+ # END for each default option key
+
+ # RENAME
+ other_name = "totally_other_name"
+ prev_name = remote.name
+ assert remote.rename(other_name) == remote
+ assert prev_name != remote.name
+ # multiple times
+ for time in range(2):
+ assert remote.rename(prev_name).name == prev_name
+ # END for each rename ( back to prev_name )
+
+ # PUSH/PULL TESTING
+ self._test_push_and_pull(remote, rw_repo, remote_repo)
+
+ # FETCH TESTING
+ # Only for remotes - local cases are the same or less complicated
+ # as additional progress information will never be emitted
+ if remote.name == "daemon_origin":
+ self._do_test_fetch(remote, rw_repo, remote_repo)
+ ran_fetch_test = True
+ # END fetch test
+
+ remote.update()
+ # END for each remote
+
+ assert ran_fetch_test
+ assert num_remotes
+ assert num_remotes == len(remote_set)
+
+ origin = rw_repo.remote('origin')
+ assert origin == rw_repo.remotes.origin
+
+ @with_rw_repo('HEAD', bare=True)
+ def test_creation_and_removal(self, bare_rw_repo):
+ new_name = "test_new_one"
+ arg_list = (new_name, "git@server:hello.git")
+ remote = Remote.create(bare_rw_repo, *arg_list )
+ assert remote.name == "test_new_one"
+ assert remote in bare_rw_repo.remotes
+
+ # create same one again
+ self.failUnlessRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list)
+
+ Remote.remove(bare_rw_repo, new_name)
+
+ for remote in bare_rw_repo.remotes:
+ if remote.name == new_name:
+ raise AssertionError("Remote removal failed")
+ # END if deleted remote matches existing remote's name
+ # END for each remote
+
+
+
diff --git a/git/test/test_repo.py b/git/test/test_repo.py
new file mode 100644
index 00000000..f517b9f1
--- /dev/null
+++ b/git/test/test_repo.py
@@ -0,0 +1,604 @@
+# test_repo.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.lib import *
+from git import *
+from git.util import join_path_native
+from git.exc import BadObject
+from gitdb.util import hex_to_bin, bin_to_hex
+
+import os, sys
+import tempfile
+import shutil
+from cStringIO import StringIO
+
+
+class TestRepo(TestBase):
+
+ @raises(InvalidGitRepositoryError)
+ def test_new_should_raise_on_invalid_repo_location(self):
+ Repo(tempfile.gettempdir())
+
+ @raises(NoSuchPathError)
+ def test_new_should_raise_on_non_existant_path(self):
+ Repo("repos/foobar")
+
+ def test_repo_creation_from_different_paths(self):
+ r_from_gitdir = Repo(self.rorepo.git_dir)
+ assert r_from_gitdir.git_dir == self.rorepo.git_dir
+ assert r_from_gitdir.git_dir.endswith('.git')
+ assert not self.rorepo.git.working_dir.endswith('.git')
+ assert r_from_gitdir.git.working_dir == self.rorepo.git.working_dir
+
+ def test_description(self):
+ txt = "Test repository"
+ self.rorepo.description = txt
+ assert_equal(self.rorepo.description, txt)
+
+ def test_heads_should_return_array_of_head_objects(self):
+ for head in self.rorepo.heads:
+ assert_equal(Head, head.__class__)
+
+ def test_heads_should_populate_head_data(self):
+ for head in self.rorepo.heads:
+ assert head.name
+ assert isinstance(head.commit,Commit)
+ # END for each head
+
+ assert isinstance(self.rorepo.heads.master, Head)
+ assert isinstance(self.rorepo.heads['master'], Head)
+
+ def test_tree_from_revision(self):
+ tree = self.rorepo.tree('0.1.6')
+ assert len(tree.hexsha) == 40
+ assert tree.type == "tree"
+ assert self.rorepo.tree(tree) == tree
+
+ # try from invalid revision that does not exist
+ self.failUnlessRaises(BadObject, self.rorepo.tree, 'hello world')
+
+ def test_commit_from_revision(self):
+ commit = self.rorepo.commit('0.1.4')
+ assert commit.type == 'commit'
+ assert self.rorepo.commit(commit) == commit
+
+ def test_commits(self):
+ mc = 10
+ commits = list(self.rorepo.iter_commits('0.1.6', max_count=mc))
+ assert len(commits) == mc
+
+ c = commits[0]
+ assert_equal('9a4b1d4d11eee3c5362a4152216376e634bd14cf', c.hexsha)
+ assert_equal(["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents])
+ assert_equal("ce41fc29549042f1aa09cc03174896cf23f112e3", c.tree.hexsha)
+ assert_equal("Michael Trier", c.author.name)
+ assert_equal("mtrier@gmail.com", c.author.email)
+ assert_equal(1232829715, c.authored_date)
+ assert_equal(5*3600, c.author_tz_offset)
+ assert_equal("Michael Trier", c.committer.name)
+ assert_equal("mtrier@gmail.com", c.committer.email)
+ assert_equal(1232829715, c.committed_date)
+ assert_equal(5*3600, c.committer_tz_offset)
+ assert_equal("Bumped version 0.1.6\n", c.message)
+
+ c = commits[1]
+ assert isinstance(c.parents, tuple)
+
+ def test_trees(self):
+ mc = 30
+ num_trees = 0
+ for tree in self.rorepo.iter_trees('0.1.5', max_count=mc):
+ num_trees += 1
+ assert isinstance(tree, Tree)
+ # END for each tree
+ assert num_trees == mc
+
+
+ def _assert_empty_repo(self, repo):
+ # test all kinds of things with an empty, freshly initialized repo.
+ # It should throw good errors
+
+ # entries should be empty
+ assert len(repo.index.entries) == 0
+
+ # head is accessible
+ assert repo.head
+ assert repo.head.ref
+ assert not repo.head.is_valid()
+
+ # we can change the head to some other ref
+ head_ref = Head.from_path(repo, Head.to_full_path('some_head'))
+ assert not head_ref.is_valid()
+ repo.head.ref = head_ref
+
+ # is_dirty can handle all kwargs
+ for args in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
+ assert not repo.is_dirty(*args)
+ # END for each arg
+
+ # we can add a file to the index ( if we are not bare )
+ if not repo.bare:
+ pass
+ # END test repos with working tree
+
+
+ def test_init(self):
+ prev_cwd = os.getcwd()
+ os.chdir(tempfile.gettempdir())
+ git_dir_rela = "repos/foo/bar.git"
+ del_dir_abs = os.path.abspath("repos")
+ git_dir_abs = os.path.abspath(git_dir_rela)
+ try:
+ # with specific path
+ for path in (git_dir_rela, git_dir_abs):
+ r = Repo.init(path=path, bare=True)
+ assert isinstance(r, Repo)
+ assert r.bare == True
+ assert os.path.isdir(r.git_dir)
+
+ self._assert_empty_repo(r)
+
+ # test clone
+ clone_path = path + "_clone"
+ rc = r.clone(clone_path)
+ self._assert_empty_repo(rc)
+
+
+ try:
+ shutil.rmtree(clone_path)
+ except OSError:
+ # when relative paths are used, the clone may actually be inside
+ # of the parent directory
+ pass
+ # END exception handling
+
+ # try again, this time with the absolute version
+ rc = Repo.clone_from(r.git_dir, clone_path)
+ self._assert_empty_repo(rc)
+
+ shutil.rmtree(git_dir_abs)
+ try:
+ shutil.rmtree(clone_path)
+ except OSError:
+ # when relative paths are used, the clone may actually be inside
+ # of the parent directory
+ pass
+ # END exception handling
+
+ # END for each path
+
+ os.makedirs(git_dir_rela)
+ os.chdir(git_dir_rela)
+ r = Repo.init(bare=False)
+ r.bare == False
+
+ self._assert_empty_repo(r)
+ finally:
+ try:
+ shutil.rmtree(del_dir_abs)
+ except OSError:
+ pass
+ os.chdir(prev_cwd)
+ # END restore previous state
+
+ def test_bare_property(self):
+ self.rorepo.bare
+
+ def test_daemon_export(self):
+ orig_val = self.rorepo.daemon_export
+ self.rorepo.daemon_export = not orig_val
+ assert self.rorepo.daemon_export == ( not orig_val )
+ self.rorepo.daemon_export = orig_val
+ assert self.rorepo.daemon_export == orig_val
+
+ def test_alternates(self):
+ cur_alternates = self.rorepo.alternates
+ # empty alternates
+ self.rorepo.alternates = []
+ assert self.rorepo.alternates == []
+ alts = [ "other/location", "this/location" ]
+ self.rorepo.alternates = alts
+ assert alts == self.rorepo.alternates
+ self.rorepo.alternates = cur_alternates
+
+ def test_repr(self):
+ path = os.path.join(os.path.abspath(GIT_REPO), '.git')
+ assert_equal('<git.Repo "%s">' % path, repr(self.rorepo))
+
+ def test_is_dirty_with_bare_repository(self):
+ orig_value = self.rorepo._bare
+ self.rorepo._bare = True
+ assert_false(self.rorepo.is_dirty())
+ self.rorepo._bare = orig_value
+
+ def test_is_dirty(self):
+ self.rorepo._bare = False
+ for index in (0,1):
+ for working_tree in (0,1):
+ for untracked_files in (0,1):
+ assert self.rorepo.is_dirty(index, working_tree, untracked_files) in (True, False)
+ # END untracked files
+ # END working tree
+ # END index
+ orig_val = self.rorepo._bare
+ self.rorepo._bare = True
+ assert self.rorepo.is_dirty() == False
+ self.rorepo._bare = orig_val
+
+ def test_head(self):
+ assert self.rorepo.head.reference.object == self.rorepo.active_branch.object
+
+ def test_index(self):
+ index = self.rorepo.index
+ assert isinstance(index, IndexFile)
+
+ def test_tag(self):
+ assert self.rorepo.tag('refs/tags/0.1.5').commit
+
+ def test_archive(self):
+ tmpfile = os.tmpfile()
+ self.rorepo.archive(tmpfile, '0.1.5')
+ assert tmpfile.tell()
+
+ @patch_object(Git, '_call_process')
+ def test_should_display_blame_information(self, git):
+ git.return_value = fixture('blame')
+ b = self.rorepo.blame( 'master', 'lib/git.py')
+ assert_equal(13, len(b))
+ assert_equal( 2, len(b[0]) )
+ # assert_equal(25, reduce(lambda acc, x: acc + len(x[-1]), b))
+ assert_equal(hash(b[0][0]), hash(b[9][0]))
+ c = b[0][0]
+ assert_true(git.called)
+ assert_equal(git.call_args, (('blame', 'master', '--', 'lib/git.py'), {'p': True}))
+
+ assert_equal('634396b2f541a9f2d58b00be1a07f0c358b999b3', c.hexsha)
+ assert_equal('Tom Preston-Werner', c.author.name)
+ assert_equal('tom@mojombo.com', c.author.email)
+ assert_equal(1191997100, c.authored_date)
+ assert_equal('Tom Preston-Werner', c.committer.name)
+ assert_equal('tom@mojombo.com', c.committer.email)
+ assert_equal(1191997100, c.committed_date)
+ assert_equal('initial grit setup', c.message)
+
+ # test the 'lines per commit' entries
+ tlist = b[0][1]
+ assert_true( tlist )
+ assert_true( isinstance( tlist[0], basestring ) )
+ assert_true( len( tlist ) < sum( len(t) for t in tlist ) ) # test for single-char bug
+
+ def test_untracked_files(self):
+ base = self.rorepo.working_tree_dir
+ files = ( join_path_native(base, "__test_myfile"),
+ join_path_native(base, "__test_other_file") )
+ num_recently_untracked = 0
+ try:
+ for fpath in files:
+ fd = open(fpath,"wb")
+ fd.close()
+ # END for each filename
+ untracked_files = self.rorepo.untracked_files
+ num_recently_untracked = len(untracked_files)
+
+ # assure we have all names - they are relative to the git-dir
+ num_test_untracked = 0
+ for utfile in untracked_files:
+ num_test_untracked += join_path_native(base, utfile) in files
+ assert len(files) == num_test_untracked
+ finally:
+ for fpath in files:
+ if os.path.isfile(fpath):
+ os.remove(fpath)
+ # END handle files
+
+ assert len(self.rorepo.untracked_files) == (num_recently_untracked - len(files))
+
+ def test_config_reader(self):
+ reader = self.rorepo.config_reader() # all config files
+ assert reader.read_only
+ reader = self.rorepo.config_reader("repository") # single config file
+ assert reader.read_only
+
+ def test_config_writer(self):
+ for config_level in self.rorepo.config_level:
+ try:
+ writer = self.rorepo.config_writer(config_level)
+ assert not writer.read_only
+ except IOError:
+ # its okay not to get a writer for some configuration files if we
+ # have no permissions
+ pass
+ # END for each config level
+
+ def test_creation_deletion(self):
+ # just a very quick test to assure it generally works. There are
+ # specialized cases in the test_refs module
+ head = self.rorepo.create_head("new_head", "HEAD~1")
+ self.rorepo.delete_head(head)
+
+ tag = self.rorepo.create_tag("new_tag", "HEAD~2")
+ self.rorepo.delete_tag(tag)
+ self.rorepo.config_writer()
+ remote = self.rorepo.create_remote("new_remote", "git@server:repo.git")
+ self.rorepo.delete_remote(remote)
+
+ def test_comparison_and_hash(self):
+ # this is only a preliminary test, more testing done in test_index
+ assert self.rorepo == self.rorepo and not (self.rorepo != self.rorepo)
+ assert len(set((self.rorepo, self.rorepo))) == 1
+
+ def test_git_cmd(self):
+ # test CatFileContentStream, just to be very sure we have no fencepost errors
+ # last \n is the terminating newline that it expects
+ l1 = "0123456789\n"
+ l2 = "abcdefghijklmnopqrstxy\n"
+ l3 = "z\n"
+ d = "%s%s%s\n" % (l1, l2, l3)
+
+ l1p = l1[:5]
+
+ # full size
+ # size is without terminating newline
+ def mkfull():
+ return Git.CatFileContentStream(len(d)-1, StringIO(d))
+
+ ts = 5
+ def mktiny():
+ return Git.CatFileContentStream(ts, StringIO(d))
+
+ # readlines no limit
+ s = mkfull()
+ lines = s.readlines()
+ assert len(lines) == 3 and lines[-1].endswith('\n')
+ assert s._stream.tell() == len(d) # must have scrubbed to the end
+
+ # realines line limit
+ s = mkfull()
+ lines = s.readlines(5)
+ assert len(lines) == 1
+
+ # readlines on tiny sections
+ s = mktiny()
+ lines = s.readlines()
+ assert len(lines) == 1 and lines[0] == l1p
+ assert s._stream.tell() == ts+1
+
+ # readline no limit
+ s = mkfull()
+ assert s.readline() == l1
+ assert s.readline() == l2
+ assert s.readline() == l3
+ assert s.readline() == ''
+ assert s._stream.tell() == len(d)
+
+ # readline limit
+ s = mkfull()
+ assert s.readline(5) == l1p
+ assert s.readline() == l1[5:]
+
+ # readline on tiny section
+ s = mktiny()
+ assert s.readline() == l1p
+ assert s.readline() == ''
+ assert s._stream.tell() == ts+1
+
+ # read no limit
+ s = mkfull()
+ assert s.read() == d[:-1]
+ assert s.read() == ''
+ assert s._stream.tell() == len(d)
+
+ # read limit
+ s = mkfull()
+ assert s.read(5) == l1p
+ assert s.read(6) == l1[5:]
+ assert s._stream.tell() == 5 + 6 # its not yet done
+
+ # read tiny
+ s = mktiny()
+ assert s.read(2) == l1[:2]
+ assert s._stream.tell() == 2
+ assert s.read() == l1[2:ts]
+ assert s._stream.tell() == ts+1
+
+ def _assert_rev_parse_types(self, name, rev_obj):
+ rev_parse = self.rorepo.rev_parse
+
+ if rev_obj.type == 'tag':
+ rev_obj = rev_obj.object
+
+ # tree and blob type
+ obj = rev_parse(name + '^{tree}')
+ assert obj == rev_obj.tree
+
+ obj = rev_parse(name + ':CHANGES')
+ assert obj.type == 'blob' and obj.path == 'CHANGES'
+ assert rev_obj.tree['CHANGES'] == obj
+
+
+ def _assert_rev_parse(self, name):
+ """tries multiple different rev-parse syntaxes with the given name
+ :return: parsed object"""
+ rev_parse = self.rorepo.rev_parse
+ orig_obj = rev_parse(name)
+ if orig_obj.type == 'tag':
+ obj = orig_obj.object
+ else:
+ obj = orig_obj
+ # END deref tags by default
+
+ # try history
+ rev = name + "~"
+ obj2 = rev_parse(rev)
+ assert obj2 == obj.parents[0]
+ self._assert_rev_parse_types(rev, obj2)
+
+ # history with number
+ ni = 11
+ history = [obj.parents[0]]
+ for pn in range(ni):
+ history.append(history[-1].parents[0])
+ # END get given amount of commits
+
+ for pn in range(11):
+ rev = name + "~%i" % (pn+1)
+ obj2 = rev_parse(rev)
+ assert obj2 == history[pn]
+ self._assert_rev_parse_types(rev, obj2)
+ # END history check
+
+ # parent ( default )
+ rev = name + "^"
+ obj2 = rev_parse(rev)
+ assert obj2 == obj.parents[0]
+ self._assert_rev_parse_types(rev, obj2)
+
+ # parent with number
+ for pn, parent in enumerate(obj.parents):
+ rev = name + "^%i" % (pn+1)
+ assert rev_parse(rev) == parent
+ self._assert_rev_parse_types(rev, parent)
+ # END for each parent
+
+ return orig_obj
+
+ def test_rev_parse(self):
+ rev_parse = self.rorepo.rev_parse
+
+ # try special case: This one failed at some point, make sure its fixed
+ assert rev_parse("33ebe").hexsha == "33ebe7acec14b25c5f84f35a664803fcab2f7781"
+
+ # start from reference
+ num_resolved = 0
+
+ for ref in Reference.iter_items(self.rorepo):
+ path_tokens = ref.path.split("/")
+ for pt in range(len(path_tokens)):
+ path_section = '/'.join(path_tokens[-(pt+1):])
+ try:
+ obj = self._assert_rev_parse(path_section)
+ assert obj.type == ref.object.type
+ num_resolved += 1
+ except BadObject:
+ print "failed on %s" % path_section
+ # is fine, in case we have something like 112, which belongs to remotes/rname/merge-requests/112
+ pass
+ # END exception handling
+ # END for each token
+ # END for each reference
+ assert num_resolved
+
+ # it works with tags !
+ tag = self._assert_rev_parse('0.1.4')
+ assert tag.type == 'tag'
+
+ # try full sha directly ( including type conversion )
+ assert tag.object == rev_parse(tag.object.hexsha)
+ self._assert_rev_parse_types(tag.object.hexsha, tag.object)
+
+
+ # multiple tree types result in the same tree: HEAD^{tree}^{tree}:CHANGES
+ rev = '0.1.4^{tree}^{tree}'
+ assert rev_parse(rev) == tag.object.tree
+ assert rev_parse(rev+':CHANGES') == tag.object.tree['CHANGES']
+
+
+ # try to get parents from first revision - it should fail as no such revision
+ # exists
+ first_rev = "33ebe7acec14b25c5f84f35a664803fcab2f7781"
+ commit = rev_parse(first_rev)
+ assert len(commit.parents) == 0
+ assert commit.hexsha == first_rev
+ self.failUnlessRaises(BadObject, rev_parse, first_rev+"~")
+ self.failUnlessRaises(BadObject, rev_parse, first_rev+"^")
+
+ # short SHA1
+ commit2 = rev_parse(first_rev[:20])
+ assert commit2 == commit
+ commit2 = rev_parse(first_rev[:5])
+ assert commit2 == commit
+
+
+ # todo: dereference tag into a blob 0.1.7^{blob} - quite a special one
+ # needs a tag which points to a blob
+
+
+ # ref^0 returns commit being pointed to, same with ref~0, and ^{}
+ tag = rev_parse('0.1.4')
+ for token in (('~0', '^0', '^{}')):
+ assert tag.object == rev_parse('0.1.4%s' % token)
+ # END handle multiple tokens
+
+ # try partial parsing
+ max_items = 40
+ for i, binsha in enumerate(self.rorepo.odb.sha_iter()):
+ assert rev_parse(bin_to_hex(binsha)[:8-(i%2)]).binsha == binsha
+ if i > max_items:
+ # this is rather slow currently, as rev_parse returns an object
+ # which requires accessing packs, it has some additional overhead
+ break
+ # END for each binsha in repo
+
+ # missing closing brace commit^{tree
+ self.failUnlessRaises(ValueError, rev_parse, '0.1.4^{tree')
+
+ # missing starting brace
+ self.failUnlessRaises(ValueError, rev_parse, '0.1.4^tree}')
+
+ # REVLOG
+ #######
+ head = self.rorepo.head
+
+ # need to specify a ref when using the @ syntax
+ self.failUnlessRaises(BadObject, rev_parse, "%s@{0}" % head.commit.hexsha)
+
+ # uses HEAD.ref by default
+ assert rev_parse('@{0}') == head.commit
+ if not head.is_detached:
+ refspec = '%s@{0}' % head.ref.name
+ assert rev_parse(refspec) == head.ref.commit
+ # all additional specs work as well
+ assert rev_parse(refspec+"^{tree}") == head.commit.tree
+ assert rev_parse(refspec+":CHANGES").type == 'blob'
+ #END operate on non-detached head
+
+ # the last position
+ assert rev_parse('@{1}') != head.commit
+
+ # position doesn't exist
+ self.failUnlessRaises(IndexError, rev_parse, '@{10000}')
+
+ # currently, nothing more is supported
+ self.failUnlessRaises(NotImplementedError, rev_parse, "@{1 week ago}")
+
+
+ def test_repo_odbtype(self):
+ target_type = GitDB
+ if sys.version_info[1] < 5:
+ target_type = GitCmdObjectDB
+ assert isinstance(self.rorepo.odb, target_type)
+
+ def test_submodules(self):
+ assert len(self.rorepo.submodules) == 1 # non-recursive
+ assert len(list(self.rorepo.iter_submodules())) == 2
+
+ assert isinstance(self.rorepo.submodule("gitdb"), Submodule)
+ self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist")
+
+ @with_rw_repo('HEAD', bare=False)
+ def test_submodule_update(self, rwrepo):
+ # fails in bare mode
+ rwrepo._bare = True
+ self.failUnlessRaises(InvalidGitRepositoryError, rwrepo.submodule_update)
+ rwrepo._bare = False
+
+ # test create submodule
+ sm = rwrepo.submodules[0]
+ sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path))
+ assert isinstance(sm, Submodule)
+
+ # note: the rest of this functionality is tested in test_submodule
+
+
diff --git a/git/test/test_stats.py b/git/test/test_stats.py
new file mode 100644
index 00000000..2bdb0a89
--- /dev/null
+++ b/git/test/test_stats.py
@@ -0,0 +1,25 @@
+# test_stats.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git import *
+
+class TestStats(TestBase):
+
+ def test__list_from_string(self):
+ output = fixture('diff_numstat')
+ stats = Stats._list_from_string(self.rorepo, output)
+
+ assert_equal(2, stats.total['files'])
+ assert_equal(52, stats.total['lines'])
+ assert_equal(29, stats.total['insertions'])
+ assert_equal(23, stats.total['deletions'])
+
+ assert_equal(29, stats.files["a.txt"]['insertions'])
+ assert_equal(18, stats.files["a.txt"]['deletions'])
+
+ assert_equal(0, stats.files["b.txt"]['insertions'])
+ assert_equal(5, stats.files["b.txt"]['deletions'])
diff --git a/git/test/test_submodule.py b/git/test/test_submodule.py
new file mode 100644
index 00000000..b8a25e02
--- /dev/null
+++ b/git/test/test_submodule.py
@@ -0,0 +1,546 @@
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+from git.test.lib import *
+from git.exc import *
+from git.objects.submodule.base import Submodule
+from git.objects.submodule.root import RootModule, RootUpdateProgress
+from git.util import to_native_path_linux, join_path_native
+import shutil
+import git
+import os
+
+class TestRootProgress(RootUpdateProgress):
+ """Just prints messages, for now without checking the correctness of the states"""
+
+ def update(self, op, index, max_count, message=''):
+ print message
+
+prog = TestRootProgress()
+
+class TestSubmodule(TestBase):
+
+ k_subm_current = "83a9e4a0dad595188ff3fb35bc3dfc4d931eff6d"
+ k_subm_changed = "394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3"
+ k_no_subm_tag = "0.1.6"
+
+
+ def _do_base_tests(self, rwrepo):
+ """Perform all tests in the given repository, it may be bare or nonbare"""
+ # manual instantiation
+ smm = Submodule(rwrepo, "\0"*20)
+ # name needs to be set in advance
+ self.failUnlessRaises(AttributeError, getattr, smm, 'name')
+
+ # iterate - 1 submodule
+ sms = Submodule.list_items(rwrepo, self.k_subm_current)
+ assert len(sms) == 1
+ sm = sms[0]
+
+ # at a different time, there is None
+ assert len(Submodule.list_items(rwrepo, self.k_no_subm_tag)) == 0
+
+ assert sm.path == 'ext/gitdb'
+ assert sm.path != sm.name # in our case, we have ids there, which don't equal the path
+ assert sm.url == 'git://gitorious.org/git-python/gitdb.git'
+ assert sm.branch_path == 'refs/heads/master' # the default ...
+ assert sm.branch_name == 'master'
+ assert sm.parent_commit == rwrepo.head.commit
+ # size is always 0
+ assert sm.size == 0
+ # the module is not checked-out yet
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
+
+ # which is why we can't get the branch either - it points into the module() repository
+ self.failUnlessRaises(InvalidGitRepositoryError, getattr, sm, 'branch')
+
+ # branch_path works, as its just a string
+ assert isinstance(sm.branch_path, basestring)
+
+ # some commits earlier we still have a submodule, but its at a different commit
+ smold = Submodule.iter_items(rwrepo, self.k_subm_changed).next()
+ assert smold.binsha != sm.binsha
+ assert smold != sm # the name changed
+
+ # force it to reread its information
+ del(smold._url)
+ smold.url == sm.url
+
+ # test config_reader/writer methods
+ sm.config_reader()
+ new_smclone_path = None # keep custom paths for later
+ new_csmclone_path = None #
+ if rwrepo.bare:
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.config_writer)
+ else:
+ writer = sm.config_writer()
+ # for faster checkout, set the url to the local path
+ new_smclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path))
+ writer.set_value('url', new_smclone_path)
+ del(writer)
+ assert sm.config_reader().get_value('url') == new_smclone_path
+ assert sm.url == new_smclone_path
+ # END handle bare repo
+ smold.config_reader()
+
+ # cannot get a writer on historical submodules
+ if not rwrepo.bare:
+ self.failUnlessRaises(ValueError, smold.config_writer)
+ # END handle bare repo
+
+ # make the old into a new - this doesn't work as the name changed
+ prev_parent_commit = smold.parent_commit
+ self.failUnlessRaises(ValueError, smold.set_parent_commit, self.k_subm_current)
+ # the sha is properly updated
+ smold.set_parent_commit(self.k_subm_changed+"~1")
+ assert smold.binsha != sm.binsha
+
+ # raises if the sm didn't exist in new parent - it keeps its
+ # parent_commit unchanged
+ self.failUnlessRaises(ValueError, smold.set_parent_commit, self.k_no_subm_tag)
+
+ # TEST TODO: if a path in the gitmodules file, but not in the index, it raises
+
+ # TEST UPDATE
+ ##############
+ # module retrieval is not always possible
+ if rwrepo.bare:
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.add, rwrepo, 'here', 'there')
+ else:
+ # its not checked out in our case
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
+ assert not sm.module_exists()
+
+ # currently there is only one submodule
+ assert len(list(rwrepo.iter_submodules())) == 1
+ assert sm.binsha != "\0"*20
+
+ # TEST ADD
+ ###########
+ # preliminary tests
+ # adding existing returns exactly the existing
+ sma = Submodule.add(rwrepo, sm.name, sm.path)
+ assert sma.path == sm.path
+
+ # no url and no module at path fails
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", "pathtorepo", url=None)
+
+ # CONTINUE UPDATE
+ #################
+
+ # lets update it - its a recursive one too
+ newdir = os.path.join(sm.abspath, 'dir')
+ os.makedirs(newdir)
+
+ # update fails if the path already exists non-empty
+ self.failUnlessRaises(OSError, sm.update)
+ os.rmdir(newdir)
+
+ # dry-run does nothing
+ sm.update(dry_run=True, progress=prog)
+ assert not sm.module_exists()
+
+ assert sm.update() is sm
+ sm_repopath = sm.path # cache for later
+ assert sm.module_exists()
+ assert isinstance(sm.module(), git.Repo)
+ assert sm.module().working_tree_dir == sm.abspath
+
+ # INTERLEAVE ADD TEST
+ #####################
+ # url must match the one in the existing repository ( if submodule name suggests a new one )
+ # or we raise
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", sm.path, "git://someurl/repo.git")
+
+
+ # CONTINUE UPDATE
+ #################
+ # we should have setup a tracking branch, which is also active
+ assert sm.module().head.ref.tracking_branch() is not None
+
+ # delete the whole directory and re-initialize
+ shutil.rmtree(sm.abspath)
+ assert len(sm.children()) == 0
+ # dry-run does nothing
+ sm.update(dry_run=True, recursive=False, progress=prog)
+ assert len(sm.children()) == 0
+
+ sm.update(recursive=False)
+ assert len(list(rwrepo.iter_submodules())) == 2
+ assert len(sm.children()) == 1 # its not checked out yet
+ csm = sm.children()[0]
+ assert not csm.module_exists()
+ csm_repopath = csm.path
+
+ # adjust the path of the submodules module to point to the local destination
+ new_csmclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path, csm.path))
+ csm.config_writer().set_value('url', new_csmclone_path)
+ assert csm.url == new_csmclone_path
+
+ # dry-run does nothing
+ assert not csm.module_exists()
+ sm.update(recursive=True, dry_run=True, progress=prog)
+ assert not csm.module_exists()
+
+ # update recuesively again
+ sm.update(recursive=True)
+ assert csm.module_exists()
+
+ # tracking branch once again
+ csm.module().head.ref.tracking_branch() is not None
+
+ # this flushed in a sub-submodule
+ assert len(list(rwrepo.iter_submodules())) == 2
+
+
+ # reset both heads to the previous version, verify that to_latest_revision works
+ smods = (sm.module(), csm.module())
+ for repo in smods:
+ repo.head.reset('HEAD~2', working_tree=1)
+ # END for each repo to reset
+
+ # dry run does nothing
+ sm.update(recursive=True, dry_run=True, progress=prog)
+ for repo in smods:
+ assert repo.head.commit != repo.head.ref.tracking_branch().commit
+ # END for each repo to check
+
+ sm.update(recursive=True, to_latest_revision=True)
+ for repo in smods:
+ assert repo.head.commit == repo.head.ref.tracking_branch().commit
+ # END for each repo to check
+ del(smods)
+
+ # if the head is detached, it still works ( but warns )
+ smref = sm.module().head.ref
+ sm.module().head.ref = 'HEAD~1'
+ # if there is no tracking branch, we get a warning as well
+ csm_tracking_branch = csm.module().head.ref.tracking_branch()
+ csm.module().head.ref.set_tracking_branch(None)
+ sm.update(recursive=True, to_latest_revision=True)
+
+ # to_latest_revision changes the child submodule's commit, it needs an
+ # update now
+ csm.set_parent_commit(csm.repo.head.commit)
+
+ # undo the changes
+ sm.module().head.ref = smref
+ csm.module().head.ref.set_tracking_branch(csm_tracking_branch)
+
+ # REMOVAL OF REPOSITOTRY
+ ########################
+ # must delete something
+ self.failUnlessRaises(ValueError, csm.remove, module=False, configuration=False)
+ # We have modified the configuration, hence the index is dirty, and the
+ # deletion will fail
+ # NOTE: As we did a few updates in the meanwhile, the indices were reset
+ # Hence we create some changes
+ sm.config_writer().set_value("somekey", "somevalue")
+ csm.config_writer().set_value("okey", "ovalue")
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+ # if we remove the dirty index, it would work
+ sm.module().index.reset()
+ # still, we have the file modified
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove, dry_run=True)
+ sm.module().index.reset(working_tree=True)
+
+ # this would work
+ assert sm.remove(dry_run=True) is sm
+ assert sm.module_exists()
+ sm.remove(force=True, dry_run=True)
+ assert sm.module_exists()
+
+ # but ... we have untracked files in the child submodule
+ fn = join_path_native(csm.module().working_tree_dir, "newfile")
+ open(fn, 'w').write("hi")
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
+
+ # forcibly delete the child repository
+ assert csm.remove(force=True) is csm
+ assert not csm.exists()
+ assert not csm.module_exists()
+ assert len(sm.children()) == 0
+ # now we have a changed index, as configuration was altered.
+ # fix this
+ sm.module().index.reset(working_tree=True)
+
+ # now delete only the module of the main submodule
+ assert sm.module_exists()
+ sm.remove(configuration=False)
+ assert sm.exists()
+ assert not sm.module_exists()
+ assert sm.config_reader().get_value('url')
+
+ # delete the rest
+ sm.remove()
+ assert not sm.exists()
+ assert not sm.module_exists()
+
+ assert len(rwrepo.submodules) == 0
+
+ # ADD NEW SUBMODULE
+ ###################
+ # add a simple remote repo - trailing slashes are no problem
+ smid = "newsub"
+ osmid = "othersub"
+ nsm = Submodule.add(rwrepo, smid, sm_repopath, new_smclone_path+"/", None, no_checkout=True)
+ assert nsm.name == smid
+ assert nsm.module_exists()
+ assert nsm.exists()
+ # its not checked out
+ assert not os.path.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file))
+ assert len(rwrepo.submodules) == 1
+
+ # add another submodule, but into the root, not as submodule
+ osm = Submodule.add(rwrepo, osmid, csm_repopath, new_csmclone_path, Submodule.k_head_default)
+ assert osm != nsm
+ assert osm.module_exists()
+ assert osm.exists()
+ assert os.path.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py'))
+
+ assert len(rwrepo.submodules) == 2
+
+ # commit the changes, just to finalize the operation
+ rwrepo.index.commit("my submod commit")
+ assert len(rwrepo.submodules) == 2
+
+ # needs update as the head changed, it thinks its in the history
+ # of the repo otherwise
+ nsm.set_parent_commit(rwrepo.head.commit)
+ osm.set_parent_commit(rwrepo.head.commit)
+
+ # MOVE MODULE
+ #############
+ # invalid inptu
+ self.failUnlessRaises(ValueError, nsm.move, 'doesntmatter', module=False, configuration=False)
+
+ # renaming to the same path does nothing
+ assert nsm.move(sm.path) is nsm
+
+ # rename a module
+ nmp = join_path_native("new", "module", "dir") + "/" # new module path
+ pmp = nsm.path
+ abspmp = nsm.abspath
+ assert nsm.move(nmp) is nsm
+ nmp = nmp[:-1] # cut last /
+ assert nsm.path == nmp
+ assert rwrepo.submodules[0].path == nmp
+
+ mpath = 'newsubmodule'
+ absmpath = join_path_native(rwrepo.working_tree_dir, mpath)
+ open(absmpath, 'w').write('')
+ self.failUnlessRaises(ValueError, nsm.move, mpath)
+ os.remove(absmpath)
+
+ # now it works, as we just move it back
+ nsm.move(pmp)
+ assert nsm.path == pmp
+ assert rwrepo.submodules[0].path == pmp
+
+ # TODO lowprio: test remaining exceptions ... for now its okay, the code looks right
+
+ # REMOVE 'EM ALL
+ ################
+ # if a submodule's repo has no remotes, it can't be added without an explicit url
+ osmod = osm.module()
+
+ osm.remove(module=False)
+ for remote in osmod.remotes:
+ remote.remove(osmod, remote.name)
+ assert not osm.exists()
+ self.failUnlessRaises(ValueError, Submodule.add, rwrepo, osmid, csm_repopath, url=None)
+ # END handle bare mode
+
+ # Error if there is no submodule file here
+ self.failUnlessRaises(IOError, Submodule._config_parser, rwrepo, rwrepo.commit(self.k_no_subm_tag), True)
+
+ @with_rw_repo(k_subm_current)
+ def test_base_rw(self, rwrepo):
+ self._do_base_tests(rwrepo)
+
+ @with_rw_repo(k_subm_current, bare=True)
+ def test_base_bare(self, rwrepo):
+ self._do_base_tests(rwrepo)
+
+ @with_rw_repo(k_subm_current, bare=False)
+ def test_root_module(self, rwrepo):
+ # Can query everything without problems
+ rm = RootModule(self.rorepo)
+ assert rm.module() is self.rorepo
+
+ # try attributes
+ rm.binsha
+ rm.mode
+ rm.path
+ assert rm.name == rm.k_root_name
+ assert rm.parent_commit == self.rorepo.head.commit
+ rm.url
+ rm.branch
+
+ assert len(rm.list_items(rm.module())) == 1
+ rm.config_reader()
+ rm.config_writer()
+
+ # deep traversal gitdb / async
+ rsmsp = [sm.path for sm in rm.traverse()]
+ assert len(rsmsp) == 2 # gitdb and async, async being a child of gitdb
+
+ # cannot set the parent commit as root module's path didn't exist
+ self.failUnlessRaises(ValueError, rm.set_parent_commit, 'HEAD')
+
+ # TEST UPDATE
+ #############
+ # setup commit which remove existing, add new and modify existing submodules
+ rm = RootModule(rwrepo)
+ assert len(rm.children()) == 1
+
+ # modify path without modifying the index entry
+ # ( which is what the move method would do properly )
+ #==================================================
+ sm = rm.children()[0]
+ pp = "path/prefix"
+ fp = join_path_native(pp, sm.path)
+ prep = sm.path
+ assert not sm.module_exists() # was never updated after rwrepo's clone
+
+ # assure we clone from a local source
+ sm.config_writer().set_value('url', to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path)))
+
+ # dry-run does nothing
+ sm.update(recursive=False, dry_run=True, progress=prog)
+ assert not sm.module_exists()
+
+ sm.update(recursive=False)
+ assert sm.module_exists()
+ sm.config_writer().set_value('path', fp) # change path to something with prefix AFTER url change
+
+ # update fails as list_items in such a situations cannot work, as it cannot
+ # find the entry at the changed path
+ self.failUnlessRaises(InvalidGitRepositoryError, rm.update, recursive=False)
+
+ # move it properly - doesn't work as it its path currently points to an indexentry
+ # which doesn't exist ( move it to some path, it doesn't matter here )
+ self.failUnlessRaises(InvalidGitRepositoryError, sm.move, pp)
+ # reset the path(cache) to where it was, now it works
+ sm.path = prep
+ sm.move(fp, module=False) # leave it at the old location
+
+ assert not sm.module_exists()
+ cpathchange = rwrepo.index.commit("changed sm path") # finally we can commit
+
+ # update puts the module into place
+ rm.update(recursive=False, progress=prog)
+ sm.set_parent_commit(cpathchange)
+ assert sm.module_exists()
+
+ # add submodule
+ #================
+ nsmn = "newsubmodule"
+ nsmp = "submrepo"
+ async_url = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1]))
+ nsm = Submodule.add(rwrepo, nsmn, nsmp, url=async_url)
+ csmadded = rwrepo.index.commit("Added submodule").hexsha # make sure we don't keep the repo reference
+ nsm.set_parent_commit(csmadded)
+ assert nsm.module_exists()
+ # in our case, the module should not exist, which happens if we update a parent
+ # repo and a new submodule comes into life
+ nsm.remove(configuration=False, module=True)
+ assert not nsm.module_exists() and nsm.exists()
+
+
+ # dry-run does nothing
+ rm.update(recursive=False, dry_run=True, progress=prog)
+
+ # otherwise it will work
+ rm.update(recursive=False, progress=prog)
+ assert nsm.module_exists()
+
+
+
+ # remove submodule - the previous one
+ #====================================
+ sm.set_parent_commit(csmadded)
+ smp = sm.abspath
+ assert not sm.remove(module=False).exists()
+ assert os.path.isdir(smp) # module still exists
+ csmremoved = rwrepo.index.commit("Removed submodule")
+
+ # an update will remove the module
+ # not in dry_run
+ rm.update(recursive=False, dry_run=True)
+ assert os.path.isdir(smp)
+
+ rm.update(recursive=False)
+ assert not os.path.isdir(smp)
+
+
+ # change url
+ #=============
+ # to the first repository, this way we have a fast checkout, and a completely different
+ # repository at the different url
+ nsm.set_parent_commit(csmremoved)
+ nsmurl = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0]))
+ nsm.config_writer().set_value('url', nsmurl)
+ csmpathchange = rwrepo.index.commit("changed url")
+ nsm.set_parent_commit(csmpathchange)
+
+ prev_commit = nsm.module().head.commit
+ # dry-run does nothing
+ rm.update(recursive=False, dry_run=True, progress=prog)
+ assert nsm.module().remotes.origin.url != nsmurl
+
+ rm.update(recursive=False, progress=prog)
+ assert nsm.module().remotes.origin.url == nsmurl
+ # head changed, as the remote url and its commit changed
+ assert prev_commit != nsm.module().head.commit
+
+ # add the submodule's changed commit to the index, which is what the
+ # user would do
+ # beforehand, update our instance's binsha with the new one
+ nsm.binsha = nsm.module().head.commit.binsha
+ rwrepo.index.add([nsm])
+
+ # change branch
+ #=================
+ # we only have one branch, so we switch to a virtual one, and back
+ # to the current one to trigger the difference
+ cur_branch = nsm.branch
+ nsmm = nsm.module()
+ prev_commit = nsmm.head.commit
+ for branch in ("some_virtual_branch", cur_branch.name):
+ nsm.config_writer().set_value(Submodule.k_head_option, git.Head.to_full_path(branch))
+ csmbranchchange = rwrepo.index.commit("changed branch to %s" % branch)
+ nsm.set_parent_commit(csmbranchchange)
+ # END for each branch to change
+
+ # Lets remove our tracking branch to simulate some changes
+ nsmmh = nsmm.head
+ assert nsmmh.ref.tracking_branch() is None # never set it up until now
+ assert not nsmmh.is_detached
+
+ #dry run does nothing
+ rm.update(recursive=False, dry_run=True, progress=prog)
+ assert nsmmh.ref.tracking_branch() is None
+
+ # the real thing does
+ rm.update(recursive=False, progress=prog)
+
+ assert nsmmh.ref.tracking_branch() is not None
+ assert not nsmmh.is_detached
+
+ # recursive update
+ # =================
+ # finally we recursively update a module, just to run the code at least once
+ # remove the module so that it has more work
+ assert len(nsm.children()) == 1
+ assert nsm.exists() and nsm.module_exists() and len(nsm.children()) == 1
+ # assure we pull locally only
+ nsmc = nsm.children()[0]
+ nsmc.config_writer().set_value('url', async_url)
+ rm.update(recursive=True, progress=prog, dry_run=True) # just to run the code
+ rm.update(recursive=True, progress=prog)
+
+ assert len(nsm.children()) == 1 and nsmc.module_exists()
+
diff --git a/git/test/test_tree.py b/git/test/test_tree.py
new file mode 100644
index 00000000..ec10e962
--- /dev/null
+++ b/git/test/test_tree.py
@@ -0,0 +1,144 @@
+# test_tree.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+from git.test.lib import *
+from git import *
+from git.objects.fun import (
+ traverse_tree_recursive,
+ traverse_trees_recursive
+ )
+from cStringIO import StringIO
+
+class TestTree(TestBase):
+
+ def test_serializable(self):
+ # tree at the given commit contains a submodule as well
+ roottree = self.rorepo.tree('6c1faef799095f3990e9970bc2cb10aa0221cf9c')
+ for item in roottree.traverse(ignore_self=False):
+ if item.type != Tree.type:
+ continue
+ # END skip non-trees
+ tree = item
+ # trees have no dict
+ self.failUnlessRaises(AttributeError, setattr, tree, 'someattr', 1)
+
+ orig_data = tree.data_stream.read()
+ orig_cache = tree._cache
+
+ stream = StringIO()
+ tree._serialize(stream)
+ assert stream.getvalue() == orig_data
+
+ stream.seek(0)
+ testtree = Tree(self.rorepo, Tree.NULL_BIN_SHA, 0, '')
+ testtree._deserialize(stream)
+ assert testtree._cache == orig_cache
+
+
+ # TEST CACHE MUTATOR
+ mod = testtree.cache
+ self.failUnlessRaises(ValueError, mod.add, "invalid sha", 0, "name")
+ self.failUnlessRaises(ValueError, mod.add, Tree.NULL_HEX_SHA, 0, "invalid mode")
+ self.failUnlessRaises(ValueError, mod.add, Tree.NULL_HEX_SHA, tree.mode, "invalid/name")
+
+ # add new item
+ name = "fake_dir"
+ mod.add(testtree.NULL_HEX_SHA, tree.mode, name)
+ assert name in testtree
+
+ # its available in the tree immediately
+ assert isinstance(testtree[name], Tree)
+
+ # adding it again will not cause multiple of them to be presents
+ cur_count = len(testtree)
+ mod.add(testtree.NULL_HEX_SHA, tree.mode, name)
+ assert len(testtree) == cur_count
+
+ # fails with a different sha - name exists
+ hexsha = "1"*40
+ self.failUnlessRaises(ValueError, mod.add, hexsha, tree.mode, name)
+
+ # force it - replace existing one
+ mod.add(hexsha, tree.mode, name, force=True)
+ assert testtree[name].hexsha == hexsha
+ assert len(testtree) == cur_count
+
+ # unchecked addition always works, even with invalid items
+ invalid_name = "hi/there"
+ mod.add_unchecked(hexsha, 0, invalid_name)
+ assert len(testtree) == cur_count + 1
+
+ del(mod[invalid_name])
+ assert len(testtree) == cur_count
+ # del again, its fine
+ del(mod[invalid_name])
+
+ # have added one item, we are done
+ mod.set_done()
+ mod.set_done() # multiple times are okay
+
+ # serialize, its different now
+ stream = StringIO()
+ testtree._serialize(stream)
+ stream.seek(0)
+ assert stream.getvalue() != orig_data
+
+ # replaces cache, but we make sure of it
+ del(testtree._cache)
+ testtree._deserialize(stream)
+ assert name in testtree
+ assert invalid_name not in testtree
+ # END for each item in tree
+
+ def test_traverse(self):
+ root = self.rorepo.tree('0.1.6')
+ num_recursive = 0
+ all_items = list()
+ for obj in root.traverse():
+ if "/" in obj.path:
+ num_recursive += 1
+
+ assert isinstance(obj, (Blob, Tree))
+ all_items.append(obj)
+ # END for each object
+ assert all_items == root.list_traverse()
+
+ # limit recursion level to 0 - should be same as default iteration
+ assert all_items
+ assert 'CHANGES' in root
+ assert len(list(root)) == len(list(root.traverse(depth=1)))
+
+ # only choose trees
+ trees_only = lambda i,d: i.type == "tree"
+ trees = list(root.traverse(predicate = trees_only))
+ assert len(trees) == len(list( i for i in root.traverse() if trees_only(i,0) ))
+
+ # test prune
+ lib_folder = lambda t,d: t.path == "lib"
+ pruned_trees = list(root.traverse(predicate = trees_only,prune = lib_folder))
+ assert len(pruned_trees) < len(trees)
+
+ # trees and blobs
+ assert len(set(trees)|set(root.trees)) == len(trees)
+ assert len(set(b for b in root if isinstance(b, Blob)) | set(root.blobs)) == len( root.blobs )
+ subitem = trees[0][0]
+ assert "/" in subitem.path
+ assert subitem.name == os.path.basename(subitem.path)
+
+ # assure that at some point the traversed paths have a slash in them
+ found_slash = False
+ for item in root.traverse():
+ assert os.path.isabs(item.abspath)
+ if '/' in item.path:
+ found_slash = True
+ # END check for slash
+
+ # slashes in paths are supported as well
+ assert root[item.path] == item == root/item.path
+ # END for each item
+ assert found_slash
+
diff --git a/git/test/test_util.py b/git/test/test_util.py
new file mode 100644
index 00000000..e55a6d15
--- /dev/null
+++ b/git/test/test_util.py
@@ -0,0 +1,109 @@
+# test_utils.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+import tempfile
+
+from git.test.lib import *
+from git.util import *
+from git.objects.util import *
+from git import *
+from git.cmd import dashify
+
+import time
+
+
+class TestUtils(TestBase):
+ def setup(self):
+ self.testdict = {
+ "string": "42",
+ "int": 42,
+ "array": [ 42 ],
+ }
+
+ def test_it_should_dashify(self):
+ assert_equal('this-is-my-argument', dashify('this_is_my_argument'))
+ assert_equal('foo', dashify('foo'))
+
+
+ def test_lock_file(self):
+ my_file = tempfile.mktemp()
+ lock_file = LockFile(my_file)
+ assert not lock_file._has_lock()
+ # release lock we don't have - fine
+ lock_file._release_lock()
+
+ # get lock
+ lock_file._obtain_lock_or_raise()
+ assert lock_file._has_lock()
+
+ # concurrent access
+ other_lock_file = LockFile(my_file)
+ assert not other_lock_file._has_lock()
+ self.failUnlessRaises(IOError, other_lock_file._obtain_lock_or_raise)
+
+ lock_file._release_lock()
+ assert not lock_file._has_lock()
+
+ other_lock_file._obtain_lock_or_raise()
+ self.failUnlessRaises(IOError, lock_file._obtain_lock_or_raise)
+
+ # auto-release on destruction
+ del(other_lock_file)
+ lock_file._obtain_lock_or_raise()
+ lock_file._release_lock()
+
+ def test_blocking_lock_file(self):
+ my_file = tempfile.mktemp()
+ lock_file = BlockingLockFile(my_file)
+ lock_file._obtain_lock()
+
+ # next one waits for the lock
+ start = time.time()
+ wait_time = 0.1
+ wait_lock = BlockingLockFile(my_file, 0.05, wait_time)
+ self.failUnlessRaises(IOError, wait_lock._obtain_lock)
+ elapsed = time.time() - start
+ assert elapsed <= wait_time + 0.02 # some extra time it may cost
+
+ def test_user_id(self):
+ assert '@' in get_user_id()
+
+ def test_parse_date(self):
+ # test all supported formats
+ def assert_rval(rval, veri_time, offset=0):
+ assert len(rval) == 2
+ assert isinstance(rval[0], int) and isinstance(rval[1], int)
+ assert rval[0] == veri_time
+ assert rval[1] == offset
+
+ # now that we are here, test our conversion functions as well
+ utctz = altz_to_utctz_str(offset)
+ assert isinstance(utctz, basestring)
+ assert utctz_to_altz(verify_utctz(utctz)) == offset
+ # END assert rval utility
+
+ rfc = ("Thu, 07 Apr 2005 22:13:11 +0000", 0)
+ iso = ("2005-04-07T22:13:11 -0200", 7200)
+ iso2 = ("2005-04-07 22:13:11 +0400", -14400)
+ iso3 = ("2005.04.07 22:13:11 -0000", 0)
+ alt = ("04/07/2005 22:13:11", 0)
+ alt2 = ("07.04.2005 22:13:11", 0)
+ veri_time = 1112904791 # the time this represents
+ for date, offset in (rfc, iso, iso2, iso3, alt, alt2):
+ assert_rval(parse_date(date), veri_time, offset)
+ # END for each date type
+
+ # and failure
+ self.failUnlessRaises(ValueError, parse_date, 'invalid format')
+ self.failUnlessRaises(ValueError, parse_date, '123456789 -02000')
+ self.failUnlessRaises(ValueError, parse_date, ' 123456789 -0200')
+
+ def test_actor(self):
+ for cr in (None, self.rorepo.config_reader()):
+ assert isinstance(Actor.committer(cr), Actor)
+ assert isinstance(Actor.author(cr), Actor)
+ #END assure config reader is handled
diff --git a/git/util.py b/git/util.py
new file mode 100644
index 00000000..8c0b6697
--- /dev/null
+++ b/git/util.py
@@ -0,0 +1,602 @@
+# utils.py
+# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
+#
+# This module is part of GitPython and is released under
+# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import os
+import re
+import sys
+import time
+import platform
+import tempfile
+
+from gitdb.util import (
+ make_sha,
+ LockedFD,
+ file_contents_ro,
+ LazyMixin,
+ to_hex_sha,
+ to_bin_sha
+ )
+
+__all__ = ( "stream_copy", "join_path", "to_native_path_windows", "to_native_path_linux",
+ "join_path_native", "Stats", "IndexFileSHA1Writer", "Iterable", "IterableList",
+ "BlockingLockFile", "LockFile", 'Actor', 'get_user_id', 'assure_directory_exists',
+ 'RemoteProgress')
+
+#{ Utility Methods
+
+def stream_copy(source, destination, chunk_size=512*1024):
+ """Copy all data from the source stream into the destination stream in chunks
+ of size chunk_size
+
+ :return: amount of bytes written"""
+ br = 0
+ while True:
+ chunk = source.read(chunk_size)
+ destination.write(chunk)
+ br += len(chunk)
+ if len(chunk) < chunk_size:
+ break
+ # END reading output stream
+ return br
+
+def join_path(a, *p):
+ """Join path tokens together similar to os.path.join, but always use
+ '/' instead of possibly '\' on windows."""
+ path = a
+ for b in p:
+ if b.startswith('/'):
+ path += b[1:]
+ elif path == '' or path.endswith('/'):
+ path += b
+ else:
+ path += '/' + b
+ return path
+
+def to_native_path_windows(path):
+ return path.replace('/','\\')
+
+def to_native_path_linux(path):
+ return path.replace('\\','/')
+
+if sys.platform.startswith('win'):
+ to_native_path = to_native_path_windows
+else:
+ # no need for any work on linux
+ def to_native_path_linux(path):
+ return path
+ to_native_path = to_native_path_linux
+
+def join_path_native(a, *p):
+ """
+ As join path, but makes sure an OS native path is returned. This is only
+ needed to play it safe on my dear windows and to assure nice paths that only
+ use '\'"""
+ return to_native_path(join_path(a, *p))
+
+def assure_directory_exists(path, is_file=False):
+ """Assure that the directory pointed to by path exists.
+
+ :param is_file: If True, path is assumed to be a file and handled correctly.
+ Otherwise it must be a directory
+ :return: True if the directory was created, False if it already existed"""
+ if is_file:
+ path = os.path.dirname(path)
+ #END handle file
+ if not os.path.isdir(path):
+ os.makedirs(path)
+ return True
+ return False
+
+
+def get_user_id():
+ """:return: string identifying the currently active system user as name@node
+ :note: user can be set with the 'USER' environment variable, usually set on windows"""
+ ukn = 'UNKNOWN'
+ username = os.environ.get('USER', os.environ.get('USERNAME', ukn))
+ if username == ukn and hasattr(os, 'getlogin'):
+ username = os.getlogin()
+ # END get username from login
+ return "%s@%s" % (username, platform.node())
+
+#} END utilities
+
+#{ Classes
+
+class RemoteProgress(object):
+ """
+ Handler providing an interface to parse progress information emitted by git-push
+ and git-fetch and to dispatch callbacks allowing subclasses to react to the progress.
+ """
+ _num_op_codes = 5
+ BEGIN, END, COUNTING, COMPRESSING, WRITING = [1 << x for x in range(_num_op_codes)]
+ STAGE_MASK = BEGIN|END
+ OP_MASK = ~STAGE_MASK
+
+ __slots__ = ("_cur_line", "_seen_ops")
+ re_op_absolute = re.compile("(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
+ re_op_relative = re.compile("(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
+
+ def __init__(self):
+ self._seen_ops = list()
+
+ def _parse_progress_line(self, line):
+ """Parse progress information from the given line as retrieved by git-push
+ or git-fetch
+
+ :return: list(line, ...) list of lines that could not be processed"""
+ # handle
+ # Counting objects: 4, done.
+ # Compressing objects: 50% (1/2) \rCompressing objects: 100% (2/2) \rCompressing objects: 100% (2/2), done.
+ self._cur_line = line
+ sub_lines = line.split('\r')
+ failed_lines = list()
+ for sline in sub_lines:
+ # find esacpe characters and cut them away - regex will not work with
+ # them as they are non-ascii. As git might expect a tty, it will send them
+ last_valid_index = None
+ for i,c in enumerate(reversed(sline)):
+ if ord(c) < 32:
+ # its a slice index
+ last_valid_index = -i-1
+ # END character was non-ascii
+ # END for each character in sline
+ if last_valid_index is not None:
+ sline = sline[:last_valid_index]
+ # END cut away invalid part
+ sline = sline.rstrip()
+
+ cur_count, max_count = None, None
+ match = self.re_op_relative.match(sline)
+ if match is None:
+ match = self.re_op_absolute.match(sline)
+
+ if not match:
+ self.line_dropped(sline)
+ failed_lines.append(sline)
+ continue
+ # END could not get match
+
+ op_code = 0
+ remote, op_name, percent, cur_count, max_count, message = match.groups()
+
+ # get operation id
+ if op_name == "Counting objects":
+ op_code |= self.COUNTING
+ elif op_name == "Compressing objects":
+ op_code |= self.COMPRESSING
+ elif op_name == "Writing objects":
+ op_code |= self.WRITING
+ else:
+ raise ValueError("Operation name %r unknown" % op_name)
+
+ # figure out stage
+ if op_code not in self._seen_ops:
+ self._seen_ops.append(op_code)
+ op_code |= self.BEGIN
+ # END begin opcode
+
+ if message is None:
+ message = ''
+ # END message handling
+
+ message = message.strip()
+ done_token = ', done.'
+ if message.endswith(done_token):
+ op_code |= self.END
+ message = message[:-len(done_token)]
+ # END end message handling
+
+ self.update(op_code, cur_count, max_count, message)
+ # END for each sub line
+ return failed_lines
+
+ def line_dropped(self, line):
+ """Called whenever a line could not be understood and was therefore dropped."""
+ pass
+
+ def update(self, op_code, cur_count, max_count=None, message=''):
+ """Called whenever the progress changes
+
+ :param op_code:
+ Integer allowing to be compared against Operation IDs and stage IDs.
+
+ Stage IDs are BEGIN and END. BEGIN will only be set once for each Operation
+ ID as well as END. It may be that BEGIN and END are set at once in case only
+ one progress message was emitted due to the speed of the operation.
+ Between BEGIN and END, none of these flags will be set
+
+ Operation IDs are all held within the OP_MASK. Only one Operation ID will
+ be active per call.
+ :param cur_count: Current absolute count of items
+
+ :param max_count:
+ The maximum count of items we expect. It may be None in case there is
+ no maximum number of items or if it is (yet) unknown.
+
+ :param message:
+ In case of the 'WRITING' operation, it contains the amount of bytes
+ transferred. It may possibly be used for other purposes as well.
+
+ You may read the contents of the current line in self._cur_line"""
+ pass
+
+
+class Actor(object):
+ """Actors hold information about a person acting on the repository. They
+ can be committers and authors or anything with a name and an email as
+ mentioned in the git log entries."""
+ # PRECOMPILED REGEX
+ name_only_regex = re.compile( r'<(.+)>' )
+ name_email_regex = re.compile( r'(.*) <(.+?)>' )
+
+ # ENVIRONMENT VARIABLES
+ # read when creating new commits
+ env_author_name = "GIT_AUTHOR_NAME"
+ env_author_email = "GIT_AUTHOR_EMAIL"
+ env_committer_name = "GIT_COMMITTER_NAME"
+ env_committer_email = "GIT_COMMITTER_EMAIL"
+
+ # CONFIGURATION KEYS
+ conf_name = 'name'
+ conf_email = 'email'
+
+ __slots__ = ('name', 'email')
+
+ def __init__(self, name, email):
+ self.name = name
+ self.email = email
+
+ def __eq__(self, other):
+ return self.name == other.name and self.email == other.email
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash((self.name, self.email))
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return '<git.Actor "%s <%s>">' % (self.name, self.email)
+
+ @classmethod
+ def _from_string(cls, string):
+ """Create an Actor from a string.
+ :param string: is the string, which is expected to be in regular git format
+
+ John Doe <jdoe@example.com>
+
+ :return: Actor """
+ m = cls.name_email_regex.search(string)
+ if m:
+ name, email = m.groups()
+ return Actor(name, email)
+ else:
+ m = cls.name_only_regex.search(string)
+ if m:
+ return Actor(m.group(1), None)
+ else:
+ # assume best and use the whole string as name
+ return Actor(string, None)
+ # END special case name
+ # END handle name/email matching
+
+ @classmethod
+ def _main_actor(cls, env_name, env_email, config_reader=None):
+ actor = Actor('', '')
+ default_email = get_user_id()
+ default_name = default_email.split('@')[0]
+
+ for attr, evar, cvar, default in (('name', env_name, cls.conf_name, default_name),
+ ('email', env_email, cls.conf_email, default_email)):
+ try:
+ setattr(actor, attr, os.environ[evar])
+ except KeyError:
+ if config_reader is not None:
+ setattr(actor, attr, config_reader.get_value('user', cvar, default))
+ #END config-reader handling
+ if not getattr(actor, attr):
+ setattr(actor, attr, default)
+ #END handle name
+ #END for each item to retrieve
+ return actor
+
+
+ @classmethod
+ def committer(cls, config_reader=None):
+ """
+ :return: Actor instance corresponding to the configured committer. It behaves
+ similar to the git implementation, such that the environment will override
+ configuration values of config_reader. If no value is set at all, it will be
+ generated
+ :param config_reader: ConfigReader to use to retrieve the values from in case
+ they are not set in the environment"""
+ return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
+
+ @classmethod
+ def author(cls, config_reader=None):
+ """Same as committer(), but defines the main author. It may be specified in the environment,
+ but defaults to the committer"""
+ return cls._main_actor(cls.env_author_name, cls.env_author_email, config_reader)
+
+
+class Stats(object):
+ """
+ Represents stat information as presented by git at the end of a merge. It is
+ created from the output of a diff operation.
+
+ ``Example``::
+
+ c = Commit( sha1 )
+ s = c.stats
+ s.total # full-stat-dict
+ s.files # dict( filepath : stat-dict )
+
+ ``stat-dict``
+
+ A dictionary with the following keys and values::
+
+ deletions = number of deleted lines as int
+ insertions = number of inserted lines as int
+ lines = total number of lines changed as int, or deletions + insertions
+
+ ``full-stat-dict``
+
+ In addition to the items in the stat-dict, it features additional information::
+
+ files = number of changed files as int"""
+ __slots__ = ("total", "files")
+
+ def __init__(self, total, files):
+ self.total = total
+ self.files = files
+
+ @classmethod
+ def _list_from_string(cls, repo, text):
+ """Create a Stat object from output retrieved by git-diff.
+
+ :return: git.Stat"""
+ hsh = {'total': {'insertions': 0, 'deletions': 0, 'lines': 0, 'files': 0}, 'files': dict()}
+ for line in text.splitlines():
+ (raw_insertions, raw_deletions, filename) = line.split("\t")
+ insertions = raw_insertions != '-' and int(raw_insertions) or 0
+ deletions = raw_deletions != '-' and int(raw_deletions) or 0
+ hsh['total']['insertions'] += insertions
+ hsh['total']['deletions'] += deletions
+ hsh['total']['lines'] += insertions + deletions
+ hsh['total']['files'] += 1
+ hsh['files'][filename.strip()] = {'insertions': insertions,
+ 'deletions': deletions,
+ 'lines': insertions + deletions}
+ return Stats(hsh['total'], hsh['files'])
+
+
+class IndexFileSHA1Writer(object):
+ """Wrapper around a file-like object that remembers the SHA1 of
+ the data written to it. It will write a sha when the stream is closed
+ or if the asked for explicitly usign write_sha.
+
+ Only useful to the indexfile
+
+ :note: Based on the dulwich project"""
+ __slots__ = ("f", "sha1")
+
+ def __init__(self, f):
+ self.f = f
+ self.sha1 = make_sha("")
+
+ def write(self, data):
+ self.sha1.update(data)
+ return self.f.write(data)
+
+ def write_sha(self):
+ sha = self.sha1.digest()
+ self.f.write(sha)
+ return sha
+
+ def close(self):
+ sha = self.write_sha()
+ self.f.close()
+ return sha
+
+ def tell(self):
+ return self.f.tell()
+
+
+class LockFile(object):
+ """Provides methods to obtain, check for, and release a file based lock which
+ should be used to handle concurrent access to the same file.
+
+ As we are a utility class to be derived from, we only use protected methods.
+
+ Locks will automatically be released on destruction"""
+ __slots__ = ("_file_path", "_owns_lock")
+
+ def __init__(self, file_path):
+ self._file_path = file_path
+ self._owns_lock = False
+
+ def __del__(self):
+ self._release_lock()
+
+ def _lock_file_path(self):
+ """:return: Path to lockfile"""
+ return "%s.lock" % (self._file_path)
+
+ def _has_lock(self):
+ """:return: True if we have a lock and if the lockfile still exists
+ :raise AssertionError: if our lock-file does not exist"""
+ if not self._owns_lock:
+ return False
+
+ return True
+
+ def _obtain_lock_or_raise(self):
+ """Create a lock file as flag for other instances, mark our instance as lock-holder
+
+ :raise IOError: if a lock was already present or a lock file could not be written"""
+ if self._has_lock():
+ return
+ lock_file = self._lock_file_path()
+ if os.path.isfile(lock_file):
+ raise IOError("Lock for file %r did already exist, delete %r in case the lock is illegal" % (self._file_path, lock_file))
+
+ try:
+ fd = os.open(lock_file, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0)
+ os.close(fd)
+ except OSError,e:
+ raise IOError(str(e))
+
+ self._owns_lock = True
+
+ def _obtain_lock(self):
+ """The default implementation will raise if a lock cannot be obtained.
+ Subclasses may override this method to provide a different implementation"""
+ return self._obtain_lock_or_raise()
+
+ def _release_lock(self):
+ """Release our lock if we have one"""
+ if not self._has_lock():
+ return
+
+ # if someone removed our file beforhand, lets just flag this issue
+ # instead of failing, to make it more usable.
+ lfp = self._lock_file_path()
+ try:
+ # on bloody windows, the file needs write permissions to be removable.
+ # Why ...
+ if os.name == 'nt':
+ os.chmod(lfp, 0777)
+ # END handle win32
+ os.remove(lfp)
+ except OSError:
+ pass
+ self._owns_lock = False
+
+
+class BlockingLockFile(LockFile):
+ """The lock file will block until a lock could be obtained, or fail after
+ a specified timeout.
+
+ :note: If the directory containing the lock was removed, an exception will
+ be raised during the blocking period, preventing hangs as the lock
+ can never be obtained."""
+ __slots__ = ("_check_interval", "_max_block_time")
+ def __init__(self, file_path, check_interval_s=0.3, max_block_time_s=sys.maxint):
+ """Configure the instance
+
+ :parm check_interval_s:
+ Period of time to sleep until the lock is checked the next time.
+ By default, it waits a nearly unlimited time
+
+ :parm max_block_time_s: Maximum amount of seconds we may lock"""
+ super(BlockingLockFile, self).__init__(file_path)
+ self._check_interval = check_interval_s
+ self._max_block_time = max_block_time_s
+
+ def _obtain_lock(self):
+ """This method blocks until it obtained the lock, or raises IOError if
+ it ran out of time or if the parent directory was not available anymore.
+ If this method returns, you are guranteed to own the lock"""
+ starttime = time.time()
+ maxtime = starttime + float(self._max_block_time)
+ while True:
+ try:
+ super(BlockingLockFile, self)._obtain_lock()
+ except IOError:
+ # synity check: if the directory leading to the lockfile is not
+ # readable anymore, raise an execption
+ curtime = time.time()
+ if not os.path.isdir(os.path.dirname(self._lock_file_path())):
+ msg = "Directory containing the lockfile %r was not readable anymore after waiting %g seconds" % (self._lock_file_path(), curtime - starttime)
+ raise IOError(msg)
+ # END handle missing directory
+
+ if curtime >= maxtime:
+ msg = "Waited %g seconds for lock at %r" % ( maxtime - starttime, self._lock_file_path())
+ raise IOError(msg)
+ # END abort if we wait too long
+ time.sleep(self._check_interval)
+ else:
+ break
+ # END endless loop
+
+
+class IterableList(list):
+ """
+ List of iterable objects allowing to query an object by id or by named index::
+
+ heads = repo.heads
+ heads.master
+ heads['master']
+ heads[0]
+
+ It requires an id_attribute name to be set which will be queried from its
+ contained items to have a means for comparison.
+
+ A prefix can be specified which is to be used in case the id returned by the
+ items always contains a prefix that does not matter to the user, so it
+ can be left out."""
+ __slots__ = ('_id_attr', '_prefix')
+
+ def __new__(cls, id_attr, prefix=''):
+ return super(IterableList,cls).__new__(cls)
+
+ def __init__(self, id_attr, prefix=''):
+ self._id_attr = id_attr
+ self._prefix = prefix
+ if not isinstance(id_attr, basestring):
+ raise ValueError("First parameter must be a string identifying the name-property. Extend the list after initialization")
+ # END help debugging !
+
+ def __getattr__(self, attr):
+ attr = self._prefix + attr
+ for item in self:
+ if getattr(item, self._id_attr) == attr:
+ return item
+ # END for each item
+ return list.__getattribute__(self, attr)
+
+ def __getitem__(self, index):
+ if isinstance(index, int):
+ return list.__getitem__(self,index)
+
+ try:
+ return getattr(self, index)
+ except AttributeError:
+ raise IndexError( "No item found with id %r" % (self._prefix + index) )
+
+
+class Iterable(object):
+ """Defines an interface for iterable items which is to assure a uniform
+ way to retrieve and iterate items within the git repository"""
+ __slots__ = tuple()
+ _id_attribute_ = "attribute that most suitably identifies your instance"
+
+ @classmethod
+ def list_items(cls, repo, *args, **kwargs):
+ """
+ Find all items of this type - subclasses can specify args and kwargs differently.
+ If no args are given, subclasses are obliged to return all items if no additional
+ arguments arg given.
+
+ :note: Favor the iter_items method as it will
+
+ :return:list(Item,...) list of item instances"""
+ out_list = IterableList( cls._id_attribute_ )
+ out_list.extend(cls.iter_items(repo, *args, **kwargs))
+ return out_list
+
+
+ @classmethod
+ def iter_items(cls, repo, *args, **kwargs):
+ """For more information about the arguments, see list_items
+ :return: iterator yielding Items"""
+ raise NotImplementedError("To be implemented by Subclass")
+
+#} END classes