summaryrefslogtreecommitdiff
path: root/git/test/db/py
diff options
context:
space:
mode:
authorSebastian Thiel <byronimo@gmail.com>2014-02-09 21:23:51 +0100
committerSebastian Thiel <byronimo@gmail.com>2014-02-09 21:23:51 +0100
commit15dd52cd578691930cea194e003fa80dd02f40eb (patch)
treeef4c9c5f705dd1ca743b7ceefe5b91b11ad15010 /git/test/db/py
parent660bdca125aa9dcca7a7730535bec433edb8ba02 (diff)
downloadgitpython-15dd52cd578691930cea194e003fa80dd02f40eb.tar.gz
tabs to 4 spaces - overall state of this branch is desolate, but fixable. Needs plenty of work
Diffstat (limited to 'git/test/db/py')
-rw-r--r--git/test/db/py/test_base.py12
-rw-r--r--git/test/db/py/test_git.py74
-rw-r--r--git/test/db/py/test_loose.py54
-rw-r--r--git/test/db/py/test_mem.py42
-rw-r--r--git/test/db/py/test_pack.py122
-rw-r--r--git/test/db/py/test_ref.py102
6 files changed, 203 insertions, 203 deletions
diff --git a/git/test/db/py/test_base.py b/git/test/db/py/test_base.py
index 6b06bbe9..5d076bb2 100644
--- a/git/test/db/py/test_base.py
+++ b/git/test/db/py/test_base.py
@@ -8,9 +8,9 @@ from git.test.db.base import RepoBase
from git.db.complex import PureCompatibilityGitDB
class TestPyDBBase(RepoBase):
-
- RepoCls = PureCompatibilityGitDB
-
- def test_basics(self):
- pass
-
+
+ RepoCls = PureCompatibilityGitDB
+
+ def test_basics(self):
+ pass
+
diff --git a/git/test/db/py/test_git.py b/git/test/db/py/test_git.py
index ecaa5c8f..4f5b5fb5 100644
--- a/git/test/db/py/test_git.py
+++ b/git/test/db/py/test_git.py
@@ -12,40 +12,40 @@ from git.util import hex_to_bin, bin_to_hex
import os
class TestGitDB(TestDBBase):
- needs_ro_repo = False
-
- def test_reading(self):
- gdb = PureGitODB(os.path.join(rorepo_dir(), 'objects'))
-
- # we have packs and loose objects, alternates doesn't necessarily exist
- assert 1 < len(gdb.databases()) < 4
-
- # access should be possible
- git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
- assert isinstance(gdb.info(git_sha), OInfo)
- assert isinstance(gdb.stream(git_sha), OStream)
- assert gdb.size() > 200
- sha_list = list(gdb.sha_iter())
- assert len(sha_list) == gdb.size()
-
-
- # This is actually a test for compound functionality, but it doesn't
- # have a separate test module
- # test partial shas
- # this one as uneven and quite short
- assert gdb.partial_to_complete_sha_hex('5aebcd') == hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
-
- # mix even/uneven hexshas
- for i, binsha in enumerate(sha_list[:50]):
- assert gdb.partial_to_complete_sha_hex(bin_to_hex(binsha)[:8-(i%2)]) == binsha
- # END for each sha
-
- self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, "0000")
-
- @with_rw_directory
- def test_writing(self, path):
- gdb = PureGitODB(path)
-
- # its possible to write objects
- self._assert_object_writing(gdb)
- self._assert_object_writing_async(gdb)
+ needs_ro_repo = False
+
+ def test_reading(self):
+ gdb = PureGitODB(os.path.join(rorepo_dir(), 'objects'))
+
+ # we have packs and loose objects, alternates doesn't necessarily exist
+ assert 1 < len(gdb.databases()) < 4
+
+ # access should be possible
+ git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+ assert isinstance(gdb.info(git_sha), OInfo)
+ assert isinstance(gdb.stream(git_sha), OStream)
+ assert gdb.size() > 200
+ sha_list = list(gdb.sha_iter())
+ assert len(sha_list) == gdb.size()
+
+
+ # This is actually a test for compound functionality, but it doesn't
+ # have a separate test module
+ # test partial shas
+ # this one as uneven and quite short
+ assert gdb.partial_to_complete_sha_hex('5aebcd') == hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+
+ # mix even/uneven hexshas
+ for i, binsha in enumerate(sha_list[:50]):
+ assert gdb.partial_to_complete_sha_hex(bin_to_hex(binsha)[:8-(i%2)]) == binsha
+ # END for each sha
+
+ self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, "0000")
+
+ @with_rw_directory
+ def test_writing(self, path):
+ gdb = PureGitODB(path)
+
+ # its possible to write objects
+ self._assert_object_writing(gdb)
+ self._assert_object_writing_async(gdb)
diff --git a/git/test/db/py/test_loose.py b/git/test/db/py/test_loose.py
index 0c9b4831..cfb0ca3a 100644
--- a/git/test/db/py/test_loose.py
+++ b/git/test/db/py/test_loose.py
@@ -6,31 +6,31 @@ from git.test.db.lib import TestDBBase, with_rw_directory
from git.db.py.loose import PureLooseObjectODB
from git.exc import BadObject
from git.util import bin_to_hex
-
+
class TestLooseDB(TestDBBase):
-
- needs_ro_repo = False
-
- @with_rw_directory
- def test_basics(self, path):
- ldb = PureLooseObjectODB(path)
-
- # write data
- self._assert_object_writing(ldb)
- self._assert_object_writing_async(ldb)
-
- # verify sha iteration and size
- shas = list(ldb.sha_iter())
- assert shas and len(shas[0]) == 20
-
- assert len(shas) == ldb.size()
-
- # verify find short object
- long_sha = bin_to_hex(shas[-1])
- for short_sha in (long_sha[:20], long_sha[:5]):
- assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha
- # END for each sha
-
- self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')
- # raises if no object could be foudn
-
+
+ needs_ro_repo = False
+
+ @with_rw_directory
+ def test_basics(self, path):
+ ldb = PureLooseObjectODB(path)
+
+ # write data
+ self._assert_object_writing(ldb)
+ self._assert_object_writing_async(ldb)
+
+ # verify sha iteration and size
+ shas = list(ldb.sha_iter())
+ assert shas and len(shas[0]) == 20
+
+ assert len(shas) == ldb.size()
+
+ # verify find short object
+ long_sha = bin_to_hex(shas[-1])
+ for short_sha in (long_sha[:20], long_sha[:5]):
+ assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha
+ # END for each sha
+
+ self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')
+ # raises if no object could be foudn
+
diff --git a/git/test/db/py/test_mem.py b/git/test/db/py/test_mem.py
index bc98dc56..bb879554 100644
--- a/git/test/db/py/test_mem.py
+++ b/git/test/db/py/test_mem.py
@@ -5,26 +5,26 @@
from git.test.db.lib import TestDBBase, with_rw_directory
from git.db.py.mem import PureMemoryDB
from git.db.py.loose import PureLooseObjectODB
-
+
class TestPureMemoryDB(TestDBBase):
-
- needs_ro_repo = False
+
+ needs_ro_repo = False
- @with_rw_directory
- def test_writing(self, path):
- mdb = PureMemoryDB()
-
- # write data
- self._assert_object_writing_simple(mdb)
-
- # test stream copy
- ldb = PureLooseObjectODB(path)
- assert ldb.size() == 0
- num_streams_copied = mdb.stream_copy(mdb.sha_iter(), ldb)
- assert num_streams_copied == mdb.size()
-
- assert ldb.size() == mdb.size()
- for sha in mdb.sha_iter():
- assert ldb.has_object(sha)
- assert ldb.stream(sha).read() == mdb.stream(sha).read()
- # END verify objects where copied and are equal
+ @with_rw_directory
+ def test_writing(self, path):
+ mdb = PureMemoryDB()
+
+ # write data
+ self._assert_object_writing_simple(mdb)
+
+ # test stream copy
+ ldb = PureLooseObjectODB(path)
+ assert ldb.size() == 0
+ num_streams_copied = mdb.stream_copy(mdb.sha_iter(), ldb)
+ assert num_streams_copied == mdb.size()
+
+ assert ldb.size() == mdb.size()
+ for sha in mdb.sha_iter():
+ assert ldb.has_object(sha)
+ assert ldb.stream(sha).read() == mdb.stream(sha).read()
+ # END verify objects where copied and are equal
diff --git a/git/test/db/py/test_pack.py b/git/test/db/py/test_pack.py
index 5043f446..54dc2e2c 100644
--- a/git/test/db/py/test_pack.py
+++ b/git/test/db/py/test_pack.py
@@ -13,64 +13,64 @@ import os
import random
class TestPackDB(TestDBBase):
-
- needs_ro_repo = False
-
- @with_packs_rw
- def test_writing(self, path):
- pdb = PurePackedODB(path)
-
- # on demand, we init our pack cache
- num_packs = len(pdb.entities())
- assert num_packs
- assert pdb._st_mtime != 0
-
- # test pack directory changed:
- # packs removed - rename a file, should affect the glob
- pack_path = pdb.entities()[0].pack().path()
- new_pack_path = pack_path + "renamed"
- os.rename(pack_path, new_pack_path)
-
- pdb.update_cache(force=True)
- assert len(pdb.entities()) == num_packs - 1
-
- # packs added
- os.rename(new_pack_path, pack_path)
- pdb.update_cache(force=True)
- assert len(pdb.entities()) == num_packs
-
- # bang on the cache
- # access the Entities directly, as there is no iteration interface
- # yet ( or required for now )
- sha_list = list(pdb.sha_iter())
- assert len(sha_list) == pdb.size()
-
- # hit all packs in random order
- random.shuffle(sha_list)
-
- for sha in sha_list:
- info = pdb.info(sha)
- stream = pdb.stream(sha)
- # END for each sha to query
-
-
- # test short finding - be a bit more brutal here
- max_bytes = 19
- min_bytes = 2
- num_ambiguous = 0
- for i, sha in enumerate(sha_list):
- short_sha = sha[:max((i % max_bytes), min_bytes)]
- try:
- assert pdb.partial_to_complete_sha(short_sha, len(short_sha)*2) == sha
- except AmbiguousObjectName:
- num_ambiguous += 1
- pass # valid, we can have short objects
- # END exception handling
- # END for each sha to find
-
- # we should have at least one ambiguous, considering the small sizes
- # but in our pack, there is no ambigious ...
- # assert num_ambiguous
-
- # non-existing
- self.failUnlessRaises(BadObject, pdb.partial_to_complete_sha, "\0\0", 4)
+
+ needs_ro_repo = False
+
+ @with_packs_rw
+ def test_writing(self, path):
+ pdb = PurePackedODB(path)
+
+ # on demand, we init our pack cache
+ num_packs = len(pdb.entities())
+ assert num_packs
+ assert pdb._st_mtime != 0
+
+ # test pack directory changed:
+ # packs removed - rename a file, should affect the glob
+ pack_path = pdb.entities()[0].pack().path()
+ new_pack_path = pack_path + "renamed"
+ os.rename(pack_path, new_pack_path)
+
+ pdb.update_cache(force=True)
+ assert len(pdb.entities()) == num_packs - 1
+
+ # packs added
+ os.rename(new_pack_path, pack_path)
+ pdb.update_cache(force=True)
+ assert len(pdb.entities()) == num_packs
+
+ # bang on the cache
+ # access the Entities directly, as there is no iteration interface
+ # yet ( or required for now )
+ sha_list = list(pdb.sha_iter())
+ assert len(sha_list) == pdb.size()
+
+ # hit all packs in random order
+ random.shuffle(sha_list)
+
+ for sha in sha_list:
+ info = pdb.info(sha)
+ stream = pdb.stream(sha)
+ # END for each sha to query
+
+
+ # test short finding - be a bit more brutal here
+ max_bytes = 19
+ min_bytes = 2
+ num_ambiguous = 0
+ for i, sha in enumerate(sha_list):
+ short_sha = sha[:max((i % max_bytes), min_bytes)]
+ try:
+ assert pdb.partial_to_complete_sha(short_sha, len(short_sha)*2) == sha
+ except AmbiguousObjectName:
+ num_ambiguous += 1
+ pass # valid, we can have short objects
+ # END exception handling
+ # END for each sha to find
+
+ # we should have at least one ambiguous, considering the small sizes
+ # but in our pack, there is no ambigious ...
+ # assert num_ambiguous
+
+ # non-existing
+ self.failUnlessRaises(BadObject, pdb.partial_to_complete_sha, "\0\0", 4)
diff --git a/git/test/db/py/test_ref.py b/git/test/db/py/test_ref.py
index c5374dc9..dfaf9644 100644
--- a/git/test/db/py/test_ref.py
+++ b/git/test/db/py/test_ref.py
@@ -6,57 +6,57 @@ from git.test.db.lib import *
from git.db.py.ref import PureReferenceDB
from git.util import (
- NULL_BIN_SHA,
- hex_to_bin
- )
+ NULL_BIN_SHA,
+ hex_to_bin
+ )
import os
-
+
class TestPureReferenceDB(TestDBBase):
-
- needs_ro_repo = False
-
- def make_alt_file(self, alt_path, alt_list):
- """Create an alternates file which contains the given alternates.
- The list can be empty"""
- alt_file = open(alt_path, "wb")
- for alt in alt_list:
- alt_file.write(alt + "\n")
- alt_file.close()
-
- @with_rw_directory
- def test_writing(self, path):
- NULL_BIN_SHA = '\0' * 20
-
- alt_path = os.path.join(path, 'alternates')
- rdb = PureReferenceDB(alt_path)
- assert len(rdb.databases()) == 0
- assert rdb.size() == 0
- assert len(list(rdb.sha_iter())) == 0
-
- # try empty, non-existing
- assert not rdb.has_object(NULL_BIN_SHA)
-
-
- # setup alternate file
- # add two, one is invalid
- own_repo_path = fixture_path('../../../.git/objects') # use own repo
- self.make_alt_file(alt_path, [own_repo_path, "invalid/path"])
- rdb.update_cache()
- assert len(rdb.databases()) == 1
-
- # we should now find a default revision of ours
- git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
- assert rdb.has_object(git_sha)
-
- # remove valid
- self.make_alt_file(alt_path, ["just/one/invalid/path"])
- rdb.update_cache()
- assert len(rdb.databases()) == 0
-
- # add valid
- self.make_alt_file(alt_path, [own_repo_path])
- rdb.update_cache()
- assert len(rdb.databases()) == 1
-
-
+
+ needs_ro_repo = False
+
+ def make_alt_file(self, alt_path, alt_list):
+ """Create an alternates file which contains the given alternates.
+ The list can be empty"""
+ alt_file = open(alt_path, "wb")
+ for alt in alt_list:
+ alt_file.write(alt + "\n")
+ alt_file.close()
+
+ @with_rw_directory
+ def test_writing(self, path):
+ NULL_BIN_SHA = '\0' * 20
+
+ alt_path = os.path.join(path, 'alternates')
+ rdb = PureReferenceDB(alt_path)
+ assert len(rdb.databases()) == 0
+ assert rdb.size() == 0
+ assert len(list(rdb.sha_iter())) == 0
+
+ # try empty, non-existing
+ assert not rdb.has_object(NULL_BIN_SHA)
+
+
+ # setup alternate file
+ # add two, one is invalid
+ own_repo_path = fixture_path('../../../.git/objects') # use own repo
+ self.make_alt_file(alt_path, [own_repo_path, "invalid/path"])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 1
+
+ # we should now find a default revision of ours
+ git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+ assert rdb.has_object(git_sha)
+
+ # remove valid
+ self.make_alt_file(alt_path, ["just/one/invalid/path"])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 0
+
+ # add valid
+ self.make_alt_file(alt_path, [own_repo_path])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 1
+
+