1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
|
"""Contains implementations of database retrieveing objects"""
import os
from git.errors import InvalidDBRoot
from git.utils import IndexFileSHA1Writer
from utils import (
to_hex_sha,
exists,
hex_to_bin,
FDCompressedSha1Writer,
isdir,
mkdir,
rename,
dirname,
join
)
import tempfile
class iObjectDBR(object):
"""Defines an interface for object database lookup.
Objects are identified either by hex-sha (40 bytes) or
by sha (20 bytes)"""
__slots__ = tuple()
def __contains__(self, sha):
return self.has_obj
#{ Query Interface
def has_object(self, sha):
"""
:return: True if the object identified by the given 40 byte hexsha or 20 bytes
binary sha is contained in the database"""
raise NotImplementedError("To be implemented in subclass")
def object(self, sha):
"""
:return: tuple(type_string, size_in_bytes, stream) a tuple with object
information including its type, its size as well as a stream from which its
contents can be read
:param sha: 40 bytes hexsha or 20 bytes binary sha """
raise NotImplementedError("To be implemented in subclass")
def object_info(self, sha):
"""
:return: tuple(type_string, size_in_bytes) tuple with the object's type
string as well as its size in bytes
:param sha: 40 bytes hexsha or 20 bytes binary sha"""
raise NotImplementedError("To be implemented in subclass")
#} END query interface
class iObjectDBW(object):
"""Defines an interface to create objects in the database"""
__slots__ = tuple()
#{ Edit Interface
def to_object(self, type, size, stream, dry_run=False, sha_as_hex=True):
"""Create a new object in the database
:return: the sha identifying the object in the database
:param type: type string identifying the object
:param size: size of the data to read from stream
:param stream: stream providing the data
:param dry_run: if True, the object database will not actually be changed
:param sha_as_hex: if True, the returned sha identifying the object will be
hex encoded, not binary"""
raise NotImplementedError("To be implemented in subclass")
def to_objects(self, iter_info, dry_run=False, sha_as_hex=True, max_threads=0):
"""Create multiple new objects in the database
:return: sequence of shas identifying the created objects in the order in which
they where given.
:param iter_info: iterable yielding tuples containing the type_string
size_in_bytes and the steam with the content data.
:param dry_run: see ``to_obj``
:param sha_as_hex: see ``to_obj``
:param max_threads: if < 1, any number of threads may be started while processing
the request, otherwise the given number of threads will be started."""
# a trivial implementation, ignoring the threads for now
# TODO: add configuration to the class to determine whether we may
# actually use multiple threads, default False of course. If the add
shas = list()
for args in iter_info:
shas.append(self.to_object(*args, dry_run=dry_run, sha_as_hex=sha_as_hex))
return shas
#} END edit interface
class FileDBBase(object):
"""Provides basic facilities to retrieve files of interest, including
caching facilities to help mapping hexsha's to objects"""
__slots__ = ('_root_path', )
def __init__(self, root_path):
"""Initialize this instance to look for its files at the given root path
All subsequent operations will be relative to this path
:raise InvalidDBRoot:
:note: The base will perform basic checking for accessability, but the subclass
is required to verify that the root_path contains the database structure it needs"""
if not os.path.isdir(root_path):
raise InvalidDBRoot(root_path)
self._root_path = root_path
#{ Interface
def root_path(self):
""":return: path at which this db operates"""
return self._root_path
def db_path(self, rela_path):
"""
:return: the given relative path relative to our database root, allowing
to pontentially access datafiles"""
return join(self._root_path, rela_path)
#} END interface
#{ Utiltities
#} END utilities
class LooseObjectDB(FileDBBase, iObjectDBR, iObjectDBW):
"""A database which operates on loose object files"""
__slots__ = ('_hexsha_to_file', )
# CONFIGURATION
# chunks in which data will be copied between streams
stream_chunk_size = 1000*1000
def __init__(self, root_path):
super(LooseObjectDB, self).__init__(root_path)
self._hexsha_to_file = dict()
#{ Interface
def hexsha_to_object_path(self, hexsha):
"""
:return: path at which the object with the given hexsha would be stored,
relative to the database root"""
return join(hexsha[:2], hexsha[2:])
#} END interface
def has_object(self, sha):
sha = to_hex_sha(sha)
# try cache
if sha in self._hexsha_to_file:
return True
# try filesystem
path = self.db_path(self.hexsha_to_object_path(sha))
if exists(path):
self._hexsha_to_file[sha] = path
return True
# END handle cache
return False
def to_object(self, type, size, stream, dry_run=False, sha_as_hex=True):
# open a tmp file to write the data to
fd, tmp_path = tempfile.mkstemp(prefix='obj', dir=self._root_path)
writer = FDCompressedSha1Writer(fd)
# WRITE HEADER: type SP size NULL
writer.write("%s %i%s" % (type, size, chr(0)))
# WRITE ALL DATA
chunksize = self.stream_chunk_size
try:
try:
while True:
data_len = writer.write(stream.read(chunksize))
if data_len < chunksize:
# WRITE FOOTER
writer.write('\n')
break
# END check for stream end
# END duplicate data
finally:
writer.close()
# END assure file was closed
except:
os.remove(tmp_path)
raise
# END assure tmpfile removal on error
# in dry-run mode, we delete the file afterwards
sha = writer.sha(as_hex=True)
if dry_run:
os.remove(tmp_path)
else:
# rename the file into place
obj_path = self.db_path(self.hexsha_to_object_path(sha))
obj_dir = dirname(obj_path)
if not isdir(obj_dir):
mkdir(obj_dir)
# END handle destination directory
rename(tmp_path, obj_path)
# END handle dry_run
if not sha_as_hex:
sha = hex_to_bin(sha)
# END handle sha format
return sha
class PackedDB(FileDBBase, iObjectDBR):
"""A database operating on a set of object packs"""
class CompoundDB(iObjectDBR):
"""A database which delegates calls to sub-databases"""
class ReferenceDB(CompoundDB):
"""A database consisting of database referred to in a file"""
class GitObjectDB(CompoundDB, iObjectDBW):
"""A database representing the default git object store, which includes loose
objects, pack files and an alternates file
It will create objects only in the loose object database."""
|