except ImportError:
from misc import defaultdict
-from itertools import chain, imap, izip
+import difflib
+from itertools import (
+ chain,
+ imap,
+ izip,
+ )
import mmap
import os
import struct
from dulwich.misc import unpack_from
import sys
import zlib
-import difflib
from dulwich.errors import (
ApplyDeltaError,
ChecksumMismatch,
)
+from dulwich.file import GitFile
from dulwich.lru_cache import (
LRUSizeCache,
)
hex_to_sha,
sha_to_hex,
)
-from dulwich.misc import make_sha
+from dulwich.misc import (
+ make_sha,
+ )
supports_mmap_offset = (sys.version_info[0] >= 3 or
(sys.version_info[0] == 2 and sys.version_info[1] >= 6))
-def take_msb_bytes(map, offset):
+def take_msb_bytes(read):
+ """Read bytes marked with most significant bit.
+
+ :param read: Read function
+ """
ret = []
while len(ret) == 0 or ret[-1] & 0x80:
- ret.append(ord(map[offset]))
- offset += 1
+ ret.append(ord(read(1)))
return ret
-def read_zlib(data, offset, dec_size):
+def read_zlib_chunks(read, buffer_size=4096):
+ """Read chunks of zlib data from a buffer.
+
+ :param read: Read function
+ :return: Tuple with list of chunks, length of
+ compressed data length and unused read data
+ """
obj = zlib.decompressobj()
ret = []
fed = 0
while obj.unused_data == "":
- base = offset+fed
- add = data[base:base+1024]
- if len(add) < 1024:
+ add = read(buffer_size)
+ if len(add) < buffer_size:
add += "Z"
fed += len(add)
ret.append(obj.decompress(add))
+ comp_len = fed-len(obj.unused_data)
+ return ret, comp_len, obj.unused_data
+
+
+def read_zlib(read, dec_size):
+ """Read zlib-compressed data from a buffer.
+
+ :param read: Read function
+ :param dec_size: Size of the decompressed buffer
+ :return: Uncompressed buffer, compressed buffer length and unused read
+ data.
+ """
+ ret, comp_len, unused = read_zlib_chunks(read)
x = "".join(ret)
assert len(x) == dec_size
- comp_len = fed-len(obj.unused_data)
- return x, comp_len
+ return x, comp_len, unused
+
def iter_sha1(iter):
return sha1.hexdigest()
-def simple_mmap(f, offset, size, access=mmap.ACCESS_READ):
- """Simple wrapper for mmap() which always supports the offset parameter.
+def load_pack_index(path):
+ """Load an index file by path.
- :param f: File object.
- :param offset: Offset in the file, from the beginning of the file.
- :param size: Size of the mmap'ed area
- :param access: Access mechanism.
- :return: MMAP'd area.
+ :param filename: Path to the index file
"""
- mem = mmap.mmap(f.fileno(), size+offset, access=access)
- return mem, offset
+ f = GitFile(path, 'rb')
+ return load_pack_index_file(path, f)
+
+def load_pack_index_file(path, f):
+ """Load an index file from a file-like object.
-def load_pack_index(filename):
- f = open(filename, 'r')
+ :param path: Path for the index file
+ :param f: File-like object
+ """
if f.read(4) == '\377tOc':
version = struct.unpack(">L", f.read(4))[0]
if version == 2:
f.seek(0)
- return PackIndex2(filename, file=f)
+ return PackIndex2(path, file=f)
else:
raise KeyError("Unknown pack index format %d" % version)
else:
f.seek(0)
- return PackIndex1(filename, file=f)
+ return PackIndex1(path, file=f)
def bisect_find_sha(start, end, sha, unpack_name):
+ """Find a SHA in a data blob with sorted SHAs.
+
+ :param start: Start index of range to search
+ :param end: End index of range to search
+ :param sha: Sha to find
+ :param unpack_name: Callback to retrieve SHA by index
+ :return: Index of the SHA, or None if it wasn't found
+ """
assert start <= end
while start <= end:
i = (start + end)/2
the start and end offset and then bisect in to find if the value is present.
"""
- def __init__(self, filename, file=None):
+ def __init__(self, filename, file=None, size=None):
"""Create a pack index object.
Provide it with the name of the index file to consider, and it will map
self._filename = filename
# Take the size now, so it can be checked each time we map the file to
# ensure that it hasn't changed.
- self._size = os.path.getsize(filename)
if file is None:
- self._file = open(filename, 'r')
+ self._file = GitFile(filename, 'rb')
else:
self._file = file
- self._contents, map_offset = simple_mmap(self._file, 0, self._size)
- assert map_offset == 0
+ fileno = getattr(self._file, 'fileno', None)
+ if fileno is not None:
+ fd = self._file.fileno()
+ if size is None:
+ self._size = os.fstat(fd).st_size
+ else:
+ self._size = size
+ self._contents = mmap.mmap(fd, self._size,
+ access=mmap.ACCESS_READ)
+ else:
+ self._file.seek(0)
+ self._contents = self._file.read()
+ self._size = len(self._contents)
def __eq__(self, other):
if not isinstance(other, PackIndex):
if name1 != name2:
return False
return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
def close(self):
self._file.close()
raise NotImplementedError(self._unpack_crc32_checksum)
def __iter__(self):
+ """Iterate over the SHAs in this pack."""
return imap(sha_to_hex, self._itersha())
def _itersha(self):
def check(self):
"""Check that the stored checksum matches the actual checksum."""
+ # TODO: Check pack contents, too
return self.calculate_checksum() == self.get_stored_checksum()
def calculate_checksum(self):
class PackIndex1(PackIndex):
"""Version 1 Pack Index."""
- def __init__(self, filename, file=None):
- PackIndex.__init__(self, filename, file)
+ def __init__(self, filename, file=None, size=None):
+ PackIndex.__init__(self, filename, file, size)
self.version = 1
self._fan_out_table = self._read_fan_out_table(0)
class PackIndex2(PackIndex):
"""Version 2 Pack Index."""
- def __init__(self, filename, file=None):
- PackIndex.__init__(self, filename, file)
+ def __init__(self, filename, file=None, size=None):
+ PackIndex.__init__(self, filename, file, size)
assert self._contents[:4] == '\377tOc', "Not a v2 pack index file"
(self.version, ) = unpack_from(">L", self._contents, 4)
assert self.version == 2, "Version was %d" % self.version
return (version, num_objects)
-def read_pack_tail(f):
- return (f.read(20),)
-
-
-def unpack_object(map, offset=0):
+def unpack_object(read):
"""Unpack a Git object.
- :return: tuple with type, uncompressed data and compressed size
+ :return: tuple with type, uncompressed data, compressed size and
+ tail data
"""
- bytes = take_msb_bytes(map, offset)
+ bytes = take_msb_bytes(read)
type = (bytes[0] >> 4) & 0x07
size = bytes[0] & 0x0f
for i, byte in enumerate(bytes[1:]):
size += (byte & 0x7f) << ((i * 7) + 4)
raw_base = len(bytes)
if type == 6: # offset delta
- bytes = take_msb_bytes(map, raw_base + offset)
+ bytes = take_msb_bytes(read)
+ raw_base += len(bytes)
assert not (bytes[-1] & 0x80)
delta_base_offset = bytes[0] & 0x7f
for byte in bytes[1:]:
delta_base_offset += 1
delta_base_offset <<= 7
delta_base_offset += (byte & 0x7f)
- raw_base+=len(bytes)
- uncomp, comp_len = read_zlib(map, offset + raw_base, size)
+ uncomp, comp_len, unused = read_zlib(read, size)
assert size == len(uncomp)
- return type, (delta_base_offset, uncomp), comp_len+raw_base
+ return type, (delta_base_offset, uncomp), comp_len+raw_base, unused
elif type == 7: # ref delta
- basename = map[offset+raw_base:offset+raw_base+20]
- uncomp, comp_len = read_zlib(map, offset+raw_base+20, size)
+ basename = read(20)
+ raw_base += 20
+ uncomp, comp_len, unused = read_zlib(read, size)
assert size == len(uncomp)
- return type, (basename, uncomp), comp_len+raw_base+20
+ return type, (basename, uncomp), comp_len+raw_base, unused
else:
- uncomp, comp_len = read_zlib(map, offset+raw_base, size)
+ uncomp, comp_len, unused = read_zlib(read, size)
assert len(uncomp) == size
- return type, uncomp, comp_len+raw_base
+ return type, uncomp, comp_len+raw_base, unused
-def compute_object_size((num, obj)):
+def _compute_object_size((num, obj)):
"""Compute the size of a unresolved object for use with LRUSizeCache.
"""
if num in (6, 7):
It will all just throw a zlib or KeyError.
"""
- def __init__(self, filename):
+ def __init__(self, filename, file=None, size=None):
"""Create a PackData object that represents the pack in the given filename.
The file must exist and stay readable until the object is disposed of. It
mmap implementation is flawed.
"""
self._filename = filename
- assert os.path.exists(filename), "%s is not a packfile" % filename
- self._size = os.path.getsize(filename)
+ self._size = size
self._header_size = 12
- assert self._size >= self._header_size, "%s is too small for a packfile (%d < %d)" % (filename, self._size, self._header_size)
- self._file = open(self._filename, 'rb')
- self._read_header()
+ if file is None:
+ self._file = GitFile(self._filename, 'rb')
+ else:
+ self._file = file
+ (version, self._num_objects) = read_pack_header(self._file)
self._offset_cache = LRUSizeCache(1024*1024*20,
- compute_size=compute_object_size)
+ compute_size=_compute_object_size)
+
+ @classmethod
+ def from_file(cls, file, size):
+ return cls(str(file), file=file, size=size)
+
+ @classmethod
+ def from_path(cls, path):
+ return cls(filename=path)
def close(self):
self._file.close()
-
- def _read_header(self):
- (version, self._num_objects) = read_pack_header(self._file)
- self._file.seek(self._size-20)
- (self._stored_checksum,) = read_pack_tail(self._file)
+
+ def _get_size(self):
+ if self._size is not None:
+ return self._size
+ self._size = os.path.getsize(self._filename)
+ assert self._size >= self._header_size, "%s is too small for a packfile (%d < %d)" % (self._filename, self._size, self._header_size)
+ return self._size
def __len__(self):
"""Returns the number of objects in this pack."""
:return: 20-byte binary SHA1 digest
"""
- map, map_offset = simple_mmap(self._file, 0, self._size - 20)
- try:
- return make_sha(map[map_offset:self._size-20]).digest()
- finally:
- map.close()
+ s = make_sha()
+ self._file.seek(0)
+ todo = self._get_size() - 20
+ while todo > 0:
+ x = self._file.read(min(todo, 1<<16))
+ s.update(x)
+ todo -= len(x)
+ return s.digest()
def resolve_object(self, offset, type, obj, get_ref, get_offset=None):
"""Resolve an object, possibly resolving deltas when necessary.
self.i = 0
self.offset = pack._header_size
self.num = len(pack)
- self.map, _ = simple_mmap(pack._file, 0, pack._size)
-
- def __del__(self):
- self.map.close()
+ self.map = pack._file
def __iter__(self):
return self
def next(self):
if self.i == self.num:
raise StopIteration
- (type, obj, total_size) = unpack_object(self.map, self.offset)
- crc32 = zlib.crc32(self.map[self.offset:self.offset+total_size]) & 0xffffffff
+ self.map.seek(self.offset)
+ (type, obj, total_size, unused) = unpack_object(self.map.read)
+ self.map.seek(self.offset)
+ crc32 = zlib.crc32(self.map.read(total_size)) & 0xffffffff
ret = (self.offset, type, obj, crc32)
self.offset += total_size
if progress:
return ObjectIterator(self)
def iterentries(self, ext_resolve_ref=None, progress=None):
+ """Yield entries summarizing the contents of this pack.
+
+ :param ext_resolve_ref: Optional function to resolve base
+ objects (in case this is a thin pack)
+ :param progress: Progress function, called with current and
+ total object count.
+
+ This will yield tuples with (sha, offset, crc32)
+ """
found = {}
postponed = defaultdict(list)
class Postpone(Exception):
raise KeyError([sha_to_hex(h) for h in postponed.keys()])
def sorted_entries(self, resolve_ext_ref=None, progress=None):
+ """Return entries in this pack, sorted by SHA.
+
+ :param ext_resolve_ref: Optional function to resolve base
+ objects (in case this is a thin pack)
+ :param progress: Progress function, called with current and
+ total object count.
+ :return: List of tuples with (sha, offset, crc32)
+ """
ret = list(self.iterentries(resolve_ext_ref, progress=progress))
ret.sort()
return ret
entries = self.sorted_entries(resolve_ext_ref, progress=progress)
write_pack_index_v2(filename, entries, self.calculate_checksum())
- def create_index(self, filename, resolve_ext_ref=None, progress=None, version=2):
+ def create_index(self, filename, resolve_ext_ref=None, progress=None,
+ version=2):
"""Create an index file for this data file.
:param filename: Index filename.
raise ValueError("unknown index format %d" % version)
def get_stored_checksum(self):
- return self._stored_checksum
+ """Return the expected checksum stored in this pack."""
+ self._file.seek(self._get_size()-20)
+ return self._file.read(20)
def check(self):
+ """Check the consistency of this pack."""
return (self.calculate_checksum() == self.get_stored_checksum())
def get_object_at(self, offset):
assert isinstance(offset, long) or isinstance(offset, int),\
"offset was %r" % offset
assert offset >= self._header_size
- map, map_offset = simple_mmap(self._file, offset, self._size-offset)
- try:
- ret = unpack_object(map, map_offset)[:2]
- return ret
- finally:
- map.close()
+ self._file.seek(offset)
+ return unpack_object(self._file.read)[:2]
+
+
+class SHA1Reader(object):
+ """Wrapper around a file-like object that remembers the SHA1 of
+ the data read from it."""
+
+ def __init__(self, f):
+ self.f = f
+ self.sha1 = make_sha("")
+
+ def read(self, num=None):
+ data = self.f.read(num)
+ self.sha1.update(data)
+ return data
+
+ def check_sha(self):
+ stored = self.f.read(20)
+ if stored != self.sha1.digest():
+ raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored))
+
+ def close(self):
+ return self.f.close()
+
+ def tell(self):
+ return self.f.tell()
class SHA1Writer(object):
:param o: Object to write
:return: Tuple with offset at which the object was written, and crc32
"""
- ret = f.tell()
+ offset = f.tell()
packed_data_hdr = ""
- if type == 6: # ref delta
+ if type == 6: # offset delta
(delta_base_offset, object) = object
- elif type == 7: # offset delta
+ elif type == 7: # ref delta
(basename, object) = object
size = len(object)
c = (type << 4) | (size & 15)
packed_data_hdr += basename
packed_data = packed_data_hdr + zlib.compress(object)
f.write(packed_data)
- return (f.tell(), (zlib.crc32(packed_data) & 0xffffffff))
+ return (offset, (zlib.crc32(packed_data) & 0xffffffff))
def write_pack(filename, objects, num_objects):
:param objects: Iterable over (object, path) tuples to write
:param num_objects: Number of objects to write
"""
- f = open(filename + ".pack", 'w')
+ f = GitFile(filename + ".pack", 'wb')
try:
entries, data_sum = write_pack_data(f, objects, num_objects)
finally:
# This helps us find good objects to diff against us
magic = []
for obj, path in recency:
- magic.append( (obj.type, path, 1, -len(obj.as_raw_string()), obj) )
+ magic.append( (obj.type, path, 1, -obj.raw_length(), obj) )
magic.sort()
# Build a map of objects and their index in magic - so we can find preceeding objects
# to diff against
crc32_checksum.
:param pack_checksum: Checksum of the pack file.
"""
- f = open(filename, 'w')
+ f = GitFile(filename, 'wb')
f = SHA1Writer(f)
fan_out_table = defaultdict(lambda: 0)
for (name, offset, entry_checksum) in entries:
def create_delta(base_buf, target_buf):
"""Use python difflib to work out how to transform base_buf to target_buf.
-
+
:param base_buf: Base buffer
:param target_buf: Target buffer
"""
o = i1
for i in range(4):
if o & 0xff << i*8:
- scratch += chr(o >> i)
+ scratch += chr((o >> i*8) & 0xff)
op |= 1 << i
s = i2 - i1
for i in range(2):
if s & 0xff << i*8:
- scratch += chr(s >> i)
+ scratch += chr((s >> i*8) & 0xff)
op |= 1 << (4+i)
out_buf += chr(op)
out_buf += scratch
crc32_checksum.
:param pack_checksum: Checksum of the pack file.
"""
- f = open(filename, 'w')
+ f = GitFile(filename, 'wb')
f = SHA1Writer(f)
f.write('\377tOc') # Magic!
f.write(struct.pack(">L", 2))
*self.data.resolve_object(offset, type, obj, get_raw))
-def load_packs(path):
- if not os.path.exists(path):
- return
- for name in os.listdir(path):
- if name.startswith("pack-") and name.endswith(".pack"):
- yield Pack(os.path.join(path, name[:-len(".pack")]))
-
-
try:
from dulwich._pack import apply_delta, bisect_find_sha
except ImportError: