import zlib
from dulwich.errors import (
+ ChecksumMismatch,
NotBlobError,
NotCommitError,
NotTagError,
return binascii.unhexlify(hex)
+def hex_to_filename(path, hex):
+ """Takes a hex sha and returns its filename relative to the given path."""
+ dir = hex[:2]
+ file = hex[2:]
+ # Check from object dir
+ return os.path.join(path, dir, file)
+
+
+def filename_to_hex(filename):
+ """Takes an object filename and returns its corresponding hex sha."""
+ # grab the last (up to) two path components
+ names = filename.rsplit(os.path.sep, 2)[-2:]
+ errmsg = "Invalid object filename: %s" % filename
+ assert len(names) == 2, errmsg
+ base, rest = names
+ assert len(base) == 2 and len(rest) == 38, errmsg
+ hex = base + rest
+ hex_to_sha(hex)
+ return hex
+
+
+def object_header(num_type, length):
+ """Return an object header for the given numeric type and text length."""
+ return "%s %d\0" % (object_class(num_type).type_name, length)
+
+
def serializable_property(name, docstring=None):
def set(obj, value):
obj._ensure_parsed()
"""Get the object class corresponding to the given type.
:param type: Either a type name string or a numeric type.
- :return: The ShaFile subclass corresponding to the given type.
+ :return: The ShaFile subclass corresponding to the given type, or None if
+ type is not a valid type name/number.
"""
- return _TYPE_MAP[type]
+ return _TYPE_MAP.get(type, None)
def check_hexsha(hex, error_msg):
raise ObjectFormatException(error_msg)
+class FixedSha(object):
+ """SHA object that behaves like hashlib's but is given a fixed value."""
+
+ def __init__(self, hexsha):
+ self._hexsha = hexsha
+ self._sha = hex_to_sha(hexsha)
+
+ def digest(self):
+ return self._sha
+
+ def hexdigest(self):
+ return self._hexsha
+
+
class ShaFile(object):
"""A git SHA file."""
- @classmethod
- def _parse_legacy_object(cls, map):
- """Parse a legacy object, creating it and setting object._text"""
- text = _decompress(map)
- object = None
- for cls in OBJECT_CLASSES:
- if text.startswith(cls.type_name):
- object = cls()
- text = text[len(cls.type_name):]
- break
- assert object is not None, "%s is not a known object type" % text[:9]
- assert text[0] == ' ', "%s is not a space" % text[0]
- text = text[1:]
- size = 0
- i = 0
- while text[0] >= '0' and text[0] <= '9':
- if i > 0 and size == 0:
- raise AssertionError("Size is not in canonical format")
- size = (size * 10) + int(text[0])
- text = text[1:]
- i += 1
- object._size = size
- assert text[0] == "\0", "Size not followed by null"
- text = text[1:]
- object.set_raw_string(text)
- return object
+ @staticmethod
+ def _parse_legacy_object_header(magic, f):
+ """Parse a legacy object, creating it but not reading the file."""
+ bufsize = 1024
+ decomp = zlib.decompressobj()
+ header = decomp.decompress(magic)
+ start = 0
+ end = -1
+ while end < 0:
+ header += decomp.decompress(f.read(bufsize))
+ end = header.find("\0", start)
+ start = len(header)
+ header = header[:end]
+ type_name, size = header.split(" ", 1)
+ size = int(size) # sanity check
+ obj_class = object_class(type_name)
+ if not obj_class:
+ raise ObjectFormatException("Not a known type: %s" % type_name)
+ obj = obj_class()
+ obj._filename = f.name
+ return obj
+
+ def _parse_legacy_object(self, f):
+ """Parse a legacy object, setting the raw string."""
+ size = os.path.getsize(f.name)
+ map = mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ)
+ try:
+ text = _decompress(map)
+ finally:
+ map.close()
+ header_end = text.find('\0')
+ if header_end < 0:
+ raise ObjectFormatException("Invalid object header")
+ self.set_raw_string(text[header_end+1:])
def as_legacy_object_chunks(self):
compobj = zlib.compressobj()
return "".join(self.as_legacy_object_chunks())
def as_raw_chunks(self):
- if self._needs_serialization:
+ if self._needs_parsing:
+ self._ensure_parsed()
+ elif self._needs_serialization:
self._chunked_text = self._serialize()
- self._needs_serialization = False
return self._chunked_text
def as_raw_string(self):
def _ensure_parsed(self):
if self._needs_parsing:
+ if not self._chunked_text:
+ assert self._filename, "ShaFile needs either text or filename"
+ self._parse_file()
self._deserialize(self._chunked_text)
self._needs_parsing = False
def set_raw_chunks(self, chunks):
self._chunked_text = chunks
+ self._deserialize(chunks)
self._sha = None
- self._needs_parsing = True
+ self._needs_parsing = False
self._needs_serialization = False
- @classmethod
- def _parse_object(cls, map):
- """Parse a new style object , creating it and setting object._text"""
- used = 0
- byte = ord(map[used])
- used += 1
- type_num = (byte >> 4) & 7
+ @staticmethod
+ def _parse_object_header(magic, f):
+ """Parse a new style object, creating it but not reading the file."""
+ num_type = (ord(magic[0]) >> 4) & 7
+ obj_class = object_class(num_type)
+ if not obj_class:
+ raise ObjectFormatException("Not a known type: %d" % num_type)
+ obj = obj_class()
+ obj._filename = f.name
+ return obj
+
+ def _parse_object(self, f):
+ """Parse a new style object, setting self._text."""
+ size = os.path.getsize(f.name)
+ map = mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ)
try:
- object = object_class(type_num)()
- except KeyError:
- raise AssertionError("Not a known type: %d" % type_num)
- while (byte & 0x80) != 0:
- byte = ord(map[used])
- used += 1
- raw = map[used:]
- object.set_raw_string(_decompress(raw))
- return object
+ # skip type and size; type must have already been determined, and we
+ # trust zlib to fail if it's otherwise corrupted
+ byte = ord(map[0])
+ used = 1
+ while (byte & 0x80) != 0:
+ byte = ord(map[used])
+ used += 1
+ raw = map[used:]
+ self.set_raw_string(_decompress(raw))
+ finally:
+ map.close()
+
+ @classmethod
+ def _is_legacy_object(cls, magic):
+ b0, b1 = map(ord, magic)
+ word = (b0 << 8) + b1
+ return b0 == 0x78 and (word % 31) == 0
@classmethod
- def _parse_file(cls, map):
- word = (ord(map[0]) << 8) + ord(map[1])
- if ord(map[0]) == 0x78 and (word % 31) == 0:
- return cls._parse_legacy_object(map)
+ def _parse_file_header(cls, f):
+ magic = f.read(2)
+ if cls._is_legacy_object(magic):
+ return cls._parse_legacy_object_header(magic, f)
else:
- return cls._parse_object(map)
+ return cls._parse_object_header(magic, f)
def __init__(self):
"""Don't call this directly"""
self._sha = None
+ self._filename = None
+ self._chunked_text = []
+ self._needs_parsing = False
+ self._needs_serialization = True
def _deserialize(self, chunks):
raise NotImplementedError(self._deserialize)
def _serialize(self):
raise NotImplementedError(self._serialize)
+ def _parse_file(self):
+ f = GitFile(self._filename, 'rb')
+ try:
+ magic = f.read(2)
+ if self._is_legacy_object(magic):
+ self._parse_legacy_object(f)
+ else:
+ self._parse_object(f)
+ finally:
+ f.close()
+
@classmethod
- def from_file(cls, filename):
- """Get the contents of a SHA file on disk"""
- size = os.path.getsize(filename)
- f = GitFile(filename, 'rb')
+ def from_path(cls, path):
+ f = GitFile(path, 'rb')
try:
- map = mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ)
- shafile = cls._parse_file(map)
- return shafile
+ obj = cls.from_file(f)
+ obj._sha = FixedSha(filename_to_hex(path))
+ return obj
finally:
f.close()
+ @classmethod
+ def from_file(cls, f):
+ """Get the contents of a SHA file on disk."""
+ try:
+ obj = cls._parse_file_header(f)
+ obj._sha = None
+ obj._needs_parsing = True
+ obj._needs_serialization = True
+ return obj
+ except (IndexError, ValueError), e:
+ raise ObjectFormatException("invalid object header")
+
@staticmethod
def from_raw_string(type_num, string):
"""Creates an object of the indicated type from the raw string given.
@classmethod
def from_string(cls, string):
- """Create a blob from a string."""
+ """Create a ShaFile from a string."""
obj = cls()
obj.set_raw_string(string)
return obj
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
+ :raise ChecksumMismatch: if the object was created with a SHA that does
+ not match its contents
"""
# TODO: if we find that error-checking during object parsing is a
# performance bottleneck, those checks should be moved to the class's
# check() method during optimization so we can still check the object
# when necessary.
+ old_sha = self.id
try:
self._deserialize(self.as_raw_chunks())
+ self._sha = None
+ new_sha = self.id
except Exception, e:
raise ObjectFormatException(e)
+ if old_sha != new_sha:
+ raise ChecksumMismatch(new_sha, old_sha)
def _header(self):
- return "%s %lu\0" % (self.type_name, self.raw_length())
+ return object_header(self.type, self.raw_length())
def raw_length(self):
"""Returns the length of the raw string of this object."""
def sha(self):
"""The SHA1 object that is the name of this object."""
- if self._needs_serialization or self._sha is None:
- self._sha = self._make_sha()
+ if self._sha is None:
+ # this is a local because as_raw_chunks() overwrites self._sha
+ new_sha = make_sha()
+ new_sha.update(self._header())
+ for chunk in self.as_raw_chunks():
+ new_sha.update(chunk)
+ self._sha = new_sha
return self._sha
@property
self.set_raw_string(data)
data = property(_get_data, _set_data,
- "The text contained within the blob object.")
+ "The text contained within the blob object.")
def _get_chunked(self):
+ self._ensure_parsed()
return self._chunked_text
def _set_chunked(self, chunks):
self._chunked_text = chunks
+ def _serialize(self):
+ if not self._chunked_text:
+ self._ensure_parsed()
+ self._needs_serialization = False
+ return self._chunked_text
+
+ def _deserialize(self, chunks):
+ self._chunked_text = chunks
+
chunked = property(_get_chunked, _set_chunked,
"The text within the blob object, as chunks (not necessarily lines).")
@classmethod
- def from_file(cls, filename):
- blob = ShaFile.from_file(filename)
+ def from_path(cls, path):
+ blob = ShaFile.from_path(path)
if not isinstance(blob, cls):
- raise NotBlobError(filename)
+ raise NotBlobError(path)
return blob
def check(self):
:raise ObjectFormatException: if the object is malformed in some way
"""
- pass # it's impossible for raw data to be malformed
+ super(Blob, self).check()
+
+
+def _parse_tag_or_commit(text):
+ """Parse tag or commit text.
+
+ :param text: the raw text of the tag or commit object.
+ :yield: tuples of (field, value), one per header line, in the order read
+ from the text, possibly including duplicates. Includes a field named
+ None for the freeform tag/commit text.
+ """
+ f = StringIO(text)
+ for l in f:
+ l = l.rstrip("\n")
+ if l == "":
+ # Empty line indicates end of headers
+ break
+ yield l.split(" ", 1)
+ yield (None, f.read())
+ f.close()
+
+
+def parse_tag(text):
+ return _parse_tag_or_commit(text)
class Tag(ShaFile):
def __init__(self):
super(Tag, self).__init__()
- self._needs_parsing = False
- self._needs_serialization = True
+ self._tag_timezone_neg_utc = False
@classmethod
- def from_file(cls, filename):
- tag = ShaFile.from_file(filename)
+ def from_path(cls, filename):
+ tag = ShaFile.from_path(filename)
if not isinstance(tag, cls):
raise NotTagError(filename)
return tag
- @classmethod
- def from_string(cls, string):
- """Create a blob from a string."""
- shafile = cls()
- shafile.set_raw_string(string)
- return shafile
-
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Tag, self).check()
- # TODO(dborowitz): check header order
self._check_has_member("_object_sha", "missing object sha")
self._check_has_member("_object_class", "missing object type")
self._check_has_member("_name", "missing tag name")
if getattr(self, "_tagger", None):
check_identity(self._tagger, "invalid tagger")
+ last = None
+ for field, _ in parse_tag("".join(self._chunked_text)):
+ if field == _OBJECT_HEADER and last is not None:
+ raise ObjectFormatException("unexpected object")
+ elif field == _TYPE_HEADER and last != _OBJECT_HEADER:
+ raise ObjectFormatException("unexpected type")
+ elif field == _TAG_HEADER and last != _TYPE_HEADER:
+ raise ObjectFormatException("unexpected tag name")
+ elif field == _TAGGER_HEADER and last != _TAG_HEADER:
+ raise ObjectFormatException("unexpected tagger")
+ last = field
+
def _serialize(self):
chunks = []
chunks.append("%s %s\n" % (_OBJECT_HEADER, self._object_sha))
else:
chunks.append("%s %s %d %s\n" % (
_TAGGER_HEADER, self._tagger, self._tag_time,
- format_timezone(self._tag_timezone)))
+ format_timezone(self._tag_timezone,
+ self._tag_timezone_neg_utc)))
chunks.append("\n") # To close headers
chunks.append(self._message)
return chunks
def _deserialize(self, chunks):
"""Grab the metadata attached to the tag"""
self._tagger = None
- f = StringIO("".join(chunks))
- for l in f:
- l = l.rstrip("\n")
- if l == "":
- break # empty line indicates end of headers
- (field, value) = l.split(" ", 1)
+ for field, value in parse_tag("".join(chunks)):
if field == _OBJECT_HEADER:
self._object_sha = value
elif field == _TYPE_HEADER:
- self._object_class = object_class(value)
+ obj_class = object_class(value)
+ if not obj_class:
+ raise ObjectFormatException("Not a known type: %s" % value)
+ self._object_class = obj_class
elif field == _TAG_HEADER:
self._name = value
elif field == _TAGGER_HEADER:
self._tagger = value
self._tag_time = None
self._tag_timezone = None
+ self._tag_timezone_neg_utc = False
else:
self._tagger = value[0:sep+1]
- (timetext, timezonetext) = value[sep+2:].rsplit(" ", 1)
- self._tag_time = int(timetext)
- self._tag_timezone = parse_timezone(timezonetext)
+ try:
+ (timetext, timezonetext) = value[sep+2:].rsplit(" ", 1)
+ self._tag_time = int(timetext)
+ self._tag_timezone, self._tag_timezone_neg_utc = \
+ parse_timezone(timezonetext)
+ except ValueError, e:
+ raise ObjectFormatException(e)
+ elif field is None:
+ self._message = value
else:
- raise AssertionError("Unknown field %s" % field)
- self._message = f.read()
+ raise ObjectFormatException("Unknown field %s" % field)
def _get_object(self):
"""Get the object pointed to by this tag.
def __init__(self):
super(Tree, self).__init__()
self._entries = {}
- self._needs_parsing = False
- self._needs_serialization = True
@classmethod
- def from_file(cls, filename):
- tree = ShaFile.from_file(filename)
+ def from_path(cls, filename):
+ tree = ShaFile.from_path(filename)
if not isinstance(tree, cls):
raise NotTreeError(filename)
return tree
def _deserialize(self, chunks):
"""Grab the entries in the tree"""
- parsed_entries = parse_tree("".join(chunks))
+ try:
+ parsed_entries = parse_tree("".join(chunks))
+ except ValueError, e:
+ raise ObjectFormatException(e)
# TODO: list comprehension is for efficiency in the common (small) case;
# if memory efficiency in the large case is a concern, use a genexp.
self._entries = dict([(n, (m, s)) for n, m, s in parsed_entries])
- self._needs_parsing = False
def check(self):
"""Check this object for internal consistency.
def parse_timezone(text):
offset = int(text)
+ negative_utc = (offset == 0 and text[0] == '-')
signum = (offset < 0) and -1 or 1
offset = abs(offset)
hours = int(offset / 100)
minutes = (offset % 100)
- return signum * (hours * 3600 + minutes * 60)
+ return signum * (hours * 3600 + minutes * 60), negative_utc
-def format_timezone(offset):
+def format_timezone(offset, negative_utc=False):
if offset % 60 != 0:
raise ValueError("Unable to handle non-minute offset.")
- sign = (offset < 0) and '-' or '+'
+ if offset < 0 or (offset == 0 and negative_utc):
+ sign = '-'
+ else:
+ sign = '+'
offset = abs(offset)
return '%c%02d%02d' % (sign, offset / 3600, (offset / 60) % 60)
+def parse_commit(text):
+ return _parse_tag_or_commit(text)
+
+
class Commit(ShaFile):
"""A git commit object"""
super(Commit, self).__init__()
self._parents = []
self._encoding = None
- self._needs_parsing = False
- self._needs_serialization = True
self._extra = {}
+ self._author_timezone_neg_utc = False
+ self._commit_timezone_neg_utc = False
@classmethod
- def from_file(cls, filename):
- commit = ShaFile.from_file(filename)
+ def from_path(cls, path):
+ commit = ShaFile.from_path(path)
if not isinstance(commit, cls):
- raise NotCommitError(filename)
+ raise NotCommitError(path)
return commit
def _deserialize(self, chunks):
self._parents = []
self._extra = []
self._author = None
- f = StringIO("".join(chunks))
- for l in f:
- l = l.rstrip("\n")
- if l == "":
- # Empty line indicates end of headers
- break
- (field, value) = l.split(" ", 1)
+ for field, value in parse_commit("".join(self._chunked_text)):
if field == _TREE_HEADER:
self._tree = value
elif field == _PARENT_HEADER:
elif field == _AUTHOR_HEADER:
self._author, timetext, timezonetext = value.rsplit(" ", 2)
self._author_time = int(timetext)
- self._author_timezone = parse_timezone(timezonetext)
+ self._author_timezone, self._author_timezone_neg_utc =\
+ parse_timezone(timezonetext)
elif field == _COMMITTER_HEADER:
self._committer, timetext, timezonetext = value.rsplit(" ", 2)
self._commit_time = int(timetext)
- self._commit_timezone = parse_timezone(timezonetext)
+ self._commit_timezone, self._commit_timezone_neg_utc =\
+ parse_timezone(timezonetext)
elif field == _ENCODING_HEADER:
self._encoding = value
+ elif field is None:
+ self._message = value
else:
self._extra.append((field, value))
- self._message = f.read()
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Commit, self).check()
- # TODO(dborowitz): check header order
- # TODO(dborowitz): check for duplicate headers
self._check_has_member("_tree", "missing tree")
self._check_has_member("_author", "missing author")
self._check_has_member("_committer", "missing committer")
check_identity(self._author, "invalid author")
check_identity(self._committer, "invalid committer")
+ last = None
+ for field, _ in parse_commit("".join(self._chunked_text)):
+ if field == _TREE_HEADER and last is not None:
+ raise ObjectFormatException("unexpected tree")
+ elif field == _PARENT_HEADER and last not in (_PARENT_HEADER,
+ _TREE_HEADER):
+ raise ObjectFormatException("unexpected parent")
+ elif field == _AUTHOR_HEADER and last not in (_TREE_HEADER,
+ _PARENT_HEADER):
+ raise ObjectFormatException("unexpected author")
+ elif field == _COMMITTER_HEADER and last != _AUTHOR_HEADER:
+ raise ObjectFormatException("unexpected committer")
+ elif field == _ENCODING_HEADER and last != _COMMITTER_HEADER:
+ raise ObjectFormatException("unexpected encoding")
+ last = field
+
+ # TODO: optionally check for duplicate parents
+
def _serialize(self):
chunks = []
chunks.append("%s %s\n" % (_TREE_HEADER, self._tree))
chunks.append("%s %s\n" % (_PARENT_HEADER, p))
chunks.append("%s %s %s %s\n" % (
_AUTHOR_HEADER, self._author, str(self._author_time),
- format_timezone(self._author_timezone)))
+ format_timezone(self._author_timezone,
+ self._author_timezone_neg_utc)))
chunks.append("%s %s %s %s\n" % (
_COMMITTER_HEADER, self._committer, str(self._commit_time),
- format_timezone(self._commit_timezone)))
+ format_timezone(self._commit_timezone,
+ self._commit_timezone_neg_utc)))
if self.encoding:
chunks.append("%s %s\n" % (_ENCODING_HEADER, self.encoding))
for k, v in self.extra: