1 # object_store.py -- Object store for git objects
2 # Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@samba.org>
4 # This program is free software; you can redistribute it and/or
5 # modify it under the terms of the GNU General Public License
6 # as published by the Free Software Foundation; either version 2
7 # or (at your option) a later version of the License.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software
16 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
20 """Git object store interfaces and implementation."""
29 from dulwich.diff_tree import (
33 from dulwich.errors import (
36 from dulwich.file import GitFile
37 from dulwich.objects import (
49 from dulwich.pack import (
66 class BaseObjectStore(object):
67 """Object store interface."""
69 def determine_wants_all(self, refs):
70 return [sha for (ref, sha) in refs.iteritems()
71 if not sha in self and not ref.endswith("^{}") and
74 def iter_shas(self, shas):
75 """Iterate over the objects for the specified shas.
77 :param shas: Iterable object with SHAs
78 :return: Object iterator
80 return ObjectStoreIterator(self, shas)
82 def contains_loose(self, sha):
83 """Check if a particular object is present by SHA1 and is loose."""
84 raise NotImplementedError(self.contains_loose)
86 def contains_packed(self, sha):
87 """Check if a particular object is present by SHA1 and is packed."""
88 raise NotImplementedError(self.contains_packed)
90 def __contains__(self, sha):
91 """Check if a particular object is present by SHA1.
93 This method makes no distinction between loose and packed objects.
95 return self.contains_packed(sha) or self.contains_loose(sha)
99 """Iterable of pack objects."""
100 raise NotImplementedError
102 def get_raw(self, name):
103 """Obtain the raw text for an object.
105 :param name: sha for the object.
106 :return: tuple with numeric type and object contents.
108 raise NotImplementedError(self.get_raw)
110 def __getitem__(self, sha):
111 """Obtain an object by SHA1."""
112 type_num, uncomp = self.get_raw(sha)
113 return ShaFile.from_raw_string(type_num, uncomp)
116 """Iterate over the SHAs that are present in this store."""
117 raise NotImplementedError(self.__iter__)
119 def add_object(self, obj):
120 """Add a single object to this object store.
123 raise NotImplementedError(self.add_object)
125 def add_objects(self, objects):
126 """Add a set of objects to this object store.
128 :param objects: Iterable over a list of objects.
130 raise NotImplementedError(self.add_objects)
132 def tree_changes(self, source, target, want_unchanged=False):
133 """Find the differences between the contents of two trees
135 :param object_store: Object store to use for retrieving tree contents
136 :param tree: SHA1 of the root tree
137 :param want_unchanged: Whether unchanged files should be reported
138 :return: Iterator over tuples with
139 (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
141 for change in tree_changes(self, source, target,
142 want_unchanged=want_unchanged):
143 yield ((change.old.path, change.new.path),
144 (change.old.mode, change.new.mode),
145 (change.old.sha, change.new.sha))
147 def iter_tree_contents(self, tree_id, include_trees=False):
148 """Iterate the contents of a tree and all subtrees.
150 Iteration is depth-first pre-order, as in e.g. os.walk.
152 :param tree_id: SHA1 of the tree.
153 :param include_trees: If True, include tree objects in the iteration.
154 :return: Iterator over TreeEntry namedtuples for all the objects in a
157 for entry, _ in walk_trees(self, tree_id, None):
158 if not stat.S_ISDIR(entry.mode) or include_trees:
161 def find_missing_objects(self, haves, wants, progress=None,
163 """Find the missing objects required for a set of revisions.
165 :param haves: Iterable over SHAs already in common.
166 :param wants: Iterable over SHAs of objects to fetch.
167 :param progress: Simple progress function that will be called with
168 updated progress strings.
169 :param get_tagged: Function that returns a dict of pointed-to sha -> tag
170 sha for including tags.
171 :return: Iterator over (sha, path) pairs.
173 finder = MissingObjectFinder(self, haves, wants, progress, get_tagged)
174 return iter(finder.next, None)
176 def find_common_revisions(self, graphwalker):
177 """Find which revisions this store has in common using graphwalker.
179 :param graphwalker: A graphwalker object.
180 :return: List of SHAs that are in common
183 sha = graphwalker.next()
188 sha = graphwalker.next()
191 def get_graph_walker(self, heads):
192 """Obtain a graph walker for this object store.
194 :param heads: Local heads to start search with
195 :return: GraphWalker object
197 return ObjectStoreGraphWalker(heads, lambda sha: self[sha].parents)
199 def generate_pack_contents(self, have, want, progress=None):
200 """Iterate over the contents of a pack file.
202 :param have: List of SHA1s of objects that should not be sent
203 :param want: List of SHA1s of objects that should be sent
204 :param progress: Optional progress reporting method
206 return self.iter_shas(self.find_missing_objects(have, want, progress))
208 def peel_sha(self, sha):
209 """Peel all tags from a SHA.
211 :param sha: The object SHA to peel.
212 :return: The fully-peeled SHA1 of a tag object, after peeling all
213 intermediate tags; if the original ref does not point to a tag, this
214 will equal the original SHA1.
217 obj_class = object_class(obj.type_name)
218 while obj_class is Tag:
219 obj_class, sha = obj.object
224 class PackBasedObjectStore(BaseObjectStore):
227 self._pack_cache = None
230 def alternates(self):
233 def contains_packed(self, sha):
234 """Check if a particular object is present by SHA1 and is packed."""
235 for pack in self.packs:
240 def _load_packs(self):
241 raise NotImplementedError(self._load_packs)
243 def _pack_cache_stale(self):
244 """Check whether the pack cache is stale."""
245 raise NotImplementedError(self._pack_cache_stale)
247 def _add_known_pack(self, pack):
248 """Add a newly appeared pack to the cache by path.
251 if self._pack_cache is not None:
252 self._pack_cache.append(pack)
256 """List with pack objects."""
257 if self._pack_cache is None or self._pack_cache_stale():
258 self._pack_cache = self._load_packs()
259 return self._pack_cache
261 def _iter_loose_objects(self):
262 """Iterate over the SHAs of all loose objects."""
263 raise NotImplementedError(self._iter_loose_objects)
265 def _get_loose_object(self, sha):
266 raise NotImplementedError(self._get_loose_object)
268 def _remove_loose_object(self, sha):
269 raise NotImplementedError(self._remove_loose_object)
271 def pack_loose_objects(self):
272 """Pack loose objects.
274 :return: Number of objects packed
277 for sha in self._iter_loose_objects():
278 objects.add((self._get_loose_object(sha), None))
279 self.add_objects(list(objects))
280 for obj, path in objects:
281 self._remove_loose_object(obj.id)
285 """Iterate over the SHAs that are present in this store."""
286 iterables = self.packs + [self._iter_loose_objects()]
287 return itertools.chain(*iterables)
289 def contains_loose(self, sha):
290 """Check if a particular object is present by SHA1 and is loose."""
291 return self._get_loose_object(sha) is not None
293 def get_raw(self, name):
294 """Obtain the raw text for an object.
296 :param name: sha for the object.
297 :return: tuple with numeric type and object contents.
300 sha = hex_to_sha(name)
302 elif len(name) == 20:
306 raise AssertionError("Invalid object name %r" % name)
307 for pack in self.packs:
309 return pack.get_raw(sha)
313 hexsha = sha_to_hex(name)
314 ret = self._get_loose_object(hexsha)
316 return ret.type_num, ret.as_raw_string()
317 for alternate in self.alternates:
319 return alternate.get_raw(hexsha)
322 raise KeyError(hexsha)
324 def add_objects(self, objects):
325 """Add a set of objects to this object store.
327 :param objects: Iterable over objects, should support __len__.
328 :return: Pack object of the objects written.
330 if len(objects) == 0:
331 # Don't bother writing an empty pack file
333 f, commit = self.add_pack()
334 write_pack_objects(f, objects)
338 class DiskObjectStore(PackBasedObjectStore):
339 """Git-style object store that exists on disk."""
341 def __init__(self, path):
342 """Open an object store.
344 :param path: Path of the object store.
346 super(DiskObjectStore, self).__init__()
348 self.pack_dir = os.path.join(self.path, PACKDIR)
349 self._pack_cache_time = 0
350 self._alternates = None
353 def alternates(self):
354 if self._alternates is not None:
355 return self._alternates
356 self._alternates = []
357 for path in self._read_alternate_paths():
358 self._alternates.append(DiskObjectStore(path))
359 return self._alternates
361 def _read_alternate_paths(self):
363 f = GitFile(os.path.join(self.path, "info", "alternates"),
365 except (OSError, IOError), e:
366 if e.errno == errno.ENOENT:
371 for l in f.readlines():
375 if not os.path.isabs(l):
382 def add_alternate_path(self, path):
383 """Add an alternate path to this object store.
386 os.mkdir(os.path.join(self.path, "info"))
388 if e.errno != errno.EEXIST:
390 alternates_path = os.path.join(self.path, "info/alternates")
391 f = GitFile(alternates_path, 'wb')
394 orig_f = open(alternates_path, 'rb')
395 except (OSError, IOError), e:
396 if e.errno != errno.ENOENT:
400 f.write(orig_f.read())
403 f.write("%s\n" % path)
406 self.alternates.append(DiskObjectStore(path))
408 def _load_packs(self):
411 self._pack_cache_time = os.stat(self.pack_dir).st_mtime
412 pack_dir_contents = os.listdir(self.pack_dir)
413 for name in pack_dir_contents:
414 # TODO: verify that idx exists first
415 if name.startswith("pack-") and name.endswith(".pack"):
416 filename = os.path.join(self.pack_dir, name)
417 pack_files.append((os.stat(filename).st_mtime, filename))
419 if e.errno == errno.ENOENT:
422 pack_files.sort(reverse=True)
423 suffix_len = len(".pack")
424 return [Pack(f[:-suffix_len]) for _, f in pack_files]
426 def _pack_cache_stale(self):
428 return os.stat(self.pack_dir).st_mtime > self._pack_cache_time
430 if e.errno == errno.ENOENT:
434 def _get_shafile_path(self, sha):
435 # Check from object dir
436 return hex_to_filename(self.path, sha)
438 def _iter_loose_objects(self):
439 for base in os.listdir(self.path):
442 for rest in os.listdir(os.path.join(self.path, base)):
445 def _get_loose_object(self, sha):
446 path = self._get_shafile_path(sha)
448 return ShaFile.from_path(path)
449 except (OSError, IOError), e:
450 if e.errno == errno.ENOENT:
454 def _remove_loose_object(self, sha):
455 os.remove(self._get_shafile_path(sha))
457 def _complete_thin_pack(self, f, path, copier, indexer):
458 """Move a specific file containing a pack into the pack directory.
460 :note: The file should be on the same file system as the
463 :param f: Open file object for the pack.
464 :param path: Path to the pack file.
465 :param copier: A PackStreamCopier to use for writing pack data.
466 :param indexer: A PackIndexer for indexing the pack.
468 entries = list(indexer)
470 # Update the header with the new number of objects.
472 write_pack_header(f, len(entries) + len(indexer.ext_refs()))
474 # Rescan the rest of the pack, computing the SHA with the new header.
475 new_sha = compute_file_sha(f, end_ofs=-20)
478 for ext_sha in indexer.ext_refs():
479 type_num, data = self.get_raw(ext_sha)
481 crc32 = write_pack_object(f, type_num, data, sha=new_sha)
482 entries.append((ext_sha, offset, crc32))
483 pack_sha = new_sha.digest()
489 pack_base_name = os.path.join(
490 self.pack_dir, 'pack-' + iter_sha1(e[0] for e in entries))
491 os.rename(path, pack_base_name + '.pack')
494 index_file = GitFile(pack_base_name + '.idx', 'wb')
496 write_pack_index_v2(index_file, entries, pack_sha)
501 # Add the pack to the store and return it.
502 final_pack = Pack(pack_base_name)
503 final_pack.check_length_and_checksum()
504 self._add_known_pack(final_pack)
507 def add_thin_pack(self, read_all, read_some):
508 """Add a new thin pack to this object store.
510 Thin packs are packs that contain deltas with parents that exist outside
511 the pack. They should never be placed in the object store directly, and
512 always indexed and completed as they are copied.
514 :param read_all: Read function that blocks until the number of requested
516 :param read_some: Read function that returns at least one byte, but may
517 not return the number of bytes requested.
518 :return: A Pack object pointing at the now-completed thin pack in the
519 objects/pack directory.
521 fd, path = tempfile.mkstemp(dir=self.path, prefix='tmp_pack_')
522 f = os.fdopen(fd, 'w+b')
525 indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
526 copier = PackStreamCopier(read_all, read_some, f,
529 return self._complete_thin_pack(f, path, copier, indexer)
533 def move_in_pack(self, path):
534 """Move a specific file containing a pack into the pack directory.
536 :note: The file should be on the same file system as the
539 :param path: Path to the pack file.
542 entries = p.sorted_entries()
543 basename = os.path.join(self.pack_dir,
544 "pack-%s" % iter_sha1(entry[0] for entry in entries))
545 f = GitFile(basename+".idx", "wb")
547 write_pack_index_v2(f, entries, p.get_stored_checksum())
551 os.rename(path, basename + ".pack")
552 final_pack = Pack(basename)
553 self._add_known_pack(final_pack)
557 """Add a new pack to this object store.
559 :return: Fileobject to write to and a commit function to
560 call when the pack is finished.
562 fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
563 f = os.fdopen(fd, 'wb')
567 if os.path.getsize(path) > 0:
568 return self.move_in_pack(path)
574 def add_object(self, obj):
575 """Add a single object to this object store.
577 :param obj: Object to add
579 dir = os.path.join(self.path, obj.id[:2])
583 if e.errno != errno.EEXIST:
585 path = os.path.join(dir, obj.id[2:])
586 if os.path.exists(path):
587 return # Already there, no need to write again
588 f = GitFile(path, 'wb')
590 f.write(obj.as_legacy_object())
599 if e.errno != errno.EEXIST:
601 os.mkdir(os.path.join(path, "info"))
602 os.mkdir(os.path.join(path, PACKDIR))
606 class MemoryObjectStore(BaseObjectStore):
607 """Object store that keeps all objects in memory."""
610 super(MemoryObjectStore, self).__init__()
613 def contains_loose(self, sha):
614 """Check if a particular object is present by SHA1 and is loose."""
615 return sha in self._data
617 def contains_packed(self, sha):
618 """Check if a particular object is present by SHA1 and is packed."""
622 """Iterate over the SHAs that are present in this store."""
623 return self._data.iterkeys()
627 """List with pack objects."""
630 def get_raw(self, name):
631 """Obtain the raw text for an object.
633 :param name: sha for the object.
634 :return: tuple with numeric type and object contents.
637 return obj.type_num, obj.as_raw_string()
639 def __getitem__(self, name):
640 return self._data[name]
642 def __delitem__(self, name):
643 """Delete an object from this store, for testing only."""
646 def add_object(self, obj):
647 """Add a single object to this object store.
650 self._data[obj.id] = obj
652 def add_objects(self, objects):
653 """Add a set of objects to this object store.
655 :param objects: Iterable over a list of objects.
657 for obj, path in objects:
658 self._data[obj.id] = obj
661 class ObjectImporter(object):
662 """Interface for importing objects."""
664 def __init__(self, count):
665 """Create a new ObjectImporter.
667 :param count: Number of objects that's going to be imported.
671 def add_object(self, object):
673 raise NotImplementedError(self.add_object)
675 def finish(self, object):
676 """Finish the import and write objects to disk."""
677 raise NotImplementedError(self.finish)
680 class ObjectIterator(object):
681 """Interface for iterating over objects."""
683 def iterobjects(self):
684 raise NotImplementedError(self.iterobjects)
687 class ObjectStoreIterator(ObjectIterator):
688 """ObjectIterator that works on top of an ObjectStore."""
690 def __init__(self, store, sha_iter):
691 """Create a new ObjectIterator.
693 :param store: Object store to retrieve from
694 :param sha_iter: Iterator over (sha, path) tuples
697 self.sha_iter = sha_iter
701 """Yield tuple with next object and path."""
702 for sha, path in self.itershas():
703 yield self.store[sha], path
705 def iterobjects(self):
706 """Iterate over just the objects."""
711 """Iterate over the SHAs."""
712 for sha in self._shas:
714 for sha in self.sha_iter:
715 self._shas.append(sha)
718 def __contains__(self, needle):
719 """Check if an object is present.
721 :note: This checks if the object is present in
722 the underlying object store, not if it would
723 be yielded by the iterator.
725 :param needle: SHA1 of the object to check for
727 return needle in self.store
729 def __getitem__(self, key):
730 """Find an object by SHA1.
732 :note: This retrieves the object from the underlying
733 object store. It will also succeed if the object would
734 not be returned by the iterator.
736 return self.store[key]
739 """Return the number of objects."""
740 return len(list(self.itershas()))
743 def tree_lookup_path(lookup_obj, root_sha, path):
744 """Look up an object in a Git tree.
746 :param lookup_obj: Callback for retrieving object by SHA1
747 :param root_sha: SHA1 of the root tree
748 :param path: Path to lookup
749 :return: A tuple of (mode, SHA) of the resulting path.
751 tree = lookup_obj(root_sha)
752 if not isinstance(tree, Tree):
753 raise NotTreeError(root_sha)
754 return tree.lookup_path(lookup_obj, path)
757 class MissingObjectFinder(object):
758 """Find the objects missing from another object store.
760 :param object_store: Object store containing at least all objects to be
762 :param haves: SHA1s of commits not to send (already present in target)
763 :param wants: SHA1s of commits to send
764 :param progress: Optional function to report progress to.
765 :param get_tagged: Function that returns a dict of pointed-to sha -> tag
766 sha for including tags.
767 :param tagged: dict of pointed-to sha -> tag sha for including tags
770 def __init__(self, object_store, haves, wants, progress=None,
773 self.sha_done = haves
774 self.objects_to_send = set([(w, None, False) for w in wants
776 self.object_store = object_store
778 self.progress = lambda x: None
780 self.progress = progress
781 self._tagged = get_tagged and get_tagged() or {}
783 def add_todo(self, entries):
784 self.objects_to_send.update([e for e in entries
785 if not e[0] in self.sha_done])
787 def parse_tree(self, tree):
788 self.add_todo([(sha, name, not stat.S_ISDIR(mode))
789 for name, mode, sha in tree.iteritems()
790 if not S_ISGITLINK(mode)])
792 def parse_commit(self, commit):
793 self.add_todo([(commit.tree, "", False)])
794 self.add_todo([(p, None, False) for p in commit.parents])
796 def parse_tag(self, tag):
797 self.add_todo([(tag.object[1], None, False)])
801 if not self.objects_to_send:
803 (sha, name, leaf) = self.objects_to_send.pop()
804 if sha not in self.sha_done:
807 o = self.object_store[sha]
808 if isinstance(o, Commit):
810 elif isinstance(o, Tree):
812 elif isinstance(o, Tag):
814 if sha in self._tagged:
815 self.add_todo([(self._tagged[sha], None, True)])
816 self.sha_done.add(sha)
817 self.progress("counting objects: %d\r" % len(self.sha_done))
821 class ObjectStoreGraphWalker(object):
822 """Graph walker that finds what commits are missing from an object store.
824 :ivar heads: Revisions without descendants in the local repo
825 :ivar get_parents: Function to retrieve parents in the local repo
828 def __init__(self, local_heads, get_parents):
829 """Create a new instance.
831 :param local_heads: Heads to start search with
832 :param get_parents: Function for finding the parents of a SHA1.
834 self.heads = set(local_heads)
835 self.get_parents = get_parents
839 """Ack that a revision and its ancestors are present in the source."""
840 ancestors = set([sha])
842 # stop if we run out of heads to remove
848 # collect all ancestors
849 new_ancestors = set()
851 ps = self.parents.get(a)
853 new_ancestors.update(ps)
854 self.parents[a] = None
856 # no more ancestors; stop
857 if not new_ancestors:
860 ancestors = new_ancestors
863 """Iterate over ancestors of heads in the target."""
865 ret = self.heads.pop()
866 ps = self.get_parents(ret)
867 self.parents[ret] = ps
868 self.heads.update([p for p in ps if not p in self.parents])