1 # object_store.py -- Object store for git objects
2 # Copyright (C) 2008-2013 Jelmer Vernooij <jelmer@samba.org>
5 # This program is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU General Public License
7 # as published by the Free Software Foundation; either version 2
8 # or (at your option) a later version of the License.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
21 """Git object store interfaces and implementation."""
24 from io import BytesIO
31 from dulwich.diff_tree import (
35 from dulwich.errors import (
38 from dulwich.file import GitFile
39 from dulwich.objects import (
51 from dulwich.pack import (
69 class BaseObjectStore(object):
70 """Object store interface."""
72 def determine_wants_all(self, refs):
73 return [sha for (ref, sha) in refs.iteritems()
74 if not sha in self and not ref.endswith("^{}") and
77 def iter_shas(self, shas):
78 """Iterate over the objects for the specified shas.
80 :param shas: Iterable object with SHAs
81 :return: Object iterator
83 return ObjectStoreIterator(self, shas)
85 def contains_loose(self, sha):
86 """Check if a particular object is present by SHA1 and is loose."""
87 raise NotImplementedError(self.contains_loose)
89 def contains_packed(self, sha):
90 """Check if a particular object is present by SHA1 and is packed."""
91 raise NotImplementedError(self.contains_packed)
93 def __contains__(self, sha):
94 """Check if a particular object is present by SHA1.
96 This method makes no distinction between loose and packed objects.
98 return self.contains_packed(sha) or self.contains_loose(sha)
102 """Iterable of pack objects."""
103 raise NotImplementedError
105 def get_raw(self, name):
106 """Obtain the raw text for an object.
108 :param name: sha for the object.
109 :return: tuple with numeric type and object contents.
111 raise NotImplementedError(self.get_raw)
113 def __getitem__(self, sha):
114 """Obtain an object by SHA1."""
115 type_num, uncomp = self.get_raw(sha)
116 return ShaFile.from_raw_string(type_num, uncomp, sha=sha)
119 """Iterate over the SHAs that are present in this store."""
120 raise NotImplementedError(self.__iter__)
122 def add_object(self, obj):
123 """Add a single object to this object store.
126 raise NotImplementedError(self.add_object)
128 def add_objects(self, objects):
129 """Add a set of objects to this object store.
131 :param objects: Iterable over a list of objects.
133 raise NotImplementedError(self.add_objects)
135 def tree_changes(self, source, target, want_unchanged=False):
136 """Find the differences between the contents of two trees
138 :param source: SHA1 of the source tree
139 :param target: SHA1 of the target tree
140 :param want_unchanged: Whether unchanged files should be reported
141 :return: Iterator over tuples with
142 (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
144 for change in tree_changes(self, source, target,
145 want_unchanged=want_unchanged):
146 yield ((change.old.path, change.new.path),
147 (change.old.mode, change.new.mode),
148 (change.old.sha, change.new.sha))
150 def iter_tree_contents(self, tree_id, include_trees=False):
151 """Iterate the contents of a tree and all subtrees.
153 Iteration is depth-first pre-order, as in e.g. os.walk.
155 :param tree_id: SHA1 of the tree.
156 :param include_trees: If True, include tree objects in the iteration.
157 :return: Iterator over TreeEntry namedtuples for all the objects in a
160 for entry, _ in walk_trees(self, tree_id, None):
161 if not stat.S_ISDIR(entry.mode) or include_trees:
164 def find_missing_objects(self, haves, wants, progress=None,
166 get_parents=lambda commit: commit.parents):
167 """Find the missing objects required for a set of revisions.
169 :param haves: Iterable over SHAs already in common.
170 :param wants: Iterable over SHAs of objects to fetch.
171 :param progress: Simple progress function that will be called with
172 updated progress strings.
173 :param get_tagged: Function that returns a dict of pointed-to sha -> tag
174 sha for including tags.
175 :param get_parents: Optional function for getting the parents of a commit.
176 :return: Iterator over (sha, path) pairs.
178 finder = MissingObjectFinder(self, haves, wants, progress, get_tagged, get_parents=get_parents)
179 return iter(finder.next, None)
181 def find_common_revisions(self, graphwalker):
182 """Find which revisions this store has in common using graphwalker.
184 :param graphwalker: A graphwalker object.
185 :return: List of SHAs that are in common
188 sha = next(graphwalker)
193 sha = next(graphwalker)
196 def generate_pack_contents(self, have, want, progress=None):
197 """Iterate over the contents of a pack file.
199 :param have: List of SHA1s of objects that should not be sent
200 :param want: List of SHA1s of objects that should be sent
201 :param progress: Optional progress reporting method
203 return self.iter_shas(self.find_missing_objects(have, want, progress))
205 def peel_sha(self, sha):
206 """Peel all tags from a SHA.
208 :param sha: The object SHA to peel.
209 :return: The fully-peeled SHA1 of a tag object, after peeling all
210 intermediate tags; if the original ref does not point to a tag, this
211 will equal the original SHA1.
214 obj_class = object_class(obj.type_name)
215 while obj_class is Tag:
216 obj_class, sha = obj.object
220 def _collect_ancestors(self, heads, common=set(),
221 get_parents=lambda commit: commit.parents):
222 """Collect all ancestors of heads up to (excluding) those in common.
224 :param heads: commits to start from
225 :param common: commits to end at, or empty set to walk repository
227 :param get_parents: Optional function for getting the parents of a commit.
228 :return: a tuple (A, B) where A - all commits reachable
229 from heads but not present in common, B - common (shared) elements
230 that are directly reachable from heads
240 elif e not in commits:
243 queue.extend(get_parents(cmt))
244 return (commits, bases)
247 """Close any files opened by this object store."""
248 # Default implementation is a NO-OP
251 class PackBasedObjectStore(BaseObjectStore):
254 self._pack_cache = None
257 def alternates(self):
260 def contains_packed(self, sha):
261 """Check if a particular object is present by SHA1 and is packed.
263 This does not check alternates.
265 for pack in self.packs:
270 def __contains__(self, sha):
271 """Check if a particular object is present by SHA1.
273 This method makes no distinction between loose and packed objects.
275 if self.contains_packed(sha) or self.contains_loose(sha):
277 for alternate in self.alternates:
282 def _load_packs(self):
283 raise NotImplementedError(self._load_packs)
285 def _pack_cache_stale(self):
286 """Check whether the pack cache is stale."""
287 raise NotImplementedError(self._pack_cache_stale)
289 def _add_known_pack(self, pack):
290 """Add a newly appeared pack to the cache by path.
293 if self._pack_cache is not None:
294 self._pack_cache.append(pack)
297 pack_cache = self._pack_cache
298 self._pack_cache = None
300 pack = pack_cache.pop()
305 """List with pack objects."""
306 if self._pack_cache is None or self._pack_cache_stale():
307 self._pack_cache = self._load_packs()
308 return self._pack_cache
310 def _iter_alternate_objects(self):
311 """Iterate over the SHAs of all the objects in alternate stores."""
312 for alternate in self.alternates:
313 for alternate_object in alternate:
314 yield alternate_object
316 def _iter_loose_objects(self):
317 """Iterate over the SHAs of all loose objects."""
318 raise NotImplementedError(self._iter_loose_objects)
320 def _get_loose_object(self, sha):
321 raise NotImplementedError(self._get_loose_object)
323 def _remove_loose_object(self, sha):
324 raise NotImplementedError(self._remove_loose_object)
326 def pack_loose_objects(self):
327 """Pack loose objects.
329 :return: Number of objects packed
332 for sha in self._iter_loose_objects():
333 objects.add((self._get_loose_object(sha), None))
334 self.add_objects(list(objects))
335 for obj, path in objects:
336 self._remove_loose_object(obj.id)
340 """Iterate over the SHAs that are present in this store."""
341 iterables = self.packs + [self._iter_loose_objects()] + [self._iter_alternate_objects()]
342 return itertools.chain(*iterables)
344 def contains_loose(self, sha):
345 """Check if a particular object is present by SHA1 and is loose.
347 This does not check alternates.
349 return self._get_loose_object(sha) is not None
351 def get_raw(self, name):
352 """Obtain the raw text for an object.
354 :param name: sha for the object.
355 :return: tuple with numeric type and object contents.
358 sha = hex_to_sha(name)
360 elif len(name) == 20:
364 raise AssertionError("Invalid object name %r" % name)
365 for pack in self.packs:
367 return pack.get_raw(sha)
371 hexsha = sha_to_hex(name)
372 ret = self._get_loose_object(hexsha)
374 return ret.type_num, ret.as_raw_string()
375 for alternate in self.alternates:
377 return alternate.get_raw(hexsha)
380 raise KeyError(hexsha)
382 def add_objects(self, objects):
383 """Add a set of objects to this object store.
385 :param objects: Iterable over objects, should support __len__.
386 :return: Pack object of the objects written.
388 if len(objects) == 0:
389 # Don't bother writing an empty pack file
391 f, commit, abort = self.add_pack()
393 write_pack_objects(f, objects)
401 class DiskObjectStore(PackBasedObjectStore):
402 """Git-style object store that exists on disk."""
404 def __init__(self, path):
405 """Open an object store.
407 :param path: Path of the object store.
409 super(DiskObjectStore, self).__init__()
411 self.pack_dir = os.path.join(self.path, PACKDIR)
412 self._pack_cache_time = 0
413 self._alternates = None
416 return "<%s(%r)>" % (self.__class__.__name__, self.path)
419 def alternates(self):
420 if self._alternates is not None:
421 return self._alternates
422 self._alternates = []
423 for path in self._read_alternate_paths():
424 self._alternates.append(DiskObjectStore(path))
425 return self._alternates
427 def _read_alternate_paths(self):
429 f = GitFile(os.path.join(self.path, "info", "alternates"),
431 except (OSError, IOError) as e:
432 if e.errno == errno.ENOENT:
437 for l in f.readlines():
444 ret.append(os.path.join(self.path, l))
449 def add_alternate_path(self, path):
450 """Add an alternate path to this object store.
453 os.mkdir(os.path.join(self.path, "info"))
455 if e.errno != errno.EEXIST:
457 alternates_path = os.path.join(self.path, "info/alternates")
458 f = GitFile(alternates_path, 'wb')
461 orig_f = open(alternates_path, 'rb')
462 except (OSError, IOError) as e:
463 if e.errno != errno.ENOENT:
467 f.write(orig_f.read())
470 f.write("%s\n" % path)
474 if not os.path.isabs(path):
475 path = os.path.join(self.path, path)
476 self.alternates.append(DiskObjectStore(path))
478 def _load_packs(self):
481 self._pack_cache_time = os.stat(self.pack_dir).st_mtime
482 pack_dir_contents = os.listdir(self.pack_dir)
483 for name in pack_dir_contents:
484 # TODO: verify that idx exists first
485 if name.startswith("pack-") and name.endswith(".pack"):
486 filename = os.path.join(self.pack_dir, name)
487 pack_files.append((os.stat(filename).st_mtime, filename))
489 if e.errno == errno.ENOENT:
492 pack_files.sort(reverse=True)
493 suffix_len = len(".pack")
496 for _, f in pack_files:
497 result.append(Pack(f[:-suffix_len]))
504 def _pack_cache_stale(self):
506 return os.stat(self.pack_dir).st_mtime > self._pack_cache_time
508 if e.errno == errno.ENOENT:
512 def _get_shafile_path(self, sha):
513 # Check from object dir
514 return hex_to_filename(self.path, sha)
516 def _iter_loose_objects(self):
517 for base in os.listdir(self.path):
520 for rest in os.listdir(os.path.join(self.path, base)):
523 def _get_loose_object(self, sha):
524 path = self._get_shafile_path(sha)
526 return ShaFile.from_path(path)
527 except (OSError, IOError) as e:
528 if e.errno == errno.ENOENT:
532 def _remove_loose_object(self, sha):
533 os.remove(self._get_shafile_path(sha))
535 def _complete_thin_pack(self, f, path, copier, indexer):
536 """Move a specific file containing a pack into the pack directory.
538 :note: The file should be on the same file system as the
541 :param f: Open file object for the pack.
542 :param path: Path to the pack file.
543 :param copier: A PackStreamCopier to use for writing pack data.
544 :param indexer: A PackIndexer for indexing the pack.
546 entries = list(indexer)
548 # Update the header with the new number of objects.
550 write_pack_header(f, len(entries) + len(indexer.ext_refs()))
552 # Must flush before reading (http://bugs.python.org/issue3207)
555 # Rescan the rest of the pack, computing the SHA with the new header.
556 new_sha = compute_file_sha(f, end_ofs=-20)
558 # Must reposition before writing (http://bugs.python.org/issue3207)
559 f.seek(0, os.SEEK_CUR)
562 for ext_sha in indexer.ext_refs():
563 assert len(ext_sha) == 20
564 type_num, data = self.get_raw(ext_sha)
566 crc32 = write_pack_object(f, type_num, data, sha=new_sha)
567 entries.append((ext_sha, offset, crc32))
568 pack_sha = new_sha.digest()
574 pack_base_name = os.path.join(
575 self.pack_dir, 'pack-' + iter_sha1(e[0] for e in entries))
576 os.rename(path, pack_base_name + '.pack')
579 index_file = GitFile(pack_base_name + '.idx', 'wb')
581 write_pack_index_v2(index_file, entries, pack_sha)
586 # Add the pack to the store and return it.
587 final_pack = Pack(pack_base_name)
588 final_pack.check_length_and_checksum()
589 self._add_known_pack(final_pack)
592 def add_thin_pack(self, read_all, read_some):
593 """Add a new thin pack to this object store.
595 Thin packs are packs that contain deltas with parents that exist outside
596 the pack. They should never be placed in the object store directly, and
597 always indexed and completed as they are copied.
599 :param read_all: Read function that blocks until the number of requested
601 :param read_some: Read function that returns at least one byte, but may
602 not return the number of bytes requested.
603 :return: A Pack object pointing at the now-completed thin pack in the
604 objects/pack directory.
606 fd, path = tempfile.mkstemp(dir=self.path, prefix='tmp_pack_')
607 f = os.fdopen(fd, 'w+b')
610 indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
611 copier = PackStreamCopier(read_all, read_some, f,
614 return self._complete_thin_pack(f, path, copier, indexer)
618 def move_in_pack(self, path):
619 """Move a specific file containing a pack into the pack directory.
621 :note: The file should be on the same file system as the
624 :param path: Path to the pack file.
628 entries = p.sorted_entries()
629 basename = os.path.join(self.pack_dir,
630 "pack-%s" % iter_sha1(entry[0] for entry in entries))
631 f = GitFile(basename+".idx", "wb")
633 write_pack_index_v2(f, entries, p.get_stored_checksum())
638 os.rename(path, basename + ".pack")
639 final_pack = Pack(basename)
640 self._add_known_pack(final_pack)
644 """Add a new pack to this object store.
646 :return: Fileobject to write to, a commit function to
647 call when the pack is finished and an abort
650 fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
651 f = os.fdopen(fd, 'wb')
655 if os.path.getsize(path) > 0:
656 return self.move_in_pack(path)
663 return f, commit, abort
665 def add_object(self, obj):
666 """Add a single object to this object store.
668 :param obj: Object to add
670 dir = os.path.join(self.path, obj.id[:2])
674 if e.errno != errno.EEXIST:
676 path = os.path.join(dir, obj.id[2:])
677 if os.path.exists(path):
678 return # Already there, no need to write again
679 f = GitFile(path, 'wb')
681 f.write(obj.as_legacy_object())
690 if e.errno != errno.EEXIST:
692 os.mkdir(os.path.join(path, "info"))
693 os.mkdir(os.path.join(path, PACKDIR))
697 class MemoryObjectStore(BaseObjectStore):
698 """Object store that keeps all objects in memory."""
701 super(MemoryObjectStore, self).__init__()
704 def _to_hexsha(self, sha):
708 return sha_to_hex(sha)
710 raise ValueError("Invalid sha %r" % (sha,))
712 def contains_loose(self, sha):
713 """Check if a particular object is present by SHA1 and is loose."""
714 return self._to_hexsha(sha) in self._data
716 def contains_packed(self, sha):
717 """Check if a particular object is present by SHA1 and is packed."""
721 """Iterate over the SHAs that are present in this store."""
722 return self._data.iterkeys()
726 """List with pack objects."""
729 def get_raw(self, name):
730 """Obtain the raw text for an object.
732 :param name: sha for the object.
733 :return: tuple with numeric type and object contents.
735 obj = self[self._to_hexsha(name)]
736 return obj.type_num, obj.as_raw_string()
738 def __getitem__(self, name):
739 return self._data[self._to_hexsha(name)]
741 def __delitem__(self, name):
742 """Delete an object from this store, for testing only."""
743 del self._data[self._to_hexsha(name)]
745 def add_object(self, obj):
746 """Add a single object to this object store.
749 self._data[obj.id] = obj
751 def add_objects(self, objects):
752 """Add a set of objects to this object store.
754 :param objects: Iterable over a list of objects.
756 for obj, path in objects:
757 self._data[obj.id] = obj
760 """Add a new pack to this object store.
762 Because this object store doesn't support packs, we extract and add the
765 :return: Fileobject to write to and a commit function to
766 call when the pack is finished.
770 p = PackData.from_file(BytesIO(f.getvalue()), f.tell())
772 for obj in PackInflater.for_pack_data(p):
773 self._data[obj.id] = obj
776 return f, commit, abort
778 def _complete_thin_pack(self, f, indexer):
779 """Complete a thin pack by adding external references.
781 :param f: Open file object for the pack.
782 :param indexer: A PackIndexer for indexing the pack.
784 entries = list(indexer)
786 # Update the header with the new number of objects.
788 write_pack_header(f, len(entries) + len(indexer.ext_refs()))
790 # Rescan the rest of the pack, computing the SHA with the new header.
791 new_sha = compute_file_sha(f, end_ofs=-20)
794 for ext_sha in indexer.ext_refs():
795 assert len(ext_sha) == 20
796 type_num, data = self.get_raw(ext_sha)
797 write_pack_object(f, type_num, data, sha=new_sha)
798 pack_sha = new_sha.digest()
801 def add_thin_pack(self, read_all, read_some):
802 """Add a new thin pack to this object store.
804 Thin packs are packs that contain deltas with parents that exist outside
805 the pack. Because this object store doesn't support packs, we extract
806 and add the individual objects.
808 :param read_all: Read function that blocks until the number of requested
810 :param read_some: Read function that returns at least one byte, but may
811 not return the number of bytes requested.
813 f, commit, abort = self.add_pack()
815 indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
816 copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer)
818 self._complete_thin_pack(f, indexer)
826 class ObjectImporter(object):
827 """Interface for importing objects."""
829 def __init__(self, count):
830 """Create a new ObjectImporter.
832 :param count: Number of objects that's going to be imported.
836 def add_object(self, object):
838 raise NotImplementedError(self.add_object)
840 def finish(self, object):
841 """Finish the import and write objects to disk."""
842 raise NotImplementedError(self.finish)
845 class ObjectIterator(object):
846 """Interface for iterating over objects."""
848 def iterobjects(self):
849 raise NotImplementedError(self.iterobjects)
852 class ObjectStoreIterator(ObjectIterator):
853 """ObjectIterator that works on top of an ObjectStore."""
855 def __init__(self, store, sha_iter):
856 """Create a new ObjectIterator.
858 :param store: Object store to retrieve from
859 :param sha_iter: Iterator over (sha, path) tuples
862 self.sha_iter = sha_iter
866 """Yield tuple with next object and path."""
867 for sha, path in self.itershas():
868 yield self.store[sha], path
870 def iterobjects(self):
871 """Iterate over just the objects."""
876 """Iterate over the SHAs."""
877 for sha in self._shas:
879 for sha in self.sha_iter:
880 self._shas.append(sha)
883 def __contains__(self, needle):
884 """Check if an object is present.
886 :note: This checks if the object is present in
887 the underlying object store, not if it would
888 be yielded by the iterator.
890 :param needle: SHA1 of the object to check for
892 return needle in self.store
894 def __getitem__(self, key):
895 """Find an object by SHA1.
897 :note: This retrieves the object from the underlying
898 object store. It will also succeed if the object would
899 not be returned by the iterator.
901 return self.store[key]
904 """Return the number of objects."""
905 return len(list(self.itershas()))
908 def tree_lookup_path(lookup_obj, root_sha, path):
909 """Look up an object in a Git tree.
911 :param lookup_obj: Callback for retrieving object by SHA1
912 :param root_sha: SHA1 of the root tree
913 :param path: Path to lookup
914 :return: A tuple of (mode, SHA) of the resulting path.
916 tree = lookup_obj(root_sha)
917 if not isinstance(tree, Tree):
918 raise NotTreeError(root_sha)
919 return tree.lookup_path(lookup_obj, path)
922 def _collect_filetree_revs(obj_store, tree_sha, kset):
923 """Collect SHA1s of files and directories for specified tree.
925 :param obj_store: Object store to get objects by SHA from
926 :param tree_sha: tree reference to walk
927 :param kset: set to fill with references to files and directories
929 filetree = obj_store[tree_sha]
930 for name, mode, sha in filetree.iteritems():
931 if not S_ISGITLINK(mode) and sha not in kset:
933 if stat.S_ISDIR(mode):
934 _collect_filetree_revs(obj_store, sha, kset)
937 def _split_commits_and_tags(obj_store, lst, ignore_unknown=False):
938 """Split object id list into two list with commit SHA1s and tag SHA1s.
940 Commits referenced by tags are included into commits
941 list as well. Only SHA1s known in this repository will get
942 through, and unless ignore_unknown argument is True, KeyError
943 is thrown for SHA1 missing in the repository
945 :param obj_store: Object store to get objects by SHA1 from
946 :param lst: Collection of commit and tag SHAs
947 :param ignore_unknown: True to skip SHA1 missing in the repository
949 :return: A tuple of (commits, tags) SHA1s
957 if not ignore_unknown:
960 if isinstance(o, Commit):
962 elif isinstance(o, Tag):
964 commits.add(o.object[1])
966 raise KeyError('Not a commit or a tag: %s' % e)
967 return (commits, tags)
970 class MissingObjectFinder(object):
971 """Find the objects missing from another object store.
973 :param object_store: Object store containing at least all objects to be
975 :param haves: SHA1s of commits not to send (already present in target)
976 :param wants: SHA1s of commits to send
977 :param progress: Optional function to report progress to.
978 :param get_tagged: Function that returns a dict of pointed-to sha -> tag
979 sha for including tags.
980 :param get_parents: Optional function for getting the parents of a commit.
981 :param tagged: dict of pointed-to sha -> tag sha for including tags
984 def __init__(self, object_store, haves, wants, progress=None,
985 get_tagged=None, get_parents=lambda commit: commit.parents):
986 self.object_store = object_store
987 self._get_parents = get_parents
988 # process Commits and Tags differently
989 # Note, while haves may list commits/tags not available locally,
990 # and such SHAs would get filtered out by _split_commits_and_tags,
991 # wants shall list only known SHAs, and otherwise
992 # _split_commits_and_tags fails with KeyError
993 have_commits, have_tags = \
994 _split_commits_and_tags(object_store, haves, True)
995 want_commits, want_tags = \
996 _split_commits_and_tags(object_store, wants, False)
997 # all_ancestors is a set of commits that shall not be sent
998 # (complete repository up to 'haves')
999 all_ancestors = object_store._collect_ancestors(
1001 get_parents=self._get_parents)[0]
1002 # all_missing - complete set of commits between haves and wants
1003 # common - commits from all_ancestors we hit into while
1004 # traversing parent hierarchy of wants
1005 missing_commits, common_commits = object_store._collect_ancestors(
1008 get_parents=self._get_parents);
1009 self.sha_done = set()
1010 # Now, fill sha_done with commits and revisions of
1011 # files and directories known to be both locally
1012 # and on target. Thus these commits and files
1013 # won't get selected for fetch
1014 for h in common_commits:
1015 self.sha_done.add(h)
1016 cmt = object_store[h]
1017 _collect_filetree_revs(object_store, cmt.tree, self.sha_done)
1018 # record tags we have as visited, too
1020 self.sha_done.add(t)
1022 missing_tags = want_tags.difference(have_tags)
1023 # in fact, what we 'want' is commits and tags
1024 # we've found missing
1025 wants = missing_commits.union(missing_tags)
1027 self.objects_to_send = set([(w, None, False) for w in wants])
1029 if progress is None:
1030 self.progress = lambda x: None
1032 self.progress = progress
1033 self._tagged = get_tagged and get_tagged() or {}
1035 def add_todo(self, entries):
1036 self.objects_to_send.update([e for e in entries
1037 if not e[0] in self.sha_done])
1041 if not self.objects_to_send:
1043 (sha, name, leaf) = self.objects_to_send.pop()
1044 if sha not in self.sha_done:
1047 o = self.object_store[sha]
1048 if isinstance(o, Commit):
1049 self.add_todo([(o.tree, "", False)])
1050 elif isinstance(o, Tree):
1051 self.add_todo([(s, n, not stat.S_ISDIR(m))
1052 for n, m, s in o.iteritems()
1053 if not S_ISGITLINK(m)])
1054 elif isinstance(o, Tag):
1055 self.add_todo([(o.object[1], None, False)])
1056 if sha in self._tagged:
1057 self.add_todo([(self._tagged[sha], None, True)])
1058 self.sha_done.add(sha)
1059 self.progress("counting objects: %d\r" % len(self.sha_done))
1063 class ObjectStoreGraphWalker(object):
1064 """Graph walker that finds what commits are missing from an object store.
1066 :ivar heads: Revisions without descendants in the local repo
1067 :ivar get_parents: Function to retrieve parents in the local repo
1070 def __init__(self, local_heads, get_parents):
1071 """Create a new instance.
1073 :param local_heads: Heads to start search with
1074 :param get_parents: Function for finding the parents of a SHA1.
1076 self.heads = set(local_heads)
1077 self.get_parents = get_parents
1081 """Ack that a revision and its ancestors are present in the source."""
1082 ancestors = set([sha])
1084 # stop if we run out of heads to remove
1088 self.heads.remove(a)
1090 # collect all ancestors
1091 new_ancestors = set()
1093 ps = self.parents.get(a)
1095 new_ancestors.update(ps)
1096 self.parents[a] = None
1098 # no more ancestors; stop
1099 if not new_ancestors:
1102 ancestors = new_ancestors
1105 """Iterate over ancestors of heads in the target."""
1107 ret = self.heads.pop()
1108 ps = self.get_parents(ret)
1109 self.parents[ret] = ps
1110 self.heads.update([p for p in ps if not p in self.parents])