fc91a59c3b077aff790d8dfae7a57cce188f9b8b
[jelmer/subvertpy.git] / fetch.py
1 # Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org>
2
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 3 of the License, or
6 # (at your option) any later version.
7
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 # GNU General Public License for more details.
12
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software
15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16 """Fetching revisions from Subversion repositories in batches."""
17
18 import bzrlib
19 from bzrlib import osutils, ui, urlutils
20 from bzrlib.inventory import Inventory
21 from bzrlib.revision import Revision, NULL_REVISION
22 from bzrlib.repository import InterRepository
23 from bzrlib.trace import mutter
24
25 from cStringIO import StringIO
26 import md5
27
28 from svn.core import Pool
29 import svn.core
30
31 from bzrlib.plugins.svn import properties
32 from bzrlib.plugins.svn.errors import InvalidFileName
33 from bzrlib.plugins.svn.logwalker import lazy_dict
34 from bzrlib.plugins.svn.mapping import (SVN_PROP_BZR_MERGE, 
35                      SVN_PROP_BZR_PREFIX, SVN_PROP_BZR_REVISION_INFO, 
36                      SVN_PROP_BZR_REVISION_ID,
37                      SVN_PROP_BZR_FILEIDS, SVN_REVPROP_BZR_SIGNATURE,
38                      parse_merge_property,
39                      parse_revision_metadata)
40 from bzrlib.plugins.svn.repository import SvnRepository, SvnRepositoryFormat
41 from bzrlib.plugins.svn.svk import SVN_PROP_SVK_MERGE
42 from bzrlib.plugins.svn.tree import (apply_txdelta_handler, parse_externals_description, 
43                   inventory_add_external)
44
45
46 def _escape_commit_message(message):
47     """Replace xml-incompatible control characters."""
48     if message is None:
49         return None
50     import re
51     # FIXME: RBC 20060419 this should be done by the revision
52     # serialiser not by commit. Then we can also add an unescaper
53     # in the deserializer and start roundtripping revision messages
54     # precisely. See repository_implementations/test_repository.py
55     
56     # Python strings can include characters that can't be
57     # represented in well-formed XML; escape characters that
58     # aren't listed in the XML specification
59     # (http://www.w3.org/TR/REC-xml/#NT-Char).
60     message, _ = re.subn(
61         u'[^\x09\x0A\x0D\u0020-\uD7FF\uE000-\uFFFD]+',
62         lambda match: match.group(0).encode('unicode_escape'),
63         message)
64     return message
65
66
67 def md5_strings(strings):
68     """Return the MD5sum of the concatenation of strings.
69
70     :param strings: Strings to find the MD5sum of.
71     :return: MD5sum
72     """
73     s = md5.new()
74     map(s.update, strings)
75     return s.hexdigest()
76
77
78 def check_filename(path):
79     """Check that a path does not contain invalid characters.
80
81     :param path: Path to check
82     :raises InvalidFileName:
83     """
84     assert isinstance(path, unicode)
85     if u"\\" in path:
86         raise InvalidFileName(path)
87
88
89 class RevisionBuildEditor(svn.delta.Editor):
90     """Implementation of the Subversion commit editor interface that builds a 
91     Bazaar revision.
92     """
93     def __init__(self, source, target):
94         self.target = target
95         self.source = source
96         self.transact = target.get_transaction()
97
98     def start_revision(self, revid, prev_inventory, revmeta):
99         self.revid = revid
100         (self.branch_path, self.revnum, self.mapping) = self.source.lookup_revision_id(revid)
101         self.revmeta = revmeta
102         self._id_map = None
103         self.dir_baserev = {}
104         self._revinfo = None
105         self._premature_deletes = set()
106         self.pool = Pool()
107         self.old_inventory = prev_inventory
108         self.inventory = prev_inventory.copy()
109         self._start_revision()
110
111     def _get_id_map(self):
112         if self._id_map is not None:
113             return self._id_map
114
115         renames = self.mapping.import_fileid_map(self.revmeta.revprops, self.revmeta.fileprops)
116         self._id_map = self.source.transform_fileid_map(self.source.uuid, 
117                               self.revnum, self.branch_path, self.revmeta.paths, renames, 
118                               self.mapping)
119
120         return self._id_map
121
122     def _get_revision(self, revid):
123         """Creates the revision object.
124
125         :param revid: Revision id of the revision to create.
126         """
127
128         # Commit SVN revision properties to a Revision object
129         rev = Revision(revision_id=revid, parent_ids=self.revmeta.get_parent_ids(self.mapping))
130
131         self.mapping.import_revision(self.revmeta.revprops, self.revmeta.fileprops, rev)
132
133         signature = self.revmeta.revprops.get(SVN_REVPROP_BZR_SIGNATURE)
134
135         return (rev, signature)
136
137     def open_root(self, base_revnum, baton):
138         if self.old_inventory.root is None:
139             # First time the root is set
140             old_file_id = None
141             file_id = self.mapping.generate_file_id(self.source.uuid, self.revnum, self.branch_path, u"")
142             self.dir_baserev[file_id] = []
143         else:
144             assert self.old_inventory.root.revision is not None
145             old_file_id = self.old_inventory.root.file_id
146             file_id = self._get_id_map().get("", old_file_id)
147             self.dir_baserev[file_id] = [self.old_inventory.root.revision]
148
149         if self.inventory.root is not None and \
150                 file_id == self.inventory.root.file_id:
151             ie = self.inventory.root
152         else:
153             ie = self.inventory.add_path("", 'directory', file_id)
154         ie.revision = self.revid
155         return (old_file_id, file_id)
156
157     def _get_existing_id(self, old_parent_id, new_parent_id, path):
158         assert isinstance(path, unicode)
159         assert isinstance(old_parent_id, str)
160         assert isinstance(new_parent_id, str)
161         ret = self._get_id_map().get(path)
162         if ret is not None:
163             return ret
164         return self.old_inventory[old_parent_id].children[urlutils.basename(path)].file_id
165
166     def _get_old_id(self, parent_id, old_path):
167         assert isinstance(old_path, unicode)
168         assert isinstance(parent_id, str)
169         return self.old_inventory[parent_id].children[urlutils.basename(old_path)].file_id
170
171     def _get_new_id(self, parent_id, new_path):
172         assert isinstance(new_path, unicode)
173         assert isinstance(parent_id, str)
174         ret = self._get_id_map().get(new_path)
175         if ret is not None:
176             return ret
177         return self.mapping.generate_file_id(self.source.uuid, self.revnum, 
178                                              self.branch_path, new_path)
179
180     def _rename(self, file_id, parent_id, path):
181         assert isinstance(path, unicode)
182         assert isinstance(parent_id, str)
183         # Only rename if not right yet
184         if (self.inventory[file_id].parent_id == parent_id and 
185             self.inventory[file_id].name == urlutils.basename(path)):
186             return
187         self.inventory.rename(file_id, parent_id, urlutils.basename(path))
188
189     def delete_entry(self, path, revnum, (old_parent_id, new_parent_id), pool):
190         assert isinstance(path, str)
191         path = path.decode("utf-8")
192         if path in self._premature_deletes:
193             # Delete recursively
194             self._premature_deletes.remove(path)
195             for p in self._premature_deletes.copy():
196                 if p.startswith("%s/" % path):
197                     self._premature_deletes.remove(p)
198         else:
199             self.inventory.remove_recursive_id(self._get_old_id(old_parent_id, path))
200
201     def close_directory(self, (old_id, new_id)):
202         self.inventory[new_id].revision = self.revid
203
204         # Only record root if the target repository supports it
205         self._store_directory(new_id, self.dir_baserev[new_id])
206
207     def add_directory(self, path, (old_parent_id, new_parent_id), copyfrom_path, copyfrom_revnum, 
208                       pool):
209         assert isinstance(path, str)
210         path = path.decode("utf-8")
211         check_filename(path)
212         file_id = self._get_new_id(new_parent_id, path)
213
214         self.dir_baserev[file_id] = []
215         if file_id in self.inventory:
216             # This directory was moved here from somewhere else, but the 
217             # other location hasn't been removed yet. 
218             if copyfrom_path is None:
219                 # This should ideally never happen!
220                 copyfrom_path = self.old_inventory.id2path(file_id)
221                 mutter('no copyfrom path set, assuming %r' % copyfrom_path)
222             assert copyfrom_path == self.old_inventory.id2path(file_id)
223             assert copyfrom_path not in self._premature_deletes
224             self._premature_deletes.add(copyfrom_path)
225             self._rename(file_id, new_parent_id, path)
226             ie = self.inventory[file_id]
227             old_file_id = file_id
228         else:
229             old_file_id = None
230             ie = self.inventory.add_path(path, 'directory', file_id)
231         ie.revision = self.revid
232
233         return (old_file_id, file_id)
234
235     def open_directory(self, path, (old_parent_id, new_parent_id), base_revnum, pool):
236         assert isinstance(path, str)
237         path = path.decode("utf-8")
238         assert base_revnum >= 0
239         base_file_id = self._get_old_id(old_parent_id, path)
240         base_revid = self.old_inventory[base_file_id].revision
241         file_id = self._get_existing_id(old_parent_id, new_parent_id, path)
242         if file_id == base_file_id:
243             self.dir_baserev[file_id] = [base_revid]
244             ie = self.inventory[file_id]
245         else:
246             # Replace if original was inside this branch
247             # change id of base_file_id to file_id
248             ie = self.inventory[base_file_id]
249             for name in ie.children:
250                 ie.children[name].parent_id = file_id
251             # FIXME: Don't touch inventory internals
252             del self.inventory._byid[base_file_id]
253             self.inventory._byid[file_id] = ie
254             ie.file_id = file_id
255             self.dir_baserev[file_id] = []
256         ie.revision = self.revid
257         return (base_file_id, file_id)
258
259     def change_dir_prop(self, (old_id, new_id), name, value, pool):
260         if new_id == self.inventory.root.file_id:
261             # Replay lazy_dict, since it may be more expensive
262             if type(self.revmeta.fileprops) != dict:
263                 self.revmeta.fileprops = {}
264             self.revmeta.fileprops[name] = value
265
266         if name in (properties.PROP_ENTRY_COMMITTED_DATE,
267                     properties.PROP_ENTRY_COMMITTED_REV,
268                     properties.PROP_ENTRY_LAST_AUTHOR,
269                     properties.PROP_ENTRY_LOCK_TOKEN,
270                     properties.PROP_ENTRY_UUID,
271                     properties.PROP_EXECUTABLE):
272             pass
273         elif (name.startswith(properties.PROP_WC_PREFIX)):
274             pass
275         elif name.startswith(properties.PROP_PREFIX):
276             mutter('unsupported dir property %r' % name)
277
278     def change_file_prop(self, id, name, value, pool):
279         if name == properties.PROP_EXECUTABLE: 
280             # You'd expect executable to match 
281             # properties.PROP_EXECUTABLE_VALUE, but that's not 
282             # how SVN behaves. It appears to consider the presence 
283             # of the property sufficient to mark it executable.
284             self.is_executable = (value != None)
285         elif (name == properties.PROP_SPECIAL):
286             self.is_symlink = (value != None)
287         elif name == properties.PROP_ENTRY_COMMITTED_REV:
288             self.last_file_rev = int(value)
289         elif name == properties.PROP_EXTERNALS:
290             mutter('svn:externals property on file!')
291         elif name in (properties.PROP_ENTRY_COMMITTED_DATE,
292                       properties.PROP_ENTRY_LAST_AUTHOR,
293                       properties.PROP_ENTRY_LOCK_TOKEN,
294                       properties.PROP_ENTRY_UUID,
295                       properties.PROP_MIME_TYPE):
296             pass
297         elif name.startswith(properties.PROP_WC_PREFIX):
298             pass
299         elif (name.startswith(properties.PROP_PREFIX) or
300               name.startswith(SVN_PROP_BZR_PREFIX)):
301             mutter('unsupported file property %r' % name)
302
303     def add_file(self, path, (old_parent_id, new_parent_id), copyfrom_path, copyfrom_revnum, baton):
304         assert isinstance(path, str)
305         path = path.decode("utf-8")
306         check_filename(path)
307         self.is_symlink = False
308         self.is_executable = None
309         self.file_data = ""
310         self.file_parents = []
311         self.file_stream = None
312         self.file_id = self._get_new_id(new_parent_id, path)
313         if self.file_id in self.inventory:
314             # This file was moved here from somewhere else, but the 
315             # other location hasn't been removed yet. 
316             if copyfrom_path is None:
317                 # This should ideally never happen
318                 copyfrom_path = self.old_inventory.id2path(self.file_id)
319                 mutter('no copyfrom path set, assuming %r' % copyfrom_path)
320             assert copyfrom_path == self.old_inventory.id2path(self.file_id)
321             assert copyfrom_path not in self._premature_deletes
322             self._premature_deletes.add(copyfrom_path)
323             # No need to rename if it's already in the right spot
324             self._rename(self.file_id, new_parent_id, path)
325         return path
326
327     def open_file(self, path, (old_parent_id, new_parent_id), base_revnum, pool):
328         assert isinstance(path, str)
329         path = path.decode("utf-8")
330         base_file_id = self._get_old_id(old_parent_id, path)
331         base_revid = self.old_inventory[base_file_id].revision
332         self.file_id = self._get_existing_id(old_parent_id, new_parent_id, path)
333         self.is_executable = None
334         self.is_symlink = (self.inventory[base_file_id].kind == 'symlink')
335         self.file_data = self._get_file_data(base_file_id, base_revid)
336         self.file_stream = None
337         if self.file_id == base_file_id:
338             self.file_parents = [base_revid]
339         else:
340             # Replace
341             del self.inventory[base_file_id]
342             self.file_parents = []
343         return path
344
345     def close_file(self, path, checksum):
346         assert isinstance(path, unicode)
347         if self.file_stream is not None:
348             self.file_stream.seek(0)
349             lines = osutils.split_lines(self.file_stream.read())
350         else:
351             # Data didn't change or file is new
352             lines = osutils.split_lines(self.file_data)
353
354         actual_checksum = md5_strings(lines)
355         assert checksum is None or checksum == actual_checksum
356
357         self._store_file(self.file_id, lines, self.file_parents)
358
359         assert self.is_symlink in (True, False)
360
361         if self.file_id in self.inventory:
362             del self.inventory[self.file_id]
363
364         if self.is_symlink:
365             ie = self.inventory.add_path(path, 'symlink', self.file_id)
366             ie.symlink_target = lines[0][len("link "):]
367             ie.text_sha1 = None
368             ie.text_size = None
369             ie.executable = False
370             ie.revision = self.revid
371         else:
372             ie = self.inventory.add_path(path, 'file', self.file_id)
373             ie.revision = self.revid
374             ie.kind = 'file'
375             ie.symlink_target = None
376             ie.text_sha1 = osutils.sha_strings(lines)
377             ie.text_size = sum(map(len, lines))
378             assert ie.text_size is not None
379             if self.is_executable is not None:
380                 ie.executable = self.is_executable
381
382
383         self.file_stream = None
384
385     def close_edit(self):
386         assert len(self._premature_deletes) == 0
387         self._finish_commit()
388         self.pool.destroy()
389
390     def apply_textdelta(self, file_id, base_checksum):
391         actual_checksum = md5.new(self.file_data).hexdigest(),
392         assert (base_checksum is None or base_checksum == actual_checksum,
393             "base checksum mismatch: %r != %r" % (base_checksum, 
394                                                   actual_checksum))
395         self.file_stream = StringIO()
396         return apply_txdelta_handler(StringIO(self.file_data), 
397                                      self.file_stream, self.pool)
398
399     def _store_file(self, file_id, lines, parents):
400         raise NotImplementedError(self._store_file)
401
402     def _store_directory(self, file_id, parents):
403         raise NotImplementedError(self._store_directory)
404
405     def _get_file_data(self, file_id, revid):
406         raise NotImplementedError(self._get_file_data)
407
408     def _finish_commit(self):
409         raise NotImplementedError(self._finish_commit)
410
411     def abort_edit(self):
412         pass
413
414     def _start_revision(self):
415         pass
416
417
418 class WeaveRevisionBuildEditor(RevisionBuildEditor):
419     """Subversion commit editor that can write to a weave-based repository.
420     """
421     def __init__(self, source, target):
422         RevisionBuildEditor.__init__(self, source, target)
423         self.weave_store = target.weave_store
424
425     def _start_revision(self):
426         self._write_group_active = True
427         self.target.start_write_group()
428
429     def _store_directory(self, file_id, parents):
430         file_weave = self.weave_store.get_weave_or_empty(file_id, self.transact)
431         if not file_weave.has_version(self.revid):
432             file_weave.add_lines(self.revid, parents, [])
433
434     def _get_file_data(self, file_id, revid):
435         file_weave = self.weave_store.get_weave_or_empty(file_id, self.transact)
436         return file_weave.get_text(revid)
437
438     def _store_file(self, file_id, lines, parents):
439         file_weave = self.weave_store.get_weave_or_empty(file_id, self.transact)
440         if not file_weave.has_version(self.revid):
441             file_weave.add_lines(self.revid, parents, lines)
442
443     def _finish_commit(self):
444         (rev, signature) = self._get_revision(self.revid)
445         self.inventory.revision_id = self.revid
446         # Escaping the commit message is really the task of the serialiser
447         rev.message = _escape_commit_message(rev.message)
448         rev.inventory_sha1 = None
449         self.target.add_revision(self.revid, rev, self.inventory)
450         if signature is not None:
451             self.target.add_signature_text(self.revid, signature)
452         self.target.commit_write_group()
453         self._write_group_active = False
454
455     def abort_edit(self):
456         if self._write_group_active:
457             self.target.abort_write_group()
458             self._write_group_active = False
459
460
461 class PackRevisionBuildEditor(WeaveRevisionBuildEditor):
462     """Revision Build Editor for Subversion that is specific for the packs API.
463     """
464     def __init__(self, source, target):
465         WeaveRevisionBuildEditor.__init__(self, source, target)
466
467     def _add_text_to_weave(self, file_id, new_lines, parents):
468         return self.target._packs._add_text_to_weave(file_id,
469             self.revid, new_lines, parents, nostore_sha=None, 
470             random_revid=False)
471
472     def _store_directory(self, file_id, parents):
473         self._add_text_to_weave(file_id, [], parents)
474
475     def _store_file(self, file_id, lines, parents):
476         self._add_text_to_weave(file_id, lines, parents)
477
478
479 class CommitBuilderRevisionBuildEditor(RevisionBuildEditor):
480     """Revision Build Editor for Subversion that uses the CommitBuilder API.
481     """
482     def __init__(self, source, target):
483         RevisionBuildEditor.__init__(self, source, target)
484         raise NotImplementedError(self)
485
486
487 def get_revision_build_editor(repository):
488     """Obtain a RevisionBuildEditor for a particular target repository.
489     
490     :param repository: Repository to obtain the buildeditor for.
491     :return: Class object of class descending from RevisionBuildEditor
492     """
493     if hasattr(repository, '_packs'):
494         return PackRevisionBuildEditor
495     return WeaveRevisionBuildEditor
496
497
498 class InterFromSvnRepository(InterRepository):
499     """Svn to any repository actions."""
500
501     _matching_repo_format = SvnRepositoryFormat()
502
503     _supports_branches = True
504
505     @staticmethod
506     def _get_repo_format_to_test():
507         return None
508
509     def _find_all(self, mapping, pb=None):
510         """Find all revisions from the source repository that are not 
511         yet in the target repository.
512         """
513         parents = {}
514         meta_map = {}
515         graph = self.source.get_graph()
516         available_revs = set()
517         for revmeta in self.source.iter_all_changes(pb=pb):
518             revid = revmeta.get_revision_id(mapping)
519             available_revs.add(revid)
520             meta_map[revid] = revmeta
521         missing = available_revs.difference(self.target.has_revisions(available_revs))
522         needed = list(graph.iter_topo_order(missing))
523         parents = graph.get_parent_map(needed)
524         return [(revid, parents[revid][0], meta_map[revid]) for revid in needed]
525
526     def _find_branches(self, branches, find_ghosts=False, fetch_rhs_ancestry=False, pb=None):
527         set_needed = set()
528         ret_needed = list()
529         for revid in branches:
530             if pb:
531                 pb.update("determining revisions to fetch", branches.index(revid), len(branches))
532             try:
533                 nestedpb = ui.ui_factory.nested_progress_bar()
534                 for rev in self._find_until(revid, find_ghosts=find_ghosts, fetch_rhs_ancestry=False,
535                                             pb=nestedpb):
536                     if rev[0] not in set_needed:
537                         ret_needed.append(rev)
538                         set_needed.add(rev[0])
539             finally:
540                 nestedpb.finished()
541         return ret_needed
542
543     def _find_until(self, revision_id, find_ghosts=False, fetch_rhs_ancestry=False, pb=None):
544         """Find all missing revisions until revision_id
545
546         :param revision_id: Stop revision
547         :param find_ghosts: Find ghosts
548         :param fetch_rhs_ancestry: Fetch right hand side ancestors
549         :return: Tuple with revisions missing and a dictionary with 
550             parents for those revision.
551         """
552         extra = set()
553         needed = []
554         revs = []
555         meta_map = {}
556         lhs_parent = {}
557         def check_revid(revision_id):
558             prev = None
559             (branch_path, revnum, mapping) = self.source.lookup_revision_id(revision_id)
560             for revmeta in self.source.iter_reverse_branch_changes(branch_path, revnum, mapping):
561                 if pb:
562                     pb.update("determining revisions to fetch", revnum-revmeta.revnum, revnum)
563                 revid = revmeta.get_revision_id(mapping)
564                 lhs_parent[prev] = revid
565                 meta_map[revid] = revmeta
566                 if fetch_rhs_ancestry:
567                     extra.update(revmeta.get_rhs_parents(mapping))
568                 if not self.target.has_revision(revid):
569                     revs.append(revid)
570                 elif not find_ghosts:
571                     prev = None
572                     break
573                 prev = revid
574             lhs_parent[prev] = NULL_REVISION
575
576         check_revid(revision_id)
577
578         for revid in extra:
579             if revid not in revs:
580                 check_revid(revid)
581
582         needed = [(revid, lhs_parent[revid], meta_map[revid]) for revid in reversed(revs)]
583
584         return needed
585
586     def copy_content(self, revision_id=None, pb=None):
587         """See InterRepository.copy_content."""
588         self.fetch(revision_id, pb, find_ghosts=False)
589
590     def _fetch_replay(self, revids, pb=None):
591         """Copy a set of related revisions using svn.ra.replay.
592
593         :param revids: Revision ids to copy.
594         :param pb: Optional progress bar
595         """
596         raise NotImplementedError(self._copy_revisions_replay)
597
598     def _fetch_switch(self, repos_root, revids, pb=None):
599         """Copy a set of related revisions using svn.ra.switch.
600
601         :param revids: List of revision ids of revisions to copy, 
602                        newest first.
603         :param pb: Optional progress bar.
604         """
605         prev_revid = None
606         if pb is None:
607             pb = ui.ui_factory.nested_progress_bar()
608             nested_pb = pb
609         else:
610             nested_pb = None
611         num = 0
612         prev_inv = None
613
614         self.target.lock_write()
615         revbuildklass = get_revision_build_editor(self.target)
616         editor = revbuildklass(self.source, self.target)
617
618         try:
619             for (revid, parent_revid, revmeta) in revids:
620                 pb.update('copying revision', num, len(revids))
621
622                 assert parent_revid is not None
623
624                 if parent_revid == NULL_REVISION:
625                     parent_inv = Inventory(root_id=None)
626                 elif prev_revid != parent_revid:
627                     parent_inv = self.target.get_inventory(parent_revid)
628                 else:
629                     parent_inv = prev_inv
630
631                 editor.start_revision(revid, parent_inv, revmeta)
632
633                 try:
634                     conn = None
635                     try:
636                         if parent_revid == NULL_REVISION:
637                             branch_url = urlutils.join(repos_root, 
638                                                        editor.branch_path)
639
640                             conn = self.source.transport.connections.get(branch_url)
641                             reporter = conn.do_update(editor.revnum, True, 
642                                                            editor)
643
644                             try:
645                                 # Report status of existing paths
646                                 reporter.set_path("", editor.revnum, True, None)
647                             except:
648                                 reporter.abort_report()
649                                 raise
650                         else:
651                             (parent_branch, parent_revnum, mapping) = \
652                                     self.source.lookup_revision_id(parent_revid)
653                             conn = self.source.transport.connections.get(urlutils.join(repos_root, parent_branch))
654
655                             if parent_branch != editor.branch_path:
656                                 reporter = conn.do_switch(editor.revnum, True, 
657                                     urlutils.join(repos_root, editor.branch_path), 
658                                     editor)
659                             else:
660                                 reporter = conn.do_update(editor.revnum, True, editor)
661
662                             try:
663                                 # Report status of existing paths
664                                 reporter.set_path("", parent_revnum, False, None)
665                             except:
666                                 reporter.abort_report()
667                                 raise
668
669                         reporter.finish_report()
670                     finally:
671                         if conn is not None:
672                             self.source.transport.add_connection(conn)
673                 except:
674                     editor.abort_edit()
675                     raise
676
677                 prev_inv = editor.inventory
678                 prev_revid = revid
679                 num += 1
680         finally:
681             self.target.unlock()
682             if nested_pb is not None:
683                 nested_pb.finished()
684
685     def fetch(self, revision_id=None, pb=None, find_ghosts=False, 
686               branches=None, fetch_rhs_ancestry=False):
687         """Fetch revisions. """
688         if revision_id == NULL_REVISION:
689             return
690         # Dictionary with paths as keys, revnums as values
691
692         if pb:
693             pb.update("determining revisions to fetch", 0, 2)
694
695         # Loop over all the revnums until revision_id
696         # (or youngest_revnum) and call self.target.add_revision() 
697         # or self.target.add_inventory() each time
698         self.target.lock_read()
699         try:
700             if branches is not None:
701                 needed = self._find_branches(branches, find_ghosts, fetch_rhs_ancestry, pb=pb)
702             elif revision_id is None:
703                 needed = self._find_all(self.source.get_mapping(), pb=pb)
704             else:
705                 needed = self._find_until(revision_id, find_ghosts, fetch_rhs_ancestry, pb=pb)
706         finally:
707             self.target.unlock()
708
709         if len(needed) == 0:
710             # Nothing to fetch
711             return
712
713         self._fetch_switch(self.source.transport.get_svn_repos_root(), needed, pb)
714
715     @staticmethod
716     def is_compatible(source, target):
717         """Be compatible with SvnRepository."""
718         # FIXME: Also check target uses VersionedFile
719         return isinstance(source, SvnRepository) and target.supports_rich_root()
720