# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
+# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
from cStringIO import StringIO
import md5
-from svn.core import Pool
-import svn.core
+import constants
from bzrlib.plugins.svn.errors import InvalidFileName
-from mapping import (SVN_PROP_BZR_ANCESTRY, SVN_PROP_BZR_MERGE,
+from logwalker import lazy_dict
+from bzrlib.plugins.svn.mapping import (SVN_PROP_BZR_MERGE,
SVN_PROP_BZR_PREFIX, SVN_PROP_BZR_REVISION_INFO,
- SVN_PROP_BZR_BRANCHING_SCHEME, SVN_PROP_BZR_REVISION_ID,
+ SVN_PROP_BZR_REVISION_ID,
SVN_PROP_BZR_FILEIDS, SVN_REVPROP_BZR_SIGNATURE,
parse_merge_property,
parse_revision_metadata)
-from repository import (SvnRepository, SvnRepositoryFormat)
+from repository import SvnRepository, SvnRepositoryFormat
from svk import SVN_PROP_SVK_MERGE
-from tree import (apply_txdelta_handler, parse_externals_description,
- inventory_add_external)
+from delta import apply_txdelta_handler
+from tree import (parse_externals_description, inventory_add_external)
def _escape_commit_message(message):
raise InvalidFileName(path)
-class RevisionBuildEditor(svn.delta.Editor):
+class RevisionBuildEditor:
"""Implementation of the Subversion commit editor interface that builds a
Bazaar revision.
"""
self.source = source
self.transact = target.get_transaction()
- def start_revision(self, revid, prev_inventory):
+ def set_target_revision(self, target_revision):
+ pass
+
+ def start_revision(self, revid, prev_inventory, revmeta):
self.revid = revid
(self.branch_path, self.revnum, self.mapping) = self.source.lookup_revision_id(revid)
- self.svn_revprops = self.source._log._get_transport().revprop_list(self.revnum)
- changes = self.source._log.get_revision_paths(self.revnum, self.branch_path)
- renames = self.source.revision_fileid_renames(self.branch_path, self.revnum, self.mapping,
- revprops=self.svn_revprops)
- self.id_map = self.source.transform_fileid_map(self.source.uuid,
- self.revnum, self.branch_path, changes, renames,
- self.mapping)
+ self.revmeta = revmeta
+ self._id_map = None
self.dir_baserev = {}
self._revinfo = None
- self._bzr_merges = ()
- self._svk_merges = []
self._premature_deletes = set()
- self.pool = Pool()
self.old_inventory = prev_inventory
self.inventory = prev_inventory.copy()
- self._branch_fileprops = {}
self._start_revision()
- def _get_parent_ids(self):
- return self.source.revision_parents(self.revid, self._branch_fileprops)
+ def _get_id_map(self):
+ if self._id_map is not None:
+ return self._id_map
+
+ renames = self.mapping.import_fileid_map(self.revmeta.revprops, self.revmeta.fileprops)
+ self._id_map = self.source.transform_fileid_map(self.source.uuid,
+ self.revnum, self.branch_path, self.revmeta.paths, renames,
+ self.mapping)
+
+ return self._id_map
def _get_revision(self, revid):
"""Creates the revision object.
"""
# Commit SVN revision properties to a Revision object
- rev = Revision(revision_id=revid, parent_ids=self._get_parent_ids())
+ rev = Revision(revision_id=revid, parent_ids=self.revmeta.get_parent_ids(self.mapping))
- self.mapping.import_revision(self.svn_revprops, self._branch_fileprops, rev)
+ self.mapping.import_revision(self.revmeta.revprops, self.revmeta.fileprops, rev)
- signature = self.svn_revprops.get(SVN_REVPROP_BZR_SIGNATURE)
+ signature = self.revmeta.revprops.get(SVN_REVPROP_BZR_SIGNATURE)
return (rev, signature)
- def open_root(self, base_revnum, baton):
+ def open_root(self, base_revnum):
if self.old_inventory.root is None:
# First time the root is set
old_file_id = None
file_id = self.mapping.generate_file_id(self.source.uuid, self.revnum, self.branch_path, u"")
- self.dir_baserev[file_id] = []
+ file_parents = []
else:
assert self.old_inventory.root.revision is not None
old_file_id = self.old_inventory.root.file_id
- if self.id_map.has_key(""):
- file_id = self.id_map[""]
- else:
- file_id = old_file_id
+ file_id = self._get_id_map().get("", old_file_id)
self.dir_baserev[file_id] = [self.old_inventory.root.revision]
if self.inventory.root is not None and \
else:
ie = self.inventory.add_path("", 'directory', file_id)
ie.revision = self.revid
- return (old_file_id, file_id)
+ return DirectoryBuildEditor(self, old_file_id, file_id, file_parents)
+
+ def close(self):
+ pass
+
+ def _store_directory(self, file_id, parents):
+ raise NotImplementedError(self._store_directory)
+
+ def _get_file_data(self, file_id, revid):
+ raise NotImplementedError(self._get_file_data)
+
+ def _finish_commit(self):
+ raise NotImplementedError(self._finish_commit)
+
+ def abort(self):
+ pass
+
+ def _start_revision(self):
+ pass
+
+ def _store_file(self, file_id, lines, parents):
+ raise NotImplementedError(self._store_file)
def _get_existing_id(self, old_parent_id, new_parent_id, path):
assert isinstance(path, unicode)
assert isinstance(old_parent_id, str)
assert isinstance(new_parent_id, str)
- if self.id_map.has_key(path):
- return self.id_map[path]
+ ret = self._get_id_map().get(path)
+ if ret is not None:
+ return ret
return self.old_inventory[old_parent_id].children[urlutils.basename(path)].file_id
def _get_old_id(self, parent_id, old_path):
def _get_new_id(self, parent_id, new_path):
assert isinstance(new_path, unicode)
assert isinstance(parent_id, str)
- if self.id_map.has_key(new_path):
- return self.id_map[new_path]
+ ret = self._get_id_map().get(new_path)
+ if ret is not None:
+ return ret
return self.mapping.generate_file_id(self.source.uuid, self.revnum, self.branch_path, new_path)
def _rename(self, file_id, parent_id, path):
return
self.inventory.rename(file_id, parent_id, urlutils.basename(path))
- def delete_entry(self, path, revnum, (old_parent_id, new_parent_id), pool):
- assert isinstance(path, str)
- path = path.decode("utf-8")
- if path in self._premature_deletes:
- # Delete recursively
- self._premature_deletes.remove(path)
- for p in self._premature_deletes.copy():
- if p.startswith("%s/" % path):
- self._premature_deletes.remove(p)
- else:
- self.inventory.remove_recursive_id(self._get_old_id(new_parent_id, path))
+class DirectoryBuildEditor:
+ def __init__(self, editor, old_id, new_id, parent_revids=[]):
+ self.editor = editor
+ self.old_id = old_id
+ self.new_id = new_id
+ self.parent_revids = parent_revids
- def close_directory(self, (old_id, new_id)):
- self.inventory[new_id].revision = self.revid
+ def close(self):
+ self.editor.inventory[self.new_id].revision = self.editor.revid
+ self.editor._store_directory(self.new_id, self.parent_revids)
- # Only record root if the target repository supports it
- self._store_directory(new_id, self.dir_baserev[new_id])
+ if self.new_id == self.editor.inventory.root.file_id:
+ assert len(self.editor._premature_deletes) == 0
+ self.editor._finish_commit()
- def add_directory(self, path, (old_parent_id, new_parent_id), copyfrom_path, copyfrom_revnum,
- pool):
+ def add_directory(self, path, copyfrom_path=None, copyfrom_revnum=-1):
assert isinstance(path, str)
path = path.decode("utf-8")
check_filename(path)
- file_id = self._get_new_id(new_parent_id, path)
+ file_id = self.editor._get_new_id(self.new_id, path)
- self.dir_baserev[file_id] = []
- if file_id in self.inventory:
+ if file_id in self.editor.inventory:
# This directory was moved here from somewhere else, but the
# other location hasn't been removed yet.
if copyfrom_path is None:
# This should ideally never happen!
- copyfrom_path = self.old_inventory.id2path(file_id)
+ copyfrom_path = self.editor.old_inventory.id2path(file_id)
mutter('no copyfrom path set, assuming %r' % copyfrom_path)
- assert copyfrom_path == self.old_inventory.id2path(file_id)
- assert copyfrom_path not in self._premature_deletes
- self._premature_deletes.add(copyfrom_path)
- self._rename(file_id, new_parent_id, path)
- ie = self.inventory[file_id]
+ assert copyfrom_path == self.editor.old_inventory.id2path(file_id)
+ assert copyfrom_path not in self.editor._premature_deletes
+ self.editor._premature_deletes.add(copyfrom_path)
+ self.editor._rename(file_id, self.new_id, path)
+ ie = self.editor.inventory[file_id]
old_file_id = file_id
else:
old_file_id = None
- ie = self.inventory.add_path(path, 'directory', file_id)
- ie.revision = self.revid
+ ie = self.editor.inventory.add_path(path, 'directory', file_id)
+ ie.revision = self.editor.revid
- return (old_file_id, file_id)
+ return DirectoryBuildEditor(self.editor, old_file_id, file_id)
- def open_directory(self, path, (old_parent_id, new_parent_id), base_revnum, pool):
+ def open_directory(self, path, base_revnum):
assert isinstance(path, str)
path = path.decode("utf-8")
- assert base_revnum >= 0
- base_file_id = self._get_old_id(old_parent_id, path)
- base_revid = self.old_inventory[base_file_id].revision
- file_id = self._get_existing_id(old_parent_id, new_parent_id, path)
+ assert isinstance(base_revnum, int)
+ base_file_id = self.editor._get_old_id(self.old_id, path)
+ base_revid = self.editor.old_inventory[base_file_id].revision
+ file_id = self.editor._get_existing_id(self.old_id, self.new_id, path)
if file_id == base_file_id:
- self.dir_baserev[file_id] = [base_revid]
- ie = self.inventory[file_id]
+ file_parents = [base_revid]
+ ie = self.editor.inventory[file_id]
else:
# Replace if original was inside this branch
# change id of base_file_id to file_id
- ie = self.inventory[base_file_id]
+ ie = self.editor.inventory[base_file_id]
for name in ie.children:
ie.children[name].parent_id = file_id
# FIXME: Don't touch inventory internals
- del self.inventory._byid[base_file_id]
- self.inventory._byid[file_id] = ie
+ del self.editor.inventory._byid[base_file_id]
+ self.editor.inventory._byid[file_id] = ie
ie.file_id = file_id
- self.dir_baserev[file_id] = []
- ie.revision = self.revid
- return (base_file_id, file_id)
-
- def change_dir_prop(self, (old_id, new_id), name, value, pool):
- if new_id == self.inventory.root.file_id:
- self._branch_fileprops[name] = value
-
- if name == SVN_PROP_BZR_BRANCHING_SCHEME:
- if new_id != self.inventory.root.file_id:
- mutter('rogue %r on non-root directory' % name)
- return
- elif name == SVN_PROP_BZR_ANCESTRY+str(self.mapping.scheme):
- if new_id != self.inventory.root.file_id:
- mutter('rogue %r on non-root directory' % name)
- return
-
- self._bzr_merges = parse_merge_property(value.splitlines()[-1])
- elif name == SVN_PROP_SVK_MERGE:
- self._svk_merges = None # Force Repository.revision_parents() to look it up
- elif name == SVN_PROP_BZR_REVISION_INFO:
- if new_id != self.inventory.root.file_id:
- mutter('rogue %r on non-root directory' % SVN_PROP_BZR_REVISION_INFO)
- return
-
- elif name in (svn.core.SVN_PROP_ENTRY_COMMITTED_DATE,
- svn.core.SVN_PROP_ENTRY_COMMITTED_REV,
- svn.core.SVN_PROP_ENTRY_LAST_AUTHOR,
- svn.core.SVN_PROP_ENTRY_LOCK_TOKEN,
- svn.core.SVN_PROP_ENTRY_UUID,
- svn.core.SVN_PROP_EXECUTABLE,
- SVN_PROP_BZR_MERGE, SVN_PROP_BZR_FILEIDS):
+ file_parents = []
+ ie.revision = self.editor.revid
+ return DirectoryBuildEditor(self.editor, base_file_id, file_id,
+ file_parents)
+
+ def change_prop(self, name, value):
+ if self.new_id == self.editor.inventory.root.file_id:
+ # Replay lazy_dict, since it may be more expensive
+ if type(self.revmeta.fileprops) != dict:
+ self.revmeta.fileprops = {}
+ self.revmeta.fileprops[name] = value
+
+ if name in (constants.PROP_ENTRY_COMMITTED_DATE,
+ constants.PROP_ENTRY_COMMITTED_REV,
+ constants.PROP_ENTRY_LAST_AUTHOR,
+ constants.PROP_ENTRY_LOCK_TOKEN,
+ constants.PROP_ENTRY_UUID,
+ constants.PROP_EXECUTABLE):
pass
- elif (name.startswith(SVN_PROP_BZR_ANCESTRY) or
- name.startswith(SVN_PROP_BZR_REVISION_ID) or
- name.startswith(svn.core.SVN_PROP_WC_PREFIX)):
+ elif (name.startswith(constants.PROP_WC_PREFIX)):
pass
- elif (name.startswith(svn.core.SVN_PROP_PREFIX) or
- name.startswith(SVN_PROP_BZR_PREFIX)):
+ elif name.startswith(constants.PROP_PREFIX):
mutter('unsupported dir property %r' % name)
- def change_file_prop(self, id, name, value, pool):
- if name == svn.core.SVN_PROP_EXECUTABLE:
- # You'd expect executable to match
- # svn.core.SVN_PROP_EXECUTABLE_VALUE, but that's not
- # how SVN behaves. It appears to consider the presence
- # of the property sufficient to mark it executable.
- self.is_executable = (value != None)
- elif (name == svn.core.SVN_PROP_SPECIAL):
- self.is_symlink = (value != None)
- elif name == svn.core.SVN_PROP_ENTRY_COMMITTED_REV:
- self.last_file_rev = int(value)
- elif name == svn.core.SVN_PROP_EXTERNALS:
- mutter('svn:externals property on file!')
- elif name in (svn.core.SVN_PROP_ENTRY_COMMITTED_DATE,
- svn.core.SVN_PROP_ENTRY_LAST_AUTHOR,
- svn.core.SVN_PROP_ENTRY_LOCK_TOKEN,
- svn.core.SVN_PROP_ENTRY_UUID,
- svn.core.SVN_PROP_MIME_TYPE):
- pass
- elif name.startswith(svn.core.SVN_PROP_WC_PREFIX):
- pass
- elif (name.startswith(svn.core.SVN_PROP_PREFIX) or
- name.startswith(SVN_PROP_BZR_PREFIX)):
- mutter('unsupported file property %r' % name)
-
- def add_file(self, path, (old_parent_id, new_parent_id), copyfrom_path, copyfrom_revnum, baton):
+ def add_file(self, path, copyfrom_path=None, copyfrom_revnum=-1):
assert isinstance(path, str)
path = path.decode("utf-8")
check_filename(path)
- self.is_symlink = False
- self.is_executable = None
- self.file_data = ""
- self.file_parents = []
- self.file_stream = None
- self.file_id = self._get_new_id(new_parent_id, path)
- if self.file_id in self.inventory:
+ file_id = self.editor._get_new_id(self.new_id, path)
+ if file_id in self.editor.inventory:
# This file was moved here from somewhere else, but the
# other location hasn't been removed yet.
if copyfrom_path is None:
# This should ideally never happen
- copyfrom_path = self.old_inventory.id2path(self.file_id)
+ copyfrom_path = self.editor.old_inventory.id2path(file_id)
mutter('no copyfrom path set, assuming %r' % copyfrom_path)
- assert copyfrom_path == self.old_inventory.id2path(self.file_id)
- assert copyfrom_path not in self._premature_deletes
- self._premature_deletes.add(copyfrom_path)
+ assert copyfrom_path == self.editor.old_inventory.id2path(file_id)
+ assert copyfrom_path not in self.editor._premature_deletes
+ self.editor._premature_deletes.add(copyfrom_path)
# No need to rename if it's already in the right spot
- self._rename(self.file_id, new_parent_id, path)
- return path
+ self.editor._rename(file_id, self.new_id, path)
+ return FileBuildEditor(self.editor, path, file_id)
- def open_file(self, path, (old_parent_id, new_parent_id), base_revnum, pool):
+ def open_file(self, path, base_revnum):
assert isinstance(path, str)
path = path.decode("utf-8")
- base_file_id = self._get_old_id(old_parent_id, path)
- base_revid = self.old_inventory[base_file_id].revision
- self.file_id = self._get_existing_id(old_parent_id, new_parent_id, path)
- self.is_executable = None
- self.is_symlink = (self.inventory[base_file_id].kind == 'symlink')
- self.file_data = self._get_file_data(base_file_id, base_revid)
- self.file_stream = None
- if self.file_id == base_file_id:
- self.file_parents = [base_revid]
+ base_file_id = self.editor._get_old_id(self.old_id, path)
+ base_revid = self.editor.old_inventory[base_file_id].revision
+ file_id = self.editor._get_existing_id(self.old_id, self.new_id, path)
+ is_symlink = (self.editor.inventory[base_file_id].kind == 'symlink')
+ file_data = self.editor._get_file_data(base_file_id, base_revid)
+ if file_id == base_file_id:
+ file_parents = [base_revid]
else:
# Replace
- del self.inventory[base_file_id]
- self.file_parents = []
- return path
+ del self.editor.inventory[base_file_id]
+ file_parents = []
+ return FileBuildEditor(self.editor, path, file_id,
+ file_parents, file_data, is_symlink=is_symlink)
- def close_file(self, path, checksum):
- assert isinstance(path, unicode)
+ def delete_entry(self, path, revnum):
+ assert isinstance(path, str)
+ path = path.decode("utf-8")
+ if path in self.editor._premature_deletes:
+ # Delete recursively
+ self.editor._premature_deletes.remove(path)
+ for p in self.editor._premature_deletes.copy():
+ if p.startswith("%s/" % path):
+ self.editor._premature_deletes.remove(p)
+ else:
+ self.editor.inventory.remove_recursive_id(self.editor._get_old_id(self.new_id, path))
+
+class FileBuildEditor:
+ def __init__(self, editor, path, file_id, file_parents=[], data="",
+ is_symlink=False):
+ self.path = path
+ self.editor = editor
+ self.file_id = file_id
+ self.file_data = data
+ self.is_symlink = is_symlink
+ self.file_parents = file_parents
+ self.is_executable = None
+ self.file_stream = None
+
+ def apply_textdelta(self, base_checksum=None):
+ actual_checksum = md5.new(self.file_data).hexdigest()
+ assert (base_checksum is None or base_checksum == actual_checksum,
+ "base checksum mismatch: %r != %r" % (base_checksum,
+ actual_checksum))
+ self.file_stream = StringIO()
+ return apply_txdelta_handler(self.file_data, self.file_stream)
+
+ def change_prop(self, name, value):
+ if name == constants.PROP_EXECUTABLE:
+ # You'd expect executable to match
+ # constants.PROP_EXECUTABLE_VALUE, but that's not
+ # how SVN behaves. It appears to consider the presence
+ # of the property sufficient to mark it executable.
+ self.is_executable = (value != None)
+ elif (name == constants.PROP_SPECIAL):
+ self.is_symlink = (value != None)
+ elif name == constants.PROP_ENTRY_COMMITTED_REV:
+ self.last_file_rev = int(value)
+ elif name in (constants.PROP_ENTRY_COMMITTED_DATE,
+ constants.PROP_ENTRY_LAST_AUTHOR,
+ constants.PROP_ENTRY_LOCK_TOKEN,
+ constants.PROP_ENTRY_UUID,
+ constants.PROP_MIME_TYPE):
+ pass
+ elif name.startswith(constants.PROP_WC_PREFIX):
+ pass
+ elif name == constants.PROP_EXTERNALS:
+ mutter('svn:externals property on file!')
+ elif (name.startswith(constants.PROP_PREFIX) or
+ name.startswith(SVN_PROP_BZR_PREFIX)):
+ mutter('unsupported file property %r' % name)
+
+ def close(self, checksum=None):
+ assert isinstance(self.path, unicode)
if self.file_stream is not None:
self.file_stream.seek(0)
lines = osutils.split_lines(self.file_stream.read())
actual_checksum = md5_strings(lines)
assert checksum is None or checksum == actual_checksum
- self._store_file(self.file_id, lines, self.file_parents)
+ self.editor._store_file(self.file_id, lines, self.file_parents)
- if self.file_id in self.inventory:
- ie = self.inventory[self.file_id]
+ if self.file_id in self.editor.inventory:
+ ie = self.editor.inventory[self.file_id]
elif self.is_symlink:
- ie = self.inventory.add_path(path, 'symlink', self.file_id)
+ ie = self.editor.inventory.add_path(self.path, 'symlink', self.file_id)
else:
- ie = self.inventory.add_path(path, 'file', self.file_id)
- ie.revision = self.revid
+ ie = self.editor.inventory.add_path(self.path, 'file', self.file_id)
+ ie.revision = self.editor.revid
if self.is_symlink:
ie.kind = 'symlink'
if self.is_executable is not None:
ie.executable = self.is_executable
- self.file_stream = None
-
- def close_edit(self):
- assert len(self._premature_deletes) == 0
- self._finish_commit()
- self.pool.destroy()
-
- def apply_textdelta(self, file_id, base_checksum):
- actual_checksum = md5.new(self.file_data).hexdigest(),
- assert (base_checksum is None or base_checksum == actual_checksum,
- "base checksum mismatch: %r != %r" % (base_checksum,
- actual_checksum))
- self.file_stream = StringIO()
- return apply_txdelta_handler(StringIO(self.file_data),
- self.file_stream, self.pool)
-
- def _store_file(self, file_id, lines, parents):
- raise NotImplementedError(self._store_file)
-
- def _store_directory(self, file_id, parents):
- raise NotImplementedError(self._store_directory)
-
- def _get_file_data(self, file_id, revid):
- raise NotImplementedError(self._get_file_data)
-
- def _finish_commit(self):
- raise NotImplementedError(self._finish_commit)
-
- def abort_edit(self):
- pass
-
- def _start_revision(self):
- pass
-
class WeaveRevisionBuildEditor(RevisionBuildEditor):
"""Subversion commit editor that can write to a weave-based repository.
self.inventory.revision_id = self.revid
# Escaping the commit message is really the task of the serialiser
rev.message = _escape_commit_message(rev.message)
- rev.inventory_sha1 = osutils.sha_string(
- self.target.serialise_inventory(self.inventory))
+ rev.inventory_sha1 = None
self.target.add_revision(self.revid, rev, self.inventory)
if signature is not None:
self.target.add_signature_text(self.revid, signature)
self.target.commit_write_group()
self._write_group_active = False
- def abort_edit(self):
+ def abort(self):
if self._write_group_active:
self.target.abort_write_group()
self._write_group_active = False
def _get_repo_format_to_test():
return None
- def _find_all(self):
+ def _find_all(self, mapping, pb=None):
"""Find all revisions from the source repository that are not
yet in the target repository.
"""
parents = {}
- needed = filter(lambda x: not self.target.has_revision(x),
- self.source.all_revision_ids())
- for revid in needed:
- (branch, revnum, mapping) = self.source.lookup_revision_id(revid)
- parents[revid] = self.source.lhs_revision_parent(branch, revnum, mapping)
- needed.reverse()
- return (needed, parents)
-
- def _find_branches(self, branches, find_ghosts=False):
+ meta_map = {}
+ graph = self.source.get_graph()
+ available_revs = set()
+ for revmeta in self.source.iter_all_changes(pb=pb):
+ revid = revmeta.get_revision_id(mapping)
+ available_revs.add(revid)
+ meta_map[revid] = revmeta
+ missing = available_revs.difference(self.target.has_revisions(available_revs))
+ needed = list(graph.iter_topo_order(missing))
+ parents = graph.get_parent_map(needed)
+ return [(revid, parents[revid][0], meta_map[revid]) for revid in needed]
+
+ def _find_branches(self, branches, find_ghosts=False, fetch_rhs_ancestry=False, pb=None):
set_needed = set()
ret_needed = list()
- ret_parents = dict()
for revid in branches:
- (needed, parents) = self._find_until(revid, find_ghosts=find_ghosts)
- for rev in needed:
- if not rev in set_needed:
- ret_needed.append(rev)
- set_needed.add(rev)
- ret_parents.update(parents)
- return ret_needed, ret_parents
-
- def _find_until(self, revision_id, find_ghosts=False):
+ if pb:
+ pb.update("determining revisions to fetch", branches.index(revid), len(branches))
+ try:
+ nestedpb = ui.ui_factory.nested_progress_bar()
+ for rev in self._find_until(revid, find_ghosts=find_ghosts, fetch_rhs_ancestry=False,
+ pb=nestedpb):
+ if rev[0] not in set_needed:
+ ret_needed.append(rev)
+ set_needed.add(rev[0])
+ finally:
+ nestedpb.finished()
+ return ret_needed
+
+ def _find_until(self, revision_id, find_ghosts=False, fetch_rhs_ancestry=False, pb=None):
"""Find all missing revisions until revision_id
:param revision_id: Stop revision
:param find_ghosts: Find ghosts
+ :param fetch_rhs_ancestry: Fetch right hand side ancestors
:return: Tuple with revisions missing and a dictionary with
parents for those revision.
"""
+ extra = set()
needed = []
- parents = {}
+ revs = []
+ meta_map = {}
+ lhs_parent = {}
+ def check_revid(revision_id):
+ prev = None
+ (branch_path, revnum, mapping) = self.source.lookup_revision_id(revision_id)
+ for revmeta in self.source.iter_reverse_branch_changes(branch_path, revnum, mapping):
+ if pb:
+ pb.update("determining revisions to fetch", revnum-revmeta.revnum, revnum)
+ revid = revmeta.get_revision_id(mapping)
+ lhs_parent[prev] = revid
+ meta_map[revid] = revmeta
+ if fetch_rhs_ancestry:
+ extra.update(revmeta.get_rhs_parents(mapping))
+ if not self.target.has_revision(revid):
+ revs.append(revid)
+ elif not find_ghosts:
+ prev = None
+ break
+ prev = revid
+ lhs_parent[prev] = NULL_REVISION
+
+ check_revid(revision_id)
+
+ for revid in extra:
+ if revid not in revs:
+ check_revid(revid)
+
+ needed = [(revid, lhs_parent[revid], meta_map[revid]) for revid in reversed(revs)]
+
+ return needed
- prev_revid = None
- for revid in self.source.get_graph().iter_ancestry(revision_id):
-
- if prev_revid is not None:
- parents[prev_revid] = revid
+ def copy_content(self, revision_id=None, pb=None):
+ """See InterRepository.copy_content."""
+ self.fetch(revision_id, pb, find_ghosts=False)
- prev_revid = revid
+ def _fetch_revision(self, editor, transport, repos_root, parent_revid):
+ if self._supports_replay:
+ try:
+ self._fetch_revision_replay(editor, transport, repos_root, parent_revid)
+ return
+ except NotImplementedError:
+ self._supports_replay = False
+ self._fetch_revision_update(editor, transport, repos_root, parent_revid)
- if not self.target.has_revision(revid):
- needed.append(revid)
- elif not find_ghosts:
- break
+ def _fetch_revision_replay(self, editor, transport, repos_root, parent_revid):
+ if parent_revid is not None:
+ parent_revnum = self.source.lookup_revision_id(parent_revid)[1]
+ else:
+ parent_revnum = editor.revnum-1
+ branch_url = urlutils.join(repos_root, editor.branch_path)
+ transport.reparent(branch_url)
+ lock = transport.lock_read(".")
+ try:
+ transport.replay(editor.revnum, parent_revnum, editor, True)
+ finally:
+ lock.unlock()
- parents[prev_revid] = None
- needed.reverse()
- return (needed, parents)
+ def _fetch_revision_update(self, editor, transport, repos_root, parent_revid):
+ if parent_revid == NULL_REVISION:
+ branch_url = urlutils.join(repos_root,
+ editor.branch_path)
+ conn.reparent(branch_url)
+ assert conn.url == branch_url, \
+ "Expected %r, got %r" % (conn.url, branch_url)
+ reporter = conn.do_update(editor.revnum, True, editor)
- def copy_content(self, revision_id=None, pb=None):
- """See InterRepository.copy_content."""
- self.fetch(revision_id, pb, find_ghosts=False)
+ try:
+ # Report status of existing paths
+ reporter.set_path("", editor.revnum, True, None)
+ except:
+ reporter.abort()
+ raise
+ else:
+ (parent_branch, parent_revnum, mapping) = \
+ self.source.lookup_revision_id(parent_revid)
+ conn.reparent(urlutils.join(repos_root, parent_branch))
+
+ if parent_branch != editor.branch_path:
+ reporter = conn.do_switch(editor.revnum, True,
+ urlutils.join(repos_root, editor.branch_path),
+ editor)
+ else:
+ reporter = conn.do_update(editor.revnum, True, editor)
- def _fetch_replay(self, revids, pb=None):
- """Copy a set of related revisions using svn.ra.replay.
+ try:
+ # Report status of existing paths
+ reporter.set_path("", parent_revnum, False)
+ except:
+ reporter.abort()
+ raise
- :param revids: Revision ids to copy.
- :param pb: Optional progress bar
- """
- raise NotImplementedError(self._copy_revisions_replay)
+ reporter.finish()
- def _fetch_switch(self, revids, pb=None, lhs_parent=None):
+ def _fetch_switch(self, conn, revids, pb=None):
"""Copy a set of related revisions using svn.ra.switch.
:param revids: List of revision ids of revisions to copy,
newest first.
:param pb: Optional progress bar.
"""
- repos_root = self.source.transport.get_svn_repos_root()
+ repos_root = conn.get_repos_root()
prev_revid = None
- transport = self.source.transport
if pb is None:
pb = ui.ui_factory.nested_progress_bar()
nested_pb = pb
editor = revbuildklass(self.source, self.target)
try:
- for revid in revids:
+ for (revid, parent_revid, revmeta) in revids:
pb.update('copying revision', num, len(revids))
- parent_revid = lhs_parent[revid]
+ assert parent_revid is not None
- if parent_revid is None:
+ if parent_revid == NULL_REVISION:
parent_inv = Inventory(root_id=None)
elif prev_revid != parent_revid:
parent_inv = self.target.get_inventory(parent_revid)
else:
parent_inv = prev_inv
- editor.start_revision(revid, parent_inv)
+ editor.start_revision(revid, parent_inv, revmeta)
try:
- pool = Pool()
-
- if parent_revid is None:
- branch_url = urlutils.join(repos_root,
- editor.branch_path)
- transport.reparent(branch_url)
- assert transport.svn_url == branch_url.rstrip("/"), \
- "Expected %r, got %r" % (transport.svn_url, branch_url)
- reporter = transport.do_update(editor.revnum, True,
- editor, pool)
-
- # Report status of existing paths
- reporter.set_path("", editor.revnum, True, None, pool)
- else:
- (parent_branch, parent_revnum, mapping) = \
- self.source.lookup_revision_id(parent_revid)
- transport.reparent(urlutils.join(repos_root, parent_branch))
-
- if parent_branch != editor.branch_path:
- reporter = transport.do_switch(editor.revnum, True,
- urlutils.join(repos_root, editor.branch_path),
- editor, pool)
- else:
- reporter = transport.do_update(editor.revnum, True, editor)
-
- # Report status of existing paths
- reporter.set_path("", parent_revnum, False, None, pool)
-
- lock = transport.lock_read(".")
- reporter.finish_report(pool)
- lock.unlock()
+<<<<<<< TREE
+ self._fetch_revision_update(editor, transport, repos_root, parent_revid)
+=======
+>>>>>>> MERGE-SOURCE
except:
- editor.abort_edit()
+ editor.abort()
raise
prev_inv = editor.inventory
prev_revid = revid
- pool.destroy()
num += 1
finally:
self.target.unlock()
if nested_pb is not None:
nested_pb.finished()
- self.source.transport.reparent_root()
def fetch(self, revision_id=None, pb=None, find_ghosts=False,
- branches=None):
+ branches=None, fetch_rhs_ancestry=False):
"""Fetch revisions. """
if revision_id == NULL_REVISION:
return
+
+ self._supports_replay = True # assume replay supported by default
# Dictionary with paths as keys, revnums as values
+ if pb:
+ pb.update("determining revisions to fetch", 0, 2)
+
# Loop over all the revnums until revision_id
# (or youngest_revnum) and call self.target.add_revision()
# or self.target.add_inventory() each time
self.target.lock_read()
try:
if branches is not None:
- (needed, lhs_parent) = self._find_branches(branches,
- find_ghosts)
+ needed = self._find_branches(branches, find_ghosts, fetch_rhs_ancestry, pb=pb)
elif revision_id is None:
- (needed, lhs_parent) = self._find_all()
+ needed = self._find_all(self.source.get_mapping(), pb=pb)
else:
- (needed, lhs_parent) = self._find_until(revision_id,
- find_ghosts)
+ needed = self._find_until(revision_id, find_ghosts, fetch_rhs_ancestry, pb=pb)
finally:
self.target.unlock()
# Nothing to fetch
return
- self._fetch_switch(needed, pb, lhs_parent)
+ conn = self.source.transport.get_connection()
+ try:
+ self._fetch_switch(conn, needed, pb)
+ finally:
+ self.source.transport.add_connection(conn)
@staticmethod
def is_compatible(source, target):