Rev 4154: (andrew) Remove InterPackToRemotePack too. in file:///home/pqm/archives/thelove/bzr/%2Btrunk/
Canonical.com Patch Queue Manager
pqm at pqm.ubuntu.com
Tue Mar 17 02:35:55 GMT 2009
At file:///home/pqm/archives/thelove/bzr/%2Btrunk/
------------------------------------------------------------
revno: 4154
revision-id: pqm at pqm.ubuntu.com-20090317023551-f1jm1klbuy1eg4hv
parent: pqm at pqm.ubuntu.com-20090317013621-d8sbugz1b1jrmzjk
parent: andrew.bennetts at canonical.com-20090317013115-bp6jn12nhzhj3ww8
committer: Canonical.com Patch Queue Manager <pqm at pqm.ubuntu.com>
branch nick: +trunk
timestamp: Tue 2009-03-17 02:35:51 +0000
message:
(andrew) Remove InterPackToRemotePack too.
modified:
NEWS NEWS-20050323055033-4e00b5db738777ff
bzrlib/repository.py rev_storage.py-20051111201905-119e9401e46257e3
------------------------------------------------------------
revno: 4144.3.12
revision-id: andrew.bennetts at canonical.com-20090317013115-bp6jn12nhzhj3ww8
parent: andrew.bennetts at canonical.com-20090317012851-xst9bpei4j066v5q
committer: Andrew Bennetts <andrew.bennetts at canonical.com>
branch nick: simplify-interrepo-stack
timestamp: Tue 2009-03-17 12:31:15 +1100
message:
Remove target_get_graph and target_get_parent_map attributes from InterRepository; nothing overrides them anymore.
modified:
bzrlib/repository.py rev_storage.py-20051111201905-119e9401e46257e3
------------------------------------------------------------
revno: 4144.3.11
revision-id: andrew.bennetts at canonical.com-20090317012851-xst9bpei4j066v5q
parent: andrew.bennetts at canonical.com-20090317004445-2ya4gfeavitj0np8
committer: Andrew Bennetts <andrew.bennetts at canonical.com>
branch nick: simplify-interrepo-stack
timestamp: Tue 2009-03-17 12:28:51 +1100
message:
Remove InterPackToRemotePack too.
modified:
NEWS NEWS-20050323055033-4e00b5db738777ff
bzrlib/repository.py rev_storage.py-20051111201905-119e9401e46257e3
=== modified file 'NEWS'
--- a/NEWS 2009-03-17 01:36:21 +0000
+++ b/NEWS 2009-03-17 02:35:51 +0000
@@ -96,8 +96,9 @@
make visible data inserted into the repository by a smart server
fetch operation. (Robert Collins, Andrew Bennetts)
- * Removed ``InterRemoteToOther`` and ``InterOtherToRemote`` classes,
- as they are now unnecessary. (Andrew Bennetts)
+ * Removed ``InterRemoteToOther``, ``InterOtherToRemote`` and
+ ``InterPackToRemotePack`` classes, as they are now unnecessary.
+ (Andrew Bennetts)
* ``_walk_to_common_revisions`` will now batch up at least 50
revisions before calling ``get_parent_map`` on the target,
=== modified file 'bzrlib/repository.py'
--- a/bzrlib/repository.py 2009-03-17 01:36:21 +0000
+++ b/bzrlib/repository.py 2009-03-17 02:35:51 +0000
@@ -34,7 +34,6 @@
lockdir,
lru_cache,
osutils,
- remote,
revision as _mod_revision,
symbol_versioning,
tsort,
@@ -2627,13 +2626,6 @@
_optimisers = []
"""The available optimised InterRepository types."""
- def __init__(self, source, target):
- InterObject.__init__(self, source, target)
- # These two attributes may be overridden by e.g. InterOtherToRemote to
- # provide a faster implementation.
- self.target_get_graph = self.target.get_graph
- self.target_get_parent_map = self.target.get_parent_map
-
@needs_write_lock
def copy_content(self, revision_id=None):
"""Make a complete copy of the content in self into destination.
@@ -2676,7 +2668,7 @@
:param revision_ids: The start point for the search.
:return: A set of revision ids.
"""
- target_graph = self.target_get_graph()
+ target_graph = self.target.get_graph()
revision_ids = frozenset(revision_ids)
# Fast path for the case where all the revisions are already in the
# target repo.
@@ -3037,10 +3029,9 @@
# till then:
source_revision_ids = frozenset(self.source.all_revision_ids())
revision_ids = source_revision_ids - \
- frozenset(self.target_get_parent_map(source_revision_ids))
+ frozenset(self.target.get_parent_map(source_revision_ids))
revision_keys = [(revid,) for revid in revision_ids]
- target_pack_collection = self._get_target_pack_collection()
- index = target_pack_collection.revision_index.combined_index
+ index = self.target._pack_collection.revision_index.combined_index
present_revision_ids = set(item[1][0] for item in
index.iter_entries(revision_keys))
revision_ids = set(revision_ids) - present_revision_ids
@@ -3066,27 +3057,20 @@
def _pack(self, source, target, revision_ids):
from bzrlib.repofmt.pack_repo import Packer
- target_pack_collection = self._get_target_pack_collection()
packs = source._pack_collection.all_packs()
- pack = Packer(target_pack_collection, packs, '.fetch',
+ pack = Packer(self.target._pack_collection, packs, '.fetch',
revision_ids).pack()
if pack is not None:
- target_pack_collection._save_pack_names()
+ self.target._pack_collection._save_pack_names()
copied_revs = pack.get_revision_count()
# Trigger an autopack. This may duplicate effort as we've just done
# a pack creation, but for now it is simpler to think about as
# 'upload data, then repack if needed'.
- self._autopack()
+ self.target._pack_collection.autopack()
return (copied_revs, [])
else:
return (0, [])
- def _autopack(self):
- self.target._pack_collection.autopack()
-
- def _get_target_pack_collection(self):
- return self.target._pack_collection
-
@needs_read_lock
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
"""See InterRepository.missing_revision_ids().
@@ -3099,7 +3083,7 @@
elif revision_id is not None:
# Find ghosts: search for revisions pointing from one repository to
# the other, and vice versa, anywhere in the history of revision_id.
- graph = self.target_get_graph(other_repository=self.source)
+ graph = self.target.get_graph(other_repository=self.source)
searcher = graph._make_breadth_first_searcher([revision_id])
found_ids = set()
while True:
@@ -3115,7 +3099,7 @@
# Double query here: should be able to avoid this by changing the
# graph api further.
result_set = found_ids - frozenset(
- self.target_get_parent_map(found_ids))
+ self.target.get_parent_map(found_ids))
else:
source_ids = self.source.all_revision_ids()
# source_ids is the worst possible case we may need to pull.
@@ -3299,59 +3283,11 @@
return basis_id, basis_tree
-class InterPackToRemotePack(InterPackRepo):
- """A specialisation of InterPackRepo for a target that is a
- RemoteRepository.
-
- This will use the get_parent_map RPC rather than plain readvs, and also
- uses an RPC for autopacking.
- """
-
- @staticmethod
- def is_compatible(source, target):
- from bzrlib.repofmt.pack_repo import RepositoryFormatPack
- if isinstance(source._format, RepositoryFormatPack):
- if isinstance(target, remote.RemoteRepository):
- target._format._ensure_real()
- if isinstance(target._format._custom_format,
- RepositoryFormatPack):
- if InterRepository._same_model(source, target):
- return True
- return False
-
- def _autopack(self):
- self.target.autopack()
-
- @needs_write_lock
- def fetch(self, revision_id=None, pb=None, find_ghosts=False,
- fetch_spec=None):
- """See InterRepository.fetch()."""
- if self.target._client._medium._is_remote_before((1, 13)):
- # The server won't support the insert_stream RPC, so just use
- # regular InterPackRepo logic. This avoids a bug that causes many
- # round-trips for small append calls.
- return InterPackRepo.fetch(self, revision_id=revision_id, pb=pb,
- find_ghosts=find_ghosts, fetch_spec=fetch_spec)
- # Always fetch using the generic streaming fetch code, to allow
- # streaming fetching into remote servers.
- from bzrlib.fetch import RepoFetcher
- fetcher = RepoFetcher(self.target, self.source, revision_id,
- pb, find_ghosts, fetch_spec=fetch_spec)
-
- def _get_target_pack_collection(self):
- return self.target._real_repository._pack_collection
-
- @classmethod
- def _get_repo_format_to_test(self):
- return None
-
-
InterRepository.register_optimiser(InterDifferingSerializer)
InterRepository.register_optimiser(InterSameDataRepository)
InterRepository.register_optimiser(InterWeaveRepo)
InterRepository.register_optimiser(InterKnitRepo)
InterRepository.register_optimiser(InterPackRepo)
-InterRepository.register_optimiser(InterPackToRemotePack)
class CopyConverter(object):
More information about the bazaar-commits
mailing list