Rev 4366: Delete obsolete pack tuned find_inconsistent_revisions as the generic code path permits the same optimisation. in http://people.canonical.com/~robertc/baz2.0/check
Robert Collins
robertc at robertcollins.net
Wed Aug 5 02:06:08 BST 2009
At http://people.canonical.com/~robertc/baz2.0/check
------------------------------------------------------------
revno: 4366
revision-id: robertc at robertcollins.net-20090805010558-60s314d1zxd9cvk7
parent: robertc at robertcollins.net-20090804044735-o4rlt22aki0a1bh4
committer: Robert Collins <robertc at robertcollins.net>
branch nick: check
timestamp: Wed 2009-08-05 11:05:58 +1000
message:
Delete obsolete pack tuned find_inconsistent_revisions as the generic code path permits the same optimisation.
=== modified file 'bzrlib/remote.py'
--- a/bzrlib/remote.py 2009-08-04 04:36:34 +0000
+++ b/bzrlib/remote.py 2009-08-05 01:05:58 +0000
@@ -1619,9 +1619,10 @@
self._ensure_real()
return self._real_repository.revision_graph_can_have_wrong_parents()
- def _find_inconsistent_revision_parents(self):
+ def _find_inconsistent_revision_parents(self, revisions_iterator=None):
self._ensure_real()
- return self._real_repository._find_inconsistent_revision_parents()
+ return self._real_repository._find_inconsistent_revision_parents(
+ revisions_iterator)
def _check_for_inconsistent_revision_parents(self):
self._ensure_real()
=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py 2009-08-04 04:36:34 +0000
+++ b/bzrlib/repofmt/pack_repo.py 2009-08-05 01:05:58 +0000
@@ -2215,52 +2215,6 @@
self.revisions._index._key_dependencies.refs.clear()
self._pack_collection._abort_write_group()
- def _find_inconsistent_revision_parents(self):
- """Find revisions with incorrectly cached parents.
-
- :returns: an iterator yielding tuples of (revison-id, parents-in-index,
- parents-in-revision).
- """
- if not self.is_locked():
- raise errors.ObjectNotLocked(self)
- pb = ui.ui_factory.nested_progress_bar()
- result = []
- try:
- revision_nodes = self._pack_collection.revision_index \
- .combined_index.iter_all_entries()
- index_positions = []
- # Get the cached index values for all revisions, and also the
- # location in each index of the revision text so we can perform
- # linear IO.
- for index, key, value, refs in revision_nodes:
- node = (index, key, value, refs)
- index_memo = self.revisions._index._node_to_position(node)
- if index_memo[0] != index:
- raise AssertionError('%r != %r' % (index_memo[0], index))
- index_positions.append((index_memo, key[0],
- tuple(parent[0] for parent in refs[0])))
- pb.update("Reading revision index", 0, 0)
- index_positions.sort()
- batch_size = 1000
- pb.update("Checking cached revision graph", 0,
- len(index_positions))
- for offset in xrange(0, len(index_positions), 1000):
- pb.update("Checking cached revision graph", offset)
- to_query = index_positions[offset:offset + batch_size]
- if not to_query:
- break
- rev_ids = [item[1] for item in to_query]
- revs = self.get_revisions(rev_ids)
- for revision, item in zip(revs, to_query):
- index_parents = item[2]
- rev_parents = tuple(revision.parent_ids)
- if index_parents != rev_parents:
- result.append((revision.revision_id, index_parents,
- rev_parents))
- finally:
- pb.finished()
- return result
-
def _get_source(self, to_format):
if to_format.network_name() == self._format.network_name():
return KnitPackStreamSource(self, to_format)
More information about the bazaar-commits
mailing list