Rev 5153: test in http://bzr.arbash-meinel.com/branches/bzr/jam-integration
John Arbash Meinel
john at arbash-meinel.com
Wed Apr 14 06:22:53 BST 2010
At http://bzr.arbash-meinel.com/branches/bzr/jam-integration
------------------------------------------------------------
revno: 5153 [merge]
revision-id: john at arbash-meinel.com-20100414052237-pby3iy90s7v9ogv1
parent: pqm at pqm.ubuntu.com-20100414043547-j4t4napw7duy07if
parent: john at arbash-meinel.com-20100414050653-5l0z0hn2pk2y2uf5
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: jam-integration
timestamp: Wed 2010-04-14 00:22:37 -0500
message:
test
modified:
NEWS NEWS-20050323055033-4e00b5db738777ff
bzrlib/index.py index.py-20070712131115-lolkarso50vjr64s-1
-------------- next part --------------
=== modified file 'NEWS'
--- a/NEWS 2010-04-14 04:35:47 +0000
+++ b/NEWS 2010-04-14 05:22:37 +0000
@@ -67,6 +67,10 @@
read calls. An incremental pull via plain HTTP takes half the time and
bytes for a moderately large repository. (Andrew Bennetts)
+* Index lookups only re-order the indexes when the hit files aren't
+ already first. Reduces the cost of reordering
+ (John Arbash Meinel, #562429)
+
* Less code is loaded at startup. (Cold-cache start time is about 10-20%
less.)
(Martin Pool, #553017)
=== modified file 'bzrlib/index.py'
--- a/bzrlib/index.py 2010-04-08 07:01:10 +0000
+++ b/bzrlib/index.py 2010-04-14 05:06:53 +0000
@@ -1418,6 +1418,10 @@
_move_to_front propagates to all objects in self._sibling_indices by
calling _move_to_front_by_name.
"""
+ if self._indices[:len(hit_indices)] == hit_indices:
+ # The 'hit_indices' are already at the front (and in the same
+ # order), no need to re-order
+ return
hit_names = self._move_to_front_by_index(hit_indices)
for sibling_idx in self._sibling_indices:
sibling_idx._move_to_front_by_name(hit_names)
@@ -1431,19 +1435,27 @@
if 'index' in debug.debug_flags:
mutter('CombinedGraphIndex reordering: currently %r, promoting %r',
indices_info, hit_indices)
- hit_indices_info = []
hit_names = []
- unhit_indices_info = []
- for name, idx in indices_info:
+ unhit_names = []
+ new_hit_indices = []
+ unhit_indices = []
+
+ for offset, (name, idx) in enumerate(indices_info):
if idx in hit_indices:
- info = hit_indices_info
hit_names.append(name)
+ new_hit_indices.append(idx)
+ if len(new_hit_indices) == len(hit_indices):
+ # We've found all of the hit entries, everything else is
+ # unhit
+ unhit_names.extend(self._index_names[offset+1:])
+ unhit_indices.extend(self._indices[offset+1:])
+ break
else:
- info = unhit_indices_info
- info.append((name, idx))
- final_info = hit_indices_info + unhit_indices_info
- self._indices = [idx for (name, idx) in final_info]
- self._index_names = [name for (name, idx) in final_info]
+ unhit_names.append(name)
+ unhit_indices.append(idx)
+
+ self._indices = new_hit_indices + unhit_indices
+ self._index_names = hit_names + unhit_names
if 'index' in debug.debug_flags:
mutter('CombinedGraphIndex reordered: %r', self._indices)
return hit_names
More information about the bazaar-commits
mailing list