Rev 4516: cleanup indentation. in http://bazaar.launchpad.net/~jameinel/bzr/1.17-chk-multilevel
John Arbash Meinel
john at arbash-meinel.com
Thu Jul 2 21:02:17 BST 2009
At http://bazaar.launchpad.net/~jameinel/bzr/1.17-chk-multilevel
------------------------------------------------------------
revno: 4516
revision-id: john at arbash-meinel.com-20090702200202-nkkcfmrka2de3tba
parent: john at arbash-meinel.com-20090702195943-q271mscu1ssmy012
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: 1.17-chk-multilevel
timestamp: Thu 2009-07-02 15:02:02 -0500
message:
cleanup indentation.
-------------- next part --------------
=== modified file 'bzrlib/chk_map.py'
--- a/bzrlib/chk_map.py 2009-07-02 19:59:43 +0000
+++ b/bzrlib/chk_map.py 2009-07-02 20:02:02 +0000
@@ -1476,8 +1476,7 @@
old_chks_to_enqueue.extend(prefix_refs)
return old_chks_to_enqueue
- def _enqueue_old(self, new_prefixes,
- old_chks_to_enqueue):
+ def _enqueue_old(self, new_prefixes, old_chks_to_enqueue):
# At this point, we have read all the uninteresting and interesting
# items, so we can queue up the uninteresting stuff, knowing that we've
# handled the interesting ones
@@ -1506,8 +1505,7 @@
return
old_chks_to_enqueue = self._read_old_roots()
# filter out any root keys that are already known to be uninteresting
- new_keys = set(self._new_root_keys).difference(
- self._all_old_chks)
+ new_keys = set(self._new_root_keys).difference(self._all_old_chks)
# These are prefixes that are present in new_keys that we are
# thinking to yield
new_prefixes = set()
@@ -1530,10 +1528,10 @@
# to make the results unique. We might profile whether we
# gain anything by ensuring unique return values for items
new_items = [item for item in items
- if item not in self._all_old_items]
+ if item not in self._all_old_items]
self._new_item_queue.extend(new_items)
new_prefixes.update([self._search_key_func(item[0])
- for item in new_items])
+ for item in new_items])
processed_new_refs.update(refs)
yield record
# For new_prefixes we have the full length prefixes queued up.
@@ -1541,10 +1539,8 @@
# 'ab', then we also need to include 'a'.) So expand the
# new_prefixes to include all shorter prefixes
for prefix in list(new_prefixes):
- new_prefixes.update([prefix[:i]
- for i in xrange(1, len(prefix))])
- self._enqueue_old(new_prefixes,
- old_chks_to_enqueue)
+ new_prefixes.update([prefix[:i] for i in xrange(1, len(prefix))])
+ self._enqueue_old(new_prefixes, old_chks_to_enqueue)
def _flush_new_queue(self):
# No need to maintain the heap invariant anymore, just pull things out
@@ -1556,7 +1552,7 @@
processed_new_refs = self._processed_new_refs
all_old_items = self._all_old_items
new_items = [item for item in self._new_item_queue
- if item not in all_old_items]
+ if item not in all_old_items]
self._new_item_queue = []
if new_items:
yield None, new_items
=== modified file 'bzrlib/tests/test_chk_map.py'
--- a/bzrlib/tests/test_chk_map.py 2009-07-02 19:59:43 +0000
+++ b/bzrlib/tests/test_chk_map.py 2009-07-02 20:02:02 +0000
@@ -2369,7 +2369,7 @@
key3 = c_map._save()
key3_c = c_map._root_node._items['c'].key()
diff = self.get_difference([key2, key3], [key1],
- chk_map._search_key_plain)
+ chk_map._search_key_plain)
root_results = [record.key for record in diff._read_all_roots()]
self.assertEqual(sorted([key2, key3]), sorted(root_results))
self.assertEqual([], diff._old_queue)
More information about the bazaar-commits
mailing list