Rev 3421: Ian's review feedback. in http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors
John Arbash Meinel
john at arbash-meinel.com
Mon May 5 21:09:58 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors
------------------------------------------------------------
revno: 3421
revision-id: john at arbash-meinel.com-20080505200949-8adblici2iuvu8z2
parent: john at arbash-meinel.com-20080501224934-3i8kxtuyr9r21711
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: find_unique_ancestors
timestamp: Mon 2008-05-05 15:09:49 -0500
message:
Ian's review feedback.
modified:
bzrlib/graph.py graph_walker.py-20070525030359-y852guab65d4wtn0-1
bzrlib/status.py status.py-20050505062338-431bfa63ec9b19e6
bzrlib/tests/test_graph.py test_graph_walker.py-20070525030405-enq4r60hhi9xrujc-1
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py 2008-05-01 22:49:34 +0000
+++ b/bzrlib/graph.py 2008-05-05 20:09:49 +0000
@@ -315,7 +315,6 @@
len(ancestor_all_unique), len(stopped_common))
del ancestor_all_unique, stopped_common
- # import pdb; pdb.set_trace()
# While we still have common nodes to search
while common_searcher._next_query:
newly_seen_common = set(common_searcher.step())
@@ -327,13 +326,8 @@
for searcher in unique_searchers:
unique_are_common_nodes = unique_are_common_nodes.intersection(
searcher.seen)
- # unique_are_common_nodes = unique_are_common_nodes.intersection(
- # all_unique_searcher.seen)
- diff = unique_are_common_nodes.intersection(
- all_unique_searcher.seen)
- if diff != unique_are_common_nodes:
- # import pdb; pdb.set_trace()
- unique_are_common_nodes = diff
+ unique_are_common_nodes = unique_are_common_nodes.intersection(
+ all_unique_searcher.seen)
unique_are_common_nodes.update(all_unique_searcher.step())
if newly_seen_common:
# If a 'common' node is an ancestor of all unique searchers, we
=== modified file 'bzrlib/status.py'
--- a/bzrlib/status.py 2008-05-01 20:41:00 +0000
+++ b/bzrlib/status.py 2008-05-05 20:09:49 +0000
@@ -157,7 +157,7 @@
:param revision_ids: A set of revision_ids
:param parent_map: The parent information for each node. Revisions which
are considered ghosts should not be present in the map.
- :return: An the iterator from MergeSorter.iter_topo_order()
+ :return: iterator from MergeSorter.iter_topo_order()
"""
# MergeSorter requires that all nodes be present in the graph, so get rid
# of any references pointing outside of this graph.
@@ -175,6 +175,10 @@
def show_pending_merges(new, to_file, short=False):
"""Write out a display of pending merges in a working tree."""
+ parents = new.get_parent_ids()
+ if len(parents) < 2:
+ return
+
# we need one extra space for terminals that wrap on last char
term_width = osutils.terminal_width() - 1
if short:
@@ -184,15 +188,11 @@
first_prefix = ' '
sub_prefix = ' '
- parents = new.get_parent_ids()
- if len(parents) < 2:
- return
pending = parents[1:]
branch = new.branch
last_revision = parents[0]
if not short:
to_file.write('pending merges:\n')
- ignore = set([None, last_revision, _mod_revision.NULL_REVISION])
graph = branch.repository.get_graph()
other_revisions = [last_revision]
log_formatter = log.LineLogFormatter(to_file)
@@ -239,8 +239,6 @@
raise AssertionError('Somehow we misunderstood how'
' iter_topo_order works %s != %s' % (first, merge))
for num, sub_merge, depth, eom in rev_id_iterator:
- if sub_merge in ignore:
- continue
rev = revisions[sub_merge]
if rev is None:
to_file.write(sub_prefix + '(ghost) ' + sub_merge + '\n')
=== modified file 'bzrlib/tests/test_graph.py'
--- a/bzrlib/tests/test_graph.py 2008-05-01 22:49:34 +0000
+++ b/bzrlib/tests/test_graph.py 2008-05-05 20:09:49 +0000
@@ -234,12 +234,10 @@
# |/|/
# t u
complex_shortcut2 = {'a':[NULL_REVISION], 'b':['a'], 'c':['b'], 'd':['c'],
- 'e':['d'], 'f':['e'],
- 'g':['f'], 'h':['d'], 'k':['h', 'i'], 'j':['h'],
- 'i':['g'], 'l':['k'], 'm':['l'],
- 'n':['m'], 't':['i', 's'], 'u':['s', 'j'],
- 'o':['n'], 'p':['o'], 'q':['p'],
- 'r':['q'], 's':['r'],
+ 'e':['d'], 'f':['e'], 'g':['f'], 'h':['d'], 'i':['g'],
+ 'j':['h'], 'k':['h', 'i'], 'l':['k'], 'm':['l'], 'n':['m'],
+ 'o':['n'], 'p':['o'], 'q':['p'], 'r':['q'], 's':['r'],
+ 't':['i', 's'], 'u':['s', 'j'],
}
# Graph where different walkers will race to find the common and uncommon
@@ -410,10 +408,6 @@
self.calls.extend(nodes)
return self._real_parents_provider.get_parent_map(nodes)
- def get_parent_map(self, nodes):
- self.calls.extend(nodes)
- return self._real_parents_provider.get_parent_map(nodes)
-
class TestGraph(TestCaseWithMemoryTransport):
@@ -1229,7 +1223,7 @@
def test_racing_shortcuts(self):
graph = self.make_graph(racing_shortcuts)
self.assertFindUniqueAncestors(graph,
- ['p', 'q', 'z'], 'z', ['j'])
+ ['p', 'q', 'z'], 'z', ['y'])
self.assertFindUniqueAncestors(graph,
['h', 'i', 'j', 'y'], 'j', ['z'])
More information about the bazaar-commits
mailing list