Rev 3519: Seeing if we can get the merge right without having to have all nodes. in http://bzr.arbash-meinel.com/branches/bzr/1.6-dev/merge3_per_file
John Arbash Meinel
john at arbash-meinel.com
Thu Jun 26 04:17:09 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.6-dev/merge3_per_file
------------------------------------------------------------
revno: 3519
revision-id: john at arbash-meinel.com-20080626031656-59y0gqrsq9t3wox9
parent: john at arbash-meinel.com-20080625224143-ndnv3fygtime27au
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: merge3_per_file
timestamp: Wed 2008-06-25 22:16:56 -0500
message:
Seeing if we can get the merge right without having to have all nodes.
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py 2008-06-20 20:07:26 +0000
+++ b/bzrlib/graph.py 2008-06-26 03:16:56 +0000
@@ -320,6 +320,45 @@
len(true_unique_nodes), len(unique_nodes))
return true_unique_nodes
+ def find_merge_order(self, tip_revision_id, lca_revision_ids):
+ """Find the order that each revision was merged into tip.
+
+ This basically just walks backwards with a stack, and walks left-first
+ until it finds a node to stop.
+ """
+ if len(lca_revision_ids) == 1:
+ return list(lca_revision_ids)
+ looking_for = set(lca_revision_ids)
+ # TODO: Is there a way we could do this "faster" by batching up the
+ # get_parent_map requests?
+ # TODO: Should we also be culling the ancestry search right away? We
+ # could add looking_for to the "stop" list, and walk their
+ # ancestry in batched mode. The flip side is it might mean we walk a
+ # lot of "stop" nodes, rather than only the minimum.
+ # Then again, without it we may trace back into ancestry we could have
+ # stopped early.
+ stack = [tip_revision_id]
+ found = []
+ stop = set()
+ while stack and looking_for:
+ next = stack.pop()
+ stop.add(next)
+ parent_ids = self.get_parent_map([next]).get(next, None)
+ if not parent_ids: # Ghost, nothing to search here
+ continue
+ for parent_id in reversed(parent_ids):
+ if parent_id in looking_for:
+ # Found it, we can stop searching this direction
+ # we could also add ancestors of this node to stop
+ # searching....
+ found.append(parent_id)
+ looking_for.remove(parent_id)
+ elif parent_id not in stop:
+ # this will need to be searched
+ stack.append(parent_id)
+ stop.add(parent_id)
+ return found
+
def _find_initial_unique_nodes(self, unique_revisions, common_revisions):
"""Steps 1-3 of find_unique_ancestors.
=== modified file 'bzrlib/merge.py'
--- a/bzrlib/merge.py 2008-06-25 22:41:43 +0000
+++ b/bzrlib/merge.py 2008-06-26 03:16:56 +0000
@@ -1543,7 +1543,7 @@
self.a_rev = a_rev
self.b_rev = b_rev
self.vf = vf
- self.graph = _mod_graph.Graph(self.vf)
+ self.graph = _mod_graph.Graph(_mod_graph.CachingParentsProvider(self.vf))
heads = self.graph.heads((a_rev, b_rev))
if len(heads) == 1:
# one side dominates, so we can just return its values, yay for
@@ -1570,7 +1570,11 @@
next_lcas = self.graph.find_lca(*cur_ancestors)
ancestors.update(next_lcas)
for rev_id in cur_ancestors:
- parent_map[rev_id] = next_lcas
+ # We need to make sure to get the parent ordering correct for
+ # this node.
+ parent_ids = self.graph.find_merge_order(rev_id, next_lcas)
+ mutter('found %s => %s', rev_id, parent_ids)
+ parent_map[rev_id] = parent_ids
if len(next_lcas) == 0:
ancestors.add(NULL_REVISION)
parent_map[NULL_REVISION] = ()
More information about the bazaar-commits
mailing list