Rev 3413: Small updates, try to write a test for the race condition. in http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors
John Arbash Meinel
john at arbash-meinel.com
Sun Apr 27 15:28:14 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors
------------------------------------------------------------
revno: 3413
revision-id: john at arbash-meinel.com-20080427142151-24uxl2vmwbs93qoa
parent: john at arbash-meinel.com-20080425205150-zsytbcx0nol0bilw
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: find_unique_ancestors
timestamp: Sun 2008-04-27 09:21:51 -0500
message:
Small updates, try to write a test for the race condition.
So far I have been unsuccessful, it is very hard to artificially
create the race that I found in bzr.dev.
modified:
bzrlib/graph.py graph_walker.py-20070525030359-y852guab65d4wtn0-1
bzrlib/tests/test_graph.py test_graph_walker.py-20070525030405-enq4r60hhi9xrujc-1
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py 2008-04-25 20:51:50 +0000
+++ b/bzrlib/graph.py 2008-04-27 14:21:51 +0000
@@ -294,22 +294,23 @@
searcher.seen)
# Collapse all the common nodes into a single searcher
all_unique_searcher = self._make_breadth_first_searcher(ancestor_all_unique)
- all_unique_searcher.step()
-
- # Stop any search tips that are already known as ancestors of the
- # unique nodes
- common_searcher.stop_searching_any(
- common_searcher.find_seen_ancestors(ancestor_all_unique))
-
- total_stopped = 0
- for searcher in unique_searchers:
- total_stopped += len(searcher.stop_searching_any(
- searcher.find_seen_ancestors(ancestor_all_unique)))
- _mutter('For %s unique nodes, created %s + 1 unique searchers'
- ' (%s stopped search tips, %s common ancestors)',
- len(unique_nodes), len(unique_searchers), total_stopped,
- len(ancestor_all_unique))
- del ancestor_all_unique
+ if ancestor_all_unique:
+ all_unique_searcher.step()
+
+ # Stop any search tips that are already known as ancestors of the
+ # unique nodes
+ common_searcher.stop_searching_any(
+ common_searcher.find_seen_ancestors(ancestor_all_unique))
+
+ total_stopped = 0
+ for searcher in unique_searchers:
+ total_stopped += len(searcher.stop_searching_any(
+ searcher.find_seen_ancestors(ancestor_all_unique)))
+ _mutter('For %s unique nodes, created %s + 1 unique searchers'
+ ' (%s stopped search tips, %s common ancestors)',
+ len(unique_nodes), len(unique_searchers), total_stopped,
+ len(ancestor_all_unique))
+ del ancestor_all_unique
# While we still have common nodes to search
while common_searcher._next_query:
@@ -322,9 +323,6 @@
for searcher in unique_searchers:
unique_are_common_nodes = unique_are_common_nodes.intersection(
searcher.seen)
- # TODO: This needs a test case
- # It is triggered when you have lots of unique nodes, and
- # some of them converge before the others.
unique_are_common_nodes = unique_are_common_nodes.intersection(
all_unique_searcher.seen)
unique_are_common_nodes.update(all_unique_searcher.step())
=== modified file 'bzrlib/tests/test_graph.py'
--- a/bzrlib/tests/test_graph.py 2008-04-25 20:51:50 +0000
+++ b/bzrlib/tests/test_graph.py 2008-04-27 14:21:51 +0000
@@ -242,6 +242,63 @@
'r':['q'], 's':['r'],
}
+# Graph where different walkers will race to find the common and uncommon
+# nodes.
+#
+# NULL_REVISION
+# |
+# a
+# |
+# b
+# |
+# c
+# |
+# d
+# |\
+# e k
+# | |
+# f-+-p
+# | | |
+# | l |
+# | | |
+# | m |
+# | |\|
+# g n q
+# |\| |
+# h o |
+# |/| |
+# i r |
+# | | |
+# | s |
+# | | |
+# | t |
+# | | |
+# | u |
+# | | |
+# | v |
+# | | |
+# | w |
+# | | |
+# | x |
+# | |\|
+# | y z
+# |/
+# j
+#
+# y is found to be common right away, but is the start of a long series of
+# common commits.
+# o is actually common, but the i-j shortcut makes it look like it is actually
+# unique to j at first, you have to traverse all of y->o to find it.
+# q,n give the walker from j a common point to stop searching, as does p,f.
+# k-n exists so that the second pass still has nodes that are worth searching,
+# rather than instantly cancelling the extra walker.
+
+racing_shortcuts = {'a':[NULL_REVISION], 'b':['a'], 'c':['b'], 'd':['c'],
+ 'e':['d'], 'f':['e'], 'g':['f'], 'h':['g'], 'i':['h', 'o'], 'j':['i', 'y'],
+ 'k':['d'], 'l':['k'], 'm':['l'], 'n':['m'], 'o':['n', 'g'], 'p':['f'],
+ 'q':['p', 'm'], 'r':['o'], 's':['r'], 't':['s'], 'u':['t'], 'v':['u'],
+ 'w':['v'], 'x':['w'], 'y':['x'], 'z':['x', 'q']}
+
# A graph with multiple nodes unique to one side.
#
@@ -1185,6 +1242,14 @@
self.assertFindUniqueAncestors(graph,
['p', 'z'], 'z', ['y'])
+ def test_racing_shortcuts(self):
+ graph = self.make_graph(racing_shortcuts)
+ self.assertFindUniqueAncestors(graph,
+ ['p', 'q', 'z'], 'z', ['y'])
+ import pdb; pdb.set_trace()
+ self.assertFindUniqueAncestors(graph,
+ ['h', 'i', 'j', 'y'], 'j', ['z'])
+
class TestCachingParentsProvider(tests.TestCase):
More information about the bazaar-commits
mailing list