Rev 3396: Start culling unique searchers once they converge. in http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_differences

John Arbash Meinel john at arbash-meinel.com
Wed Apr 23 23:29:57 BST 2008


At http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_differences

------------------------------------------------------------
revno: 3396
revision-id: john at arbash-meinel.com-20080423222403-sqa8rs4d8eqdk0xi
parent: john at arbash-meinel.com-20080423221521-zya0eumw6tmo2ii6
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: find_differences
timestamp: Wed 2008-04-23 17:24:03 -0500
message:
  Start culling unique searchers once they converge.
  
  This shows a large improvement when there are lots of searchers.
modified:
  bzrlib/graph.py                graph_walker.py-20070525030359-y852guab65d4wtn0-1
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py	2008-04-23 22:15:21 +0000
+++ b/bzrlib/graph.py	2008-04-23 22:24:03 +0000
@@ -521,8 +521,6 @@
         unique = self._remove_simple_descendants(unique,
                     self.get_parent_map(unique))
         simple_unique = len(unique)
-        trace.mutter('Starting %s unique searchers for %s unique revisions',
-                     simple_unique, total_unique)
 
         unique_searchers = []
         for revision_id in unique:
@@ -550,6 +548,21 @@
                 ancestor_all_unique = ancestor_all_unique.intersection(
                                             searcher.seen)
 
+        # Filter out searchers that don't actually search different nodes. We
+        # already have the ancestry intersection for them
+        next_unique_searchers = []
+        unique_search_sets = set()
+        for searcher in unique_searchers:
+            will_search_set = frozenset(searcher._next_query)
+            if will_search_set not in unique_search_sets:
+                # This searcher is searching a unique set of nodes, let it
+                unique_search_sets.add(will_search_set)
+                next_unique_searchers.append(searcher)
+        trace.mutter('Started %s unique searchers for %s unique revisions,'
+                     ' %s with unique tips',
+                     simple_unique, total_unique, len(next_unique_searchers))
+        unique_searchers = next_unique_searchers
+
         while True: # If we have no more nodes we have nothing to do
             newly_seen_common = set()
             for searcher in common_searchers:
@@ -609,6 +622,18 @@
                 for searcher in common_searchers:
                     searcher.stop_searching_any(new_common_unique)
                 ancestor_all_unique.update(new_common_unique)
+
+                # Filter out searchers that don't actually search different nodes. We
+                # already have the ancestry intersection for them
+                next_unique_searchers = []
+                unique_search_sets = set()
+                for searcher in unique_searchers:
+                    will_search_set = frozenset(searcher._next_query)
+                    if will_search_set not in unique_search_sets:
+                        # This searcher is searching a unique set of nodes, let it
+                        unique_search_sets.add(will_search_set)
+                        next_unique_searchers.append(searcher)
+                unique_searchers = next_unique_searchers
             for searcher in common_searchers:
                 if searcher._next_query:
                     break



More information about the bazaar-commits mailing list