Rev 3406: Revert the _find_any_seen change. in http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors

John Arbash Meinel john at arbash-meinel.com
Fri Apr 25 03:10:55 BST 2008


At http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_unique_ancestors

------------------------------------------------------------
revno: 3406
revision-id: john at arbash-meinel.com-20080425020453-4txgyv9utbd73y21
parent: john at arbash-meinel.com-20080425015422-k2xb2p6k6sgxwt5s
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: find_unique_ancestors
timestamp: Thu 2008-04-24 21:04:53 -0500
message:
  Revert the _find_any_seen change.
  The overhead of combining the searchers swamps the benefits
  of combining the search.
modified:
  bzrlib/graph.py                graph_walker.py-20070525030359-y852guab65d4wtn0-1
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py	2008-04-25 01:54:22 +0000
+++ b/bzrlib/graph.py	2008-04-25 02:04:53 +0000
@@ -159,52 +159,6 @@
     def __repr__(self):
         return 'Graph(%r)' % self._parents_provider
 
-    def _find_any_seen_ancestors(self, revisions, searchers):
-        """Find any revisions that are seen by any of the searchers."""
-        # This will actually return more nodes than just aggregating
-        # find_seen_ancestors() across the searchers, because it can bridge
-        # 1-node gaps inline.
-
-        # seen contains what nodes have been returned, not what nodes
-        # have been queried. We don't want to probe for nodes that haven't
-        # been searched yet.
-        all_seen = set()
-        not_searched_yet = set()
-        for searcher in searchers:
-            all_seen.update(searcher.seen)
-
-            not_searched = set(searcher._next_query)
-            # Have these nodes been searched in any other searcher?
-            for other_searcher in searchers:
-                if other_searcher is searcher:
-                    continue
-                # This other searcher claims to have seen these nodes
-                maybe_searched = not_searched.intersection(other_searcher.seen)
-                searched = maybe_searched.difference(other_searcher._next_query)
-                not_searched.difference_update(searched)
-
-            # Now we know the real ones that haven't been searched
-            not_searched_yet.update(not_searched)
-
-        pending = set(revisions).intersection(all_seen)
-        seen_ancestors = set(pending)
-
-        pending.difference_update(not_searched_yet)
-        get_parent_map = self._parents_provider.get_parent_map
-        while pending:
-            parent_map = get_parent_map(pending)
-            all_parents = []
-            # We don't care if it is a ghost, since it can't be seen if it is
-            # a ghost
-            for parent_ids in parent_map.itervalues():
-                all_parents.extend(parent_ids)
-            next_pending = all_seen.intersection(all_parents).difference(seen_ancestors)
-            seen_ancestors.update(next_pending)
-            next_pending.difference_update(not_searched_yet)
-            pending = next_pending
-
-        return seen_ancestors
-
     def find_lca(self, *revisions):
         """Determine the lowest common ancestors of the provided revisions
 
@@ -299,8 +253,9 @@
             unique_are_common_nodes.update(
                 next_common_nodes.intersection(unique_searcher.seen))
             if unique_are_common_nodes:
-                ancestors = self._find_any_seen_ancestors(unique_are_common_nodes,
-                    [unique_searcher, common_searcher])
+                ancestors = unique_searcher.find_seen_ancestors(
+                                unique_are_common_nodes)
+                ancestors.update(common_searcher.find_seen_ancestors(ancestors))
                 unique_searcher.stop_searching_any(ancestors)
                 common_searcher.start_searching(ancestors)
 
@@ -350,10 +305,13 @@
                     ancestor_all_unique.intersection(newly_seen_common))
             if unique_are_common_nodes:
                 # We have new common-to-all-unique-searchers nodes
-                # Which also means we can check in the common_searcher
+                for searcher in unique_searchers:
+                    unique_are_common_nodes.update(
+                        searcher.find_seen_ancestors(unique_are_common_nodes))
+                # Since these are common, we can grab another set of ancestors
+                # that we have seen
                 unique_are_common_nodes.update(
-                    self._find_any_seen_ancestors(unique_are_common_nodes,
-                        [common_searcher] + unique_searchers))
+                    common_searcher.find_seen_ancestors(unique_are_common_nodes))
 
                 # We can tell all of the unique searchers to start at these
                 # nodes, and tell all of the common searchers to *stop*



More information about the bazaar-commits mailing list