Rev 3391: Take another tack on _search_for_extra in http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_differences
John Arbash Meinel
john at arbash-meinel.com
Wed Apr 23 22:48:09 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.4-dev/find_differences
------------------------------------------------------------
revno: 3391
revision-id: john at arbash-meinel.com-20080423214212-i63wv3x7s27ekjim
parent: john at arbash-meinel.com-20080423203341-4qlndx8u2zu21yz0
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: find_differences
timestamp: Wed 2008-04-23 16:42:12 -0500
message:
Take another tack on _search_for_extra
All we really care about is getting to the point where
all common searchers have passed the unique nodes.
So start all unique searchers with as much ancestry as
possible.
modified:
bzrlib/graph.py graph_walker.py-20070525030359-y852guab65d4wtn0-1
-------------- next part --------------
=== modified file 'bzrlib/graph.py'
--- a/bzrlib/graph.py 2008-04-23 20:33:41 +0000
+++ b/bzrlib/graph.py 2008-04-23 21:42:12 +0000
@@ -514,25 +514,27 @@
common_searchers = searchers
left_searcher = searchers[0]
right_searcher = searchers[1]
- unique = left_searcher.seen.symmetric_difference(right_searcher.seen)
+ left_unique = left_searcher.seen.difference(right_searcher.seen)
+ right_unique = right_searcher.seen.difference(left_searcher.seen)
+ unique = left_unique.union(right_unique)
total_unique = len(unique)
unique = self._remove_simple_descendants(unique,
self.get_parent_map(unique))
simple_unique = len(unique)
trace.mutter('Starting %s unique searchers for %s unique revisions',
simple_unique, total_unique)
- # TODO: jam 20071214 Would it be possible to seed these searchers with
- # the revisions that we have already seen on each side?
- # Maybe something like:
- # unique_searchers = []
- # for left_unique in left.difference(right):
- # l_searcher = self._make_breadth_first_searcher(left_unique)
- # l_searcher.seen.update(left.seen)
- # ... (same for right.difference(left))
- # This might also be easier to set up for the case of >2
- # searchers.
- unique_searchers = [self._make_breadth_first_searcher([r])
- for r in unique]
+
+ unique_searchers = []
+ for revision_id in unique:
+ if revision_id in left_unique:
+ parent_searcher = left_searcher
+ else:
+ parent_searcher = right_searcher
+ revs_to_search = parent_searcher.find_seen_ancestors([revision_id])
+ if not revs_to_search: # XXX: This shouldn't be possible
+ revs_to_search = [revision_id]
+ unique_searchers.append(self._make_breadth_first_searcher(revs_to_search))
+
# Aggregate all of the searchers into a single common searcher, would
# it be okay to do this?
# okay to do this?
@@ -555,9 +557,8 @@
new_common_unique = set()
for revision in newly_seen_unique:
if revision in common_ancestors_unique:
- # TODO: Do we need to add it to new_common_unique, since it
- # seems to have already been found... ?
- new_common_unique.add(revision)
+ # It is already in common_ancestors_unique, so we don't
+ # need to search it again.
continue
for searcher in unique_searchers:
if revision not in searcher.seen:
@@ -571,6 +572,11 @@
# Make sure all searchers are on the same page
for searcher in common_searchers:
newly_seen_common.update(searcher.find_seen_ancestors(newly_seen_common))
+ # We start searching the whole ancestry. It is a bit wasteful,
+ # though. We really just want to mark all of these nodes as
+ # 'seen' and then start just the tips. However, it requires a
+ # get_parent_map() call to figure out the tips anyway, and all
+ # redundant requests should be fairly fast.
for searcher in common_searchers:
searcher.start_searching(newly_seen_common)
@@ -605,13 +611,6 @@
for searcher in common_searchers:
searcher.stop_searching_any(new_common_unique)
common_ancestors_unique.update(new_common_unique)
- # for searcher in unique_searchers:
- # if searcher._next_query:
- # # We have something to look for
- # break
- # else:
- # # All unique_searchers have stopped searching
- # break
for searcher in common_searchers:
if searcher._next_query:
break
More information about the bazaar-commits
mailing list