Rev 4417: Cleanup. in file:///home/vila/src/bzr/reviews/vila-better-heads/

Vincent Ladeuil v.ladeuil+lp at free.fr
Wed Jun 17 17:32:11 BST 2009


At file:///home/vila/src/bzr/reviews/vila-better-heads/

------------------------------------------------------------
revno: 4417
revision-id: v.ladeuil+lp at free.fr-20090617163210-kruj5o9qii6hxhnn
parent: v.ladeuil+lp at free.fr-20090617162802-gcbr56os427emput
committer: Vincent Ladeuil <v.ladeuil+lp at free.fr>
branch nick: vila-better-heads
timestamp: Wed 2009-06-17 18:32:10 +0200
message:
  Cleanup.
  
  * bzrlib/_known_graph_py.py:
  Get rid of previous versions for _find_gdfo and heads.
-------------- next part --------------
=== modified file 'bzrlib/_known_graph_py.py'
--- a/bzrlib/_known_graph_py.py	2009-06-17 16:25:50 +0000
+++ b/bzrlib/_known_graph_py.py	2009-06-17 16:32:10 +0000
@@ -170,46 +170,6 @@
         while pending:
             update_childs(pending.pop())
 
-    def x_find_gdfo(self):
-        def find_tails():
-            return [node for node in self._nodes.itervalues()
-                       if not node.parent_keys]
-        tails = find_tails()
-        todo = []
-        heappush = heapq.heappush
-        heappop = heapq.heappop
-        nodes = self._nodes
-        for node in tails:
-            node.gdfo = 1
-            heappush(todo, (1, node))
-        processed = 0
-        max_gdfo = len(self._nodes) + 1
-        while todo:
-            gdfo, next = heappop(todo)
-            processed += 1
-            if next.gdfo is not None and gdfo < next.gdfo:
-                # This node was reached from a longer path, we assume it was
-                # enqued correctly with the longer gdfo, so don't continue
-                # processing now
-                assert gdfo < next.gdfo
-                continue
-            next_gdfo = gdfo + 1
-            assert next_gdfo <= max_gdfo
-            for child_key in next.child_keys:
-                child_node = nodes[child_key]
-                if child_node.gdfo is None or child_node.gdfo < next_gdfo:
-                    # Only enque children when all of their parents have been
-                    # resolved
-                    for parent_key in child_node.parent_keys:
-                        # We know that 'this' parent is counted
-                        if parent_key != next.key:
-                            parent_node = nodes[parent_key]
-                            if parent_node.gdfo is None:
-                                break
-                    else:
-                        child_node.gdfo = next_gdfo
-                        heappush(todo, (next_gdfo, child_node))
-
     def _get_dominators_to_nodes(self, candidate_nodes):
         """Get the reverse mapping from dominator_key => candidate_nodes.
 
@@ -298,136 +258,3 @@
             self._known_heads[heads_key] = heads
         return heads
 
-    def xheads(self, keys):
-        """Return the heads from amongst keys.
-
-        This is done by searching the ancestries of each key.  Any key that is
-        reachable from another key is not returned; all the others are.
-
-        This operation scales with the relative depth between any two keys. If
-        any two keys are completely disconnected all ancestry of both sides
-        will be retrieved.
-
-        :param keys: An iterable of keys.
-        :return: A set of the heads. Note that as a set there is no ordering
-            information. Callers will need to filter their input to create
-            order if they need it.
-        """
-        candidate_nodes = dict((key, self._nodes[key]) for key in keys)
-        if revision.NULL_REVISION in candidate_nodes:
-            # NULL_REVISION is only a head if it is the only entry
-            candidate_nodes.pop(revision.NULL_REVISION)
-            if not candidate_nodes:
-                return set([revision.NULL_REVISION])
-        if len(candidate_nodes) < 2:
-            return frozenset(candidate_nodes)
-        heads_key = frozenset(candidate_nodes)
-        if heads_key != frozenset(keys):
-            note('%s != %s', heads_key, frozenset(keys))
-        try:
-            heads = self._known_heads[heads_key]
-            return heads
-        except KeyError:
-            pass # compute it ourselves
-        dom_to_node = self._get_dominators_to_nodes(candidate_nodes)
-        if len(candidate_nodes) < 2:
-            # We shrunk candidate_nodes and determined a new head
-            return frozenset(candidate_nodes)
-        dom_heads_key = None
-        # Check the linear dominators of these keys, to see if we already
-        # know the heads answer
-        dom_heads_key = frozenset([node.linear_dominator
-                                   for node in candidate_nodes.itervalues()])
-        if dom_heads_key in self._known_heads:
-            # map back into the original keys
-            heads = self._known_heads[dom_heads_key]
-            heads = frozenset([dom_to_node[key].key for key in heads])
-            return heads
-        heads = self._heads_from_candidate_nodes(candidate_nodes, dom_to_node)
-        if self.do_cache:
-            self._known_heads[heads_key] = heads
-            # Cache the dominator heads
-            if dom_heads_key is not None:
-                dom_heads = frozenset([candidate_nodes[key].linear_dominator
-                                       for key in heads])
-                self._known_heads[dom_heads_key] = dom_heads
-        return heads
-
-    def _heads_from_candidate_nodes(self, candidate_nodes, dom_to_node):
-        queue = []
-        to_cleanup = []
-        to_cleanup_append = to_cleanup.append
-        for node in candidate_nodes.itervalues():
-            assert node.ancestor_of is None
-            node.ancestor_of = (node.key,)
-            queue.append((-node.gdfo, node))
-            to_cleanup_append(node)
-        heapq.heapify(queue)
-        # These are nodes that we determined are 'common' that we are no longer
-        # walking
-        # Now we walk nodes until all nodes that are being walked are 'common'
-        num_candidates = len(candidate_nodes)
-        nodes = self._nodes
-        heappop = heapq.heappop
-        heappush = heapq.heappush
-        while queue and len(candidate_nodes) > 1:
-            _, node = heappop(queue)
-            # assert node.ancestor_of is not None
-            next_ancestor_of = node.ancestor_of
-            if len(next_ancestor_of) == num_candidates:
-                # This node is now considered 'common'
-                # Make sure all parent nodes are marked as such
-                for parent_key in node.parent_keys:
-                    parent_node = nodes[parent_key]
-                    if parent_node.ancestor_of is not None:
-                        parent_node.ancestor_of = next_ancestor_of
-                if node.linear_dominator != node.key:
-                    parent_node = nodes[node.linear_dominator]
-                    if parent_node.ancestor_of is not None:
-                        parent_node.ancestor_of = next_ancestor_of
-                continue
-            if node.parent_keys is None:
-                # This is a ghost
-                continue
-            # Now project the current nodes ancestor list to the parent nodes,
-            # and queue them up to be walked
-            # Note: using linear_dominator speeds things up quite a bit
-            #       enough that we actually start to be slightly faster
-            #       than the default heads() implementation
-            if node.linear_dominator != node.key:
-                # We are at the tip of a long linear region
-                # We know that there is nothing between here and the tail
-                # that is interesting, so skip to the end
-                parent_keys = [node.linear_dominator]
-            else:
-                parent_keys = node.parent_keys
-            for parent_key in parent_keys:
-                if parent_key in candidate_nodes:
-                    candidate_nodes.pop(parent_key)
-                    if len(candidate_nodes) <= 1:
-                        break
-                elif parent_key in dom_to_node:
-                    orig_node = dom_to_node[parent_key]
-                    if orig_node is not node:
-                        if orig_node.key in candidate_nodes:
-                            candidate_nodes.pop(orig_node.key)
-                            if len(candidate_nodes) <= 1:
-                                break
-                parent_node = nodes[parent_key]
-                ancestor_of = parent_node.ancestor_of
-                if ancestor_of is None:
-                    # This node hasn't been walked yet
-                    parent_node.ancestor_of = next_ancestor_of
-                    # Enqueue this node
-                    heappush(queue, (-parent_node.gdfo, parent_node))
-                    to_cleanup_append(parent_node)
-                elif ancestor_of != next_ancestor_of:
-                    # Combine to get the full set of parents
-                    all_ancestors = set(ancestor_of)
-                    all_ancestors.update(next_ancestor_of)
-                    parent_node.ancestor_of = tuple(sorted(all_ancestors))
-        def cleanup():
-            for node in to_cleanup:
-                node.ancestor_of = None
-        cleanup()
-        return frozenset(candidate_nodes)



More information about the bazaar-commits mailing list