Rev 3888: Change the _GCIndex._get_entries code to not build a set that we don't use. in http://bzr.arbash-meinel.com/branches/bzr/brisbane/hack3
John Arbash Meinel
john at arbash-meinel.com
Mon Mar 23 18:40:11 GMT 2009
At http://bzr.arbash-meinel.com/branches/bzr/brisbane/hack3
------------------------------------------------------------
revno: 3888
revision-id: john at arbash-meinel.com-20090323184002-yn1gmjy96ezys08c
parent: john at arbash-meinel.com-20090323173537-opqyhyax7bv2w2j1
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: hack3
timestamp: Mon 2009-03-23 13:40:02 -0500
message:
Change the _GCIndex._get_entries code to not build a set that we don't use.
-------------- next part --------------
=== modified file 'bzrlib/groupcompress.py'
--- a/bzrlib/groupcompress.py 2009-03-23 17:35:37 +0000
+++ b/bzrlib/groupcompress.py 2009-03-23 18:40:02 +0000
@@ -556,17 +556,16 @@
compressor = GroupCompressor()
tstart = time.time()
old_length = self._block._content_length
- cur_endpoint = 0
+ end_point = 0
for factory in self._factories:
bytes = factory.get_bytes_as('fulltext')
- (found_sha1, end_point, type,
+ (found_sha1, start_point, end_point, type,
length) = compressor.compress(factory.key, bytes, factory.sha1)
# Now update this factory with the new offsets, etc
factory.sha1 = found_sha1
- factory._start = cur_endpoint
+ factory._start = start_point
factory._end = end_point
- cur_endpoint = end_point
- self._last_byte = cur_endpoint
+ self._last_byte = end_point
new_block = compressor.flush()
# TODO: Should we check that new_block really *is* smaller than the old
# block? It seems hard to come up with a method that it would
@@ -1703,23 +1702,25 @@
:param keys: An iterable of index key tuples.
"""
- # TODO: don't do set(keys) or build up found_keys unless we actually
- # use them.
- keys = set(keys)
- found_keys = set()
- if self._parents:
- for node in self._graph_index.iter_entries(keys):
- yield node
- found_keys.add(node[1])
- else:
- # adapt parentless index to the rest of the code.
- for node in self._graph_index.iter_entries(keys):
- yield node[0], node[1], node[2], ()
- found_keys.add(node[1])
if check_present:
- missing_keys = keys.difference(found_keys)
- if missing_keys:
- raise RevisionNotPresent(missing_keys.pop(), self)
+ missing_keys = set(keys)
+ if self._parents:
+ for node in self._graph_index.iter_entries(keys):
+ yield node
+ missing_keys.discard(node[1])
+ else:
+ # adapt parentless index to the rest of the code.
+ for node in self._graph_index.iter_entries(keys):
+ yield node[0], node[1], node[2], ()
+ missing_keys.discard(node[1])
+ else:
+ if self._parents:
+ for node in self._graph_index.iter_entries(keys):
+ yield node
+ else:
+ # adapt parentless index to the rest of the code.
+ for node in self._graph_index.iter_entries(keys):
+ yield node[0], node[1], node[2], ()
def get_parent_map(self, keys):
"""Get a map of the parents of keys.
More information about the bazaar-commits
mailing list