Rev 3913: Move more information down into EquivalenceTable. in http://bzr.arbash-meinel.com/branches/bzr/brisbane/vilajam
John Arbash Meinel
john at arbash-meinel.com
Wed Mar 25 21:04:07 GMT 2009
At http://bzr.arbash-meinel.com/branches/bzr/brisbane/vilajam
------------------------------------------------------------
revno: 3913
revision-id: john at arbash-meinel.com-20090325210346-f4c2x0dh17dlj4mi
parent: john at arbash-meinel.com-20090325205816-m8rellryp3b7u4f0
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: vilajam
timestamp: Wed 2009-03-25 16:03:46 -0500
message:
Move more information down into EquivalenceTable.
-------------- next part --------------
=== modified file 'bzrlib/_groupcompress_py.py'
--- a/bzrlib/_groupcompress_py.py 2009-03-25 20:58:16 +0000
+++ b/bzrlib/_groupcompress_py.py 2009-03-25 21:03:46 +0000
@@ -30,6 +30,8 @@
def __init__(self, lines):
self.lines = lines
+ self.line_offsets = []
+ self.endpoint = sum(map(len, lines))
self._right_lines = None
# For each line in 'left' give the offset to the other lines which
# match it.
@@ -158,6 +160,12 @@
"""
self._update_matching_lines(lines, index)
self.lines.extend(lines)
+ endpoint = self.endpoint
+ for line in lines:
+ endpoint += len(line)
+ self.line_offsets.append(endpoint)
+ assert len(self.line_offsets) == len(self.lines)
+ self.endpoint = endpoint
def set_right_lines(self, lines):
"""Set the lines we will be matching against."""
=== modified file 'bzrlib/groupcompress.py'
--- a/bzrlib/groupcompress.py 2009-03-25 20:58:16 +0000
+++ b/bzrlib/groupcompress.py 2009-03-25 21:03:46 +0000
@@ -840,7 +840,6 @@
:param delta: If False, do not compress records.
"""
super(PythonGroupCompressor, self).__init__()
- self.line_offsets = []
self.line_locations = EquivalenceTable([])
self.lines = self.line_locations.lines
self._present_prefixes = set()
@@ -946,8 +945,8 @@
if old_start_linenum == 0:
first_byte = 0
else:
- first_byte = self.line_offsets[old_start_linenum - 1]
- stop_byte = self.line_offsets[old_start_linenum + num_lines - 1]
+ first_byte = self.line_locations.line_offsets[old_start_linenum - 1]
+ stop_byte = self.line_locations.line_offsets[old_start_linenum + num_lines - 1]
num_bytes = stop_byte - first_byte
# The data stream allows >64kB in a copy, but to match the compiled
# code, we will also limit it to a 64kB copy
@@ -978,7 +977,6 @@
self.line_locations.extend_lines(new_lines, index_lines)
for line in new_lines:
endpoint += len(line)
- self.line_offsets.append(endpoint)
self.endpoint = endpoint
More information about the bazaar-commits
mailing list