Rev 4470: Add a set_content_chunked member to GroupCompressBlock. in http://bazaar.launchpad.net/~jameinel/bzr/1.17-gc-single-mem
John Arbash Meinel
john at arbash-meinel.com
Mon Jun 22 19:10:44 BST 2009
At http://bazaar.launchpad.net/~jameinel/bzr/1.17-gc-single-mem
------------------------------------------------------------
revno: 4470
revision-id: john at arbash-meinel.com-20090622181004-0rsmfqcnhk48fq88
parent: pqm at pqm.ubuntu.com-20090622171120-fuxez9ylfqpxynqn
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: 1.17-gc-single-mem
timestamp: Mon 2009-06-22 13:10:04 -0500
message:
Add a set_content_chunked member to GroupCompressBlock.
-------------- next part --------------
=== modified file 'bzrlib/groupcompress.py'
--- a/bzrlib/groupcompress.py 2009-06-22 15:47:25 +0000
+++ b/bzrlib/groupcompress.py 2009-06-22 18:10:04 +0000
@@ -108,6 +108,7 @@
self._z_content_length = None
self._content_length = None
self._content = None
+ self._content_chunks = None
def __len__(self):
# This is the maximum number of bytes this object will reference if
@@ -137,6 +138,10 @@
% (num_bytes, self._content_length))
# Expand the content if required
if self._content is None:
+ if self._content_chunks is not None:
+ self._content = ''.join(self._content_chunks)
+ self._content_chunks = None
+ if self._content is None:
if self._z_content is None:
raise AssertionError('No content to decompress')
if self._z_content == '':
@@ -273,22 +278,54 @@
bytes = apply_delta_to_source(self._content, content_start, end)
return bytes
+ def set_chunked_content(self, content_chunks):
+ """Set the content of this block to the given chunks."""
+ self._content_length = sum(map(len, content_chunks))
+ self._content_chunks = content_chunks
+ self._z_content = None
+ self._content = None
+
def set_content(self, content):
"""Set the content of this block."""
self._content_length = len(content)
self._content = content
self._z_content = None
+ def _create_z_content_using_lzma(self):
+ if self._content_chunks is not None:
+ self._content = ''.join(self._content_chunks)
+ self._content_chunks = None
+ if self._content is None:
+ raise AssertionError('Nothing to compress')
+ self._z_content = pylzma.compress(self._content)
+ self._z_content_length = len(self._z_content)
+
+ def _create_z_content_from_chunks(self):
+ compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION)
+ compressed_chunks = []
+ for chunk in self._content_chunks:
+ z_bytes = compressor.compress(chunk)
+ if z_bytes:
+ compressed_chunks.append(z_bytes)
+ compressed_chunks.append(compressor.flush())
+ self._z_content = ''.join(compressed_chunks)
+ self._z_content_length = len(self._z_content)
+
+ def _create_z_content(self):
+ if self._z_content is not None:
+ return
+ if _USE_LZMA:
+ self._create_z_content_using_lzma()
+ return
+ if self._content_chunks is not None:
+ self._create_z_content_from_chunks()
+ return
+ self._z_content = zlib.compress(self._content)
+ self._z_content_length = len(self._z_content)
+
def to_bytes(self):
"""Encode the information into a byte stream."""
- compress = zlib.compress
- if _USE_LZMA:
- compress = pylzma.compress
- if self._z_content is None:
- if self._content is None:
- raise AssertionError('Nothing to compress')
- self._z_content = compress(self._content)
- self._z_content_length = len(self._z_content)
+ self._create_z_content()
if _USE_LZMA:
header = self.GCB_LZ_HEADER
else:
=== modified file 'bzrlib/tests/test_groupcompress.py'
--- a/bzrlib/tests/test_groupcompress.py 2009-06-10 03:56:49 +0000
+++ b/bzrlib/tests/test_groupcompress.py 2009-06-22 18:10:04 +0000
@@ -363,6 +363,14 @@
raw_bytes = zlib.decompress(remaining_bytes)
self.assertEqual(content, raw_bytes)
+ # we should get the same results if using the chunked version
+ gcb = groupcompress.GroupCompressBlock()
+ gcb.set_chunked_content(['this is some content\n'
+ 'this content will be compressed\n'])
+ old_bytes = bytes
+ bytes = gcb.to_bytes()
+ self.assertEqual(old_bytes, bytes)
+
def test_partial_decomp(self):
content_chunks = []
# We need a sufficient amount of data so that zlib.decompress has
More information about the bazaar-commits
mailing list