Rev 3899: Start using osutils.chunks_as_lines rather than osutils.split_lines. in http://bzr.arbash-meinel.com/branches/bzr/1.11/get_record_stream_chunked

John Arbash Meinel john at arbash-meinel.com
Thu Dec 11 03:19:17 GMT 2008


At http://bzr.arbash-meinel.com/branches/bzr/1.11/get_record_stream_chunked

------------------------------------------------------------
revno: 3899
revision-id: john at arbash-meinel.com-20081211031852-cmjpdf2ufno0okui
parent: john at arbash-meinel.com-20081211030803-gctunob7zsten3qg
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: get_record_stream_chunked
timestamp: Wed 2008-12-10 21:18:52 -0600
message:
  Start using osutils.chunks_as_lines rather than osutils.split_lines.
-------------- next part --------------
=== modified file 'bzrlib/knit.py'
--- a/bzrlib/knit.py	2008-12-11 00:54:36 +0000
+++ b/bzrlib/knit.py	2008-12-11 03:18:52 +0000
@@ -196,7 +196,8 @@
             [compression_parent], 'unordered', True).next()
         if basis_entry.storage_kind == 'absent':
             raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
-        basis_lines = split_lines(basis_entry.get_bytes_as('fulltext'))
+        basis_chunks = basis_entry.get_bytes_as('chunked')
+        basis_lines = osutils.chunks_to_lines(basis_chunks)
         # Manually apply the delta because we have one annotated content and
         # one plain.
         basis_content = PlainKnitContent(basis_lines, compression_parent)
@@ -229,7 +230,8 @@
             [compression_parent], 'unordered', True).next()
         if basis_entry.storage_kind == 'absent':
             raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
-        basis_lines = split_lines(basis_entry.get_bytes_as('fulltext'))
+        basis_chunks = basis_entry.get_bytes_as('chunked')
+        basis_lines = osutils.chunks_to_lines(basis_chunks)
         basis_content = PlainKnitContent(basis_lines, compression_parent)
         # Manually apply the delta because we have one annotated content and
         # one plain.
@@ -1022,7 +1024,7 @@
                 if record.storage_kind == 'absent':
                     continue
                 missing_keys.remove(record.key)
-                lines = split_lines(record.get_bytes_as('fulltext'))
+                lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
                 text_map[record.key] = lines
                 content_map[record.key] = PlainKnitContent(lines, record.key)
                 if record.key in keys:
@@ -1444,8 +1446,10 @@
                         buffered = True
                 if not buffered:
                     self._index.add_records([index_entry])
-            elif (record.storage_kind == 'fulltext'
-                  or record.storage_kind == 'chunked'):
+            elif record.storage_kind == 'chunked':
+                self.add_lines(record.key, parents,
+                    osutils.chunks_to_lines(record.get_bytes_as('chunked')))
+            elif record.storage_kind == 'fulltext':
                 self.add_lines(record.key, parents,
                     split_lines(record.get_bytes_as('fulltext')))
             else:
@@ -2954,7 +2958,7 @@
         reannotate = annotate.reannotate
         for record in self._knit.get_record_stream(keys, 'topological', True):
             key = record.key
-            fulltext = split_lines(record.get_bytes_as('fulltext'))
+            fulltext = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
             parents = parent_map[key]
             if parents is not None:
                 parent_lines = [parent_cache[parent] for parent in parent_map[key]]

=== modified file 'bzrlib/merge.py'
--- a/bzrlib/merge.py	2008-10-10 11:55:03 +0000
+++ b/bzrlib/merge.py	2008-12-11 03:18:52 +0000
@@ -1579,7 +1579,7 @@
 
     def get_lines(self, revisions):
         """Get lines for revisions from the backing VersionedFiles.
-        
+
         :raises RevisionNotPresent: on absent texts.
         """
         keys = [(self._key_prefix + (rev,)) for rev in revisions]
@@ -1587,8 +1587,8 @@
         for record in self.vf.get_record_stream(keys, 'unordered', True):
             if record.storage_kind == 'absent':
                 raise errors.RevisionNotPresent(record.key, self.vf)
-            result[record.key[-1]] = osutils.split_lines(
-                record.get_bytes_as('fulltext'))
+            result[record.key[-1]] = osutils.chunks_to_lines(
+                record.get_bytes_as('chunked'))
         return result
 
     def plan_merge(self):

=== modified file 'bzrlib/transform.py'
--- a/bzrlib/transform.py	2008-10-28 10:31:32 +0000
+++ b/bzrlib/transform.py	2008-12-11 03:18:52 +0000
@@ -1177,7 +1177,7 @@
             if kind == 'file':
                 cur_file = open(self._limbo_name(trans_id), 'rb')
                 try:
-                    lines = osutils.split_lines(cur_file.read())
+                    lines = osutils.chunks_to_lines(cur_file.readlines())
                 finally:
                     cur_file.close()
                 parents = self._get_parents_lines(trans_id)

=== modified file 'bzrlib/versionedfile.py'
--- a/bzrlib/versionedfile.py	2008-12-11 00:56:16 +0000
+++ b/bzrlib/versionedfile.py	2008-12-11 03:18:52 +0000
@@ -842,12 +842,12 @@
                                   if not mpvf.has_version(p))
         # It seems likely that adding all the present parents as fulltexts can
         # easily exhaust memory.
-        split_lines = osutils.split_lines
+        chunks_to_lines = osutils.chunks_to_lines
         for record in self.get_record_stream(needed_parents, 'unordered',
             True):
             if record.storage_kind == 'absent':
                 continue
-            mpvf.add_version(split_lines(record.get_bytes_as('fulltext')),
+            mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
                 record.key, [])
         for (key, parent_keys, expected_sha1, mpdiff), lines in\
             zip(records, mpvf.get_line_list(versions)):
@@ -978,9 +978,9 @@
         ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
         knit_keys.difference_update(ghosts)
         lines = {}
-        split_lines = osutils.split_lines
+        chunks_to_lines = osutils.chunks_to_lines
         for record in self.get_record_stream(knit_keys, 'topological', True):
-            lines[record.key] = split_lines(record.get_bytes_as('fulltext'))
+            lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
             # line_block_dict = {}
             # for parent, blocks in record.extract_line_blocks():
             #   line_blocks[parent] = blocks

=== modified file 'bzrlib/weave.py'
--- a/bzrlib/weave.py	2008-12-11 00:54:36 +0000
+++ b/bzrlib/weave.py	2008-12-11 03:18:52 +0000
@@ -79,6 +79,8 @@
 from bzrlib import tsort
 """)
 from bzrlib import (
+    errors,
+    osutils,
     progress,
     )
 from bzrlib.errors import (WeaveError, WeaveFormatError, WeaveParentMismatch,
@@ -88,7 +90,6 @@
         WeaveRevisionAlreadyPresent,
         WeaveRevisionNotPresent,
         )
-import bzrlib.errors as errors
 from bzrlib.osutils import dirname, sha, sha_strings, split_lines
 import bzrlib.patiencediff
 from bzrlib.revision import NULL_REVISION
@@ -362,7 +363,7 @@
             if (record.storage_kind == 'fulltext'
                 or record.storage_kind == 'chunked'):
                 self.add_lines(record.key[0], parents,
-                    split_lines(record.get_bytes_as('fulltext')))
+                    osutils.chunks_to_lines(record.get_bytes_as('chunked')))
             else:
                 adapter_key = record.storage_kind, 'fulltext'
                 try:



More information about the bazaar-commits mailing list