Rev 4261: Fix one unicode readlink related test failure. in file:///home/vila/src/bzr/bugs/355454-unicode-warning/

Vincent Ladeuil v.ladeuil+lp at free.fr
Thu Apr 16 22:31:35 BST 2009


At file:///home/vila/src/bzr/bugs/355454-unicode-warning/

------------------------------------------------------------
revno: 4261
revision-id: v.ladeuil+lp at free.fr-20090416213135-1ycxrk415pbsse9l
parent: v.ladeuil+lp at free.fr-20090416165728-lnobbei6iew38g2d
committer: Vincent Ladeuil <v.ladeuil+lp at free.fr>
branch nick: 355454-unicode-warning
timestamp: Thu 2009-04-16 23:31:35 +0200
message:
  Fix one unicode readlink related test failure.
  
  * bzrlib/hashcache.py:
  (HashCache.get_sha1): Use osutils.readlink() and uf8 encode the
  result.
-------------- next part --------------
=== modified file 'BRANCH.TODO'
--- a/BRANCH.TODO	2009-04-16 16:57:28 +0000
+++ b/BRANCH.TODO	2009-04-16 21:31:35 +0000
@@ -5,7 +5,6 @@
 
 - use osutils.readlink in: 
 
-  - HashCache.get_sha1 (one test failing)
   - bzrlib/tests/test_transform.py
   - bzrlib/transform.py _PreviewTree.get_symlink_target
   - bzrlib/workingtree.py WorkingTree.path_content_summary and

=== modified file 'bzrlib/hashcache.py'
--- a/bzrlib/hashcache.py	2009-04-16 14:13:57 +0000
+++ b/bzrlib/hashcache.py	2009-04-16 21:31:35 +0000
@@ -29,13 +29,17 @@
 
 CACHE_HEADER = "### bzr hashcache v5\n"
 
-import os, stat, time
+import os
+import stat
+import time
 
-from bzrlib.filters import internal_size_sha_file_byname
-from bzrlib.osutils import sha_file, sha_string, pathjoin, safe_unicode
-from bzrlib.trace import mutter, warning
-from bzrlib.atomicfile import AtomicFile
-from bzrlib.errors import BzrError
+from bzrlib import (
+    atomicfile,
+    errors,
+    filters as _mod_filters,
+    osutils,
+    trace,
+    )
 
 
 FP_MTIME_COLUMN = 1
@@ -89,7 +93,7 @@
             parameters and returns a stack of ContentFilters.
             If None, no content filtering is performed.
         """
-        self.root = safe_unicode(root)
+        self.root = osutils.safe_unicode(root)
         self.root_utf8 = self.root.encode('utf8') # where is the filesystem encoding ?
         self.hit_count = 0
         self.miss_count = 0
@@ -99,7 +103,7 @@
         self.update_count = 0
         self._cache = {}
         self._mode = mode
-        self._cache_file_name = safe_unicode(cache_file_name)
+        self._cache_file_name = osutils.safe_unicode(cache_file_name)
         self._filter_provider = content_filter_stack_provider
 
     def cache_file_name(self):
@@ -125,7 +129,7 @@
         prep.sort()
 
         for inum, path, cache_entry in prep:
-            abspath = pathjoin(self.root, path)
+            abspath = osutils.pathjoin(self.root, path)
             fp = self._fingerprint(abspath)
             self.stat_count += 1
 
@@ -141,9 +145,9 @@
         """Return the sha1 of a file.
         """
         if path.__class__ is str:
-            abspath = pathjoin(self.root_utf8, path)
+            abspath = osutils.pathjoin(self.root_utf8, path)
         else:
-            abspath = pathjoin(self.root, path)
+            abspath = osutils.pathjoin(self.root, path)
         self.stat_count += 1
         file_fp = self._fingerprint(abspath, stat_value)
 
@@ -176,13 +180,13 @@
                 filters = self._filter_provider(path=path, file_id=None)
             digest = self._really_sha1_file(abspath, filters)
         elif stat.S_ISLNK(mode):
-            target = os.readlink(abspath)
+            target = osutils.readlink(osutils.safe_unicode(abspath))
             import pronto
             pronto.bzr_test('HashCache.get_sha1(%r) -> %r' % (abspath, target))
-            digest = sha_string(target)
+            digest = osutils.sha_string(target.encode('UTF-8'))
         else:
-            raise BzrError("file %r: unknown file stat mode: %o"
-                           % (abspath, mode))
+            raise errors.BzrError("file %r: unknown file stat mode: %o"
+                                  % (abspath, mode))
 
         # window of 3 seconds to allow for 2s resolution on windows,
         # unsynchronized file servers, etc.
@@ -217,11 +221,12 @@
 
     def _really_sha1_file(self, abspath, filters):
         """Calculate the SHA1 of a file by reading the full text"""
-        return internal_size_sha_file_byname(abspath, filters)[1]
+        return _mod_filters.internal_size_sha_file_byname(abspath, filters)[1]
 
     def write(self):
         """Write contents of cache to file."""
-        outf = AtomicFile(self.cache_file_name(), 'wb', new_mode=self._mode)
+        outf = atomicfile.AtomicFile(self.cache_file_name(), 'wb',
+                                     new_mode=self._mode)
         try:
             outf.write(CACHE_HEADER)
 
@@ -252,15 +257,15 @@
         try:
             inf = file(fn, 'rb', buffering=65000)
         except IOError, e:
-            mutter("failed to open %s: %s", fn, e)
+            trace.mutter("failed to open %s: %s", fn, e)
             # better write it now so it is valid
             self.needs_write = True
             return
 
         hdr = inf.readline()
         if hdr != CACHE_HEADER:
-            mutter('cache header marker not found at top of %s;'
-                   ' discarding cache', fn)
+            trace.mutter('cache header marker not found at top of %s;'
+                         ' discarding cache', fn)
             self.needs_write = True
             return
 
@@ -268,18 +273,18 @@
             pos = l.index('// ')
             path = l[:pos].decode('utf-8')
             if path in self._cache:
-                warning('duplicated path %r in cache' % path)
+                trace.warning('duplicated path %r in cache' % path)
                 continue
 
             pos += 3
             fields = l[pos:].split(' ')
             if len(fields) != 7:
-                warning("bad line in hashcache: %r" % l)
+                trace.warning("bad line in hashcache: %r" % l)
                 continue
 
             sha1 = fields[0]
             if len(sha1) != 40:
-                warning("bad sha1 in hashcache: %r" % sha1)
+                trace.warning("bad sha1 in hashcache: %r" % sha1)
                 continue
 
             fp = tuple(map(long, fields[1:]))



More information about the bazaar-commits mailing list