Merge pull request #814 from tahoe-lafs/3419.test-encode-python-3-trying-again

Port test_encode to Python 3

Fixes ticket:3419
This commit is contained in:
Itamar Turner-Trauring 2020-09-21 11:52:34 -04:00 committed by GitHub
commit 139bcb1500
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 66 additions and 48 deletions

View File

@ -117,7 +117,7 @@ class ValidatedExtendedURIProxy(object):
# Next: things that are optional and not redundant: crypttext_hash
if d.has_key('crypttext_hash'):
if 'crypttext_hash' in d:
self.crypttext_hash = d['crypttext_hash']
if len(self.crypttext_hash) != CRYPTO_VAL_SIZE:
raise BadURIExtension('crypttext_hash is required to be hashutil.CRYPTO_VAL_SIZE bytes, not %s bytes' % (len(self.crypttext_hash),))
@ -126,11 +126,11 @@ class ValidatedExtendedURIProxy(object):
# Next: things that are optional, redundant, and required to be
# consistent: codec_name, codec_params, tail_codec_params,
# num_segments, size, needed_shares, total_shares
if d.has_key('codec_name'):
if d['codec_name'] != "crs":
if 'codec_name' in d:
if d['codec_name'] != b"crs":
raise UnsupportedErasureCodec(d['codec_name'])
if d.has_key('codec_params'):
if 'codec_params' in d:
ucpss, ucpns, ucpts = codec.parse_params(d['codec_params'])
if ucpss != self.segment_size:
raise BadURIExtension("inconsistent erasure code params: "
@ -145,7 +145,7 @@ class ValidatedExtendedURIProxy(object):
"self._verifycap.total_shares: %s" %
(ucpts, self._verifycap.total_shares))
if d.has_key('tail_codec_params'):
if 'tail_codec_params' in d:
utcpss, utcpns, utcpts = codec.parse_params(d['tail_codec_params'])
if utcpss != self.tail_segment_size:
raise BadURIExtension("inconsistent erasure code params: utcpss: %s != "
@ -162,7 +162,7 @@ class ValidatedExtendedURIProxy(object):
"self._verifycap.total_shares: %s" % (utcpts,
self._verifycap.total_shares))
if d.has_key('num_segments'):
if 'num_segments' in d:
if d['num_segments'] != self.num_segments:
raise BadURIExtension("inconsistent num_segments: size: %s, "
"segment_size: %s, computed_num_segments: %s, "
@ -170,18 +170,18 @@ class ValidatedExtendedURIProxy(object):
self.segment_size,
self.num_segments, d['num_segments']))
if d.has_key('size'):
if 'size' in d:
if d['size'] != self._verifycap.size:
raise BadURIExtension("inconsistent size: URI size: %s, UEB size: %s" %
(self._verifycap.size, d['size']))
if d.has_key('needed_shares'):
if 'needed_shares' in d:
if d['needed_shares'] != self._verifycap.needed_shares:
raise BadURIExtension("inconsistent needed shares: URI needed shares: %s, UEB "
"needed shares: %s" % (self._verifycap.total_shares,
d['needed_shares']))
if d.has_key('total_shares'):
if 'total_shares' in d:
if d['total_shares'] != self._verifycap.total_shares:
raise BadURIExtension("inconsistent total shares: URI total shares: %s, UEB "
"total shares: %s" % (self._verifycap.total_shares,
@ -428,7 +428,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
self.log(" sharehashes:\n" + "\n".join(lines) + "\n")
lines = []
for i,h in blockhashes.items():
for i,h in list(blockhashes.items()):
lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
log.msg(" blockhashes:\n" + "\n".join(lines) + "\n")
raise BadOrMissingHash(le)
@ -695,7 +695,7 @@ class Checker(log.PrefixingLogMixin):
bucketdict, success = result
shareverds = []
for (sharenum, bucket) in bucketdict.items():
for (sharenum, bucket) in list(bucketdict.items()):
d = self._download_and_verify(s, sharenum, bucket)
shareverds.append(d)

View File

@ -1,4 +1,5 @@
from six.moves import cStringIO as StringIO
from io import BytesIO
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.interfaces import IPushProducer
@ -104,7 +105,7 @@ class LiteralFileNode(_ImmutableFileNodeBase):
# vfs.adapters.ftp._FileToConsumerAdapter), neither of which is
# likely to be used as the target for a Tahoe download.
d = basic.FileSender().beginFileTransfer(StringIO(data), consumer)
d = basic.FileSender().beginFileTransfer(BytesIO(data), consumer)
d.addCallback(lambda lastSent: consumer)
return d

View File

@ -1,4 +1,4 @@
from past.builtins import long
from past.builtins import long, unicode
import os, time, weakref, itertools
from zope.interface import implementer
@ -1377,7 +1377,7 @@ class LiteralUploader(object):
self._progress.set_progress_total(size)
return read_this_many_bytes(uploadable, size)
d.addCallback(_got_size)
d.addCallback(lambda data: uri.LiteralFileURI("".join(data)))
d.addCallback(lambda data: uri.LiteralFileURI(b"".join(data)))
d.addCallback(lambda u: u.to_string())
d.addCallback(self._build_results)
return d
@ -1500,7 +1500,7 @@ class AssistedUploader(object):
Returns a Deferred that will fire with the UploadResults instance.
"""
precondition(isinstance(storage_index, str), storage_index)
precondition(isinstance(storage_index, bytes), storage_index)
self._started = time.time()
eu = IEncryptedUploadable(encrypted_uploadable)
eu.set_upload_status(self._upload_status)
@ -1653,7 +1653,7 @@ class BaseUploadable(object):
def set_default_encoding_parameters(self, default_params):
assert isinstance(default_params, dict)
for k,v in default_params.items():
precondition(isinstance(k, str), k, v)
precondition(isinstance(k, (bytes, unicode)), k, v)
precondition(isinstance(v, int), k, v)
if "k" in default_params:
self.default_encoding_param_k = default_params["k"]
@ -1773,7 +1773,7 @@ class FileName(FileHandle):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
FileHandle.__init__(self, open(filename, "rb"), convergence=convergence)
def close(self):
FileHandle.close(self)

View File

@ -479,10 +479,12 @@ class GridTestMixin(object):
def _record_webports_and_baseurls(self):
self.g._check_clients()
self.client_webports = [c.getServiceNamed("webish").getPortnum()
for c in self.g.clients]
self.client_baseurls = [c.getServiceNamed("webish").getURL()
for c in self.g.clients]
if PY2:
# Temporarily disabled on Python 3 until Nevow is gone:
self.client_webports = [c.getServiceNamed("webish").getPortnum()
for c in self.g.clients]
self.client_baseurls = [c.getServiceNamed("webish").getURL()
for c in self.g.clients]
def get_client_config(self, i=0):
self.g._check_clients()

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import chr as byteschr, long
from zope.interface import implementer
from twisted.trial import unittest
from twisted.internet import defer
@ -15,7 +28,7 @@ class LostPeerError(Exception):
pass
def flip_bit(good): # flips the last bit
return good[:-1] + chr(ord(good[-1]) ^ 0x01)
return good[:-1] + byteschr(ord(good[-1]) ^ 0x01)
@implementer(IStorageBucketWriter, IStorageBucketReader)
class FakeBucketReaderWriterProxy(object):
@ -158,7 +171,7 @@ class FakeBucketReaderWriterProxy(object):
def make_data(length):
data = "happy happy joy joy" * 100
data = b"happy happy joy joy" * 100
assert length <= len(data)
return data[:length]
@ -173,32 +186,32 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
if _TMP % K != 0:
_TMP += (K - (_TMP % K))
TAIL_SEGSIZE = _TMP
_TMP = SIZE / SEGSIZE
_TMP = SIZE // SEGSIZE
if SIZE % SEGSIZE != 0:
_TMP += 1
NUM_SEGMENTS = _TMP
mindict = { 'segment_size': SEGSIZE,
'crypttext_root_hash': '0'*hashutil.CRYPTO_VAL_SIZE,
'share_root_hash': '1'*hashutil.CRYPTO_VAL_SIZE }
optional_consistent = { 'crypttext_hash': '2'*hashutil.CRYPTO_VAL_SIZE,
'codec_name': "crs",
'codec_params': "%d-%d-%d" % (SEGSIZE, K, M),
'tail_codec_params': "%d-%d-%d" % (TAIL_SEGSIZE, K, M),
'crypttext_root_hash': b'0'*hashutil.CRYPTO_VAL_SIZE,
'share_root_hash': b'1'*hashutil.CRYPTO_VAL_SIZE }
optional_consistent = { 'crypttext_hash': b'2'*hashutil.CRYPTO_VAL_SIZE,
'codec_name': b"crs",
'codec_params': b"%d-%d-%d" % (SEGSIZE, K, M),
'tail_codec_params': b"%d-%d-%d" % (TAIL_SEGSIZE, K, M),
'num_segments': NUM_SEGMENTS,
'size': SIZE,
'needed_shares': K,
'total_shares': M,
'plaintext_hash': "anything",
'plaintext_root_hash': "anything", }
'plaintext_hash': b"anything",
'plaintext_root_hash': b"anything", }
# optional_inconsistent = { 'crypttext_hash': ('2'*(hashutil.CRYPTO_VAL_SIZE-1), "", 77),
optional_inconsistent = { 'crypttext_hash': (77,),
'codec_name': ("digital fountain", ""),
'codec_params': ("%d-%d-%d" % (SEGSIZE, K-1, M),
"%d-%d-%d" % (SEGSIZE-1, K, M),
"%d-%d-%d" % (SEGSIZE, K, M-1)),
'tail_codec_params': ("%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
"%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
"%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
'codec_name': (b"digital fountain", b""),
'codec_params': (b"%d-%d-%d" % (SEGSIZE, K-1, M),
b"%d-%d-%d" % (SEGSIZE-1, K, M),
b"%d-%d-%d" % (SEGSIZE, K, M-1)),
'tail_codec_params': (b"%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
b"%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
b"%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
'num_segments': (NUM_SEGMENTS-1,),
'size': (SIZE-1,),
'needed_shares': (K-1,),
@ -209,7 +222,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
uebhash = hashutil.uri_extension_hash(uebstring)
fb = FakeBucketReaderWriterProxy()
fb.put_uri_extension(uebstring)
verifycap = uri.CHKFileVerifierURI(storage_index='x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
verifycap = uri.CHKFileVerifierURI(storage_index=b'x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
vup = checker.ValidatedExtendedURIProxy(fb, verifycap)
return vup.start()
@ -232,7 +245,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_reject_insufficient(self):
dl = []
for k in self.mindict.iterkeys():
for k in self.mindict.keys():
insuffdict = self.mindict.copy()
del insuffdict[k]
d = self._test_reject(insuffdict)
@ -241,7 +254,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_accept_optional(self):
dl = []
for k in self.optional_consistent.iterkeys():
for k in self.optional_consistent.keys():
mydict = self.mindict.copy()
mydict[k] = self.optional_consistent[k]
d = self._test_accept(mydict)
@ -250,7 +263,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_reject_optional(self):
dl = []
for k in self.optional_inconsistent.iterkeys():
for k in self.optional_inconsistent.keys():
for v in self.optional_inconsistent[k]:
mydict = self.mindict.copy()
mydict[k] = v
@ -264,7 +277,7 @@ class Encode(unittest.TestCase):
data = make_data(datalen)
# force use of multiple segments
e = encode.Encoder()
u = upload.Data(data, convergence="some convergence string")
u = upload.Data(data, convergence=b"some convergence string")
u.set_default_encoding_parameters({'max_segment_size': max_segment_size,
'k': 25, 'happy': 75, 'n': 100})
eu = upload.EncryptAnUploadable(u)
@ -294,7 +307,7 @@ class Encode(unittest.TestCase):
def _check(res):
verifycap = res
self.failUnless(isinstance(verifycap.uri_extension_hash, str))
self.failUnless(isinstance(verifycap.uri_extension_hash, bytes))
self.failUnlessEqual(len(verifycap.uri_extension_hash), 32)
for i,peer in enumerate(all_shareholders):
self.failUnless(peer.closed)
@ -398,7 +411,7 @@ class Roundtrip(GridTestMixin, unittest.TestCase):
self.basedir = self.mktemp()
self.set_up_grid()
self.c0 = self.g.clients[0]
DATA = "p"*size
DATA = b"p"*size
d = self.upload(DATA)
d.addCallback(lambda n: download_to_data(n))
def _downloaded(newdata):

View File

@ -13,7 +13,8 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import bytes or str, to prevent leaks.
# Don't import bytes or str, to prevent future's newbytes leaking and
# breaking code that only expects normal bytes.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401
str = unicode

View File

@ -85,6 +85,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_crypto",
"allmydata.test.test_deferredutil",
"allmydata.test.test_dictutil",
"allmydata.test.test_encode",
"allmydata.test.test_encodingutil",
"allmydata.test.test_happiness",
"allmydata.test.test_hashtree",