Merge branch '3374.codec-monitor-python-3-take-2' into 3397.test-storage-python-3
This commit is contained in:
commit
1e5f7a9379
|
@ -9,6 +9,7 @@ venv*
|
||||||
*~
|
*~
|
||||||
*.DS_Store
|
*.DS_Store
|
||||||
.*.kate-swp
|
.*.kate-swp
|
||||||
|
*.bak
|
||||||
|
|
||||||
/build/
|
/build/
|
||||||
/support/
|
/support/
|
||||||
|
|
|
@ -1,4 +1,16 @@
|
||||||
# -*- test-case-name: allmydata.test.test_encode_share -*-
|
"""
|
||||||
|
CRS encoding and decoding.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -9,7 +21,7 @@ import zfec
|
||||||
|
|
||||||
@implementer(ICodecEncoder)
|
@implementer(ICodecEncoder)
|
||||||
class CRSEncoder(object):
|
class CRSEncoder(object):
|
||||||
ENCODER_TYPE = "crs"
|
ENCODER_TYPE = b"crs"
|
||||||
|
|
||||||
def set_params(self, data_size, required_shares, max_shares):
|
def set_params(self, data_size, required_shares, max_shares):
|
||||||
assert required_shares <= max_shares
|
assert required_shares <= max_shares
|
||||||
|
@ -27,8 +39,8 @@ class CRSEncoder(object):
|
||||||
return (self.data_size, self.required_shares, self.max_shares)
|
return (self.data_size, self.required_shares, self.max_shares)
|
||||||
|
|
||||||
def get_serialized_params(self):
|
def get_serialized_params(self):
|
||||||
return "%d-%d-%d" % (self.data_size, self.required_shares,
|
return b"%d-%d-%d" % (self.data_size, self.required_shares,
|
||||||
self.max_shares)
|
self.max_shares)
|
||||||
|
|
||||||
def get_block_size(self):
|
def get_block_size(self):
|
||||||
return self.share_size
|
return self.share_size
|
||||||
|
@ -37,7 +49,7 @@ class CRSEncoder(object):
|
||||||
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
|
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
|
||||||
|
|
||||||
if desired_share_ids is None:
|
if desired_share_ids is None:
|
||||||
desired_share_ids = range(self.max_shares)
|
desired_share_ids = list(range(self.max_shares))
|
||||||
|
|
||||||
for inshare in inshares:
|
for inshare in inshares:
|
||||||
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
|
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
|
||||||
|
@ -71,5 +83,5 @@ class CRSDecoder(object):
|
||||||
return defer.succeed(data)
|
return defer.succeed(data)
|
||||||
|
|
||||||
def parse_params(serializedparams):
|
def parse_params(serializedparams):
|
||||||
pieces = serializedparams.split("-")
|
pieces = serializedparams.split(b"-")
|
||||||
return int(pieces[0]), int(pieces[1]), int(pieces[2])
|
return int(pieces[0]), int(pieces[1]), int(pieces[2])
|
||||||
|
|
|
@ -1,3 +1,19 @@
|
||||||
|
"""
|
||||||
|
Interfaces for Tahoe-LAFS.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
# Don't import object/str/dict/etc. types, so we don't break any
|
||||||
|
# interfaces. Not importing open() because it triggers bogus flake8 error.
|
||||||
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401
|
||||||
|
|
||||||
from past.builtins import long
|
from past.builtins import long
|
||||||
|
|
||||||
from zope.interface import Interface, Attribute
|
from zope.interface import Interface, Attribute
|
||||||
|
@ -58,7 +74,7 @@ class RIBucketReader(RemoteInterface):
|
||||||
def read(offset=Offset, length=ReadSize):
|
def read(offset=Offset, length=ReadSize):
|
||||||
return ShareData
|
return ShareData
|
||||||
|
|
||||||
def advise_corrupt_share(reason=str):
|
def advise_corrupt_share(reason=bytes):
|
||||||
"""Clients who discover hash failures in shares that they have
|
"""Clients who discover hash failures in shares that they have
|
||||||
downloaded from me will use this method to inform me about the
|
downloaded from me will use this method to inform me about the
|
||||||
failures. I will record their concern so that my operator can
|
failures. I will record their concern so that my operator can
|
||||||
|
@ -71,7 +87,7 @@ class RIBucketReader(RemoteInterface):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
TestVector = ListOf(TupleOf(Offset, ReadSize, str, str))
|
TestVector = ListOf(TupleOf(Offset, ReadSize, bytes, bytes))
|
||||||
# elements are (offset, length, operator, specimen)
|
# elements are (offset, length, operator, specimen)
|
||||||
# operator is one of "lt, le, eq, ne, ge, gt"
|
# operator is one of "lt, le, eq, ne, ge, gt"
|
||||||
# nop always passes and is used to fetch data while writing.
|
# nop always passes and is used to fetch data while writing.
|
||||||
|
@ -89,13 +105,13 @@ ReadData = ListOf(ShareData)
|
||||||
|
|
||||||
|
|
||||||
class RIStorageServer(RemoteInterface):
|
class RIStorageServer(RemoteInterface):
|
||||||
__remote_name__ = "RIStorageServer.tahoe.allmydata.com"
|
__remote_name__ = b"RIStorageServer.tahoe.allmydata.com"
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
"""
|
"""
|
||||||
Return a dictionary of version information.
|
Return a dictionary of version information.
|
||||||
"""
|
"""
|
||||||
return DictOf(str, Any())
|
return DictOf(bytes, Any())
|
||||||
|
|
||||||
def allocate_buckets(storage_index=StorageIndex,
|
def allocate_buckets(storage_index=StorageIndex,
|
||||||
renew_secret=LeaseRenewSecret,
|
renew_secret=LeaseRenewSecret,
|
||||||
|
@ -277,8 +293,8 @@ class RIStorageServer(RemoteInterface):
|
||||||
"""
|
"""
|
||||||
return TupleOf(bool, DictOf(int, ReadData))
|
return TupleOf(bool, DictOf(int, ReadData))
|
||||||
|
|
||||||
def advise_corrupt_share(share_type=str, storage_index=StorageIndex,
|
def advise_corrupt_share(share_type=bytes, storage_index=StorageIndex,
|
||||||
shnum=int, reason=str):
|
shnum=int, reason=bytes):
|
||||||
"""Clients who discover hash failures in shares that they have
|
"""Clients who discover hash failures in shares that they have
|
||||||
downloaded from me will use this method to inform me about the
|
downloaded from me will use this method to inform me about the
|
||||||
failures. I will record their concern so that my operator can
|
failures. I will record their concern so that my operator can
|
||||||
|
@ -2859,7 +2875,7 @@ UploadResults = Any() #DictOf(str, str)
|
||||||
|
|
||||||
|
|
||||||
class RIEncryptedUploadable(RemoteInterface):
|
class RIEncryptedUploadable(RemoteInterface):
|
||||||
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com"
|
__remote_name__ = b"RIEncryptedUploadable.tahoe.allmydata.com"
|
||||||
|
|
||||||
def get_size():
|
def get_size():
|
||||||
return Offset
|
return Offset
|
||||||
|
@ -2875,7 +2891,7 @@ class RIEncryptedUploadable(RemoteInterface):
|
||||||
|
|
||||||
|
|
||||||
class RICHKUploadHelper(RemoteInterface):
|
class RICHKUploadHelper(RemoteInterface):
|
||||||
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com"
|
__remote_name__ = b"RIUploadHelper.tahoe.allmydata.com"
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
"""
|
"""
|
||||||
|
@ -2888,7 +2904,7 @@ class RICHKUploadHelper(RemoteInterface):
|
||||||
|
|
||||||
|
|
||||||
class RIHelper(RemoteInterface):
|
class RIHelper(RemoteInterface):
|
||||||
__remote_name__ = "RIHelper.tahoe.allmydata.com"
|
__remote_name__ = b"RIHelper.tahoe.allmydata.com"
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
"""
|
"""
|
||||||
|
@ -2915,7 +2931,7 @@ class RIHelper(RemoteInterface):
|
||||||
|
|
||||||
|
|
||||||
class RIStatsProvider(RemoteInterface):
|
class RIStatsProvider(RemoteInterface):
|
||||||
__remote_name__ = "RIStatsProvider.tahoe.allmydata.com"
|
__remote_name__ = b"RIStatsProvider.tahoe.allmydata.com"
|
||||||
"""
|
"""
|
||||||
Provides access to statistics and monitoring information.
|
Provides access to statistics and monitoring information.
|
||||||
"""
|
"""
|
||||||
|
@ -2932,7 +2948,7 @@ class RIStatsProvider(RemoteInterface):
|
||||||
|
|
||||||
|
|
||||||
class RIStatsGatherer(RemoteInterface):
|
class RIStatsGatherer(RemoteInterface):
|
||||||
__remote_name__ = "RIStatsGatherer.tahoe.allmydata.com"
|
__remote_name__ = b"RIStatsGatherer.tahoe.allmydata.com"
|
||||||
"""
|
"""
|
||||||
Provides a monitoring service for centralised collection of stats
|
Provides a monitoring service for centralised collection of stats
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,7 +1,21 @@
|
||||||
|
"""
|
||||||
|
Manage status of long-running operations.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from zope.interface import Interface, implementer
|
from zope.interface import Interface, implementer
|
||||||
from allmydata.util import observer
|
from allmydata.util import observer
|
||||||
|
|
||||||
|
|
||||||
class IMonitor(Interface):
|
class IMonitor(Interface):
|
||||||
"""I manage status, progress, and cancellation for long-running operations.
|
"""I manage status, progress, and cancellation for long-running operations.
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,21 @@
|
||||||
|
"""
|
||||||
|
Tests for allmydata.codec.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
from allmydata.codec import CRSEncoder, CRSDecoder
|
from allmydata.codec import CRSEncoder, CRSDecoder, parse_params
|
||||||
import random
|
import random
|
||||||
from allmydata.util import mathutil
|
from allmydata.util import mathutil
|
||||||
|
|
||||||
|
@ -13,6 +26,8 @@ class T(unittest.TestCase):
|
||||||
enc.set_params(size, required_shares, max_shares)
|
enc.set_params(size, required_shares, max_shares)
|
||||||
params = enc.get_params()
|
params = enc.get_params()
|
||||||
assert params == (size, required_shares, max_shares)
|
assert params == (size, required_shares, max_shares)
|
||||||
|
serialized_params = enc.get_serialized_params()
|
||||||
|
self.assertEqual(parse_params(serialized_params), params)
|
||||||
log.msg("params: %s" % (params,))
|
log.msg("params: %s" % (params,))
|
||||||
d = enc.encode(data0s)
|
d = enc.encode(data0s)
|
||||||
def _done_encoding_all(shares_and_shareids):
|
def _done_encoding_all(shares_and_shareids):
|
||||||
|
@ -23,7 +38,7 @@ class T(unittest.TestCase):
|
||||||
d.addCallback(_done_encoding_all)
|
d.addCallback(_done_encoding_all)
|
||||||
if fewer_shares is not None:
|
if fewer_shares is not None:
|
||||||
# also validate that the desired_shareids= parameter works
|
# also validate that the desired_shareids= parameter works
|
||||||
desired_shareids = random.sample(range(max_shares), fewer_shares)
|
desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
|
||||||
d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
|
d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
|
||||||
def _check_fewer_shares(some_shares_and_their_shareids):
|
def _check_fewer_shares(some_shares_and_their_shareids):
|
||||||
(some_shares, their_shareids) = some_shares_and_their_shareids
|
(some_shares, their_shareids) = some_shares_and_their_shareids
|
||||||
|
@ -38,11 +53,11 @@ class T(unittest.TestCase):
|
||||||
return d1
|
return d1
|
||||||
|
|
||||||
def _check_data(decoded_shares):
|
def _check_data(decoded_shares):
|
||||||
self.failUnlessEqual(len(''.join(decoded_shares)), len(''.join(data0s)))
|
self.failUnlessEqual(len(b''.join(decoded_shares)), len(b''.join(data0s)))
|
||||||
self.failUnlessEqual(len(decoded_shares), len(data0s))
|
self.failUnlessEqual(len(decoded_shares), len(data0s))
|
||||||
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
|
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
|
||||||
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
|
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
|
||||||
self.failUnless(''.join(decoded_shares) == ''.join(data0s), "%s" % ("???",))
|
self.failUnless(b''.join(decoded_shares) == b''.join(data0s), "%s" % ("???",))
|
||||||
# 0data0sclipped = tuple(data0s)
|
# 0data0sclipped = tuple(data0s)
|
||||||
# data0sclipped[-1] =
|
# data0sclipped[-1] =
|
||||||
# self.failUnless(tuple(decoded_shares) == tuple(data0s))
|
# self.failUnless(tuple(decoded_shares) == tuple(data0s))
|
||||||
|
@ -59,7 +74,7 @@ class T(unittest.TestCase):
|
||||||
def _decode_some_random(res):
|
def _decode_some_random(res):
|
||||||
log.msg("_decode_some_random")
|
log.msg("_decode_some_random")
|
||||||
# use a randomly-selected minimal subset
|
# use a randomly-selected minimal subset
|
||||||
l = random.sample(zip(self.shares, self.shareids), required_shares)
|
l = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||||
some_shares = [ x[0] for x in l ]
|
some_shares = [ x[0] for x in l ]
|
||||||
some_shareids = [ x[1] for x in l ]
|
some_shareids = [ x[1] for x in l ]
|
||||||
return _decode((some_shares, some_shareids))
|
return _decode((some_shares, some_shareids))
|
||||||
|
@ -70,10 +85,10 @@ class T(unittest.TestCase):
|
||||||
log.msg("_decode_multiple")
|
log.msg("_decode_multiple")
|
||||||
# make sure we can re-use the decoder object
|
# make sure we can re-use the decoder object
|
||||||
shares1 = random.sample(self.shares, required_shares)
|
shares1 = random.sample(self.shares, required_shares)
|
||||||
sharesl1 = random.sample(zip(self.shares, self.shareids), required_shares)
|
sharesl1 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||||
shares1 = [ x[0] for x in sharesl1 ]
|
shares1 = [ x[0] for x in sharesl1 ]
|
||||||
shareids1 = [ x[1] for x in sharesl1 ]
|
shareids1 = [ x[1] for x in sharesl1 ]
|
||||||
sharesl2 = random.sample(zip(self.shares, self.shareids), required_shares)
|
sharesl2 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||||
shares2 = [ x[0] for x in sharesl2 ]
|
shares2 = [ x[0] for x in sharesl2 ]
|
||||||
shareids2 = [ x[1] for x in sharesl2 ]
|
shareids2 = [ x[1] for x in sharesl2 ]
|
||||||
dec = CRSDecoder()
|
dec = CRSDecoder()
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
"""
|
||||||
|
Tests for allmydata.monitor.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
from twisted.trial import unittest
|
||||||
|
|
||||||
|
from allmydata.monitor import Monitor, OperationCancelledError
|
||||||
|
|
||||||
|
|
||||||
|
class MonitorTests(unittest.TestCase):
|
||||||
|
"""Tests for the Monitor class."""
|
||||||
|
|
||||||
|
def test_cancellation(self):
|
||||||
|
"""The monitor can be cancelled."""
|
||||||
|
m = Monitor()
|
||||||
|
self.assertFalse(m.is_cancelled())
|
||||||
|
m.raise_if_cancelled()
|
||||||
|
m.cancel()
|
||||||
|
self.assertTrue(m.is_cancelled())
|
||||||
|
with self.assertRaises(OperationCancelledError):
|
||||||
|
m.raise_if_cancelled()
|
||||||
|
|
||||||
|
def test_status(self):
|
||||||
|
"""The monitor can have its status set."""
|
||||||
|
m = Monitor()
|
||||||
|
self.assertEqual(m.get_status(), None)
|
||||||
|
m.set_status("discombobulated")
|
||||||
|
self.assertEqual(m.get_status(), "discombobulated")
|
||||||
|
|
||||||
|
def test_finish(self):
|
||||||
|
"""The monitor can finish."""
|
||||||
|
m = Monitor()
|
||||||
|
self.assertFalse(m.is_finished())
|
||||||
|
d = m.when_done()
|
||||||
|
self.assertNoResult(d)
|
||||||
|
|
||||||
|
result = m.finish(300)
|
||||||
|
self.assertEqual(result, 300)
|
||||||
|
self.assertEqual(m.get_status(), 300)
|
||||||
|
self.assertTrue(m.is_finished())
|
||||||
|
|
||||||
|
d.addBoth(self.assertEqual, 300)
|
||||||
|
return d
|
|
@ -24,6 +24,7 @@ if PY2:
|
||||||
|
|
||||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||||
PORTED_MODULES = [
|
PORTED_MODULES = [
|
||||||
|
"allmydata.codec",
|
||||||
"allmydata.crypto",
|
"allmydata.crypto",
|
||||||
"allmydata.crypto.aes",
|
"allmydata.crypto.aes",
|
||||||
"allmydata.crypto.ed25519",
|
"allmydata.crypto.ed25519",
|
||||||
|
@ -32,6 +33,8 @@ PORTED_MODULES = [
|
||||||
"allmydata.crypto.util",
|
"allmydata.crypto.util",
|
||||||
"allmydata.hashtree",
|
"allmydata.hashtree",
|
||||||
"allmydata.immutable.happiness_upload",
|
"allmydata.immutable.happiness_upload",
|
||||||
|
"allmydata.interfaces",
|
||||||
|
"allmydata.monitor",
|
||||||
"allmydata.storage.crawler",
|
"allmydata.storage.crawler",
|
||||||
"allmydata.storage.expirer",
|
"allmydata.storage.expirer",
|
||||||
"allmydata.test.common_py3",
|
"allmydata.test.common_py3",
|
||||||
|
@ -68,6 +71,7 @@ PORTED_TEST_MODULES = [
|
||||||
"allmydata.test.test_abbreviate",
|
"allmydata.test.test_abbreviate",
|
||||||
"allmydata.test.test_base32",
|
"allmydata.test.test_base32",
|
||||||
"allmydata.test.test_base62",
|
"allmydata.test.test_base62",
|
||||||
|
"allmydata.test.test_codec",
|
||||||
"allmydata.test.test_configutil",
|
"allmydata.test.test_configutil",
|
||||||
"allmydata.test.test_connection_status",
|
"allmydata.test.test_connection_status",
|
||||||
"allmydata.test.test_crawler",
|
"allmydata.test.test_crawler",
|
||||||
|
@ -81,6 +85,7 @@ PORTED_TEST_MODULES = [
|
||||||
"allmydata.test.test_humanreadable",
|
"allmydata.test.test_humanreadable",
|
||||||
"allmydata.test.test_iputil",
|
"allmydata.test.test_iputil",
|
||||||
"allmydata.test.test_log",
|
"allmydata.test.test_log",
|
||||||
|
"allmydata.test.test_monitor",
|
||||||
"allmydata.test.test_netstring",
|
"allmydata.test.test_netstring",
|
||||||
"allmydata.test.test_observer",
|
"allmydata.test.test_observer",
|
||||||
"allmydata.test.test_pipeline",
|
"allmydata.test.test_pipeline",
|
||||||
|
|
Loading…
Reference in New Issue