Merge 'origin/master' into 3391.codecov-yml

This commit is contained in:
Sajith Sasidharan 2020-09-08 18:05:20 -04:00
commit 3b2b6fe646
65 changed files with 1604 additions and 1432 deletions

View File

@ -1,95 +0,0 @@
# adapted from https://packaging.python.org/en/latest/appveyor/
environment:
matrix:
# For Python versions available on Appveyor, see
# http://www.appveyor.com/docs/installed-software#python
- PYTHON: "C:\\Python27"
- PYTHON: "C:\\Python27-x64"
# DISTUTILS_USE_SDK: "1"
# TOX_TESTENV_PASSENV: "DISTUTILS_USE_SDK INCLUDE LIB"
install:
- |
%PYTHON%\python.exe -m pip install -U pip
%PYTHON%\python.exe -m pip install wheel tox==3.9.0 virtualenv
# note:
# %PYTHON% has: python.exe
# %PYTHON%\Scripts has: pip.exe, tox.exe (and others installed by bare pip)
# We have a custom "build" system. We don't need MSBuild or whatever.
build: off
# Do not build feature branch with open pull requests. This is documented but
# it's not clear it does anything.
skip_branch_with_pr: true
# This, perhaps, is effective.
branches:
# whitelist
only:
- 'master'
skip_commits:
files:
# The Windows builds are unaffected by news fragments.
- 'newsfragments/*'
# Also, all this build junk.
- '.circleci/*'
- '.lgtm.yml'
- '.travis.yml'
# we run from C:\projects\tahoe-lafs
test_script:
# Put your test command here.
# Note that you must use the environment variable %PYTHON% to refer to
# the interpreter you're using - Appveyor does not do anything special
# to put the Python version you want to use on PATH.
- |
%PYTHON%\Scripts\tox.exe -e coverage
%PYTHON%\Scripts\tox.exe -e pyinstaller
# To verify that the resultant PyInstaller-generated binary executes
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
# failing due to import/packaging-related errors, etc.).
- dist\Tahoe-LAFS\tahoe.exe --version
after_test:
# This builds the main tahoe wheel, and wheels for all dependencies.
# Again, you only need build.cmd if you're building C extensions for
# 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct
# interpreter. If _trial_temp still exists, the "pip wheel" fails on
# _trial_temp\local_dir (not sure why).
- |
copy _trial_temp\test.log trial_test_log.txt
rd /s /q _trial_temp
%PYTHON%\python.exe setup.py bdist_wheel
%PYTHON%\python.exe -m pip wheel -w dist .
- |
%PYTHON%\python.exe -m pip install codecov "coverage ~= 4.5"
%PYTHON%\python.exe -m coverage xml -o coverage.xml -i
%PYTHON%\python.exe -m codecov -X search -X gcov -f coverage.xml
artifacts:
# bdist_wheel puts your built wheel in the dist directory
# "pip wheel -w dist ." puts all the dependency wheels there too
# this gives us a zipfile with everything
- path: 'dist\*'
- path: trial_test_log.txt
name: Trial test.log
- path: eliot.log
name: Eliot test log
on_failure:
# Artifacts are not normally uploaded when the job fails. To get the test
# logs, we have to push them ourselves.
- ps: Push-AppveyorArtifact _trial_temp\test.log -Filename trial.log
- ps: Push-AppveyorArtifact eliot.log -Filename eliot.log
#on_success:
# You can use this step to upload your artifacts to a public website.
# See Appveyor's documentation for more details. Or you can simply
# access your wheels from the Appveyor "artifacts" tab for your build.

View File

@ -211,7 +211,8 @@ jobs:
environment: environment:
<<: *UTF_8_ENVIRONMENT <<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage" # We don't do coverage since it makes PyPy far too slow:
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
c-locale: c-locale:
@ -285,7 +286,7 @@ jobs:
# this reporter on Python 3. So drop that and just specify the # this reporter on Python 3. So drop that and just specify the
# reporter. # reporter.
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file" TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
TAHOE_LAFS_TOX_ENVIRONMENT: "py36" TAHOE_LAFS_TOX_ENVIRONMENT: "py36-coverage"
ubuntu-20.04: ubuntu-20.04:
@ -508,6 +509,7 @@ jobs:
environment: environment:
DISTRO: "ubuntu" DISTRO: "ubuntu"
TAG: "20.04" TAG: "20.04"
PYTHON_VERSION: "2.7"
build-image-centos-8: build-image-centos-8:

View File

@ -68,6 +68,10 @@ export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}" export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
export PIP_NO_INDEX="1" export PIP_NO_INDEX="1"
# Make output unbuffered, so progress reports from subunitv2-file get streamed
# and notify CircleCI we're still alive.
export PYTHONUNBUFFERED=1
if [ "${ALLOWED_FAILURE}" = "yes" ]; then if [ "${ALLOWED_FAILURE}" = "yes" ]; then
alternative="true" alternative="true"
else else

View File

@ -49,8 +49,8 @@ jobs:
- name: Display tool versions - name: Display tool versions
run: python misc/build_helpers/show-tool-versions.py run: python misc/build_helpers/show-tool-versions.py
- name: Run "tox -e coverage" - name: Run "tox -e py27-coverage"
run: tox -e coverage run: tox -e py27-coverage
- name: Upload eliot.log in case of failure - name: Upload eliot.log in case of failure
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1

3
.gitignore vendored
View File

@ -1,4 +1,4 @@
venv venv*
# vim swap files # vim swap files
*.swp *.swp
@ -9,6 +9,7 @@ venv
*~ *~
*.DS_Store *.DS_Store
.*.kate-swp .*.kate-swp
*.bak
/build/ /build/
/support/ /support/

View File

@ -36,7 +36,7 @@ people are Release Maintainers:
- [ ] documentation is ready (see above) - [ ] documentation is ready (see above)
- [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z - [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
- [ ] build code locally: - [ ] build code locally:
tox -e py27,codechecks,coverage,deprecations,docs,integration,upcoming-deprecations tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations
- [ ] created tarballs (they'll be in dist/ for later comparison) - [ ] created tarballs (they'll be in dist/ for later comparison)
tox -e tarballs tox -e tarballs
- [ ] release version is reporting itself as intended version - [ ] release version is reporting itself as intended version

1
newsfragments/3355.other Normal file
View File

@ -0,0 +1 @@
The "coverage" tox environment has been replaced by the "py27-coverage" and "py36-coverage" environments.

0
newsfragments/3367.minor Normal file
View File

0
newsfragments/3374.minor Normal file
View File

0
newsfragments/3377.minor Normal file
View File

0
newsfragments/3381.minor Normal file
View File

0
newsfragments/3387.minor Normal file
View File

0
newsfragments/3388.minor Normal file
View File

0
newsfragments/3392.minor Normal file
View File

0
newsfragments/3393.minor Normal file
View File

0
newsfragments/3395.minor Normal file
View File

0
newsfragments/3396.minor Normal file
View File

0
newsfragments/3397.minor Normal file
View File

0
newsfragments/3401.minor Normal file
View File

0
newsfragments/3403.minor Normal file
View File

View File

@ -116,6 +116,11 @@ install_requires = [
# know works on Python 2.7. # know works on Python 2.7.
"eliot ~= 1.7", "eliot ~= 1.7",
# Pyrsistent 0.17.0 (which we use by way of Eliot) has dropped
# Python 2 entirely; stick to the version known to work for us.
# XXX: drop this bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3404
"pyrsistent < 0.17.0",
# A great way to define types of values. # A great way to define types of values.
# XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390 # XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390
"attrs >= 18.2.0, < 20", "attrs >= 18.2.0, < 20",

View File

@ -2,7 +2,10 @@ import os, stat, time, weakref
from base64 import urlsafe_b64encode from base64 import urlsafe_b64encode
from functools import partial from functools import partial
from errno import ENOENT, EPERM from errno import ENOENT, EPERM
from ConfigParser import NoSectionError try:
from ConfigParser import NoSectionError
except ImportError:
from configparser import NoSectionError
from foolscap.furl import ( from foolscap.furl import (
decode_furl, decode_furl,

View File

@ -1,4 +1,16 @@
# -*- test-case-name: allmydata.test.test_encode_share -*- """
CRS encoding and decoding.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import implementer from zope.interface import implementer
from twisted.internet import defer from twisted.internet import defer
@ -9,7 +21,7 @@ import zfec
@implementer(ICodecEncoder) @implementer(ICodecEncoder)
class CRSEncoder(object): class CRSEncoder(object):
ENCODER_TYPE = "crs" ENCODER_TYPE = b"crs"
def set_params(self, data_size, required_shares, max_shares): def set_params(self, data_size, required_shares, max_shares):
assert required_shares <= max_shares assert required_shares <= max_shares
@ -27,7 +39,7 @@ class CRSEncoder(object):
return (self.data_size, self.required_shares, self.max_shares) return (self.data_size, self.required_shares, self.max_shares)
def get_serialized_params(self): def get_serialized_params(self):
return "%d-%d-%d" % (self.data_size, self.required_shares, return b"%d-%d-%d" % (self.data_size, self.required_shares,
self.max_shares) self.max_shares)
def get_block_size(self): def get_block_size(self):
@ -37,7 +49,7 @@ class CRSEncoder(object):
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares) precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
if desired_share_ids is None: if desired_share_ids is None:
desired_share_ids = range(self.max_shares) desired_share_ids = list(range(self.max_shares))
for inshare in inshares: for inshare in inshares:
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares) assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
@ -71,5 +83,5 @@ class CRSDecoder(object):
return defer.succeed(data) return defer.succeed(data)
def parse_params(serializedparams): def parse_params(serializedparams):
pieces = serializedparams.split("-") pieces = serializedparams.split(b"-")
return int(pieces[0]), int(pieces[1]), int(pieces[2]) return int(pieces[0]), int(pieces[1]), int(pieces[2])

View File

@ -1,4 +1,6 @@
"""Directory Node implementation.""" """Directory Node implementation."""
from past.builtins import unicode
import time import time
from zope.interface import implementer from zope.interface import implementer
@ -227,7 +229,7 @@ def pack_children(childrenx, writekey, deep_immutable=False):
return _pack_normalized_children(children, writekey=writekey, deep_immutable=deep_immutable) return _pack_normalized_children(children, writekey=writekey, deep_immutable=deep_immutable)
ZERO_LEN_NETSTR=netstring('') ZERO_LEN_NETSTR=netstring(b'')
def _pack_normalized_children(children, writekey, deep_immutable=False): def _pack_normalized_children(children, writekey, deep_immutable=False):
"""Take a dict that maps: """Take a dict that maps:
children[unicode_nfc_name] = (IFileSystemNode, metadata_dict) children[unicode_nfc_name] = (IFileSystemNode, metadata_dict)

View File

@ -4,7 +4,7 @@ from foolscap.api import eventually
from allmydata.interfaces import NotEnoughSharesError, NoSharesError from allmydata.interfaces import NotEnoughSharesError, NoSharesError
from allmydata.util import log from allmydata.util import log
from allmydata.util.dictutil import DictOfSets from allmydata.util.dictutil import DictOfSets
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \ from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
BadSegmentNumberError BadSegmentNumberError
class SegmentFetcher(object): class SegmentFetcher(object):

View File

@ -5,7 +5,7 @@ from foolscap.api import eventually
from allmydata.util import base32, log from allmydata.util import base32, log
from twisted.internet import reactor from twisted.internet import reactor
from share import Share, CommonShare from .share import Share, CommonShare
def incidentally(res, f, *args, **kwargs): def incidentally(res, f, *args, **kwargs):
"""Add me to a Deferred chain like this: """Add me to a Deferred chain like this:

View File

@ -13,10 +13,10 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
NotEnoughHashesError NotEnoughHashesError
# local imports # local imports
from finder import ShareFinder from .finder import ShareFinder
from fetcher import SegmentFetcher from .fetcher import SegmentFetcher
from segmentation import Segmentation from .segmentation import Segmentation
from common import BadCiphertextHashError from .common import BadCiphertextHashError
class IDownloadStatusHandlingConsumer(Interface): class IDownloadStatusHandlingConsumer(Interface):
def set_download_status_read_event(read_ev): def set_download_status_read_event(read_ev):

View File

@ -9,7 +9,7 @@ from allmydata.util import log
from allmydata.util.spans import overlap from allmydata.util.spans import overlap
from allmydata.interfaces import DownloadStopped from allmydata.interfaces import DownloadStopped
from common import BadSegmentNumberError, WrongSegmentError from .common import BadSegmentNumberError, WrongSegmentError
@implementer(IPushProducer) @implementer(IPushProducer)
class Segmentation(object): class Segmentation(object):

View File

@ -13,7 +13,7 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
from allmydata.immutable.layout import make_write_bucket_proxy from allmydata.immutable.layout import make_write_bucket_proxy
from allmydata.util.observer import EventStreamObserver from allmydata.util.observer import EventStreamObserver
from common import COMPLETE, CORRUPT, DEAD, BADSEGNUM from .common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
class LayoutInvalid(Exception): class LayoutInvalid(Exception):

View File

@ -171,7 +171,7 @@ class WriteBucketProxy(object):
def put_block(self, segmentnum, data): def put_block(self, segmentnum, data):
offset = self._offsets['data'] + segmentnum * self._block_size offset = self._offsets['data'] + segmentnum * self._block_size
assert offset + len(data) <= self._offsets['uri_extension'] assert offset + len(data) <= self._offsets['uri_extension']
assert isinstance(data, str) assert isinstance(data, bytes)
if segmentnum < self._num_segments-1: if segmentnum < self._num_segments-1:
precondition(len(data) == self._block_size, precondition(len(data) == self._block_size,
len(data), self._block_size) len(data), self._block_size)
@ -185,7 +185,7 @@ class WriteBucketProxy(object):
def put_crypttext_hashes(self, hashes): def put_crypttext_hashes(self, hashes):
offset = self._offsets['crypttext_hash_tree'] offset = self._offsets['crypttext_hash_tree']
assert isinstance(hashes, list) assert isinstance(hashes, list)
data = "".join(hashes) data = b"".join(hashes)
precondition(len(data) == self._segment_hash_size, precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size) len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['block_hashes'], precondition(offset + len(data) <= self._offsets['block_hashes'],
@ -196,7 +196,7 @@ class WriteBucketProxy(object):
def put_block_hashes(self, blockhashes): def put_block_hashes(self, blockhashes):
offset = self._offsets['block_hashes'] offset = self._offsets['block_hashes']
assert isinstance(blockhashes, list) assert isinstance(blockhashes, list)
data = "".join(blockhashes) data = b"".join(blockhashes)
precondition(len(data) == self._segment_hash_size, precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size) len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['share_hashes'], precondition(offset + len(data) <= self._offsets['share_hashes'],
@ -209,7 +209,7 @@ class WriteBucketProxy(object):
# as 2+32=34 bytes each # as 2+32=34 bytes each
offset = self._offsets['share_hashes'] offset = self._offsets['share_hashes']
assert isinstance(sharehashes, list) assert isinstance(sharehashes, list)
data = "".join([struct.pack(">H", hashnum) + hashvalue data = b"".join([struct.pack(">H", hashnum) + hashvalue
for hashnum,hashvalue in sharehashes]) for hashnum,hashvalue in sharehashes])
precondition(len(data) == self._share_hashtree_size, precondition(len(data) == self._share_hashtree_size,
len(data), self._share_hashtree_size) len(data), self._share_hashtree_size)
@ -220,7 +220,7 @@ class WriteBucketProxy(object):
def put_uri_extension(self, data): def put_uri_extension(self, data):
offset = self._offsets['uri_extension'] offset = self._offsets['uri_extension']
assert isinstance(data, str) assert isinstance(data, bytes)
precondition(len(data) <= self._uri_extension_size_max, precondition(len(data) <= self._uri_extension_size_max,
len(data), self._uri_extension_size_max) len(data), self._uri_extension_size_max)
length = struct.pack(self.fieldstruct, len(data)) length = struct.pack(self.fieldstruct, len(data))

View File

@ -1,3 +1,5 @@
from past.builtins import long
import os, time, weakref, itertools import os, time, weakref, itertools
from zope.interface import implementer from zope.interface import implementer
from twisted.python import failure from twisted.python import failure
@ -26,7 +28,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
from allmydata.immutable import layout from allmydata.immutable import layout
from six.moves import cStringIO as StringIO from six.moves import cStringIO as StringIO
from happiness_upload import share_placement, calculate_happiness from .happiness_upload import share_placement, calculate_happiness
from ..util.eliotutil import ( from ..util.eliotutil import (
log_call_deferred, log_call_deferred,

View File

@ -1,3 +1,18 @@
"""
Interfaces for Tahoe-LAFS.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import object/str/dict/etc. types, so we don't break any
# interfaces. Not importing open() because it triggers bogus flake8 error.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401
from past.builtins import long from past.builtins import long
@ -59,7 +74,7 @@ class RIBucketReader(RemoteInterface):
def read(offset=Offset, length=ReadSize): def read(offset=Offset, length=ReadSize):
return ShareData return ShareData
def advise_corrupt_share(reason=str): def advise_corrupt_share(reason=bytes):
"""Clients who discover hash failures in shares that they have """Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can failures. I will record their concern so that my operator can
@ -72,7 +87,7 @@ class RIBucketReader(RemoteInterface):
""" """
TestVector = ListOf(TupleOf(Offset, ReadSize, str, str)) TestVector = ListOf(TupleOf(Offset, ReadSize, bytes, bytes))
# elements are (offset, length, operator, specimen) # elements are (offset, length, operator, specimen)
# operator is one of "lt, le, eq, ne, ge, gt" # operator is one of "lt, le, eq, ne, ge, gt"
# nop always passes and is used to fetch data while writing. # nop always passes and is used to fetch data while writing.
@ -90,13 +105,13 @@ ReadData = ListOf(ShareData)
class RIStorageServer(RemoteInterface): class RIStorageServer(RemoteInterface):
__remote_name__ = "RIStorageServer.tahoe.allmydata.com" __remote_name__ = b"RIStorageServer.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
Return a dictionary of version information. Return a dictionary of version information.
""" """
return DictOf(str, Any()) return DictOf(bytes, Any())
def allocate_buckets(storage_index=StorageIndex, def allocate_buckets(storage_index=StorageIndex,
renew_secret=LeaseRenewSecret, renew_secret=LeaseRenewSecret,
@ -278,8 +293,8 @@ class RIStorageServer(RemoteInterface):
""" """
return TupleOf(bool, DictOf(int, ReadData)) return TupleOf(bool, DictOf(int, ReadData))
def advise_corrupt_share(share_type=str, storage_index=StorageIndex, def advise_corrupt_share(share_type=bytes, storage_index=StorageIndex,
shnum=int, reason=str): shnum=int, reason=bytes):
"""Clients who discover hash failures in shares that they have """Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can failures. I will record their concern so that my operator can
@ -2860,7 +2875,7 @@ UploadResults = Any() #DictOf(str, str)
class RIEncryptedUploadable(RemoteInterface): class RIEncryptedUploadable(RemoteInterface):
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com" __remote_name__ = b"RIEncryptedUploadable.tahoe.allmydata.com"
def get_size(): def get_size():
return Offset return Offset
@ -2876,7 +2891,7 @@ class RIEncryptedUploadable(RemoteInterface):
class RICHKUploadHelper(RemoteInterface): class RICHKUploadHelper(RemoteInterface):
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com" __remote_name__ = b"RIUploadHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2889,7 +2904,7 @@ class RICHKUploadHelper(RemoteInterface):
class RIHelper(RemoteInterface): class RIHelper(RemoteInterface):
__remote_name__ = "RIHelper.tahoe.allmydata.com" __remote_name__ = b"RIHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2916,7 +2931,7 @@ class RIHelper(RemoteInterface):
class RIStatsProvider(RemoteInterface): class RIStatsProvider(RemoteInterface):
__remote_name__ = "RIStatsProvider.tahoe.allmydata.com" __remote_name__ = b"RIStatsProvider.tahoe.allmydata.com"
""" """
Provides access to statistics and monitoring information. Provides access to statistics and monitoring information.
""" """
@ -2933,7 +2948,7 @@ class RIStatsProvider(RemoteInterface):
class RIStatsGatherer(RemoteInterface): class RIStatsGatherer(RemoteInterface):
__remote_name__ = "RIStatsGatherer.tahoe.allmydata.com" __remote_name__ = b"RIStatsGatherer.tahoe.allmydata.com"
""" """
Provides a monitoring service for centralised collection of stats Provides a monitoring service for centralised collection of stats
""" """

View File

@ -1,7 +1,21 @@
"""
Manage status of long-running operations.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import Interface, implementer from zope.interface import Interface, implementer
from allmydata.util import observer from allmydata.util import observer
class IMonitor(Interface): class IMonitor(Interface):
"""I manage status, progress, and cancellation for long-running operations. """I manage status, progress, and cancellation for long-running operations.

View File

@ -1,3 +1,4 @@
from past.utils import old_div
import struct import struct
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \ from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \
@ -180,11 +181,11 @@ def pack_offsets(verification_key_length, signature_length,
def pack_share(prefix, verification_key, signature, def pack_share(prefix, verification_key, signature,
share_hash_chain, block_hash_tree, share_hash_chain, block_hash_tree,
share_data, encprivkey): share_data, encprivkey):
share_hash_chain_s = "".join([struct.pack(">H32s", i, share_hash_chain[i]) share_hash_chain_s = b"".join([struct.pack(">H32s", i, share_hash_chain[i])
for i in sorted(share_hash_chain.keys())]) for i in sorted(share_hash_chain.keys())])
for h in block_hash_tree: for h in block_hash_tree:
assert len(h) == 32 assert len(h) == 32
block_hash_tree_s = "".join(block_hash_tree) block_hash_tree_s = b"".join(block_hash_tree)
offsets = pack_offsets(len(verification_key), offsets = pack_offsets(len(verification_key),
len(signature), len(signature),
@ -192,7 +193,7 @@ def pack_share(prefix, verification_key, signature,
len(block_hash_tree_s), len(block_hash_tree_s),
len(share_data), len(share_data),
len(encprivkey)) len(encprivkey))
final_share = "".join([prefix, final_share = b"".join([prefix,
offsets, offsets,
verification_key, verification_key,
signature, signature,
@ -255,7 +256,7 @@ class SDMFSlotWriteProxy(object):
self._required_shares) self._required_shares)
assert expected_segment_size == segment_size assert expected_segment_size == segment_size
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# This is meant to mimic how SDMF files were built before MDMF # This is meant to mimic how SDMF files were built before MDMF
# entered the picture: we generate each share in its entirety, # entered the picture: we generate each share in its entirety,
@ -296,7 +297,7 @@ class SDMFSlotWriteProxy(object):
salt) salt)
else: else:
checkstring = checkstring_or_seqnum checkstring = checkstring_or_seqnum
self._testvs = [(0, len(checkstring), "eq", checkstring)] self._testvs = [(0, len(checkstring), b"eq", checkstring)]
def get_checkstring(self): def get_checkstring(self):
@ -306,7 +307,7 @@ class SDMFSlotWriteProxy(object):
""" """
if self._testvs: if self._testvs:
return self._testvs[0][3] return self._testvs[0][3]
return "" return b""
def put_block(self, data, segnum, salt): def put_block(self, data, segnum, salt):
@ -343,7 +344,7 @@ class SDMFSlotWriteProxy(object):
assert len(h) == HASH_SIZE assert len(h) == HASH_SIZE
# serialize the blockhashes, then set them. # serialize the blockhashes, then set them.
blockhashes_s = "".join(blockhashes) blockhashes_s = b"".join(blockhashes)
self._share_pieces['block_hash_tree'] = blockhashes_s self._share_pieces['block_hash_tree'] = blockhashes_s
return defer.succeed(None) return defer.succeed(None)
@ -354,11 +355,11 @@ class SDMFSlotWriteProxy(object):
Add the share hash chain to the share. Add the share hash chain to the share.
""" """
assert isinstance(sharehashes, dict) assert isinstance(sharehashes, dict)
for h in sharehashes.itervalues(): for h in sharehashes.values():
assert len(h) == HASH_SIZE assert len(h) == HASH_SIZE
# serialize the sharehashes, then set them. # serialize the sharehashes, then set them.
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i]) sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())]) for i in sorted(sharehashes.keys())])
self._share_pieces['share_hash_chain'] = sharehashes_s self._share_pieces['share_hash_chain'] = sharehashes_s
@ -383,7 +384,7 @@ class SDMFSlotWriteProxy(object):
assert len(salt) == SALT_SIZE assert len(salt) == SALT_SIZE
self._share_pieces['salt'] = salt self._share_pieces['salt'] = salt
self._share_pieces['sharedata'] = "" self._share_pieces['sharedata'] = b""
def get_signable(self): def get_signable(self):
@ -519,7 +520,7 @@ class SDMFSlotWriteProxy(object):
# to the remote server in one write. # to the remote server in one write.
offsets = self._pack_offsets() offsets = self._pack_offsets()
prefix = self.get_signable() prefix = self.get_signable()
final_share = "".join([prefix, final_share = b"".join([prefix,
offsets, offsets,
self._share_pieces['verification_key'], self._share_pieces['verification_key'],
self._share_pieces['signature'], self._share_pieces['signature'],
@ -537,7 +538,7 @@ class SDMFSlotWriteProxy(object):
# yet, so we assume that we are writing a new share, and set # yet, so we assume that we are writing a new share, and set
# a test vector that will allow a new share to be written. # a test vector that will allow a new share to be written.
self._testvs = [] self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""])) self._testvs.append(tuple([0, 1, b"eq", b""]))
tw_vectors = {} tw_vectors = {}
tw_vectors[self.shnum] = (self._testvs, datavs, None) tw_vectors[self.shnum] = (self._testvs, datavs, None)
@ -788,7 +789,7 @@ class MDMFSlotWriteProxy(object):
# and also because it provides a useful amount of bounds checking. # and also because it provides a useful amount of bounds checking.
self._num_segments = mathutil.div_ceil(self._data_length, self._num_segments = mathutil.div_ceil(self._data_length,
self._segment_size) self._segment_size)
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# We also calculate the share size, to help us with block # We also calculate the share size, to help us with block
# constraints later. # constraints later.
tail_size = self._data_length % self._segment_size tail_size = self._data_length % self._segment_size
@ -797,7 +798,7 @@ class MDMFSlotWriteProxy(object):
else: else:
self._tail_block_size = mathutil.next_multiple(tail_size, self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares) self._required_shares)
self._tail_block_size /= self._required_shares self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
# We already know where the sharedata starts; right after the end # We already know where the sharedata starts; right after the end
# of the header (which is defined as the signable part + the offsets) # of the header (which is defined as the signable part + the offsets)
@ -868,7 +869,7 @@ class MDMFSlotWriteProxy(object):
else: else:
checkstring = seqnum_or_checkstring checkstring = seqnum_or_checkstring
if checkstring == "": if checkstring == b"":
# We special-case this, since len("") = 0, but we need # We special-case this, since len("") = 0, but we need
# length of 1 for the case of an empty share to work on the # length of 1 for the case of an empty share to work on the
# storage server, which is what a checkstring that is the # storage server, which is what a checkstring that is the
@ -876,7 +877,7 @@ class MDMFSlotWriteProxy(object):
self._testvs = [] self._testvs = []
else: else:
self._testvs = [] self._testvs = []
self._testvs.append((0, len(checkstring), "eq", checkstring)) self._testvs.append((0, len(checkstring), b"eq", checkstring))
def __repr__(self): def __repr__(self):
@ -893,7 +894,7 @@ class MDMFSlotWriteProxy(object):
if self._root_hash: if self._root_hash:
roothash = self._root_hash roothash = self._root_hash
else: else:
roothash = "\x00" * 32 roothash = b"\x00" * 32
return struct.pack(MDMFCHECKSTRING, return struct.pack(MDMFCHECKSTRING,
1, 1,
self._seqnum, self._seqnum,
@ -964,7 +965,7 @@ class MDMFSlotWriteProxy(object):
assert isinstance(blockhashes, list) assert isinstance(blockhashes, list)
blockhashes_s = "".join(blockhashes) blockhashes_s = b"".join(blockhashes)
self._offsets['EOF'] = self._offsets['block_hash_tree'] + len(blockhashes_s) self._offsets['EOF'] = self._offsets['block_hash_tree'] + len(blockhashes_s)
self._writevs.append(tuple([self._offsets['block_hash_tree'], self._writevs.append(tuple([self._offsets['block_hash_tree'],
@ -998,7 +999,7 @@ class MDMFSlotWriteProxy(object):
if "verification_key" in self._offsets: if "verification_key" in self._offsets:
raise LayoutInvalid("You must write the share hash chain " raise LayoutInvalid("You must write the share hash chain "
"before you write the signature") "before you write the signature")
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i]) sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())]) for i in sorted(sharehashes.keys())])
self._offsets['signature'] = self._offsets['share_hash_chain'] + \ self._offsets['signature'] = self._offsets['share_hash_chain'] + \
len(sharehashes_s) len(sharehashes_s)
@ -1149,7 +1150,7 @@ class MDMFSlotWriteProxy(object):
tw_vectors = {} tw_vectors = {}
if not self._testvs: if not self._testvs:
self._testvs = [] self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""])) self._testvs.append(tuple([0, 1, b"eq", b""]))
if not self._written: if not self._written:
# Write a new checkstring to the share when we write it, so # Write a new checkstring to the share when we write it, so
# that we have something to check later. # that we have something to check later.
@ -1157,7 +1158,7 @@ class MDMFSlotWriteProxy(object):
datavs.append((0, new_checkstring)) datavs.append((0, new_checkstring))
def _first_write(): def _first_write():
self._written = True self._written = True
self._testvs = [(0, len(new_checkstring), "eq", new_checkstring)] self._testvs = [(0, len(new_checkstring), b"eq", new_checkstring)]
on_success = _first_write on_success = _first_write
tw_vectors[self.shnum] = (self._testvs, datavs, None) tw_vectors[self.shnum] = (self._testvs, datavs, None)
d = self._storage_server.slot_testv_and_readv_and_writev( d = self._storage_server.slot_testv_and_readv_and_writev(
@ -1194,7 +1195,7 @@ class MDMFSlotReadProxy(object):
storage_server, storage_server,
storage_index, storage_index,
shnum, shnum,
data="", data=b"",
data_is_everything=False): data_is_everything=False):
# Start the initialization process. # Start the initialization process.
self._storage_server = storage_server self._storage_server = storage_server
@ -1238,7 +1239,7 @@ class MDMFSlotReadProxy(object):
# None if there isn't any cached data, but the way we index the # None if there isn't any cached data, but the way we index the
# cached data requires a string, so convert None to "". # cached data requires a string, so convert None to "".
if self._data == None: if self._data == None:
self._data = "" self._data = b""
def _maybe_fetch_offsets_and_header(self, force_remote=False): def _maybe_fetch_offsets_and_header(self, force_remote=False):
@ -1317,7 +1318,7 @@ class MDMFSlotReadProxy(object):
self._segment_size = segsize self._segment_size = segsize
self._data_length = datalen self._data_length = datalen
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# We can upload empty files, and need to account for this fact # We can upload empty files, and need to account for this fact
# so as to avoid zero-division and zero-modulo errors. # so as to avoid zero-division and zero-modulo errors.
if datalen > 0: if datalen > 0:
@ -1329,7 +1330,7 @@ class MDMFSlotReadProxy(object):
else: else:
self._tail_block_size = mathutil.next_multiple(tail_size, self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares) self._required_shares)
self._tail_block_size /= self._required_shares self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
return encoding_parameters return encoding_parameters
@ -1416,7 +1417,7 @@ class MDMFSlotReadProxy(object):
# when we fetched the header # when we fetched the header
data = results[self.shnum] data = results[self.shnum]
if not data: if not data:
data = "" data = b""
else: else:
if len(data) != 1: if len(data) != 1:
raise BadShareError("got %d vectors, not 1" % len(data)) raise BadShareError("got %d vectors, not 1" % len(data))
@ -1425,7 +1426,7 @@ class MDMFSlotReadProxy(object):
else: else:
data = results[self.shnum] data = results[self.shnum]
if not data: if not data:
salt = data = "" salt = data = b""
else: else:
salt_and_data = results[self.shnum][0] salt_and_data = results[self.shnum][0]
salt = salt_and_data[:SALT_SIZE] salt = salt_and_data[:SALT_SIZE]
@ -1743,7 +1744,7 @@ class MDMFSlotReadProxy(object):
def _read(self, readvs, force_remote=False): def _read(self, readvs, force_remote=False):
unsatisfiable = filter(lambda x: x[0] + x[1] > len(self._data), readvs) unsatisfiable = list(filter(lambda x: x[0] + x[1] > len(self._data), readvs))
# TODO: It's entirely possible to tweak this so that it just # TODO: It's entirely possible to tweak this so that it just
# fulfills the requests that it can, and not demand that all # fulfills the requests that it can, and not demand that all
# requests are satisfiable before running it. # requests are satisfiable before running it.

View File

@ -2,12 +2,17 @@
This module contains classes and functions to implement and manage This module contains classes and functions to implement and manage
a node for Tahoe-LAFS. a node for Tahoe-LAFS.
""" """
from past.builtins import unicode
import datetime import datetime
import os.path import os.path
import re import re
import types import types
import errno import errno
import ConfigParser try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
import tempfile import tempfile
from io import BytesIO from io import BytesIO
from base64 import b32decode, b32encode from base64 import b32decode, b32encode
@ -67,7 +72,7 @@ def _common_valid_config():
# Add our application versions to the data that Foolscap's LogPublisher # Add our application versions to the data that Foolscap's LogPublisher
# reports. # reports.
for thing, things_version in get_package_versions().iteritems(): for thing, things_version in get_package_versions().items():
app_versions.add_version(thing, str(things_version)) app_versions.add_version(thing, str(things_version))
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string) # group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
@ -272,7 +277,10 @@ class _Config(object):
self.config = configparser self.config = configparser
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>") nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
if isinstance(nickname_utf8, bytes): # Python 2
self.nickname = nickname_utf8.decode("utf-8") self.nickname = nickname_utf8.decode("utf-8")
else:
self.nickname = nickname_utf8
assert type(self.nickname) is unicode assert type(self.nickname) is unicode
def validate(self, valid_config_sections): def validate(self, valid_config_sections):

View File

@ -1,3 +1,13 @@
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit anything that might end up in pickle, just in case.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401
import time, os, pickle, struct import time, os, pickle, struct
from allmydata.storage.crawler import ShareCrawler from allmydata.storage.crawler import ShareCrawler
from allmydata.storage.shares import get_share_file from allmydata.storage.shares import get_share_file

View File

@ -1,3 +1,5 @@
from future.utils import bytes_to_native_str
import os, stat, struct, time import os, stat, struct, time
from foolscap.api import Referenceable from foolscap.api import Referenceable
@ -85,7 +87,7 @@ class ShareFile(object):
seekpos = self._data_offset+offset seekpos = self._data_offset+offset
actuallength = max(0, min(length, self._lease_offset-seekpos)) actuallength = max(0, min(length, self._lease_offset-seekpos))
if actuallength == 0: if actuallength == 0:
return "" return b""
with open(self.home, 'rb') as f: with open(self.home, 'rb') as f:
f.seek(seekpos) f.seek(seekpos)
return f.read(actuallength) return f.read(actuallength)
@ -298,7 +300,9 @@ class BucketReader(Referenceable):
def __repr__(self): def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__, return "<%s %s %s>" % (self.__class__.__name__,
base32.b2a(self.storage_index[:8])[:12], bytes_to_native_str(
base32.b2a(self.storage_index[:8])[:12]
),
self.shnum) self.shnum)
def remote_read(self, offset, length): def remote_read(self, offset, length):
@ -309,7 +313,7 @@ class BucketReader(Referenceable):
return data return data
def remote_advise_corrupt_share(self, reason): def remote_advise_corrupt_share(self, reason):
return self.ss.remote_advise_corrupt_share("immutable", return self.ss.remote_advise_corrupt_share(b"immutable",
self.storage_index, self.storage_index,
self.shnum, self.shnum,
reason) reason)

View File

@ -48,8 +48,9 @@ class MutableShareFile(object):
# our sharefiles share with a recognizable string, plus some random # our sharefiles share with a recognizable string, plus some random
# binary data to reduce the chance that a regular text file will look # binary data to reduce the chance that a regular text file will look
# like a sharefile. # like a sharefile.
MAGIC = "Tahoe mutable container v1\n" + "\x75\x09\x44\x03\x8e" MAGIC = b"Tahoe mutable container v1\n" + b"\x75\x09\x44\x03\x8e"
assert len(MAGIC) == 32 assert len(MAGIC) == 32
assert isinstance(MAGIC, bytes)
MAX_SIZE = MAX_MUTABLE_SHARE_SIZE MAX_SIZE = MAX_MUTABLE_SHARE_SIZE
# TODO: decide upon a policy for max share size # TODO: decide upon a policy for max share size
@ -86,7 +87,7 @@ class MutableShareFile(object):
self.MAGIC, my_nodeid, write_enabler, self.MAGIC, my_nodeid, write_enabler,
data_length, extra_lease_offset, data_length, extra_lease_offset,
) )
leases = ("\x00" * self.LEASE_SIZE) * 4 leases = (b"\x00" * self.LEASE_SIZE) * 4
f.write(header + leases) f.write(header + leases)
# data goes here, empty after creation # data goes here, empty after creation
f.write(struct.pack(">L", num_extra_leases)) f.write(struct.pack(">L", num_extra_leases))
@ -112,7 +113,7 @@ class MutableShareFile(object):
# start beyond the end of the data return an empty string. # start beyond the end of the data return an empty string.
length = max(0, data_length-offset) length = max(0, data_length-offset)
if length == 0: if length == 0:
return "" return b""
precondition(offset+length <= data_length) precondition(offset+length <= data_length)
f.seek(self.DATA_OFFSET+offset) f.seek(self.DATA_OFFSET+offset)
data = f.read(length) data = f.read(length)
@ -154,7 +155,7 @@ class MutableShareFile(object):
# Zero out the old lease info (in order to minimize the chance that # Zero out the old lease info (in order to minimize the chance that
# it could accidentally be exposed to a reader later, re #1528). # it could accidentally be exposed to a reader later, re #1528).
f.seek(old_extra_lease_offset) f.seek(old_extra_lease_offset)
f.write('\x00' * leases_size) f.write(b'\x00' * leases_size)
f.flush() f.flush()
# An interrupt here will corrupt the leases. # An interrupt here will corrupt the leases.
@ -193,7 +194,7 @@ class MutableShareFile(object):
# Fill any newly exposed empty space with 0's. # Fill any newly exposed empty space with 0's.
if offset > data_length: if offset > data_length:
f.seek(self.DATA_OFFSET+data_length) f.seek(self.DATA_OFFSET+data_length)
f.write('\x00'*(offset - data_length)) f.write(b'\x00'*(offset - data_length))
f.flush() f.flush()
new_data_length = offset+length new_data_length = offset+length
@ -325,10 +326,10 @@ class MutableShareFile(object):
modified = 0 modified = 0
remaining = 0 remaining = 0
blank_lease = LeaseInfo(owner_num=0, blank_lease = LeaseInfo(owner_num=0,
renew_secret="\x00"*32, renew_secret=b"\x00"*32,
cancel_secret="\x00"*32, cancel_secret=b"\x00"*32,
expiration_time=0, expiration_time=0,
nodeid="\x00"*20) nodeid=b"\x00"*20)
with open(self.home, 'rb+') as f: with open(self.home, 'rb+') as f:
for (leasenum,lease) in self._enumerate_leases(f): for (leasenum,lease) in self._enumerate_leases(f):
accepting_nodeids.add(lease.nodeid) accepting_nodeids.add(lease.nodeid)
@ -420,18 +421,18 @@ class MutableShareFile(object):
# self._change_container_size() here. # self._change_container_size() here.
def testv_compare(a, op, b): def testv_compare(a, op, b):
assert op in ("lt", "le", "eq", "ne", "ge", "gt") assert op in (b"lt", b"le", b"eq", b"ne", b"ge", b"gt")
if op == "lt": if op == b"lt":
return a < b return a < b
if op == "le": if op == b"le":
return a <= b return a <= b
if op == "eq": if op == b"eq":
return a == b return a == b
if op == "ne": if op == b"ne":
return a != b return a != b
if op == "ge": if op == b"ge":
return a >= b return a >= b
if op == "gt": if op == b"gt":
return a > b return a > b
# never reached # never reached
@ -440,7 +441,7 @@ class EmptyShare(object):
def check_testv(self, testv): def check_testv(self, testv):
test_good = True test_good = True
for (offset, length, operator, specimen) in testv: for (offset, length, operator, specimen) in testv:
data = "" data = b""
if not testv_compare(data, operator, specimen): if not testv_compare(data, operator, specimen):
test_good = False test_good = False
break break

View File

@ -1,3 +1,4 @@
from future.utils import bytes_to_native_str
import os, re, struct, time import os, re, struct, time
import weakref import weakref
import six import six
@ -51,6 +52,7 @@ class StorageServer(service.MultiService, Referenceable):
service.MultiService.__init__(self) service.MultiService.__init__(self)
assert isinstance(nodeid, bytes) assert isinstance(nodeid, bytes)
assert len(nodeid) == 20 assert len(nodeid) == 20
assert isinstance(nodeid, bytes)
self.my_nodeid = nodeid self.my_nodeid = nodeid
self.storedir = storedir self.storedir = storedir
sharedir = os.path.join(storedir, "shares") sharedir = os.path.join(storedir, "shares")
@ -398,7 +400,7 @@ class StorageServer(service.MultiService, Referenceable):
# since all shares get the same lease data, we just grab the leases # since all shares get the same lease data, we just grab the leases
# from the first share # from the first share
try: try:
shnum, filename = self._get_bucket_shares(storage_index).next() shnum, filename = next(self._get_bucket_shares(storage_index))
sf = ShareFile(filename) sf = ShareFile(filename)
return sf.get_leases() return sf.get_leases()
except StopIteration: except StopIteration:
@ -676,6 +678,10 @@ class StorageServer(service.MultiService, Referenceable):
def remote_advise_corrupt_share(self, share_type, storage_index, shnum, def remote_advise_corrupt_share(self, share_type, storage_index, shnum,
reason): reason):
# This is a remote API, I believe, so this has to be bytes for legacy
# protocol backwards compatibility reasons.
assert isinstance(share_type, bytes)
assert isinstance(reason, bytes)
fileutil.make_dirs(self.corruption_advisory_dir) fileutil.make_dirs(self.corruption_advisory_dir)
now = time_format.iso_utc(sep="T") now = time_format.iso_utc(sep="T")
si_s = si_b2a(storage_index) si_s = si_b2a(storage_index)
@ -684,11 +690,11 @@ class StorageServer(service.MultiService, Referenceable):
"%s--%s-%d" % (now, si_s, shnum)).replace(":","") "%s--%s-%d" % (now, si_s, shnum)).replace(":","")
with open(fn, "w") as f: with open(fn, "w") as f:
f.write("report: Share Corruption\n") f.write("report: Share Corruption\n")
f.write("type: %s\n" % share_type) f.write("type: %s\n" % bytes_to_native_str(share_type))
f.write("storage_index: %s\n" % si_s) f.write("storage_index: %s\n" % bytes_to_native_str(si_s))
f.write("share_number: %d\n" % shnum) f.write("share_number: %d\n" % shnum)
f.write("\n") f.write("\n")
f.write(reason) f.write(bytes_to_native_str(reason))
f.write("\n") f.write("\n")
log.msg(format=("client claims corruption in (%(share_type)s) " + log.msg(format=("client claims corruption in (%(share_type)s) " +
"%(si)s-%(shnum)d: %(reason)s"), "%(si)s-%(shnum)d: %(reason)s"),

View File

@ -30,9 +30,12 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
import re, time, hashlib import re, time, hashlib
from ConfigParser import ( try:
from ConfigParser import (
NoSectionError, NoSectionError,
) )
except ImportError:
from configparser import NoSectionError
import attr import attr
from zope.interface import ( from zope.interface import (
Attribute, Attribute,
@ -534,11 +537,11 @@ class _NullStorage(object):
which we can't communicate. which we can't communicate.
""" """
nickname = "" nickname = ""
permutation_seed = hashlib.sha256("").digest() permutation_seed = hashlib.sha256(b"").digest()
tubid = hashlib.sha256("").digest() tubid = hashlib.sha256(b"").digest()
storage_server = None storage_server = None
lease_seed = hashlib.sha256("").digest() lease_seed = hashlib.sha256(b"").digest()
name = "<unsupported>" name = "<unsupported>"
longname = "<storage with unsupported protocol>" longname = "<storage with unsupported protocol>"

View File

@ -6,6 +6,8 @@ from twisted.python import usage
from allmydata.util import configutil from allmydata.util import configutil
from ..common_util import run_cli, parse_cli from ..common_util import run_cli, parse_cli
from ...scripts import create_node from ...scripts import create_node
from ... import client
def read_config(basedir): def read_config(basedir):
tahoe_cfg = os.path.join(basedir, "tahoe.cfg") tahoe_cfg = os.path.join(basedir, "tahoe.cfg")
@ -33,6 +35,31 @@ class Config(unittest.TestCase):
e = self.assertRaises(usage.UsageError, parse_cli, verb, *args) e = self.assertRaises(usage.UsageError, parse_cli, verb, *args)
self.assertIn("option %s not recognized" % (option,), str(e)) self.assertIn("option %s not recognized" % (option,), str(e))
def test_create_client_config(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
opts = {"nickname": "nick",
"webport": "tcp:3456",
"hide-ip": False,
"listen": "none",
"shares-needed": "1",
"shares-happy": "1",
"shares-total": "1",
}
create_node.write_node_config(f, opts)
create_node.write_client_config(f, opts)
config = configutil.get_config(fname)
# should succeed, no exceptions
configutil.validate_config(
fname,
config,
client._valid_config(),
)
@defer.inlineCallbacks @defer.inlineCallbacks
def test_client(self): def test_client(self):
basedir = self.mktemp() basedir = self.mktemp()

View File

@ -52,7 +52,6 @@ from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.interfaces import IPullProducer from twisted.internet.interfaces import IPullProducer
from twisted.python import failure from twisted.python import failure
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from twisted.application import service
from twisted.web.error import Error as WebError from twisted.web.error import Error as WebError
from twisted.internet.interfaces import ( from twisted.internet.interfaces import (
IStreamServerEndpointStringParser, IStreamServerEndpointStringParser,
@ -88,6 +87,8 @@ from ..crypto import (
from .eliotutil import ( from .eliotutil import (
EliotLoggedRunTest, EliotLoggedRunTest,
) )
# Backwards compatibility imports:
from .common_py3 import LoggingServiceParent, ShouldFailMixin # noqa: F401
TEST_RSA_KEY_SIZE = 522 TEST_RSA_KEY_SIZE = 522
@ -780,53 +781,8 @@ def create_mutable_filenode(contents, mdmf=False, all_contents=None):
return filenode return filenode
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1) TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, str)
d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res):
if isinstance(res, failure.Failure):
res.trap(expected_failure)
if substring:
message = repr(res.value.args[0])
self.failUnless(substring in message,
"%s: substring '%s' not in '%s'"
% (which, substring, message))
else:
self.fail("%s was supposed to raise %s, not get '%s'" %
(which, expected_failure, res))
d.addBoth(done)
return d
class WebErrorMixin(object): class WebErrorMixin(object):
def explain_web_error(self, f): def explain_web_error(self, f):

View File

@ -19,11 +19,13 @@ import time
import signal import signal
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.application import service
from twisted.python import failure from twisted.python import failure
from twisted.trial import unittest from twisted.trial import unittest
from ..util.assertutil import precondition from ..util.assertutil import precondition
from ..util.encodingutil import unicode_platform, get_filesystem_encoding from ..util.encodingutil import unicode_platform, get_filesystem_encoding
from ..util import log
class TimezoneMixin(object): class TimezoneMixin(object):
@ -77,6 +79,28 @@ class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring, def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs): callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, (bytes, unicode)) assert substring is None or isinstance(substring, (bytes, unicode))
d = defer.maybeDeferred(callable, *args, **kwargs) d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res): def done(res):
@ -135,3 +159,9 @@ class FakeCanary(object):
if self.ignore: if self.ignore:
return return
del self.disconnectors[marker] del self.disconnectors[marker]
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)

View File

@ -1,64 +1,7 @@
import re
import treq import treq
from twisted.internet import defer from twisted.internet import defer
from twisted.web.error import Error from twisted.web.error import Error
from nevow.testutil import FakeRequest
from nevow import inevow, context
class WebRenderingMixin(object):
# d=page.renderString() or s=page.renderSynchronously() will exercise
# docFactory, render_*/data_* . It won't exercise want_json(), or my
# renderHTTP() override which tests want_json(). To exercise args=, we
# must build a context. Pages which use a return_to= argument need a
# context.
# d=page.renderHTTP(ctx) will exercise my renderHTTP, want_json, and
# docFactory/render_*/data_*, but it requires building a context. Since
# we're already building a context, it is easy to exercise args= .
# so, use at least two d=page.renderHTTP(ctx) per page (one for json, one
# for html), then use lots of simple s=page.renderSynchronously() to
# exercise the fine details (the ones that don't require args=).
def make_context(self, req):
ctx = context.RequestContext(tag=req)
ctx.remember(req, inevow.IRequest)
ctx.remember(None, inevow.IData)
ctx = context.WovenContext(parent=ctx, precompile=False)
return ctx
def render1(self, page, **kwargs):
# use this to exercise an overridden renderHTTP, usually for
# output=json or render_GET. It always returns a Deferred.
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
d = defer.maybeDeferred(page.renderHTTP, ctx)
def _done(res):
if isinstance(res, str):
return res + req.v
return req.v
d.addCallback(_done)
return d
def render2(self, page, **kwargs):
# use this to exercise the normal Nevow docFactory rendering. It
# returns a string. If one of the render_* methods returns a
# Deferred, this will throw an exception. (note that
# page.renderString is the Deferred-returning equivalent)
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
return page.renderSynchronously(ctx)
def failUnlessIn(self, substring, s):
self.failUnless(substring in s, s)
def remove_tags(self, s):
s = re.sub(r'<[^>]*>', ' ', s)
s = re.sub(r'\s+', ' ', s)
return s
@defer.inlineCallbacks @defer.inlineCallbacks
def do_http(method, url, **kwargs): def do_http(method, url, **kwargs):

View File

@ -2,6 +2,8 @@
Tools aimed at the interaction between tests and Eliot. Tools aimed at the interaction between tests and Eliot.
""" """
from past.builtins import unicode
__all__ = [ __all__ = [
"RUN_TEST", "RUN_TEST",
"EliotLoggedRunTest", "EliotLoggedRunTest",

View File

@ -1,8 +1,21 @@
"""
Tests for allmydata.codec.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.python import log from twisted.python import log
from allmydata.codec import CRSEncoder, CRSDecoder from allmydata.codec import CRSEncoder, CRSDecoder, parse_params
import random import random
from allmydata.util import mathutil from allmydata.util import mathutil
@ -13,6 +26,8 @@ class T(unittest.TestCase):
enc.set_params(size, required_shares, max_shares) enc.set_params(size, required_shares, max_shares)
params = enc.get_params() params = enc.get_params()
assert params == (size, required_shares, max_shares) assert params == (size, required_shares, max_shares)
serialized_params = enc.get_serialized_params()
self.assertEqual(parse_params(serialized_params), params)
log.msg("params: %s" % (params,)) log.msg("params: %s" % (params,))
d = enc.encode(data0s) d = enc.encode(data0s)
def _done_encoding_all(shares_and_shareids): def _done_encoding_all(shares_and_shareids):
@ -23,7 +38,7 @@ class T(unittest.TestCase):
d.addCallback(_done_encoding_all) d.addCallback(_done_encoding_all)
if fewer_shares is not None: if fewer_shares is not None:
# also validate that the desired_shareids= parameter works # also validate that the desired_shareids= parameter works
desired_shareids = random.sample(range(max_shares), fewer_shares) desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
d.addCallback(lambda res: enc.encode(data0s, desired_shareids)) d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
def _check_fewer_shares(some_shares_and_their_shareids): def _check_fewer_shares(some_shares_and_their_shareids):
(some_shares, their_shareids) = some_shares_and_their_shareids (some_shares, their_shareids) = some_shares_and_their_shareids
@ -38,11 +53,11 @@ class T(unittest.TestCase):
return d1 return d1
def _check_data(decoded_shares): def _check_data(decoded_shares):
self.failUnlessEqual(len(''.join(decoded_shares)), len(''.join(data0s))) self.failUnlessEqual(len(b''.join(decoded_shares)), len(b''.join(data0s)))
self.failUnlessEqual(len(decoded_shares), len(data0s)) self.failUnlessEqual(len(decoded_shares), len(data0s))
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)): for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],)) self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
self.failUnless(''.join(decoded_shares) == ''.join(data0s), "%s" % ("???",)) self.failUnless(b''.join(decoded_shares) == b''.join(data0s), "%s" % ("???",))
# 0data0sclipped = tuple(data0s) # 0data0sclipped = tuple(data0s)
# data0sclipped[-1] = # data0sclipped[-1] =
# self.failUnless(tuple(decoded_shares) == tuple(data0s)) # self.failUnless(tuple(decoded_shares) == tuple(data0s))
@ -59,7 +74,7 @@ class T(unittest.TestCase):
def _decode_some_random(res): def _decode_some_random(res):
log.msg("_decode_some_random") log.msg("_decode_some_random")
# use a randomly-selected minimal subset # use a randomly-selected minimal subset
l = random.sample(zip(self.shares, self.shareids), required_shares) l = random.sample(list(zip(self.shares, self.shareids)), required_shares)
some_shares = [ x[0] for x in l ] some_shares = [ x[0] for x in l ]
some_shareids = [ x[1] for x in l ] some_shareids = [ x[1] for x in l ]
return _decode((some_shares, some_shareids)) return _decode((some_shares, some_shareids))
@ -70,10 +85,10 @@ class T(unittest.TestCase):
log.msg("_decode_multiple") log.msg("_decode_multiple")
# make sure we can re-use the decoder object # make sure we can re-use the decoder object
shares1 = random.sample(self.shares, required_shares) shares1 = random.sample(self.shares, required_shares)
sharesl1 = random.sample(zip(self.shares, self.shareids), required_shares) sharesl1 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares1 = [ x[0] for x in sharesl1 ] shares1 = [ x[0] for x in sharesl1 ]
shareids1 = [ x[1] for x in sharesl1 ] shareids1 = [ x[1] for x in sharesl1 ]
sharesl2 = random.sample(zip(self.shares, self.shareids), required_shares) sharesl2 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares2 = [ x[0] for x in sharesl2 ] shares2 = [ x[0] for x in sharesl2 ]
shareids2 = [ x[1] for x in sharesl2 ] shareids2 = [ x[1] for x in sharesl2 ]
dec = CRSDecoder() dec = CRSDecoder()

View File

@ -1,14 +1,26 @@
"""
Tests for allmydata.util.configutil.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Omitted dict, cause worried about interactions.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
import os.path import os.path
from twisted.trial import unittest from twisted.trial import unittest
from allmydata.util import configutil from allmydata.util import configutil
from allmydata.test.no_network import GridTestMixin
from ..scripts import create_node
from .. import client
class ConfigUtilTests(GridTestMixin, unittest.TestCase): class ConfigUtilTests(unittest.TestCase):
def setUp(self): def setUp(self):
super(ConfigUtilTests, self).setUp() super(ConfigUtilTests, self).setUp()
self.static_valid_config = configutil.ValidConfiguration( self.static_valid_config = configutil.ValidConfiguration(
@ -20,10 +32,22 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
lambda section_name, item_name: (section_name, item_name) == ("node", "valid"), lambda section_name, item_name: (section_name, item_name) == ("node", "valid"),
) )
def create_tahoe_cfg(self, cfg):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, "w") as f:
f.write(cfg)
return fname
def test_config_utils(self): def test_config_utils(self):
self.basedir = "cli/ConfigUtilTests/test-config-utils" tahoe_cfg = self.create_tahoe_cfg("""\
self.set_up_grid(oneshare=True) [node]
tahoe_cfg = os.path.join(self.get_clientdir(i=0), "tahoe.cfg") nickname = client-0
web.port = adopt-socket:fd=5
[storage]
enabled = false
""")
# test that at least one option was read correctly # test that at least one option was read correctly
config = configutil.get_config(tahoe_cfg) config = configutil.get_config(tahoe_cfg)
@ -45,12 +69,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
self.failUnlessEqual(config.get("node", "descriptor"), descriptor) self.failUnlessEqual(config.get("node", "descriptor"), descriptor)
def test_config_validation_success(self): def test_config_validation_success(self):
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
# should succeed, no exceptions # should succeed, no exceptions
@ -66,12 +85,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
validation but are matched by the dynamic validation is considered validation but are matched by the dynamic validation is considered
valid. valid.
""" """
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
# should succeed, no exceptions # should succeed, no exceptions
@ -82,12 +96,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
) )
def test_config_validation_invalid_item(self): def test_config_validation_invalid_item(self):
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\ninvalid = foo\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
@ -103,12 +112,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section that is matched by neither the static nor A configuration with a section that is matched by neither the static nor
dynamic validators is rejected. dynamic validators is rejected.
""" """
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n[invalid]\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
@ -124,12 +128,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section that is matched by neither the static nor A configuration with a section that is matched by neither the static nor
dynamic validators is rejected. dynamic validators is rejected.
""" """
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n[invalid]\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
@ -145,12 +144,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section, item pair that is matched by neither the A configuration with a section, item pair that is matched by neither the
static nor dynamic validators is rejected. static nor dynamic validators is rejected.
""" """
d = self.mktemp() fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n')
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\ninvalid = foo\n')
config = configutil.get_config(fname) config = configutil.get_config(fname)
e = self.assertRaises( e = self.assertRaises(
@ -160,28 +154,3 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
self.dynamic_valid_config, self.dynamic_valid_config,
) )
self.assertIn("section [node] contains unknown option 'invalid'", str(e)) self.assertIn("section [node] contains unknown option 'invalid'", str(e))
def test_create_client_config(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
opts = {"nickname": "nick",
"webport": "tcp:3456",
"hide-ip": False,
"listen": "none",
"shares-needed": "1",
"shares-happy": "1",
"shares-total": "1",
}
create_node.write_node_config(f, opts)
create_node.write_client_config(f, opts)
config = configutil.get_config(fname)
# should succeed, no exceptions
configutil.validate_config(
fname,
config,
client._valid_config(),
)

View File

@ -0,0 +1,122 @@
"""
Tests for allmydata.util.connection_status.
Port to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import mock
from twisted.trial import unittest
from ..util import connection_status
class Status(unittest.TestCase):
def test_hint_statuses(self):
ncs = connection_status._hint_statuses(["h2","h1"],
{"h1": "hand1", "h4": "hand4"},
{"h1": "st1", "h2": "st2",
"h3": "st3"})
self.assertEqual(ncs, {"h1 via hand1": "st1",
"h2": "st2"})
def test_reconnector_connected(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1"}
ci.connectionHandlers = {"h1": "hand1"}
ci.winningHint = "h1"
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected to h1 via hand1")
self.assertEqual(cs.non_connected_statuses, {})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connected_others(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ci.winningHint = "h1"
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected to h1 via hand1")
self.assertEqual(cs.non_connected_statuses, {"h2": "st2"})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connected_listener(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ci.listenerStatus = ("listener1", "successful")
ci.winningHint = None
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected via listener (listener1)")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connecting(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ri = mock.Mock()
ri.state = "connecting"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, False)
self.assertEqual(cs.summary, "Trying to connect")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, None)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_waiting(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ri = mock.Mock()
ri.state = "waiting"
ri.lastAttempt = 10
ri.nextAttempt = 20
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
with mock.patch("time.time", return_value=12):
cs = connection_status.from_foolscap_reconnector(rc, 5)
self.assertEqual(cs.connected, False)
self.assertEqual(cs.summary,
"Reconnecting in 8 seconds (last attempt 2s ago)")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, None)
self.assertEqual(cs.last_received_time, 5)

View File

@ -7,7 +7,6 @@ from foolscap.connections import tcp
from ..node import PrivacyError, config_from_string from ..node import PrivacyError, config_from_string
from ..node import create_connection_handlers from ..node import create_connection_handlers
from ..node import create_main_tub, _tub_portlocation from ..node import create_main_tub, _tub_portlocation
from ..util import connection_status
from ..util.i2p_provider import create as create_i2p_provider from ..util.i2p_provider import create as create_i2p_provider
from ..util.tor_provider import create as create_tor_provider from ..util.tor_provider import create as create_tor_provider
@ -463,106 +462,3 @@ class Privacy(unittest.TestCase):
str(ctx.exception), str(ctx.exception),
"tub.location includes tcp: hint", "tub.location includes tcp: hint",
) )
class Status(unittest.TestCase):
def test_hint_statuses(self):
ncs = connection_status._hint_statuses(["h2","h1"],
{"h1": "hand1", "h4": "hand4"},
{"h1": "st1", "h2": "st2",
"h3": "st3"})
self.assertEqual(ncs, {"h1 via hand1": "st1",
"h2": "st2"})
def test_reconnector_connected(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1"}
ci.connectionHandlers = {"h1": "hand1"}
ci.winningHint = "h1"
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected to h1 via hand1")
self.assertEqual(cs.non_connected_statuses, {})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connected_others(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ci.winningHint = "h1"
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected to h1 via hand1")
self.assertEqual(cs.non_connected_statuses, {"h2": "st2"})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connected_listener(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ci.listenerStatus = ("listener1", "successful")
ci.winningHint = None
ci.establishedAt = 120
ri = mock.Mock()
ri.state = "connected"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, True)
self.assertEqual(cs.summary, "Connected via listener (listener1)")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, 120)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_connecting(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ri = mock.Mock()
ri.state = "connecting"
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
cs = connection_status.from_foolscap_reconnector(rc, 123)
self.assertEqual(cs.connected, False)
self.assertEqual(cs.summary, "Trying to connect")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, None)
self.assertEqual(cs.last_received_time, 123)
def test_reconnector_waiting(self):
ci = mock.Mock()
ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
ci.connectionHandlers = {"h1": "hand1"}
ri = mock.Mock()
ri.state = "waiting"
ri.lastAttempt = 10
ri.nextAttempt = 20
ri.connectionInfo = ci
rc = mock.Mock
rc.getReconnectionInfo = mock.Mock(return_value=ri)
with mock.patch("time.time", return_value=12):
cs = connection_status.from_foolscap_reconnector(rc, 5)
self.assertEqual(cs.connected, False)
self.assertEqual(cs.summary,
"Reconnecting in 8 seconds (last attempt 2s ago)")
self.assertEqual(cs.non_connected_statuses,
{"h1 via hand1": "st1", "h2": "st2"})
self.assertEqual(cs.last_connection_time, None)
self.assertEqual(cs.last_received_time, 5)

View File

@ -0,0 +1,52 @@
"""
Tests for allmydata.monitor.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from allmydata.monitor import Monitor, OperationCancelledError
class MonitorTests(unittest.TestCase):
"""Tests for the Monitor class."""
def test_cancellation(self):
"""The monitor can be cancelled."""
m = Monitor()
self.assertFalse(m.is_cancelled())
m.raise_if_cancelled()
m.cancel()
self.assertTrue(m.is_cancelled())
with self.assertRaises(OperationCancelledError):
m.raise_if_cancelled()
def test_status(self):
"""The monitor can have its status set."""
m = Monitor()
self.assertEqual(m.get_status(), None)
m.set_status("discombobulated")
self.assertEqual(m.get_status(), "discombobulated")
def test_finish(self):
"""The monitor can finish."""
m = Monitor()
self.assertFalse(m.is_finished())
d = m.when_done()
self.assertNoResult(d)
result = m.finish(300)
self.assertEqual(result, 300)
self.assertEqual(m.get_status(), 300)
self.assertTrue(m.is_finished())
d.addBoth(self.assertEqual, 300)
return d

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,19 @@
""" """
Tests for twisted.storage that uses Web APIs. Tests for twisted.storage that uses Web APIs.
Partially ported to Python 3.
""" """
from __future__ import absolute_import from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Omitted list sinc it broke a test on Python 2. Shouldn't require further
# work, when we switch to Python 3 we'll be dropping this, anyway.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401
import time import time
import os.path import os.path
@ -18,7 +29,10 @@ from twisted.web.template import flattenString
# We need to use `nevow.inevow.IRequest` for now for compatibility # We need to use `nevow.inevow.IRequest` for now for compatibility
# with the code in web/common.py. Once nevow bits are gone from # with the code in web/common.py. Once nevow bits are gone from
# web/common.py, we can use `twisted.web.iweb.IRequest` here. # web/common.py, we can use `twisted.web.iweb.IRequest` here.
from nevow.inevow import IRequest if PY2:
from nevow.inevow import IRequest
else:
from twisted.web.iweb import IRequest
from twisted.web.server import Request from twisted.web.server import Request
from twisted.web.test.requesthelper import DummyChannel from twisted.web.test.requesthelper import DummyChannel
@ -36,11 +50,11 @@ from allmydata.web.storage import (
StorageStatusElement, StorageStatusElement,
remove_prefix remove_prefix
) )
from .test_storage import FakeCanary from .common_py3 import FakeCanary
def remove_tags(s): def remove_tags(s):
s = re.sub(r'<[^>]*>', ' ', s) s = re.sub(br'<[^>]*>', b' ', s)
s = re.sub(r'\s+', ' ', s) s = re.sub(br'\s+', b' ', s)
return s return s
def renderSynchronously(ss): def renderSynchronously(ss):
@ -89,6 +103,7 @@ class MyStorageServer(StorageServer):
self.bucket_counter = MyBucketCountingCrawler(self, statefile) self.bucket_counter = MyBucketCountingCrawler(self, statefile)
self.bucket_counter.setServiceParent(self) self.bucket_counter.setServiceParent(self)
class BucketCounter(unittest.TestCase, pollmixin.PollMixin): class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def setUp(self): def setUp(self):
@ -100,7 +115,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def test_bucket_counter(self): def test_bucket_counter(self):
basedir = "storage/BucketCounter/bucket_counter" basedir = "storage/BucketCounter/bucket_counter"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle # to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0. We # of a cycle, we reach in and reduce its maximum slice time to 0. We
# also make it start sooner than usual. # also make it start sooner than usual.
@ -113,12 +128,12 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
# this sample is before the crawler has started doing anything # this sample is before the crawler has started doing anything
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Accepting new shares: Yes", s) self.failUnlessIn(b"Accepting new shares: Yes", s)
self.failUnlessIn("Reserved space: - 0 B (0)", s) self.failUnlessIn(b"Reserved space: - 0 B (0)", s)
self.failUnlessIn("Total buckets: Not computed yet", s) self.failUnlessIn(b"Total buckets: Not computed yet", s)
self.failUnlessIn("Next crawl in", s) self.failUnlessIn(b"Next crawl in", s)
# give the bucket-counting-crawler one tick to get started. The # give the bucket-counting-crawler one tick to get started. The
# cpu_slice=0 will force it to yield right after it processes the # cpu_slice=0 will force it to yield right after it processes the
@ -137,8 +152,8 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
ss.bucket_counter.cpu_slice = 100.0 # finish as fast as possible ss.bucket_counter.cpu_slice = 100.0 # finish as fast as possible
html = renderSynchronously(w) html = renderSynchronously(w)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn(" Current crawl ", s) self.failUnlessIn(b" Current crawl ", s)
self.failUnlessIn(" (next work in ", s) self.failUnlessIn(b" (next work in ", s)
d.addCallback(_check) d.addCallback(_check)
# now give it enough time to complete a full cycle # now give it enough time to complete a full cycle
@ -149,15 +164,15 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
ss.bucket_counter.cpu_slice = orig_cpu_slice ss.bucket_counter.cpu_slice = orig_cpu_slice
html = renderSynchronously(w) html = renderSynchronously(w)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Total buckets: 0 (the number of", s) self.failUnlessIn(b"Total buckets: 0 (the number of", s)
self.failUnless("Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s) self.failUnless(b"Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s)
d.addCallback(_check2) d.addCallback(_check2)
return d return d
def test_bucket_counter_cleanup(self): def test_bucket_counter_cleanup(self):
basedir = "storage/BucketCounter/bucket_counter_cleanup" basedir = "storage/BucketCounter/bucket_counter_cleanup"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle # to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0. # of a cycle, we reach in and reduce its maximum slice time to 0.
ss.bucket_counter.slow_start = 0 ss.bucket_counter.slow_start = 0
@ -190,16 +205,16 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def _check2(ignored): def _check2(ignored):
ss.bucket_counter.cpu_slice = orig_cpu_slice ss.bucket_counter.cpu_slice = orig_cpu_slice
s = ss.bucket_counter.get_state() s = ss.bucket_counter.get_state()
self.failIf(-12 in s["bucket-counts"], s["bucket-counts"].keys()) self.failIf(-12 in s["bucket-counts"], list(s["bucket-counts"].keys()))
self.failIf("bogusprefix!" in s["storage-index-samples"], self.failIf("bogusprefix!" in s["storage-index-samples"],
s["storage-index-samples"].keys()) list(s["storage-index-samples"].keys()))
d.addCallback(_check2) d.addCallback(_check2)
return d return d
def test_bucket_counter_eta(self): def test_bucket_counter_eta(self):
basedir = "storage/BucketCounter/bucket_counter_eta" basedir = "storage/BucketCounter/bucket_counter_eta"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = MyStorageServer(basedir, "\x00" * 20) ss = MyStorageServer(basedir, b"\x00" * 20)
ss.bucket_counter.slow_start = 0 ss.bucket_counter.slow_start = 0
# these will be fired inside finished_prefix() # these will be fired inside finished_prefix()
hooks = ss.bucket_counter.hook_ds = [defer.Deferred() for i in range(3)] hooks = ss.bucket_counter.hook_ds = [defer.Deferred() for i in range(3)]
@ -211,20 +226,20 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
# no ETA is available yet # no ETA is available yet
html = renderSynchronously(w) html = renderSynchronously(w)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("complete (next work", s) self.failUnlessIn(b"complete (next work", s)
def _check_2(ignored): def _check_2(ignored):
# one prefix has finished, so an ETA based upon that elapsed time # one prefix has finished, so an ETA based upon that elapsed time
# should be available. # should be available.
html = renderSynchronously(w) html = renderSynchronously(w)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("complete (ETA ", s) self.failUnlessIn(b"complete (ETA ", s)
def _check_3(ignored): def _check_3(ignored):
# two prefixes have finished # two prefixes have finished
html = renderSynchronously(w) html = renderSynchronously(w)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("complete (ETA ", s) self.failUnlessIn(b"complete (ETA ", s)
d.callback("done") d.callback("done")
hooks[0].addCallback(_check_1).addErrback(d.errback) hooks[0].addCallback(_check_1).addErrback(d.errback)
@ -275,27 +290,27 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def make_shares(self, ss): def make_shares(self, ss):
def make(si): def make(si):
return (si, hashutil.tagged_hash("renew", si), return (si, hashutil.tagged_hash(b"renew", si),
hashutil.tagged_hash("cancel", si)) hashutil.tagged_hash(b"cancel", si))
def make_mutable(si): def make_mutable(si):
return (si, hashutil.tagged_hash("renew", si), return (si, hashutil.tagged_hash(b"renew", si),
hashutil.tagged_hash("cancel", si), hashutil.tagged_hash(b"cancel", si),
hashutil.tagged_hash("write-enabler", si)) hashutil.tagged_hash(b"write-enabler", si))
def make_extra_lease(si, num): def make_extra_lease(si, num):
return (hashutil.tagged_hash("renew-%d" % num, si), return (hashutil.tagged_hash(b"renew-%d" % num, si),
hashutil.tagged_hash("cancel-%d" % num, si)) hashutil.tagged_hash(b"cancel-%d" % num, si))
immutable_si_0, rs0, cs0 = make("\x00" * 16) immutable_si_0, rs0, cs0 = make(b"\x00" * 16)
immutable_si_1, rs1, cs1 = make("\x01" * 16) immutable_si_1, rs1, cs1 = make(b"\x01" * 16)
rs1a, cs1a = make_extra_lease(immutable_si_1, 1) rs1a, cs1a = make_extra_lease(immutable_si_1, 1)
mutable_si_2, rs2, cs2, we2 = make_mutable("\x02" * 16) mutable_si_2, rs2, cs2, we2 = make_mutable(b"\x02" * 16)
mutable_si_3, rs3, cs3, we3 = make_mutable("\x03" * 16) mutable_si_3, rs3, cs3, we3 = make_mutable(b"\x03" * 16)
rs3a, cs3a = make_extra_lease(mutable_si_3, 1) rs3a, cs3a = make_extra_lease(mutable_si_3, 1)
sharenums = [0] sharenums = [0]
canary = FakeCanary() canary = FakeCanary()
# note: 'tahoe debug dump-share' will not handle this file, since the # note: 'tahoe debug dump-share' will not handle this file, since the
# inner contents are not a valid CHK share # inner contents are not a valid CHK share
data = "\xff" * 1000 data = b"\xff" * 1000
a,w = ss.remote_allocate_buckets(immutable_si_0, rs0, cs0, sharenums, a,w = ss.remote_allocate_buckets(immutable_si_0, rs0, cs0, sharenums,
1000, canary) 1000, canary)
@ -322,7 +337,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_basic(self): def test_basic(self):
basedir = "storage/LeaseCrawler/basic" basedir = "storage/LeaseCrawler/basic"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = InstrumentedStorageServer(basedir, "\x00" * 20) ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual. # make it start sooner than usual.
lc = ss.lease_checker lc = ss.lease_checker
lc.slow_start = 0 lc.slow_start = 0
@ -339,7 +354,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
storage_index_to_dir(immutable_si_0), storage_index_to_dir(immutable_si_0),
"not-a-share") "not-a-share")
f = open(fn, "wb") f = open(fn, "wb")
f.write("I am not a share.\n") f.write(b"I am not a share.\n")
f.close() f.close()
# this is before the crawl has started, so we're not in a cycle yet # this is before the crawl has started, so we're not in a cycle yet
@ -398,25 +413,25 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html): def _check_html_in_cycle(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("So far, this cycle has examined " self.failUnlessIn(b"So far, this cycle has examined "
"1 shares in 1 buckets (0 mutable / 1 immutable) ", s) b"1 shares in 1 buckets (0 mutable / 1 immutable) ", s)
self.failUnlessIn("and has recovered: " self.failUnlessIn(b"and has recovered: "
"0 shares, 0 buckets (0 mutable / 0 immutable), " b"0 shares, 0 buckets (0 mutable / 0 immutable), "
"0 B (0 B / 0 B)", s) b"0 B (0 B / 0 B)", s)
self.failUnlessIn("If expiration were enabled, " self.failUnlessIn(b"If expiration were enabled, "
"we would have recovered: " b"we would have recovered: "
"0 shares, 0 buckets (0 mutable / 0 immutable)," b"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B) by now", s) b" 0 B (0 B / 0 B) by now", s)
self.failUnlessIn("and the remainder of this cycle " self.failUnlessIn(b"and the remainder of this cycle "
"would probably recover: " b"would probably recover: "
"0 shares, 0 buckets (0 mutable / 0 immutable)," b"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B)", s) b" 0 B (0 B / 0 B)", s)
self.failUnlessIn("and the whole cycle would probably recover: " self.failUnlessIn(b"and the whole cycle would probably recover: "
"0 shares, 0 buckets (0 mutable / 0 immutable)," b"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B)", s) b" 0 B (0 B / 0 B)", s)
self.failUnlessIn("if we were strictly using each lease's default " self.failUnlessIn(b"if we were strictly using each lease's default "
"31-day lease lifetime", s) b"31-day lease lifetime", s)
self.failUnlessIn("this cycle would be expected to recover: ", s) self.failUnlessIn(b"this cycle would be expected to recover: ", s)
d.addCallback(_check_html_in_cycle) d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Nothing should have # wait for the crawler to finish the first cycle. Nothing should have
@ -473,11 +488,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("recovered: 0 shares, 0 buckets " self.failUnlessIn(b"recovered: 0 shares, 0 buckets "
"(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s) b"(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s)
self.failUnlessIn("and saw a total of 4 shares, 4 buckets " self.failUnlessIn(b"and saw a total of 4 shares, 4 buckets "
"(2 mutable / 2 immutable),", s) b"(2 mutable / 2 immutable),", s)
self.failUnlessIn("but expiration was not enabled", s) self.failUnlessIn(b"but expiration was not enabled", s)
d.addCallback(_check_html) d.addCallback(_check_html)
d.addCallback(lambda ign: renderJSON(webstatus)) d.addCallback(lambda ign: renderJSON(webstatus))
def _check_json(raw): def _check_json(raw):
@ -505,7 +520,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
# setting expiration_time to 2000 means that any lease which is more # setting expiration_time to 2000 means that any lease which is more
# than 2000s old will be expired. # than 2000s old will be expired.
ss = InstrumentedStorageServer(basedir, "\x00" * 20, ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True, expiration_enabled=True,
expiration_mode="age", expiration_mode="age",
expiration_override_lease_duration=2000) expiration_override_lease_duration=2000)
@ -578,11 +593,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them # predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right # all. This part of the test depends upon the SIs landing right
# where they do now. # where they do now.
self.failUnlessIn("The remainder of this cycle is expected to " self.failUnlessIn(b"The remainder of this cycle is expected to "
"recover: 4 shares, 4 buckets", s) b"recover: 4 shares, 4 buckets", s)
self.failUnlessIn("The whole cycle is expected to examine " self.failUnlessIn(b"The whole cycle is expected to examine "
"5 shares in 5 buckets and to recover: " b"5 shares in 5 buckets and to recover: "
"5 shares, 5 buckets", s) b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle) d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should # wait for the crawler to finish the first cycle. Two shares should
@ -632,9 +647,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Expiration Enabled: expired leases will be removed", s) self.failUnlessIn(b"Expiration Enabled: expired leases will be removed", s)
self.failUnlessIn("Leases created or last renewed more than 33 minutes ago will be considered expired.", s) self.failUnlessIn(b"Leases created or last renewed more than 33 minutes ago will be considered expired.", s)
self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s) self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html) d.addCallback(_check_html)
return d return d
@ -645,7 +660,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# is more than 2000s old will be expired. # is more than 2000s old will be expired.
now = time.time() now = time.time()
then = int(now - 2000) then = int(now - 2000)
ss = InstrumentedStorageServer(basedir, "\x00" * 20, ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True, expiration_enabled=True,
expiration_mode="cutoff-date", expiration_mode="cutoff-date",
expiration_cutoff_date=then) expiration_cutoff_date=then)
@ -722,11 +737,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them # predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right # all. This part of the test depends upon the SIs landing right
# where they do now. # where they do now.
self.failUnlessIn("The remainder of this cycle is expected to " self.failUnlessIn(b"The remainder of this cycle is expected to "
"recover: 4 shares, 4 buckets", s) b"recover: 4 shares, 4 buckets", s)
self.failUnlessIn("The whole cycle is expected to examine " self.failUnlessIn(b"The whole cycle is expected to examine "
"5 shares in 5 buckets and to recover: " b"5 shares in 5 buckets and to recover: "
"5 shares, 5 buckets", s) b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle) d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should # wait for the crawler to finish the first cycle. Two shares should
@ -778,12 +793,13 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Expiration Enabled:" self.failUnlessIn(b"Expiration Enabled:"
" expired leases will be removed", s) b" expired leases will be removed", s)
date = time.strftime("%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then)) date = time.strftime(
substr = "Leases created or last renewed before %s will be considered expired." % date u"%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then)).encode("ascii")
substr =b"Leases created or last renewed before %s will be considered expired." % date
self.failUnlessIn(substr, s) self.failUnlessIn(substr, s)
self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s) self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html) d.addCallback(_check_html)
return d return d
@ -792,7 +808,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
now = time.time() now = time.time()
then = int(now - 2000) then = int(now - 2000)
ss = StorageServer(basedir, "\x00" * 20, ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True, expiration_enabled=True,
expiration_mode="cutoff-date", expiration_mode="cutoff-date",
expiration_cutoff_date=then, expiration_cutoff_date=then,
@ -840,7 +856,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: immutable.", s) self.failUnlessIn(b"The following sharetypes will be expired: immutable.", s)
d.addCallback(_check_html) d.addCallback(_check_html)
return d return d
@ -849,7 +865,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
now = time.time() now = time.time()
then = int(now - 2000) then = int(now - 2000)
ss = StorageServer(basedir, "\x00" * 20, ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True, expiration_enabled=True,
expiration_mode="cutoff-date", expiration_mode="cutoff-date",
expiration_cutoff_date=then, expiration_cutoff_date=then,
@ -897,7 +913,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus)) d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: mutable.", s) self.failUnlessIn(b"The following sharetypes will be expired: mutable.", s)
d.addCallback(_check_html) d.addCallback(_check_html)
return d return d
@ -905,14 +921,14 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/LeaseCrawler/bad_mode" basedir = "storage/LeaseCrawler/bad_mode"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
e = self.failUnlessRaises(ValueError, e = self.failUnlessRaises(ValueError,
StorageServer, basedir, "\x00" * 20, StorageServer, basedir, b"\x00" * 20,
expiration_mode="bogus") expiration_mode="bogus")
self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e)) self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e))
def test_limited_history(self): def test_limited_history(self):
basedir = "storage/LeaseCrawler/limited_history" basedir = "storage/LeaseCrawler/limited_history"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual. # make it start sooner than usual.
lc = ss.lease_checker lc = ss.lease_checker
lc.slow_start = 0 lc.slow_start = 0
@ -944,7 +960,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_unpredictable_future(self): def test_unpredictable_future(self):
basedir = "storage/LeaseCrawler/unpredictable_future" basedir = "storage/LeaseCrawler/unpredictable_future"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual. # make it start sooner than usual.
lc = ss.lease_checker lc = ss.lease_checker
lc.slow_start = 0 lc.slow_start = 0
@ -1007,7 +1023,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_no_st_blocks(self): def test_no_st_blocks(self):
basedir = "storage/LeaseCrawler/no_st_blocks" basedir = "storage/LeaseCrawler/no_st_blocks"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = No_ST_BLOCKS_StorageServer(basedir, "\x00" * 20, ss = No_ST_BLOCKS_StorageServer(basedir, b"\x00" * 20,
expiration_mode="age", expiration_mode="age",
expiration_override_lease_duration=-1000) expiration_override_lease_duration=-1000)
# a negative expiration_time= means the "configured-" # a negative expiration_time= means the "configured-"
@ -1046,7 +1062,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
] ]
basedir = "storage/LeaseCrawler/share_corruption" basedir = "storage/LeaseCrawler/share_corruption"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = InstrumentedStorageServer(basedir, "\x00" * 20) ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
w = StorageStatus(ss) w = StorageStatus(ss)
# make it start sooner than usual. # make it start sooner than usual.
lc = ss.lease_checker lc = ss.lease_checker
@ -1064,7 +1080,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fn = os.path.join(ss.sharedir, storage_index_to_dir(first), "0") fn = os.path.join(ss.sharedir, storage_index_to_dir(first), "0")
f = open(fn, "rb+") f = open(fn, "rb+")
f.seek(0) f.seek(0)
f.write("BAD MAGIC") f.write(b"BAD MAGIC")
f.close() f.close()
# if get_share_file() doesn't see the correct mutable magic, it # if get_share_file() doesn't see the correct mutable magic, it
# assumes the file is an immutable share, and then # assumes the file is an immutable share, and then
@ -1073,7 +1089,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# UnknownImmutableContainerVersionError. # UnknownImmutableContainerVersionError.
# also create an empty bucket # also create an empty bucket
empty_si = base32.b2a("\x04"*16) empty_si = base32.b2a(b"\x04"*16)
empty_bucket_dir = os.path.join(ss.sharedir, empty_bucket_dir = os.path.join(ss.sharedir,
storage_index_to_dir(empty_si)) storage_index_to_dir(empty_si))
fileutil.make_dirs(empty_bucket_dir) fileutil.make_dirs(empty_bucket_dir)
@ -1094,7 +1110,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = so_far["space-recovered"] rec = so_far["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 1) self.failUnlessEqual(rec["examined-buckets"], 1)
self.failUnlessEqual(rec["examined-shares"], 0) self.failUnlessEqual(rec["examined-shares"], 0)
self.failUnlessEqual(so_far["corrupt-shares"], [(first_b32, 0)]) [(actual_b32, i)] = so_far["corrupt-shares"]
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_bucket) d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: renderJSON(w)) d.addCallback(lambda ign: renderJSON(w))
@ -1103,13 +1121,15 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# grr. json turns all dict keys into strings. # grr. json turns all dict keys into strings.
so_far = data["lease-checker"]["cycle-to-date"] so_far = data["lease-checker"]["cycle-to-date"]
corrupt_shares = so_far["corrupt-shares"] corrupt_shares = so_far["corrupt-shares"]
# it also turns all tuples into lists # it also turns all tuples into lists, and result is unicode:
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]]) [(actual_b32, i)] = corrupt_shares
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json) d.addCallback(_check_json)
d.addCallback(lambda ign: renderDeferred(w)) d.addCallback(lambda ign: renderDeferred(w))
def _check_html(html): def _check_html(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s) self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html) d.addCallback(_check_html)
def _wait(): def _wait():
@ -1122,19 +1142,22 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = last["space-recovered"] rec = last["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 5) self.failUnlessEqual(rec["examined-buckets"], 5)
self.failUnlessEqual(rec["examined-shares"], 3) self.failUnlessEqual(rec["examined-shares"], 3)
self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)]) [(actual_b32, i)] = last["corrupt-shares"]
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_cycle) d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: renderJSON(w)) d.addCallback(lambda ign: renderJSON(w))
def _check_json_history(raw): def _check_json_history(raw):
data = json.loads(raw) data = json.loads(raw)
last = data["lease-checker"]["history"]["0"] last = data["lease-checker"]["history"]["0"]
corrupt_shares = last["corrupt-shares"] [(actual_b32, i)] = last["corrupt-shares"]
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]]) actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json_history) d.addCallback(_check_json_history)
d.addCallback(lambda ign: renderDeferred(w)) d.addCallback(lambda ign: renderDeferred(w))
def _check_html_history(html): def _check_html_history(html):
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s) self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html_history) d.addCallback(_check_html_history)
def _cleanup(res): def _cleanup(res):
@ -1156,23 +1179,23 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_no_server(self): def test_no_server(self):
w = StorageStatus(None) w = StorageStatus(None)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>No Storage Server Running</h1>", html) self.failUnlessIn(b"<h1>No Storage Server Running</h1>", html)
def test_status(self): def test_status(self):
basedir = "storage/WebStatus/status" basedir = "storage/WebStatus/status"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
nodeid = "\x00" * 20 nodeid = b"\x00" * 20
ss = StorageServer(basedir, nodeid) ss = StorageServer(basedir, nodeid)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss, "nickname") w = StorageStatus(ss, "nickname")
d = renderDeferred(w) d = renderDeferred(w)
def _check_html(html): def _check_html(html):
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Server Nickname: nickname", s) self.failUnlessIn(b"Server Nickname: nickname", s)
self.failUnlessIn("Server Nodeid: %s" % base32.b2a(nodeid), s) self.failUnlessIn(b"Server Nodeid: %s" % base32.b2a(nodeid), s)
self.failUnlessIn("Accepting new shares: Yes", s) self.failUnlessIn(b"Accepting new shares: Yes", s)
self.failUnlessIn("Reserved space: - 0 B (0)", s) self.failUnlessIn(b"Reserved space: - 0 B (0)", s)
d.addCallback(_check_html) d.addCallback(_check_html)
d.addCallback(lambda ign: renderJSON(w)) d.addCallback(lambda ign: renderJSON(w))
def _check_json(raw): def _check_json(raw):
@ -1195,15 +1218,15 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# (test runs on all platforms). # (test runs on all platforms).
basedir = "storage/WebStatus/status_no_disk_stats" basedir = "storage/WebStatus/status_no_disk_stats"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Accepting new shares: Yes", s) self.failUnlessIn(b"Accepting new shares: Yes", s)
self.failUnlessIn("Total disk space: ?", s) self.failUnlessIn(b"Total disk space: ?", s)
self.failUnlessIn("Space Available to Tahoe: ?", s) self.failUnlessIn(b"Space Available to Tahoe: ?", s)
self.failUnless(ss.get_available_space() is None) self.failUnless(ss.get_available_space() is None)
def test_status_bad_disk_stats(self): def test_status_bad_disk_stats(self):
@ -1215,15 +1238,15 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# show that no shares will be accepted, and get_available_space() should be 0. # show that no shares will be accepted, and get_available_space() should be 0.
basedir = "storage/WebStatus/status_bad_disk_stats" basedir = "storage/WebStatus/status_bad_disk_stats"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20) ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Accepting new shares: No", s) self.failUnlessIn(b"Accepting new shares: No", s)
self.failUnlessIn("Total disk space: ?", s) self.failUnlessIn(b"Total disk space: ?", s)
self.failUnlessIn("Space Available to Tahoe: ?", s) self.failUnlessIn(b"Space Available to Tahoe: ?", s)
self.failUnlessEqual(ss.get_available_space(), 0) self.failUnlessEqual(ss.get_available_space(), 0)
def test_status_right_disk_stats(self): def test_status_right_disk_stats(self):
@ -1235,7 +1258,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/WebStatus/status_right_disk_stats" basedir = "storage/WebStatus/status_right_disk_stats"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=reserved) ss = StorageServer(basedir, b"\x00" * 20, reserved_space=reserved)
expecteddir = ss.sharedir expecteddir = ss.sharedir
def call_get_disk_stats(whichdir, reserved_space=0): def call_get_disk_stats(whichdir, reserved_space=0):
@ -1256,48 +1279,48 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Total disk space: 5.00 GB", s) self.failUnlessIn(b"Total disk space: 5.00 GB", s)
self.failUnlessIn("Disk space used: - 1.00 GB", s) self.failUnlessIn(b"Disk space used: - 1.00 GB", s)
self.failUnlessIn("Disk space free (root): 4.00 GB", s) self.failUnlessIn(b"Disk space free (root): 4.00 GB", s)
self.failUnlessIn("Disk space free (non-root): 3.00 GB", s) self.failUnlessIn(b"Disk space free (non-root): 3.00 GB", s)
self.failUnlessIn("Reserved space: - 1.00 GB", s) self.failUnlessIn(b"Reserved space: - 1.00 GB", s)
self.failUnlessIn("Space Available to Tahoe: 2.00 GB", s) self.failUnlessIn(b"Space Available to Tahoe: 2.00 GB", s)
self.failUnlessEqual(ss.get_available_space(), 2*GB) self.failUnlessEqual(ss.get_available_space(), 2*GB)
def test_readonly(self): def test_readonly(self):
basedir = "storage/WebStatus/readonly" basedir = "storage/WebStatus/readonly"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, readonly_storage=True) ss = StorageServer(basedir, b"\x00" * 20, readonly_storage=True)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Accepting new shares: No", s) self.failUnlessIn(b"Accepting new shares: No", s)
def test_reserved(self): def test_reserved(self):
basedir = "storage/WebStatus/reserved" basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6) ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s) self.failUnlessIn(b"Reserved space: - 10.00 MB (10000000)", s)
def test_huge_reserved(self): def test_huge_reserved(self):
basedir = "storage/WebStatus/reserved" basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir) fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6) ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s) ss.setServiceParent(self.s)
w = StorageStatus(ss) w = StorageStatus(ss)
html = renderSynchronously(w) html = renderSynchronously(w)
self.failUnlessIn("<h1>Storage Server Status</h1>", html) self.failUnlessIn(b"<h1>Storage Server Status</h1>", html)
s = remove_tags(html) s = remove_tags(html)
self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s) self.failUnlessIn(b"Reserved space: - 10.00 MB (10000000)", s)
def test_util(self): def test_util(self):
w = StorageStatusElement(None, None) w = StorageStatusElement(None, None)

View File

@ -980,6 +980,8 @@ class CountingDataUploadable(upload.Data):
class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
timeout = 180
def test_connections(self): def test_connections(self):
self.basedir = "system/SystemTest/test_connections" self.basedir = "system/SystemTest/test_connections"
d = self.set_up_nodes() d = self.set_up_nodes()

View File

@ -1,3 +1,17 @@
"""
Tests for allmydata.uri.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
import os import os
from twisted.trial import unittest from twisted.trial import unittest
@ -40,24 +54,24 @@ class Literal(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(u.get_verify_cap(), None) self.failUnlessReallyEqual(u.get_verify_cap(), None)
def test_empty(self): def test_empty(self):
data = "" # This data is some *very* small data! data = b"" # This data is some *very* small data!
return self._help_test(data) return self._help_test(data)
def test_pack(self): def test_pack(self):
data = "This is some small data" data = b"This is some small data"
return self._help_test(data) return self._help_test(data)
def test_nonascii(self): def test_nonascii(self):
data = "This contains \x00 and URI:LIT: and \n, oh my." data = b"This contains \x00 and URI:LIT: and \n, oh my."
return self._help_test(data) return self._help_test(data)
class Compare(testutil.ReallyEqualMixin, unittest.TestCase): class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
def test_compare(self): def test_compare(self):
lit1 = uri.LiteralFileURI("some data") lit1 = uri.LiteralFileURI(b"some data")
fileURI = 'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834' fileURI = b'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834'
chk1 = uri.CHKFileURI.init_from_string(fileURI) chk1 = uri.CHKFileURI.init_from_string(fileURI)
chk2 = uri.CHKFileURI.init_from_string(fileURI) chk2 = uri.CHKFileURI.init_from_string(fileURI)
unk = uri.UnknownURI("lafs://from_the_future") unk = uri.UnknownURI(b"lafs://from_the_future")
self.failIfEqual(lit1, chk1) self.failIfEqual(lit1, chk1)
self.failUnlessReallyEqual(chk1, chk2) self.failUnlessReallyEqual(chk1, chk2)
self.failIfEqual(chk1, "not actually a URI") self.failIfEqual(chk1, "not actually a URI")
@ -66,21 +80,24 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(len(s), 3) # since chk1==chk2 self.failUnlessReallyEqual(len(s), 3) # since chk1==chk2
def test_is_uri(self): def test_is_uri(self):
lit1 = uri.LiteralFileURI("some data").to_string() lit1 = uri.LiteralFileURI(b"some data").to_string()
self.failUnless(uri.is_uri(lit1)) self.failUnless(uri.is_uri(lit1))
self.failIf(uri.is_uri(None)) self.failIf(uri.is_uri(None))
def test_is_literal_file_uri(self): def test_is_literal_file_uri(self):
lit1 = uri.LiteralFileURI("some data").to_string() lit1 = uri.LiteralFileURI(b"some data").to_string()
self.failUnless(uri.is_literal_file_uri(lit1)) self.failUnless(uri.is_literal_file_uri(lit1))
self.failIf(uri.is_literal_file_uri(None)) self.failIf(uri.is_literal_file_uri(None))
self.failIf(uri.is_literal_file_uri("foo")) self.failIf(uri.is_literal_file_uri("foo"))
self.failIf(uri.is_literal_file_uri("ro.foo")) self.failIf(uri.is_literal_file_uri("ro.foo"))
self.failIf(uri.is_literal_file_uri("URI:LITfoo")) self.failIf(uri.is_literal_file_uri(b"URI:LITfoo"))
self.failUnless(uri.is_literal_file_uri("ro.URI:LIT:foo")) self.failUnless(uri.is_literal_file_uri("ro.URI:LIT:foo"))
self.failUnless(uri.is_literal_file_uri("imm.URI:LIT:foo")) self.failUnless(uri.is_literal_file_uri("imm.URI:LIT:foo"))
def test_has_uri_prefix(self): def test_has_uri_prefix(self):
self.failUnless(uri.has_uri_prefix(b"URI:foo"))
self.failUnless(uri.has_uri_prefix(b"ro.URI:foo"))
self.failUnless(uri.has_uri_prefix(b"imm.URI:foo"))
self.failUnless(uri.has_uri_prefix("URI:foo")) self.failUnless(uri.has_uri_prefix("URI:foo"))
self.failUnless(uri.has_uri_prefix("ro.URI:foo")) self.failUnless(uri.has_uri_prefix("ro.URI:foo"))
self.failUnless(uri.has_uri_prefix("imm.URI:foo")) self.failUnless(uri.has_uri_prefix("imm.URI:foo"))
@ -89,9 +106,9 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase): class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self): def test_pack(self):
key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
storage_index = hashutil.storage_index_hash(key) storage_index = hashutil.storage_index_hash(key)
uri_extension_hash = hashutil.uri_extension_hash("stuff") uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 25 needed_shares = 25
total_shares = 100 total_shares = 100
size = 1234 size = 1234
@ -138,26 +155,26 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(u.to_string(), u2imm.to_string()) self.failUnlessReallyEqual(u.to_string(), u2imm.to_string())
v = u.get_verify_cap() v = u.get_verify_cap()
self.failUnless(isinstance(v.to_string(), str)) self.failUnless(isinstance(v.to_string(), bytes))
self.failUnless(v.is_readonly()) self.failUnless(v.is_readonly())
self.failIf(v.is_mutable()) self.failIf(v.is_mutable())
v2 = uri.from_string(v.to_string()) v2 = uri.from_string(v.to_string())
self.failUnlessReallyEqual(v, v2) self.failUnlessReallyEqual(v, v2)
v3 = uri.CHKFileVerifierURI(storage_index="\x00"*16, v3 = uri.CHKFileVerifierURI(storage_index=b"\x00"*16,
uri_extension_hash="\x00"*32, uri_extension_hash=b"\x00"*32,
needed_shares=3, needed_shares=3,
total_shares=10, total_shares=10,
size=1234) size=1234)
self.failUnless(isinstance(v3.to_string(), str)) self.failUnless(isinstance(v3.to_string(), bytes))
self.failUnless(v3.is_readonly()) self.failUnless(v3.is_readonly())
self.failIf(v3.is_mutable()) self.failIf(v3.is_mutable())
def test_pack_badly(self): def test_pack_badly(self):
key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
storage_index = hashutil.storage_index_hash(key) storage_index = hashutil.storage_index_hash(key)
uri_extension_hash = hashutil.uri_extension_hash("stuff") uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 25 needed_shares = 25
total_shares = 100 total_shares = 100
size = 1234 size = 1234
@ -186,35 +203,37 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
class Extension(testutil.ReallyEqualMixin, unittest.TestCase): class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self): def test_pack(self):
data = {"stuff": "value", data = {b"stuff": b"value",
"size": 12, b"size": 12,
"needed_shares": 3, b"needed_shares": 3,
"big_hash": hashutil.tagged_hash("foo", "bar"), b"big_hash": hashutil.tagged_hash(b"foo", b"bar"),
} }
ext = uri.pack_extension(data) ext = uri.pack_extension(data)
d = uri.unpack_extension(ext) d = uri.unpack_extension(ext)
self.failUnlessReallyEqual(d["stuff"], "value") self.failUnlessReallyEqual(d[b"stuff"], b"value")
self.failUnlessReallyEqual(d["size"], 12) self.failUnlessReallyEqual(d[b"size"], 12)
self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash("foo", "bar")) self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
readable = uri.unpack_extension_readable(ext) readable = uri.unpack_extension_readable(ext)
self.failUnlessReallyEqual(readable["needed_shares"], 3) self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
self.failUnlessReallyEqual(readable["stuff"], "value") self.failUnlessReallyEqual(readable[b"stuff"], b"value")
self.failUnlessReallyEqual(readable["size"], 12) self.failUnlessReallyEqual(readable[b"size"], 12)
self.failUnlessReallyEqual(readable["big_hash"], self.failUnlessReallyEqual(readable[b"big_hash"],
base32.b2a(hashutil.tagged_hash("foo", "bar"))) base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
self.failUnlessReallyEqual(readable["UEB_hash"], self.failUnlessReallyEqual(readable[b"UEB_hash"],
base32.b2a(hashutil.uri_extension_hash(ext))) base32.b2a(hashutil.uri_extension_hash(ext)))
class Unknown(testutil.ReallyEqualMixin, unittest.TestCase): class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
def test_from_future(self): def test_from_future(self):
# any URI type that we don't recognize should be treated as unknown # any URI type that we don't recognize should be treated as unknown
future_uri = "I am a URI from the future. Whatever you do, don't " future_uri = b"I am a URI from the future. Whatever you do, don't "
u = uri.from_string(future_uri) u = uri.from_string(future_uri)
self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnless(isinstance(u, uri.UnknownURI))
self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnlessReallyEqual(u.to_string(), future_uri)
self.failUnless(u.get_readonly() is None) self.failUnless(u.get_readonly() is None)
self.failUnless(u.get_error() is None) self.failUnless(u.get_error() is None)
future_uri_unicode = future_uri.decode("utf-8")
self.assertEqual(future_uri, uri.from_string(future_uri_unicode).to_string())
u2 = uri.UnknownURI(future_uri, error=CapConstraintError("...")) u2 = uri.UnknownURI(future_uri, error=CapConstraintError("..."))
self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnlessReallyEqual(u.to_string(), future_uri)
@ -222,7 +241,7 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(isinstance(u2.get_error(), CapConstraintError)) self.failUnless(isinstance(u2.get_error(), CapConstraintError))
# Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?) # Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?)
future_uri = u"I am a cap from the \u263A future. Whatever you ".encode('utf-8') future_uri = u"I am a cap from the \u263A future. Whatever you ".encode("utf-8")
u = uri.from_string(future_uri) u = uri.from_string(future_uri)
self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnless(isinstance(u, uri.UnknownURI))
self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnlessReallyEqual(u.to_string(), future_uri)
@ -236,15 +255,15 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
class Constraint(testutil.ReallyEqualMixin, unittest.TestCase): class Constraint(testutil.ReallyEqualMixin, unittest.TestCase):
def test_constraint(self): def test_constraint(self):
bad = "http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/" bad = b"http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/"
self.failUnlessRaises(uri.BadURIError, uri.DirectoryURI.init_from_string, bad) self.failUnlessRaises(uri.BadURIError, uri.DirectoryURI.init_from_string, bad)
fileURI = 'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834' fileURI = b'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834'
uri.CHKFileURI.init_from_string(fileURI) uri.CHKFileURI.init_from_string(fileURI)
class Mutable(testutil.ReallyEqualMixin, unittest.TestCase): class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
def setUp(self): def setUp(self):
self.writekey = "\x01" * 16 self.writekey = b"\x01" * 16
self.fingerprint = "\x02" * 32 self.fingerprint = b"\x02" * 32
self.readkey = hashutil.ssk_readkey_hash(self.writekey) self.readkey = hashutil.ssk_readkey_hash(self.writekey)
self.storage_index = hashutil.ssk_storage_index_hash(self.readkey) self.storage_index = hashutil.ssk_storage_index_hash(self.readkey)
@ -410,28 +429,29 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint) u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint)
cap = u1.to_string() cap = u1.to_string()
cap2 = cap+":I COME FROM THE FUTURE" cap2 = cap+b":I COME FROM THE FUTURE"
u2 = uri.WriteableMDMFFileURI.init_from_string(cap2) u2 = uri.WriteableMDMFFileURI.init_from_string(cap2)
self.failUnlessReallyEqual(self.writekey, u2.writekey) self.failUnlessReallyEqual(self.writekey, u2.writekey)
self.failUnlessReallyEqual(self.fingerprint, u2.fingerprint) self.failUnlessReallyEqual(self.fingerprint, u2.fingerprint)
self.failIf(u2.is_readonly()) self.failIf(u2.is_readonly())
self.failUnless(u2.is_mutable()) self.failUnless(u2.is_mutable())
cap3 = cap+":"+os.urandom(40) # parse *that*!
cap3 = cap+b":" + os.urandom(40)
u3 = uri.WriteableMDMFFileURI.init_from_string(cap3) u3 = uri.WriteableMDMFFileURI.init_from_string(cap3)
self.failUnlessReallyEqual(self.writekey, u3.writekey) self.failUnlessReallyEqual(self.writekey, u3.writekey)
self.failUnlessReallyEqual(self.fingerprint, u3.fingerprint) self.failUnlessReallyEqual(self.fingerprint, u3.fingerprint)
self.failIf(u3.is_readonly()) self.failIf(u3.is_readonly())
self.failUnless(u3.is_mutable()) self.failUnless(u3.is_mutable())
cap4 = u1.get_readonly().to_string()+":ooh scary future stuff" cap4 = u1.get_readonly().to_string()+b":ooh scary future stuff"
u4 = uri.from_string_mutable_filenode(cap4) u4 = uri.from_string_mutable_filenode(cap4)
self.failUnlessReallyEqual(self.readkey, u4.readkey) self.failUnlessReallyEqual(self.readkey, u4.readkey)
self.failUnlessReallyEqual(self.fingerprint, u4.fingerprint) self.failUnlessReallyEqual(self.fingerprint, u4.fingerprint)
self.failUnless(u4.is_readonly()) self.failUnless(u4.is_readonly())
self.failUnless(u4.is_mutable()) self.failUnless(u4.is_mutable())
cap5 = u1.get_verify_cap().to_string()+":spoilers!" cap5 = u1.get_verify_cap().to_string()+b":spoilers!"
u5 = uri.from_string(cap5) u5 = uri.from_string(cap5)
self.failUnlessReallyEqual(self.storage_index, u5.storage_index) self.failUnlessReallyEqual(self.storage_index, u5.storage_index)
self.failUnlessReallyEqual(self.fingerprint, u5.fingerprint) self.failUnlessReallyEqual(self.fingerprint, u5.fingerprint)
@ -468,8 +488,8 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase): class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self): def test_pack(self):
writekey = "\x01" * 16 writekey = b"\x01" * 16
fingerprint = "\x02" * 32 fingerprint = b"\x02" * 32
n = uri.WriteableSSKFileURI(writekey, fingerprint) n = uri.WriteableSSKFileURI(writekey, fingerprint)
u1 = uri.DirectoryURI(n) u1 = uri.DirectoryURI(n)
@ -536,8 +556,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
u1.get_verify_cap()._filenode_uri) u1.get_verify_cap()._filenode_uri)
def test_immutable(self): def test_immutable(self):
readkey = "\x01" * 16 readkey = b"\x01" * 16
uri_extension_hash = hashutil.uri_extension_hash("stuff") uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 3 needed_shares = 3
total_shares = 10 total_shares = 10
size = 1234 size = 1234
@ -548,7 +568,7 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
total_shares=total_shares, total_shares=total_shares,
size=size) size=size)
fncap = fnuri.to_string() fncap = fnuri.to_string()
self.failUnlessReallyEqual(fncap, "URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234") self.failUnlessReallyEqual(fncap, b"URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234")
u1 = uri.ImmutableDirectoryURI(fnuri) u1 = uri.ImmutableDirectoryURI(fnuri)
self.failUnless(u1.is_readonly()) self.failUnless(u1.is_readonly())
self.failIf(u1.is_mutable()) self.failIf(u1.is_mutable())
@ -587,20 +607,20 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(IVerifierURI.providedBy(u2_verifier)) self.failUnless(IVerifierURI.providedBy(u2_verifier))
u2vs = u2_verifier.to_string() u2vs = u2_verifier.to_string()
# URI:DIR2-CHK-Verifier:$key:$ueb:$k:$n:$size # URI:DIR2-CHK-Verifier:$key:$ueb:$k:$n:$size
self.failUnless(u2vs.startswith("URI:DIR2-CHK-Verifier:"), u2vs) self.failUnless(u2vs.startswith(b"URI:DIR2-CHK-Verifier:"), u2vs)
u2_verifier_fileuri = u2_verifier.get_filenode_cap() u2_verifier_fileuri = u2_verifier.get_filenode_cap()
self.failUnless(IVerifierURI.providedBy(u2_verifier_fileuri)) self.failUnless(IVerifierURI.providedBy(u2_verifier_fileuri))
u2vfs = u2_verifier_fileuri.to_string() u2vfs = u2_verifier_fileuri.to_string()
# URI:CHK-Verifier:$key:$ueb:$k:$n:$size # URI:CHK-Verifier:$key:$ueb:$k:$n:$size
self.failUnlessReallyEqual(u2vfs, fnuri.get_verify_cap().to_string()) self.failUnlessReallyEqual(u2vfs, fnuri.get_verify_cap().to_string())
self.failUnlessReallyEqual(u2vs[len("URI:DIR2-"):], u2vfs[len("URI:"):]) self.failUnlessReallyEqual(u2vs[len(b"URI:DIR2-"):], u2vfs[len(b"URI:"):])
self.failUnless(str(u2_verifier)) self.failUnless(str(u2_verifier))
def test_literal(self): def test_literal(self):
u0 = uri.LiteralFileURI("data") u0 = uri.LiteralFileURI(b"data")
u1 = uri.LiteralDirectoryURI(u0) u1 = uri.LiteralDirectoryURI(u0)
self.failUnless(str(u1)) self.failUnless(str(u1))
self.failUnlessReallyEqual(u1.to_string(), "URI:DIR2-LIT:mrqxiyi") self.failUnlessReallyEqual(u1.to_string(), b"URI:DIR2-LIT:mrqxiyi")
self.failUnless(u1.is_readonly()) self.failUnless(u1.is_readonly())
self.failIf(u1.is_mutable()) self.failIf(u1.is_mutable())
self.failUnless(IURI.providedBy(u1)) self.failUnless(IURI.providedBy(u1))
@ -608,11 +628,11 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(IDirnodeURI.providedBy(u1)) self.failUnless(IDirnodeURI.providedBy(u1))
self.failUnlessReallyEqual(u1.get_verify_cap(), None) self.failUnlessReallyEqual(u1.get_verify_cap(), None)
self.failUnlessReallyEqual(u1.get_storage_index(), None) self.failUnlessReallyEqual(u1.get_storage_index(), None)
self.failUnlessReallyEqual(u1.abbrev_si(), "<LIT>") self.failUnlessReallyEqual(u1.abbrev_si(), b"<LIT>")
def test_mdmf(self): def test_mdmf(self):
writekey = "\x01" * 16 writekey = b"\x01" * 16
fingerprint = "\x02" * 32 fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1) d1 = uri.MDMFDirectoryURI(uri1)
self.failIf(d1.is_readonly()) self.failIf(d1.is_readonly())
@ -635,8 +655,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessIsInstance(d3, uri.UnknownURI) self.failUnlessIsInstance(d3, uri.UnknownURI)
def test_mdmf_attenuation(self): def test_mdmf_attenuation(self):
writekey = "\x01" * 16 writekey = b"\x01" * 16
fingerprint = "\x02" * 32 fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1) d1 = uri.MDMFDirectoryURI(uri1)
@ -676,8 +696,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
def test_mdmf_verifier(self): def test_mdmf_verifier(self):
# I'm not sure what I want to write here yet. # I'm not sure what I want to write here yet.
writekey = "\x01" * 16 writekey = b"\x01" * 16
fingerprint = "\x02" * 32 fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1) d1 = uri.MDMFDirectoryURI(uri1)
v1 = d1.get_verify_cap() v1 = d1.get_verify_cap()

View File

@ -1,3 +1,22 @@
"""
URIs (kinda sorta, really they're capabilities?).
Ported to Python 3.
Methods ending in to_string() are actually to_bytes(), possibly should be fixed
in follow-up port.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import bytes, to prevent leaks.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode, long
import re import re
@ -24,10 +43,10 @@ class BadURIError(CapConstraintError):
# - make variable and method names consistently use _uri for an URI string, # - make variable and method names consistently use _uri for an URI string,
# and _cap for a Cap object (decoded URI) # and _cap for a Cap object (decoded URI)
BASE32STR_128bits = '(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits) BASE32STR_128bits = b'(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits)
BASE32STR_256bits = '(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits) BASE32STR_256bits = b'(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits)
NUMBER='([0-9]+)' NUMBER=b'([0-9]+)'
class _BaseURI(object): class _BaseURI(object):
@ -53,10 +72,10 @@ class _BaseURI(object):
@implementer(IURI, IImmutableFileURI) @implementer(IURI, IImmutableFileURI)
class CHKFileURI(_BaseURI): class CHKFileURI(_BaseURI):
BASE_STRING='URI:CHK:' BASE_STRING=b'URI:CHK:'
STRING_RE=re.compile('^URI:CHK:'+BASE32STR_128bits+':'+ STRING_RE=re.compile(b'^URI:CHK:'+BASE32STR_128bits+b':'+
BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER+ BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER+
'$') b'$')
def __init__(self, key, uri_extension_hash, needed_shares, total_shares, def __init__(self, key, uri_extension_hash, needed_shares, total_shares,
size): size):
@ -82,7 +101,7 @@ class CHKFileURI(_BaseURI):
assert isinstance(self.total_shares, int) assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long)) assert isinstance(self.size, (int,long))
return ('URI:CHK:%s:%s:%d:%d:%d' % return (b'URI:CHK:%s:%s:%d:%d:%d' %
(base32.b2a(self.key), (base32.b2a(self.key),
base32.b2a(self.uri_extension_hash), base32.b2a(self.uri_extension_hash),
self.needed_shares, self.needed_shares,
@ -112,9 +131,9 @@ class CHKFileURI(_BaseURI):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class CHKFileVerifierURI(_BaseURI): class CHKFileVerifierURI(_BaseURI):
BASE_STRING='URI:CHK-Verifier:' BASE_STRING=b'URI:CHK-Verifier:'
STRING_RE=re.compile('^URI:CHK-Verifier:'+BASE32STR_128bits+':'+ STRING_RE=re.compile(b'^URI:CHK-Verifier:'+BASE32STR_128bits+b':'+
BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER) BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER)
def __init__(self, storage_index, uri_extension_hash, def __init__(self, storage_index, uri_extension_hash,
needed_shares, total_shares, size): needed_shares, total_shares, size):
@ -138,7 +157,7 @@ class CHKFileVerifierURI(_BaseURI):
assert isinstance(self.total_shares, int) assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long)) assert isinstance(self.size, (int,long))
return ('URI:CHK-Verifier:%s:%s:%d:%d:%d' % return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
(si_b2a(self.storage_index), (si_b2a(self.storage_index),
base32.b2a(self.uri_extension_hash), base32.b2a(self.uri_extension_hash),
self.needed_shares, self.needed_shares,
@ -161,12 +180,12 @@ class CHKFileVerifierURI(_BaseURI):
@implementer(IURI, IImmutableFileURI) @implementer(IURI, IImmutableFileURI)
class LiteralFileURI(_BaseURI): class LiteralFileURI(_BaseURI):
BASE_STRING='URI:LIT:' BASE_STRING=b'URI:LIT:'
STRING_RE=re.compile('^URI:LIT:'+base32.BASE32STR_anybytes+'$') STRING_RE=re.compile(b'^URI:LIT:'+base32.BASE32STR_anybytes+b'$')
def __init__(self, data=None): def __init__(self, data=None):
if data is not None: if data is not None:
assert isinstance(data, str) assert isinstance(data, bytes)
self.data = data self.data = data
@classmethod @classmethod
@ -177,7 +196,7 @@ class LiteralFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1))) return cls(base32.a2b(mo.group(1)))
def to_string(self): def to_string(self):
return 'URI:LIT:%s' % base32.b2a(self.data) return b'URI:LIT:%s' % base32.b2a(self.data)
def is_readonly(self): def is_readonly(self):
return True return True
@ -202,9 +221,9 @@ class LiteralFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI) @implementer(IURI, IMutableFileURI)
class WriteableSSKFileURI(_BaseURI): class WriteableSSKFileURI(_BaseURI):
BASE_STRING='URI:SSK:' BASE_STRING=b'URI:SSK:'
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+ STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+
BASE32STR_256bits+'$') BASE32STR_256bits+b'$')
def __init__(self, writekey, fingerprint): def __init__(self, writekey, fingerprint):
self.writekey = writekey self.writekey = writekey
@ -221,9 +240,9 @@ class WriteableSSKFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.writekey, str) assert isinstance(self.writekey, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
return 'URI:SSK:%s:%s' % (base32.b2a(self.writekey), return b'URI:SSK:%s:%s' % (base32.b2a(self.writekey),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
def __repr__(self): def __repr__(self):
@ -251,8 +270,8 @@ class WriteableSSKFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI) @implementer(IURI, IMutableFileURI)
class ReadonlySSKFileURI(_BaseURI): class ReadonlySSKFileURI(_BaseURI):
BASE_STRING='URI:SSK-RO:' BASE_STRING=b'URI:SSK-RO:'
STRING_RE=re.compile('^URI:SSK-RO:'+BASE32STR_128bits+':'+BASE32STR_256bits+'$') STRING_RE=re.compile(b'^URI:SSK-RO:'+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
def __init__(self, readkey, fingerprint): def __init__(self, readkey, fingerprint):
self.readkey = readkey self.readkey = readkey
@ -268,9 +287,9 @@ class ReadonlySSKFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.readkey, str) assert isinstance(self.readkey, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
return 'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey), return b'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
def __repr__(self): def __repr__(self):
@ -298,8 +317,8 @@ class ReadonlySSKFileURI(_BaseURI):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class SSKVerifierURI(_BaseURI): class SSKVerifierURI(_BaseURI):
BASE_STRING='URI:SSK-Verifier:' BASE_STRING=b'URI:SSK-Verifier:'
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'$') STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
def __init__(self, storage_index, fingerprint): def __init__(self, storage_index, fingerprint):
assert len(storage_index) == 16 assert len(storage_index) == 16
@ -314,9 +333,9 @@ class SSKVerifierURI(_BaseURI):
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.storage_index, str) assert isinstance(self.storage_index, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
return 'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index), return b'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
def is_readonly(self): def is_readonly(self):
@ -335,8 +354,8 @@ class SSKVerifierURI(_BaseURI):
@implementer(IURI, IMutableFileURI) @implementer(IURI, IMutableFileURI)
class WriteableMDMFFileURI(_BaseURI): class WriteableMDMFFileURI(_BaseURI):
BASE_STRING='URI:MDMF:' BASE_STRING=b'URI:MDMF:'
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, writekey, fingerprint): def __init__(self, writekey, fingerprint):
self.writekey = writekey self.writekey = writekey
@ -353,9 +372,9 @@ class WriteableMDMFFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.writekey, str) assert isinstance(self.writekey, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
ret = 'URI:MDMF:%s:%s' % (base32.b2a(self.writekey), ret = b'URI:MDMF:%s:%s' % (base32.b2a(self.writekey),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
return ret return ret
@ -384,8 +403,8 @@ class WriteableMDMFFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI) @implementer(IURI, IMutableFileURI)
class ReadonlyMDMFFileURI(_BaseURI): class ReadonlyMDMFFileURI(_BaseURI):
BASE_STRING='URI:MDMF-RO:' BASE_STRING=b'URI:MDMF-RO:'
STRING_RE=re.compile('^' +BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') STRING_RE=re.compile(b'^' +BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, readkey, fingerprint): def __init__(self, readkey, fingerprint):
self.readkey = readkey self.readkey = readkey
@ -402,9 +421,9 @@ class ReadonlyMDMFFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.readkey, str) assert isinstance(self.readkey, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
ret = 'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey), ret = b'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
return ret return ret
@ -433,8 +452,8 @@ class ReadonlyMDMFFileURI(_BaseURI):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class MDMFVerifierURI(_BaseURI): class MDMFVerifierURI(_BaseURI):
BASE_STRING='URI:MDMF-Verifier:' BASE_STRING=b'URI:MDMF-Verifier:'
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, storage_index, fingerprint): def __init__(self, storage_index, fingerprint):
assert len(storage_index) == 16 assert len(storage_index) == 16
@ -449,9 +468,9 @@ class MDMFVerifierURI(_BaseURI):
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2))) return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self): def to_string(self):
assert isinstance(self.storage_index, str) assert isinstance(self.storage_index, bytes)
assert isinstance(self.fingerprint, str) assert isinstance(self.fingerprint, bytes)
ret = 'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index), ret = b'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
base32.b2a(self.fingerprint)) base32.b2a(self.fingerprint))
return ret return ret
@ -494,12 +513,12 @@ class _DirectoryBaseURI(_BaseURI):
return self.BASE_STRING+bits return self.BASE_STRING+bits
def abbrev(self): def abbrev(self):
return self._filenode_uri.to_string().split(':')[2][:5] return self._filenode_uri.to_string().split(b':')[2][:5]
def abbrev_si(self): def abbrev_si(self):
si = self._filenode_uri.get_storage_index() si = self._filenode_uri.get_storage_index()
if si is None: if si is None:
return "<LIT>" return b"<LIT>"
return base32.b2a(si)[:5] return base32.b2a(si)[:5]
def is_mutable(self): def is_mutable(self):
@ -518,8 +537,8 @@ class _DirectoryBaseURI(_BaseURI):
@implementer(IDirectoryURI) @implementer(IDirectoryURI)
class DirectoryURI(_DirectoryBaseURI): class DirectoryURI(_DirectoryBaseURI):
BASE_STRING='URI:DIR2:' BASE_STRING=b'URI:DIR2:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=WriteableSSKFileURI INNER_URI_CLASS=WriteableSSKFileURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -537,8 +556,8 @@ class DirectoryURI(_DirectoryBaseURI):
@implementer(IReadonlyDirectoryURI) @implementer(IReadonlyDirectoryURI)
class ReadonlyDirectoryURI(_DirectoryBaseURI): class ReadonlyDirectoryURI(_DirectoryBaseURI):
BASE_STRING='URI:DIR2-RO:' BASE_STRING=b'URI:DIR2-RO:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=ReadonlySSKFileURI INNER_URI_CLASS=ReadonlySSKFileURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -571,8 +590,8 @@ class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI): class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
BASE_STRING='URI:DIR2-CHK:' BASE_STRING=b'URI:DIR2-CHK:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=CHKFileURI INNER_URI_CLASS=CHKFileURI
def get_verify_cap(self): def get_verify_cap(self):
@ -581,8 +600,8 @@ class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
class LiteralDirectoryURI(_ImmutableDirectoryBaseURI): class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
BASE_STRING='URI:DIR2-LIT:' BASE_STRING=b'URI:DIR2-LIT:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=LiteralFileURI INNER_URI_CLASS=LiteralFileURI
def get_verify_cap(self): def get_verify_cap(self):
@ -593,8 +612,8 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
@implementer(IDirectoryURI) @implementer(IDirectoryURI)
class MDMFDirectoryURI(_DirectoryBaseURI): class MDMFDirectoryURI(_DirectoryBaseURI):
BASE_STRING='URI:DIR2-MDMF:' BASE_STRING=b'URI:DIR2-MDMF:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=WriteableMDMFFileURI INNER_URI_CLASS=WriteableMDMFFileURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -615,8 +634,8 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
@implementer(IReadonlyDirectoryURI) @implementer(IReadonlyDirectoryURI)
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI): class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
BASE_STRING='URI:DIR2-MDMF-RO:' BASE_STRING=b'URI:DIR2-MDMF-RO:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=ReadonlyMDMFFileURI INNER_URI_CLASS=ReadonlyMDMFFileURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -653,8 +672,8 @@ def wrap_dirnode_cap(filecap):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class MDMFDirectoryURIVerifier(_DirectoryBaseURI): class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
BASE_STRING='URI:DIR2-MDMF-Verifier:' BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=MDMFVerifierURI INNER_URI_CLASS=MDMFVerifierURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -678,8 +697,8 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class DirectoryURIVerifier(_DirectoryBaseURI): class DirectoryURIVerifier(_DirectoryBaseURI):
BASE_STRING='URI:DIR2-Verifier:' BASE_STRING=b'URI:DIR2-Verifier:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=SSKVerifierURI INNER_URI_CLASS=SSKVerifierURI
def __init__(self, filenode_uri=None): def __init__(self, filenode_uri=None):
@ -702,8 +721,8 @@ class DirectoryURIVerifier(_DirectoryBaseURI):
@implementer(IVerifierURI) @implementer(IVerifierURI)
class ImmutableDirectoryURIVerifier(DirectoryURIVerifier): class ImmutableDirectoryURIVerifier(DirectoryURIVerifier):
BASE_STRING='URI:DIR2-CHK-Verifier:' BASE_STRING=b'URI:DIR2-CHK-Verifier:'
BASE_STRING_RE=re.compile('^'+BASE_STRING) BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=CHKFileVerifierURI INNER_URI_CLASS=CHKFileVerifierURI
@ -725,12 +744,15 @@ class UnknownURI(object):
return None return None
ALLEGED_READONLY_PREFIX = 'ro.' ALLEGED_READONLY_PREFIX = b'ro.'
ALLEGED_IMMUTABLE_PREFIX = 'imm.' ALLEGED_IMMUTABLE_PREFIX = b'imm.'
def from_string(u, deep_immutable=False, name=u"<unknown name>"): def from_string(u, deep_immutable=False, name=u"<unknown name>"):
if not isinstance(u, str): """Create URI from either unicode or byte string."""
raise TypeError("URI must be str: %r" % (u,)) if isinstance(u, unicode):
u = u.encode("utf-8")
if not isinstance(u, bytes):
raise TypeError("URI must be unicode string or bytes: %r" % (u,))
# We allow and check ALLEGED_READONLY_PREFIX or ALLEGED_IMMUTABLE_PREFIX # We allow and check ALLEGED_READONLY_PREFIX or ALLEGED_IMMUTABLE_PREFIX
# on all URIs, even though we would only strictly need to do so for caps of # on all URIs, even though we would only strictly need to do so for caps of
@ -748,62 +770,62 @@ def from_string(u, deep_immutable=False, name=u"<unknown name>"):
error = None error = None
try: try:
if s.startswith('URI:CHK:'): if s.startswith(b'URI:CHK:'):
return CHKFileURI.init_from_string(s) return CHKFileURI.init_from_string(s)
elif s.startswith('URI:CHK-Verifier:'): elif s.startswith(b'URI:CHK-Verifier:'):
return CHKFileVerifierURI.init_from_string(s) return CHKFileVerifierURI.init_from_string(s)
elif s.startswith('URI:LIT:'): elif s.startswith(b'URI:LIT:'):
return LiteralFileURI.init_from_string(s) return LiteralFileURI.init_from_string(s)
elif s.startswith('URI:SSK:'): elif s.startswith(b'URI:SSK:'):
if can_be_writeable: if can_be_writeable:
return WriteableSSKFileURI.init_from_string(s) return WriteableSSKFileURI.init_from_string(s)
kind = "URI:SSK file writecap" kind = "URI:SSK file writecap"
elif s.startswith('URI:SSK-RO:'): elif s.startswith(b'URI:SSK-RO:'):
if can_be_mutable: if can_be_mutable:
return ReadonlySSKFileURI.init_from_string(s) return ReadonlySSKFileURI.init_from_string(s)
kind = "URI:SSK-RO readcap to a mutable file" kind = "URI:SSK-RO readcap to a mutable file"
elif s.startswith('URI:SSK-Verifier:'): elif s.startswith(b'URI:SSK-Verifier:'):
return SSKVerifierURI.init_from_string(s) return SSKVerifierURI.init_from_string(s)
elif s.startswith('URI:MDMF:'): elif s.startswith(b'URI:MDMF:'):
if can_be_writeable: if can_be_writeable:
return WriteableMDMFFileURI.init_from_string(s) return WriteableMDMFFileURI.init_from_string(s)
kind = "URI:MDMF file writecap" kind = "URI:MDMF file writecap"
elif s.startswith('URI:MDMF-RO:'): elif s.startswith(b'URI:MDMF-RO:'):
if can_be_mutable: if can_be_mutable:
return ReadonlyMDMFFileURI.init_from_string(s) return ReadonlyMDMFFileURI.init_from_string(s)
kind = "URI:MDMF-RO readcap to a mutable file" kind = "URI:MDMF-RO readcap to a mutable file"
elif s.startswith('URI:MDMF-Verifier:'): elif s.startswith(b'URI:MDMF-Verifier:'):
return MDMFVerifierURI.init_from_string(s) return MDMFVerifierURI.init_from_string(s)
elif s.startswith('URI:DIR2:'): elif s.startswith(b'URI:DIR2:'):
if can_be_writeable: if can_be_writeable:
return DirectoryURI.init_from_string(s) return DirectoryURI.init_from_string(s)
kind = "URI:DIR2 directory writecap" kind = "URI:DIR2 directory writecap"
elif s.startswith('URI:DIR2-RO:'): elif s.startswith(b'URI:DIR2-RO:'):
if can_be_mutable: if can_be_mutable:
return ReadonlyDirectoryURI.init_from_string(s) return ReadonlyDirectoryURI.init_from_string(s)
kind = "URI:DIR2-RO readcap to a mutable directory" kind = "URI:DIR2-RO readcap to a mutable directory"
elif s.startswith('URI:DIR2-Verifier:'): elif s.startswith(b'URI:DIR2-Verifier:'):
return DirectoryURIVerifier.init_from_string(s) return DirectoryURIVerifier.init_from_string(s)
elif s.startswith('URI:DIR2-CHK:'): elif s.startswith(b'URI:DIR2-CHK:'):
return ImmutableDirectoryURI.init_from_string(s) return ImmutableDirectoryURI.init_from_string(s)
elif s.startswith('URI:DIR2-CHK-Verifier:'): elif s.startswith(b'URI:DIR2-CHK-Verifier:'):
return ImmutableDirectoryURIVerifier.init_from_string(s) return ImmutableDirectoryURIVerifier.init_from_string(s)
elif s.startswith('URI:DIR2-LIT:'): elif s.startswith(b'URI:DIR2-LIT:'):
return LiteralDirectoryURI.init_from_string(s) return LiteralDirectoryURI.init_from_string(s)
elif s.startswith('URI:DIR2-MDMF:'): elif s.startswith(b'URI:DIR2-MDMF:'):
if can_be_writeable: if can_be_writeable:
return MDMFDirectoryURI.init_from_string(s) return MDMFDirectoryURI.init_from_string(s)
kind = "URI:DIR2-MDMF directory writecap" kind = "URI:DIR2-MDMF directory writecap"
elif s.startswith('URI:DIR2-MDMF-RO:'): elif s.startswith(b'URI:DIR2-MDMF-RO:'):
if can_be_mutable: if can_be_mutable:
return ReadonlyMDMFDirectoryURI.init_from_string(s) return ReadonlyMDMFDirectoryURI.init_from_string(s)
kind = "URI:DIR2-MDMF-RO readcap to a mutable directory" kind = "URI:DIR2-MDMF-RO readcap to a mutable directory"
elif s.startswith('URI:DIR2-MDMF-Verifier:'): elif s.startswith(b'URI:DIR2-MDMF-Verifier:'):
return MDMFDirectoryURIVerifier.init_from_string(s) return MDMFDirectoryURIVerifier.init_from_string(s)
elif s.startswith('x-tahoe-future-test-writeable:') and not can_be_writeable: elif s.startswith(b'x-tahoe-future-test-writeable:') and not can_be_writeable:
# For testing how future writeable caps would behave in read-only contexts. # For testing how future writeable caps would behave in read-only contexts.
kind = "x-tahoe-future-test-writeable: testing cap" kind = "x-tahoe-future-test-writeable: testing cap"
elif s.startswith('x-tahoe-future-test-mutable:') and not can_be_mutable: elif s.startswith(b'x-tahoe-future-test-mutable:') and not can_be_mutable:
# For testing how future mutable readcaps would behave in immutable contexts. # For testing how future mutable readcaps would behave in immutable contexts.
kind = "x-tahoe-future-test-mutable: testing cap" kind = "x-tahoe-future-test-mutable: testing cap"
else: else:
@ -829,18 +851,22 @@ def is_uri(s):
return False return False
def is_literal_file_uri(s): def is_literal_file_uri(s):
if not isinstance(s, str): if isinstance(s, unicode):
s = s.encode("utf-8")
if not isinstance(s, bytes):
return False return False
return (s.startswith('URI:LIT:') or return (s.startswith(b'URI:LIT:') or
s.startswith(ALLEGED_READONLY_PREFIX + 'URI:LIT:') or s.startswith(ALLEGED_READONLY_PREFIX + b'URI:LIT:') or
s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:LIT:')) s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
def has_uri_prefix(s): def has_uri_prefix(s):
if not isinstance(s, str): if isinstance(s, unicode):
s = s.encode("utf-8")
if not isinstance(s, bytes):
return False return False
return (s.startswith("URI:") or return (s.startswith(b"URI:") or
s.startswith(ALLEGED_READONLY_PREFIX + 'URI:') or s.startswith(ALLEGED_READONLY_PREFIX + b'URI:') or
s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:')) s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:'))
# These take the same keyword arguments as from_string above. # These take the same keyword arguments as from_string above.
@ -850,26 +876,26 @@ def from_string_dirnode(s, **kwargs):
_assert(IDirnodeURI.providedBy(u)) _assert(IDirnodeURI.providedBy(u))
return u return u
registerAdapter(from_string_dirnode, str, IDirnodeURI) registerAdapter(from_string_dirnode, bytes, IDirnodeURI)
def from_string_filenode(s, **kwargs): def from_string_filenode(s, **kwargs):
u = from_string(s, **kwargs) u = from_string(s, **kwargs)
_assert(IFileURI.providedBy(u)) _assert(IFileURI.providedBy(u))
return u return u
registerAdapter(from_string_filenode, str, IFileURI) registerAdapter(from_string_filenode, bytes, IFileURI)
def from_string_mutable_filenode(s, **kwargs): def from_string_mutable_filenode(s, **kwargs):
u = from_string(s, **kwargs) u = from_string(s, **kwargs)
_assert(IMutableFileURI.providedBy(u)) _assert(IMutableFileURI.providedBy(u))
return u return u
registerAdapter(from_string_mutable_filenode, str, IMutableFileURI) registerAdapter(from_string_mutable_filenode, bytes, IMutableFileURI)
def from_string_verifier(s, **kwargs): def from_string_verifier(s, **kwargs):
u = from_string(s, **kwargs) u = from_string(s, **kwargs)
_assert(IVerifierURI.providedBy(u)) _assert(IVerifierURI.providedBy(u))
return u return u
registerAdapter(from_string_verifier, str, IVerifierURI) registerAdapter(from_string_verifier, bytes, IVerifierURI)
def pack_extension(data): def pack_extension(data):
@ -877,34 +903,36 @@ def pack_extension(data):
for k in sorted(data.keys()): for k in sorted(data.keys()):
value = data[k] value = data[k]
if isinstance(value, (int, long)): if isinstance(value, (int, long)):
value = "%d" % value value = b"%d" % value
assert isinstance(value, str), k if isinstance(k, unicode):
assert re.match(r'^[a-zA-Z_\-]+$', k) k = k.encode("utf-8")
pieces.append(k + ':' + hashutil.netstring(value)) assert isinstance(value, bytes), k
uri_extension = ''.join(pieces) assert re.match(br'^[a-zA-Z_\-]+$', k)
pieces.append(k + b':' + hashutil.netstring(value))
uri_extension = b''.join(pieces)
return uri_extension return uri_extension
def unpack_extension(data): def unpack_extension(data):
d = {} d = {}
while data: while data:
colon = data.index(':') colon = data.index(b':')
key = data[:colon] key = data[:colon]
data = data[colon+1:] data = data[colon+1:]
colon = data.index(':') colon = data.index(b':')
number = data[:colon] number = data[:colon]
length = int(number) length = int(number)
data = data[colon+1:] data = data[colon+1:]
value = data[:length] value = data[:length]
assert data[length] == ',' assert data[length:length+1] == b','
data = data[length+1:] data = data[length+1:]
d[key] = value d[key] = value
# convert certain things to numbers # convert certain things to numbers
for intkey in ('size', 'segment_size', 'num_segments', for intkey in (b'size', b'segment_size', b'num_segments',
'needed_shares', 'total_shares'): b'needed_shares', b'total_shares'):
if intkey in d: if intkey in d:
d[intkey] = int(d[intkey]) d[intkey] = int(d[intkey])
return d return d
@ -912,9 +940,9 @@ def unpack_extension(data):
def unpack_extension_readable(data): def unpack_extension_readable(data):
unpacked = unpack_extension(data) unpacked = unpack_extension(data)
unpacked["UEB_hash"] = hashutil.uri_extension_hash(data) unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
for k in sorted(unpacked.keys()): for k in sorted(unpacked.keys()):
if 'hash' in k: if b'hash' in k:
unpacked[k] = base32.b2a(unpacked[k]) unpacked[k] = base32.b2a(unpacked[k])
return unpacked return unpacked

View File

@ -20,10 +20,11 @@ from __future__ import print_function
from future.utils import PY2 from future.utils import PY2
if PY2: if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401 from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# Keep these sorted alphabetically, to reduce merge conflicts: # Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [ PORTED_MODULES = [
"allmydata.codec",
"allmydata.crypto", "allmydata.crypto",
"allmydata.crypto.aes", "allmydata.crypto.aes",
"allmydata.crypto.ed25519", "allmydata.crypto.ed25519",
@ -32,13 +33,19 @@ PORTED_MODULES = [
"allmydata.crypto.util", "allmydata.crypto.util",
"allmydata.hashtree", "allmydata.hashtree",
"allmydata.immutable.happiness_upload", "allmydata.immutable.happiness_upload",
"allmydata.interfaces",
"allmydata.monitor",
"allmydata.storage.crawler", "allmydata.storage.crawler",
"allmydata.storage.expirer",
"allmydata.test.common_py3", "allmydata.test.common_py3",
"allmydata.uri",
"allmydata.util._python3", "allmydata.util._python3",
"allmydata.util.abbreviate", "allmydata.util.abbreviate",
"allmydata.util.assertutil", "allmydata.util.assertutil",
"allmydata.util.base32", "allmydata.util.base32",
"allmydata.util.base62", "allmydata.util.base62",
"allmydata.util.configutil",
"allmydata.util.connection_status",
"allmydata.util.deferredutil", "allmydata.util.deferredutil",
"allmydata.util.fileutil", "allmydata.util.fileutil",
"allmydata.util.dictutil", "allmydata.util.dictutil",
@ -64,6 +71,9 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_abbreviate", "allmydata.test.test_abbreviate",
"allmydata.test.test_base32", "allmydata.test.test_base32",
"allmydata.test.test_base62", "allmydata.test.test_base62",
"allmydata.test.test_codec",
"allmydata.test.test_configutil",
"allmydata.test.test_connection_status",
"allmydata.test.test_crawler", "allmydata.test.test_crawler",
"allmydata.test.test_crypto", "allmydata.test.test_crypto",
"allmydata.test.test_deferredutil", "allmydata.test.test_deferredutil",
@ -75,13 +85,17 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",
"allmydata.test.test_iputil", "allmydata.test.test_iputil",
"allmydata.test.test_log", "allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_netstring", "allmydata.test.test_netstring",
"allmydata.test.test_observer", "allmydata.test.test_observer",
"allmydata.test.test_pipeline", "allmydata.test.test_pipeline",
"allmydata.test.test_python3", "allmydata.test.test_python3",
"allmydata.test.test_spans", "allmydata.test.test_spans",
"allmydata.test.test_statistics", "allmydata.test.test_statistics",
"allmydata.test.test_storage",
"allmydata.test.test_storage_web",
"allmydata.test.test_time_format", "allmydata.test.test_time_format",
"allmydata.test.test_uri",
"allmydata.test.test_util", "allmydata.test.test_util",
"allmydata.test.test_version", "allmydata.test.test_version",
] ]

View File

@ -1,8 +1,32 @@
"""
Read/write config files.
from ConfigParser import SafeConfigParser Configuration is returned as native strings.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We don't do open(), because we want files to read/write native strs when
# we do "r" or "w".
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
if PY2:
# In theory on Python 2 configparser also works, but then code gets the
# wrong exceptions and they don't get handled. So just use native parser
# for now.
from ConfigParser import SafeConfigParser
else:
from configparser import SafeConfigParser
import attr import attr
class UnknownConfigError(Exception): class UnknownConfigError(Exception):
""" """
An unknown config item was found. An unknown config item was found.
@ -12,11 +36,16 @@ class UnknownConfigError(Exception):
def get_config(tahoe_cfg): def get_config(tahoe_cfg):
"""Load the config, returning a SafeConfigParser.
Configuration is returned as native strings.
"""
config = SafeConfigParser() config = SafeConfigParser()
with open(tahoe_cfg, "rb") as f: with open(tahoe_cfg, "r") as f:
# Skip any initial Byte Order Mark. Since this is an ordinary file, we # On Python 2, where we read in bytes, skip any initial Byte Order
# don't need to handle incomplete reads, and can assume seekability. # Mark. Since this is an ordinary file, we don't need to handle
if f.read(3) != '\xEF\xBB\xBF': # incomplete reads, and can assume seekability.
if PY2 and f.read(3) != b'\xEF\xBB\xBF':
f.seek(0) f.seek(0)
config.readfp(f) config.readfp(f)
return config return config
@ -28,7 +57,7 @@ def set_config(config, section, option, value):
assert config.get(section, option) == value assert config.get(section, option) == value
def write_config(tahoe_cfg, config): def write_config(tahoe_cfg, config):
with open(tahoe_cfg, "wb") as f: with open(tahoe_cfg, "w") as f:
config.write(f) config.write(f)
def validate_config(fname, cfg, valid_config): def validate_config(fname, cfg, valid_config):

View File

@ -1,3 +1,18 @@
"""
Parse connection status from Foolscap.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import time import time
from zope.interface import implementer from zope.interface import implementer
from ..interfaces import IConnectionStatus from ..interfaces import IConnectionStatus
@ -37,9 +52,12 @@ def _hint_statuses(which, handlers, statuses):
def from_foolscap_reconnector(rc, last_received): def from_foolscap_reconnector(rc, last_received):
ri = rc.getReconnectionInfo() ri = rc.getReconnectionInfo()
# See foolscap/reconnector.py, ReconnectionInfo, for details about # See foolscap/reconnector.py, ReconnectionInfo, for details about possible
# possible states. # states. The returned result is a native string, it seems, so convert to
# unicode.
state = ri.state state = ri.state
if isinstance(state, bytes): # Python 2
state = str(state, "ascii")
if state == "unstarted": if state == "unstarted":
return ConnectionStatus.unstarted() return ConnectionStatus.unstarted()

View File

@ -1,31 +0,0 @@
"""
Implement a work-around for <https://github.com/twisted/nevow/issues/106>.
"""
from __future__ import (
print_function,
unicode_literals,
absolute_import,
division,
)
from nevow import inevow
from twisted.internet import defer
def renderHTTP(self, ctx):
request = inevow.IRequest(ctx)
if self.real_prepath_len is not None:
request.postpath = request.prepath + request.postpath
request.prepath = request.postpath[:self.real_prepath_len]
del request.postpath[:self.real_prepath_len]
result = defer.maybeDeferred(self.original.render, request).addCallback(
self._handle_NOT_DONE_YET, request)
return result
def patch():
"""
Monkey-patch the proposed fix into place.
"""
from nevow.appserver import OldResourceAdapter
OldResourceAdapter.renderHTTP = renderHTTP

View File

@ -15,11 +15,15 @@ from allmydata.interfaces import ExistingChildError, NoSuchChildError, \
EmptyPathnameComponentError, MustBeDeepImmutableError, \ EmptyPathnameComponentError, MustBeDeepImmutableError, \
MustBeReadonlyError, MustNotBeUnknownRWError, SDMF_VERSION, MDMF_VERSION MustBeReadonlyError, MustNotBeUnknownRWError, SDMF_VERSION, MDMF_VERSION
from allmydata.mutable.common import UnrecoverableFileError from allmydata.mutable.common import UnrecoverableFileError
from allmydata.util import abbreviate
from allmydata.util.hashutil import timing_safe_compare from allmydata.util.hashutil import timing_safe_compare
from allmydata.util.time_format import format_time, format_delta from allmydata.util.time_format import format_time, format_delta
from allmydata.util.encodingutil import to_bytes, quote_output from allmydata.util.encodingutil import to_bytes, quote_output
# Originally part of this module, so still part of its API:
from .common_py3 import ( # noqa: F401
get_arg, abbreviate_time, MultiFormatResource, WebError
)
def get_filenode_metadata(filenode): def get_filenode_metadata(filenode):
metadata = {'mutable': filenode.is_mutable()} metadata = {'mutable': filenode.is_mutable()}
@ -104,24 +108,6 @@ def get_root(ctx_or_req):
link = "/".join([".."] * depth) link = "/".join([".."] * depth)
return link return link
def get_arg(ctx_or_req, argname, default=None, multiple=False):
"""Extract an argument from either the query args (req.args) or the form
body fields (req.fields). If multiple=False, this returns a single value
(or the default, which defaults to None), and the query args take
precedence. If multiple=True, this returns a tuple of arguments (possibly
empty), starting with all those in the query args.
"""
req = IRequest(ctx_or_req)
results = []
if argname in req.args:
results.extend(req.args[argname])
if req.fields and argname in req.fields:
results.append(req.fields[argname].value)
if multiple:
return tuple(results)
if results:
return results[0]
return default
def convert_children_json(nodemaker, children_json): def convert_children_json(nodemaker, children_json):
"""I convert the JSON output of GET?t=json into the dict-of-nodes input """I convert the JSON output of GET?t=json into the dict-of-nodes input
@ -141,20 +127,6 @@ def convert_children_json(nodemaker, children_json):
children[namex] = (childnode, metadata) children[namex] = (childnode, metadata)
return children return children
def abbreviate_time(data):
# 1.23s, 790ms, 132us
if data is None:
return ""
s = float(data)
if s >= 10:
return abbreviate.abbreviate_time(data)
if s >= 1.0:
return "%.2fs" % s
if s >= 0.01:
return "%.0fms" % (1000*s)
if s >= 0.001:
return "%.1fms" % (1000*s)
return "%.0fus" % (1000000*s)
def compute_rate(bytes, seconds): def compute_rate(bytes, seconds):
if bytes is None: if bytes is None:
@ -219,10 +191,6 @@ def render_time(t):
def render_time_attr(t): def render_time_attr(t):
return format_time(time.localtime(t)) return format_time(time.localtime(t))
class WebError(Exception):
def __init__(self, text, code=http.BAD_REQUEST):
self.text = text
self.code = code
# XXX: to make UnsupportedMethod return 501 NOT_IMPLEMENTED instead of 500 # XXX: to make UnsupportedMethod return 501 NOT_IMPLEMENTED instead of 500
# Internal Server Error, we either need to do that ICanHandleException trick, # Internal Server Error, we either need to do that ICanHandleException trick,
@ -421,62 +389,6 @@ class MultiFormatPage(Page):
return lambda ctx: renderer(IRequest(ctx)) return lambda ctx: renderer(IRequest(ctx))
class MultiFormatResource(resource.Resource, object):
"""
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
a number of different formats.
Rendered format is controlled by a query argument (given by
``self.formatArgument``). Different resources may support different
formats but ``json`` is a pretty common one. ``html`` is the default
format if nothing else is given as the ``formatDefault``.
"""
formatArgument = "t"
formatDefault = None
def render(self, req):
"""
Dispatch to a renderer for a particular format, as selected by a query
argument.
A renderer for the format given by the query argument matching
``formatArgument`` will be selected and invoked. render_HTML will be
used as a default if no format is selected (either by query arguments
or by ``formatDefault``).
:return: The result of the selected renderer.
"""
t = get_arg(req, self.formatArgument, self.formatDefault)
renderer = self._get_renderer(t)
return renderer(req)
def _get_renderer(self, fmt):
"""
Get the renderer for the indicated format.
:param str fmt: The format. If a method with a prefix of ``render_``
and a suffix of this format (upper-cased) is found, it will be
used.
:return: A callable which takes a twisted.web Request and renders a
response.
"""
renderer = None
if fmt is not None:
try:
renderer = getattr(self, "render_{}".format(fmt.upper()))
except AttributeError:
raise WebError(
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
)
if renderer is None:
renderer = self.render_HTML
return renderer
class SlotsSequenceElement(template.Element): class SlotsSequenceElement(template.Element):
""" """
``SlotsSequenceElement` is a minimal port of nevow's sequence renderer for ``SlotsSequenceElement` is a minimal port of nevow's sequence renderer for

View File

@ -0,0 +1,120 @@
"""
Common utilities that are available from Python 3.
Can eventually be merged back into allmydata.web.common.
"""
from future.utils import PY2
if PY2:
from nevow.inevow import IRequest as INevowRequest
else:
INevowRequest = None
from twisted.web import resource, http
from twisted.web.iweb import IRequest
from allmydata.util import abbreviate
class WebError(Exception):
def __init__(self, text, code=http.BAD_REQUEST):
self.text = text
self.code = code
def get_arg(ctx_or_req, argname, default=None, multiple=False):
"""Extract an argument from either the query args (req.args) or the form
body fields (req.fields). If multiple=False, this returns a single value
(or the default, which defaults to None), and the query args take
precedence. If multiple=True, this returns a tuple of arguments (possibly
empty), starting with all those in the query args.
"""
results = []
if PY2:
req = INevowRequest(ctx_or_req)
if argname in req.args:
results.extend(req.args[argname])
if req.fields and argname in req.fields:
results.append(req.fields[argname].value)
else:
req = IRequest(ctx_or_req)
if argname in req.args:
results.extend(req.args[argname])
if multiple:
return tuple(results)
if results:
return results[0]
return default
class MultiFormatResource(resource.Resource, object):
"""
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
a number of different formats.
Rendered format is controlled by a query argument (given by
``self.formatArgument``). Different resources may support different
formats but ``json`` is a pretty common one. ``html`` is the default
format if nothing else is given as the ``formatDefault``.
"""
formatArgument = "t"
formatDefault = None
def render(self, req):
"""
Dispatch to a renderer for a particular format, as selected by a query
argument.
A renderer for the format given by the query argument matching
``formatArgument`` will be selected and invoked. render_HTML will be
used as a default if no format is selected (either by query arguments
or by ``formatDefault``).
:return: The result of the selected renderer.
"""
t = get_arg(req, self.formatArgument, self.formatDefault)
renderer = self._get_renderer(t)
return renderer(req)
def _get_renderer(self, fmt):
"""
Get the renderer for the indicated format.
:param str fmt: The format. If a method with a prefix of ``render_``
and a suffix of this format (upper-cased) is found, it will be
used.
:return: A callable which takes a twisted.web Request and renders a
response.
"""
renderer = None
if fmt is not None:
try:
renderer = getattr(self, "render_{}".format(fmt.upper()))
except AttributeError:
raise WebError(
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
)
if renderer is None:
renderer = self.render_HTML
return renderer
def abbreviate_time(data):
# 1.23s, 790ms, 132us
if data is None:
return ""
s = float(data)
if s >= 10:
return abbreviate.abbreviate_time(data)
if s >= 1.0:
return "%.2fs" % s
if s >= 0.01:
return "%.0fms" % (1000*s)
if s >= 0.001:
return "%.1fms" % (1000*s)
return "%.0fus" % (1000000*s)

View File

@ -53,7 +53,6 @@ from allmydata.web.common import (
get_mutable_type, get_mutable_type,
get_filenode_metadata, get_filenode_metadata,
render_time, render_time,
MultiFormatPage,
MultiFormatResource, MultiFormatResource,
SlotsSequenceElement, SlotsSequenceElement,
) )
@ -1213,7 +1212,7 @@ class ManifestElement(ReloadableMonitorElement):
class ManifestResults(MultiFormatResource, ReloadMixin): class ManifestResults(MultiFormatResource, ReloadMixin):
# Control MultiFormatPage # Control MultiFormatResource
formatArgument = "output" formatArgument = "output"
formatDefault = "html" formatDefault = "html"
@ -1268,8 +1267,9 @@ class ManifestResults(MultiFormatResource, ReloadMixin):
return json.dumps(status, indent=1) return json.dumps(status, indent=1)
class DeepSizeResults(MultiFormatPage): class DeepSizeResults(MultiFormatResource):
# Control MultiFormatPage
# Control MultiFormatResource
formatArgument = "output" formatArgument = "output"
formatDefault = "html" formatDefault = "html"

View File

@ -54,11 +54,6 @@ from .logs import (
create_log_resources, create_log_resources,
) )
# Hotfix work-around https://github.com/twisted/nevow/issues/106
from . import _nevow_106
_nevow_106.patch()
del _nevow_106
SCHEME = b"tahoe-lafs" SCHEME = b"tahoe-lafs"
class IToken(ICredentials): class IToken(ICredentials):

View File

@ -8,7 +8,7 @@ from twisted.web.template import (
renderer, renderer,
renderElement renderElement
) )
from allmydata.web.common import ( from allmydata.web.common_py3 import (
abbreviate_time, abbreviate_time,
MultiFormatResource MultiFormatResource
) )

42
tox.ini
View File

@ -44,13 +44,32 @@ usedevelop = False
# We use extras=test to get things like "mock" that are required for our unit # We use extras=test to get things like "mock" that are required for our unit
# tests. # tests.
extras = test extras = test
commands =
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
tahoe --version
[testenv:py36] setenv =
# Define TEST_SUITE in the environment as an aid to constructing the
# correct test command below.
!py36: TEST_SUITE = allmydata
py36: TEST_SUITE = allmydata.test.python3_tests
commands = commands =
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata.test.python3_tests} # As an aid to debugging, dump all of the Python packages and their
# versions that are installed in the test environment. This is
# particularly useful to get from CI runs - though hopefully the
# version pinning we do limits the variability of this output
pip freeze
# The tahoe script isn't sufficiently ported for this to succeed on
# Python 3.x yet.
!py36: tahoe --version
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
# measuring coverage is somewhat slower than not measuring coverage
# so only do it on request.
coverage: coverage run -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:{env:TEST_SUITE}}
coverage: coverage combine
coverage: coverage xml
[testenv:integration] [testenv:integration]
setenv = setenv =
@ -61,19 +80,6 @@ commands =
coverage combine coverage combine
coverage report coverage report
[testenv:coverage]
# coverage (with --branch) takes about 65% longer to run
commands =
# As an aid to debugging, dump all of the Python packages and their
# versions that are installed in the test environment. This is
# particularly useful to get from CI runs - though hopefully the
# version pinning we do limits the variability of this output
# somewhat.
pip freeze
tahoe --version
coverage run --branch -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:allmydata}
coverage combine
coverage xml
[testenv:codechecks] [testenv:codechecks]
# On macOS, git inside of towncrier needs $HOME. # On macOS, git inside of towncrier needs $HOME.