Merge remote-tracking branch 'origin/master' into 3581.unicode_to_argv.1
This commit is contained in:
commit
8ffb083d38
|
@ -29,7 +29,7 @@ workflows:
|
||||||
- "debian-9": &DOCKERHUB_CONTEXT
|
- "debian-9": &DOCKERHUB_CONTEXT
|
||||||
context: "dockerhub-auth"
|
context: "dockerhub-auth"
|
||||||
|
|
||||||
- "debian-8":
|
- "debian-10":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
requires:
|
requires:
|
||||||
- "debian-9"
|
- "debian-9"
|
||||||
|
@ -86,11 +86,6 @@ workflows:
|
||||||
# integration tests.
|
# integration tests.
|
||||||
- "debian-9"
|
- "debian-9"
|
||||||
|
|
||||||
# Generate the underlying data for a visualization to aid with Python 3
|
|
||||||
# porting.
|
|
||||||
- "build-porting-depgraph":
|
|
||||||
<<: *DOCKERHUB_CONTEXT
|
|
||||||
|
|
||||||
- "typechecks":
|
- "typechecks":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
|
@ -107,7 +102,7 @@ workflows:
|
||||||
- "master"
|
- "master"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- "build-image-debian-8":
|
- "build-image-debian-10":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
- "build-image-debian-9":
|
- "build-image-debian-9":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
@ -277,11 +272,11 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
debian-8:
|
debian-10:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- <<: *DOCKERHUB_AUTH
|
- <<: *DOCKERHUB_AUTH
|
||||||
image: "tahoelafsci/debian:8-py2.7"
|
image: "tahoelafsci/debian:10-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
|
@ -451,33 +446,6 @@ jobs:
|
||||||
# them in parallel.
|
# them in parallel.
|
||||||
nix-build --cores 3 --max-jobs 2 nix/
|
nix-build --cores 3 --max-jobs 2 nix/
|
||||||
|
|
||||||
# Generate up-to-date data for the dependency graph visualizer.
|
|
||||||
build-porting-depgraph:
|
|
||||||
# Get a system in which we can easily install Tahoe-LAFS and all its
|
|
||||||
# dependencies. The dependency graph analyzer works by executing the code.
|
|
||||||
# It's Python, what do you expect?
|
|
||||||
<<: *DEBIAN
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- "checkout"
|
|
||||||
|
|
||||||
- add_ssh_keys:
|
|
||||||
fingerprints:
|
|
||||||
# Jean-Paul Calderone <exarkun@twistedmatrix.com> (CircleCI depgraph key)
|
|
||||||
# This lets us push to tahoe-lafs/tahoe-depgraph in the next step.
|
|
||||||
- "86:38:18:a7:c0:97:42:43:18:46:55:d6:21:b0:5f:d4"
|
|
||||||
|
|
||||||
- run:
|
|
||||||
name: "Setup Python Environment"
|
|
||||||
command: |
|
|
||||||
/tmp/venv/bin/pip install -e /tmp/project
|
|
||||||
|
|
||||||
- run:
|
|
||||||
name: "Generate dependency graph data"
|
|
||||||
command: |
|
|
||||||
. /tmp/venv/bin/activate
|
|
||||||
./misc/python3/depgraph.sh
|
|
||||||
|
|
||||||
typechecks:
|
typechecks:
|
||||||
docker:
|
docker:
|
||||||
- <<: *DOCKERHUB_AUTH
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
@ -529,12 +497,12 @@ jobs:
|
||||||
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||||
|
|
||||||
|
|
||||||
build-image-debian-8:
|
build-image-debian-10:
|
||||||
<<: *BUILD_IMAGE
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "debian"
|
DISTRO: "debian"
|
||||||
TAG: "8"
|
TAG: "10"
|
||||||
PYTHON_VERSION: "2.7"
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ class ProvisioningTool(rend.Page):
|
||||||
req = inevow.IRequest(ctx)
|
req = inevow.IRequest(ctx)
|
||||||
|
|
||||||
def getarg(name, astype=int):
|
def getarg(name, astype=int):
|
||||||
if req.method != "POST":
|
if req.method != b"POST":
|
||||||
return None
|
return None
|
||||||
if name in req.fields:
|
if name in req.fields:
|
||||||
return astype(req.fields[name].value)
|
return astype(req.fields[name].value)
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Debian 8 support has been replaced with Debian 10 support.
|
|
@ -454,7 +454,7 @@ class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
||||||
return self.storage_index
|
return self.storage_index
|
||||||
|
|
||||||
def check(self, monitor, verify=False, add_lease=False):
|
def check(self, monitor, verify=False, add_lease=False):
|
||||||
s = StubServer("\x00"*20)
|
s = StubServer(b"\x00"*20)
|
||||||
r = CheckResults(self.my_uri, self.storage_index,
|
r = CheckResults(self.my_uri, self.storage_index,
|
||||||
healthy=True, recoverable=True,
|
healthy=True, recoverable=True,
|
||||||
count_happiness=10,
|
count_happiness=10,
|
||||||
|
@ -588,12 +588,12 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
self.file_types[self.storage_index] = version
|
self.file_types[self.storage_index] = version
|
||||||
initial_contents = self._get_initial_contents(contents)
|
initial_contents = self._get_initial_contents(contents)
|
||||||
data = initial_contents.read(initial_contents.get_size())
|
data = initial_contents.read(initial_contents.get_size())
|
||||||
data = "".join(data)
|
data = b"".join(data)
|
||||||
self.all_contents[self.storage_index] = data
|
self.all_contents[self.storage_index] = data
|
||||||
return defer.succeed(self)
|
return defer.succeed(self)
|
||||||
def _get_initial_contents(self, contents):
|
def _get_initial_contents(self, contents):
|
||||||
if contents is None:
|
if contents is None:
|
||||||
return MutableData("")
|
return MutableData(b"")
|
||||||
|
|
||||||
if IMutableUploadable.providedBy(contents):
|
if IMutableUploadable.providedBy(contents):
|
||||||
return contents
|
return contents
|
||||||
|
@ -647,7 +647,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
def raise_error(self):
|
def raise_error(self):
|
||||||
pass
|
pass
|
||||||
def get_writekey(self):
|
def get_writekey(self):
|
||||||
return "\x00"*16
|
return b"\x00"*16
|
||||||
def get_size(self):
|
def get_size(self):
|
||||||
return len(self.all_contents[self.storage_index])
|
return len(self.all_contents[self.storage_index])
|
||||||
def get_current_size(self):
|
def get_current_size(self):
|
||||||
|
@ -666,7 +666,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
return self.file_types[self.storage_index]
|
return self.file_types[self.storage_index]
|
||||||
|
|
||||||
def check(self, monitor, verify=False, add_lease=False):
|
def check(self, monitor, verify=False, add_lease=False):
|
||||||
s = StubServer("\x00"*20)
|
s = StubServer(b"\x00"*20)
|
||||||
r = CheckResults(self.my_uri, self.storage_index,
|
r = CheckResults(self.my_uri, self.storage_index,
|
||||||
healthy=True, recoverable=True,
|
healthy=True, recoverable=True,
|
||||||
count_happiness=10,
|
count_happiness=10,
|
||||||
|
@ -677,7 +677,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
count_recoverable_versions=1,
|
count_recoverable_versions=1,
|
||||||
count_unrecoverable_versions=0,
|
count_unrecoverable_versions=0,
|
||||||
servers_responding=[s],
|
servers_responding=[s],
|
||||||
sharemap={"seq1-abcd-sh0": [s]},
|
sharemap={b"seq1-abcd-sh0": [s]},
|
||||||
count_wrong_shares=0,
|
count_wrong_shares=0,
|
||||||
list_corrupt_shares=[],
|
list_corrupt_shares=[],
|
||||||
count_corrupt_shares=0,
|
count_corrupt_shares=0,
|
||||||
|
@ -731,7 +731,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
def overwrite(self, new_contents):
|
def overwrite(self, new_contents):
|
||||||
assert not self.is_readonly()
|
assert not self.is_readonly()
|
||||||
new_data = new_contents.read(new_contents.get_size())
|
new_data = new_contents.read(new_contents.get_size())
|
||||||
new_data = "".join(new_data)
|
new_data = b"".join(new_data)
|
||||||
self.all_contents[self.storage_index] = new_data
|
self.all_contents[self.storage_index] = new_data
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
def modify(self, modifier):
|
def modify(self, modifier):
|
||||||
|
@ -762,7 +762,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
def update(self, data, offset):
|
def update(self, data, offset):
|
||||||
assert not self.is_readonly()
|
assert not self.is_readonly()
|
||||||
def modifier(old, servermap, first_time):
|
def modifier(old, servermap, first_time):
|
||||||
new = old[:offset] + "".join(data.read(data.get_size()))
|
new = old[:offset] + b"".join(data.read(data.get_size()))
|
||||||
new += old[len(new):]
|
new += old[len(new):]
|
||||||
return new
|
return new
|
||||||
return self.modify(modifier)
|
return self.modify(modifier)
|
||||||
|
@ -881,6 +881,8 @@ class WebErrorMixin(object):
|
||||||
body = yield response.content()
|
body = yield response.content()
|
||||||
self.assertEquals(response.code, code)
|
self.assertEquals(response.code, code)
|
||||||
if response_substring is not None:
|
if response_substring is not None:
|
||||||
|
if isinstance(response_substring, unicode):
|
||||||
|
response_substring = response_substring.encode("utf-8")
|
||||||
self.assertIn(response_substring, body)
|
self.assertIn(response_substring, body)
|
||||||
returnValue(body)
|
returnValue(body)
|
||||||
|
|
||||||
|
|
|
@ -203,6 +203,14 @@ def flip_one_bit(s, offset=0, size=None):
|
||||||
class ReallyEqualMixin(object):
|
class ReallyEqualMixin(object):
|
||||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||||
self.assertEqual(a, b, msg)
|
self.assertEqual(a, b, msg)
|
||||||
|
# Make sure unicode strings are a consistent type. Specifically there's
|
||||||
|
# Future newstr (backported Unicode type) vs. Python 2 native unicode
|
||||||
|
# type. They're equal, and _logically_ the same type, but have
|
||||||
|
# different types in practice.
|
||||||
|
if a.__class__ == future_str:
|
||||||
|
a = unicode(a)
|
||||||
|
if b.__class__ == future_str:
|
||||||
|
b = unicode(b)
|
||||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,29 +6,43 @@ Tools aimed at the interaction between tests and Eliot.
|
||||||
# Can't use `builtins.str` because it's not JSON encodable:
|
# Can't use `builtins.str` because it's not JSON encodable:
|
||||||
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
|
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
|
||||||
from past.builtins import unicode as str
|
from past.builtins import unicode as str
|
||||||
from future.utils import PY3
|
from future.utils import PY2
|
||||||
|
from six import ensure_text
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RUN_TEST",
|
"RUN_TEST",
|
||||||
"EliotLoggedRunTest",
|
"EliotLoggedRunTest",
|
||||||
"eliot_logged_test",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Callable
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from functools import (
|
from functools import (
|
||||||
wraps,
|
|
||||||
partial,
|
partial,
|
||||||
|
wraps,
|
||||||
)
|
)
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
|
from zope.interface import (
|
||||||
|
implementer,
|
||||||
|
)
|
||||||
|
|
||||||
from eliot import (
|
from eliot import (
|
||||||
ActionType,
|
ActionType,
|
||||||
Field,
|
Field,
|
||||||
|
MemoryLogger,
|
||||||
|
ILogger,
|
||||||
|
)
|
||||||
|
from eliot.testing import (
|
||||||
|
swap_logger,
|
||||||
|
check_for_errors,
|
||||||
)
|
)
|
||||||
from eliot.testing import capture_logging
|
|
||||||
|
|
||||||
from twisted.internet.defer import (
|
from twisted.python.monkey import (
|
||||||
maybeDeferred,
|
MonkeyPatcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..util.jsonbytes import BytesJSONEncoder
|
from ..util.jsonbytes import BytesJSONEncoder
|
||||||
|
@ -48,92 +62,12 @@ RUN_TEST = ActionType(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def eliot_logged_test(f):
|
# On Python 3, we want to use our custom JSON encoder when validating messages
|
||||||
"""
|
# can be encoded to JSON:
|
||||||
Decorate a test method to run in a dedicated Eliot action context.
|
if PY2:
|
||||||
|
_memory_logger = MemoryLogger
|
||||||
The action will finish after the test is done (after the returned Deferred
|
else:
|
||||||
fires, if a Deferred is returned). It will note the name of the test
|
_memory_logger = lambda: MemoryLogger(encoder=BytesJSONEncoder)
|
||||||
being run.
|
|
||||||
|
|
||||||
All messages emitted by the test will be validated. They will still be
|
|
||||||
delivered to the global logger.
|
|
||||||
"""
|
|
||||||
# A convenient, mutable container into which nested functions can write
|
|
||||||
# state to be shared among them.
|
|
||||||
class storage(object):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# On Python 3, we want to use our custom JSON encoder when validating
|
|
||||||
# messages can be encoded to JSON:
|
|
||||||
if PY3:
|
|
||||||
capture = lambda f : capture_logging(None, encoder_=BytesJSONEncoder)(f)
|
|
||||||
else:
|
|
||||||
capture = lambda f : capture_logging(None)(f)
|
|
||||||
|
|
||||||
@wraps(f)
|
|
||||||
def run_and_republish(self, *a, **kw):
|
|
||||||
# Unfortunately the only way to get at the global/default logger...
|
|
||||||
# This import is delayed here so that we get the *current* default
|
|
||||||
# logger at the time the decorated function is run.
|
|
||||||
from eliot._output import _DEFAULT_LOGGER as default_logger
|
|
||||||
|
|
||||||
def republish():
|
|
||||||
# This is called as a cleanup function after capture_logging has
|
|
||||||
# restored the global/default logger to its original state. We
|
|
||||||
# can now emit messages that go to whatever global destinations
|
|
||||||
# are installed.
|
|
||||||
|
|
||||||
# storage.logger.serialize() seems like it would make more sense
|
|
||||||
# than storage.logger.messages here. However, serialize()
|
|
||||||
# explodes, seemingly as a result of double-serializing the logged
|
|
||||||
# messages. I don't understand this.
|
|
||||||
for msg in storage.logger.messages:
|
|
||||||
default_logger.write(msg)
|
|
||||||
|
|
||||||
# And now that we've re-published all of the test's messages, we
|
|
||||||
# can finish the test's action.
|
|
||||||
storage.action.finish()
|
|
||||||
|
|
||||||
@capture
|
|
||||||
def run(self, logger):
|
|
||||||
# Record the MemoryLogger for later message extraction.
|
|
||||||
storage.logger = logger
|
|
||||||
# Give the test access to the logger as well. It would be just
|
|
||||||
# fine to pass this as a keyword argument to `f` but implementing
|
|
||||||
# that now will give me conflict headaches so I'm not doing it.
|
|
||||||
self.eliot_logger = logger
|
|
||||||
return f(self, *a, **kw)
|
|
||||||
|
|
||||||
# Arrange for all messages written to the memory logger that
|
|
||||||
# `capture_logging` installs to be re-written to the global/default
|
|
||||||
# logger so they might end up in a log file somewhere, if someone
|
|
||||||
# wants. This has to be done in a cleanup function (or later) because
|
|
||||||
# capture_logging restores the original logger in a cleanup function.
|
|
||||||
# We install our cleanup function here, before we call run, so that it
|
|
||||||
# runs *after* the cleanup function capture_logging installs (cleanup
|
|
||||||
# functions are a stack).
|
|
||||||
self.addCleanup(republish)
|
|
||||||
|
|
||||||
# Begin an action that should comprise all messages from the decorated
|
|
||||||
# test method.
|
|
||||||
with RUN_TEST(name=self.id()).context() as action:
|
|
||||||
# When the test method Deferred fires, the RUN_TEST action is
|
|
||||||
# done. However, we won't have re-published the MemoryLogger
|
|
||||||
# messages into the global/default logger when this Deferred
|
|
||||||
# fires. So we need to delay finishing the action until that has
|
|
||||||
# happened. Record the action so we can do that.
|
|
||||||
storage.action = action
|
|
||||||
|
|
||||||
# Support both Deferred-returning and non-Deferred-returning
|
|
||||||
# tests.
|
|
||||||
d = maybeDeferred(run, self)
|
|
||||||
|
|
||||||
# Let the test runner do its thing.
|
|
||||||
return d
|
|
||||||
|
|
||||||
return run_and_republish
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
|
@ -174,10 +108,91 @@ class EliotLoggedRunTest(object):
|
||||||
def id(self):
|
def id(self):
|
||||||
return self.case.id()
|
return self.case.id()
|
||||||
|
|
||||||
@eliot_logged_test
|
def run(self, result):
|
||||||
def run(self, result=None):
|
"""
|
||||||
|
Run the test case in the context of a distinct Eliot action.
|
||||||
|
|
||||||
|
The action will finish after the test is done. It will note the name of
|
||||||
|
the test being run.
|
||||||
|
|
||||||
|
All messages emitted by the test will be validated. They will still be
|
||||||
|
delivered to the global logger.
|
||||||
|
"""
|
||||||
|
# The idea here is to decorate the test method itself so that all of
|
||||||
|
# the extra logic happens at the point where test/application logic is
|
||||||
|
# expected to be. This `run` method is more like test infrastructure
|
||||||
|
# and things do not go well when we add too much extra behavior here.
|
||||||
|
# For example, exceptions raised here often just kill the whole
|
||||||
|
# runner.
|
||||||
|
patcher = MonkeyPatcher()
|
||||||
|
|
||||||
|
# So, grab the test method.
|
||||||
|
name = self.case._testMethodName
|
||||||
|
original = getattr(self.case, name)
|
||||||
|
decorated = with_logging(ensure_text(self.case.id()), original)
|
||||||
|
patcher.addPatch(self.case, name, decorated)
|
||||||
|
try:
|
||||||
|
# Patch it in
|
||||||
|
patcher.patch()
|
||||||
|
# Then use the rest of the machinery to run it.
|
||||||
return self._run_tests_with_factory(
|
return self._run_tests_with_factory(
|
||||||
self.case,
|
self.case,
|
||||||
self.handlers,
|
self.handlers,
|
||||||
self.last_resort,
|
self.last_resort,
|
||||||
).run(result)
|
).run(result)
|
||||||
|
finally:
|
||||||
|
# Clean up the patching for idempotency or something.
|
||||||
|
patcher.restore()
|
||||||
|
|
||||||
|
|
||||||
|
def with_logging(
|
||||||
|
test_id, # type: str
|
||||||
|
test_method, # type: Callable
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Decorate a test method with additional log-related behaviors.
|
||||||
|
|
||||||
|
1. The test method will run in a distinct Eliot action.
|
||||||
|
2. Typed log messages will be validated.
|
||||||
|
3. Logged tracebacks will be added as errors.
|
||||||
|
|
||||||
|
:param test_id: The full identifier of the test being decorated.
|
||||||
|
:param test_method: The method itself.
|
||||||
|
"""
|
||||||
|
@wraps(test_method)
|
||||||
|
def run_with_logging(*args, **kwargs):
|
||||||
|
validating_logger = _memory_logger()
|
||||||
|
original = swap_logger(None)
|
||||||
|
try:
|
||||||
|
swap_logger(_TwoLoggers(original, validating_logger))
|
||||||
|
with RUN_TEST(name=test_id):
|
||||||
|
try:
|
||||||
|
return test_method(*args, **kwargs)
|
||||||
|
finally:
|
||||||
|
check_for_errors(validating_logger)
|
||||||
|
finally:
|
||||||
|
swap_logger(original)
|
||||||
|
return run_with_logging
|
||||||
|
|
||||||
|
|
||||||
|
@implementer(ILogger)
|
||||||
|
class _TwoLoggers(object):
|
||||||
|
"""
|
||||||
|
Log to two loggers.
|
||||||
|
|
||||||
|
A single logger can have multiple destinations so this isn't typically a
|
||||||
|
useful thing to do. However, MemoryLogger has inline validation instead
|
||||||
|
of destinations. That means this *is* useful to simultaneously write to
|
||||||
|
the normal places and validate all written log messages.
|
||||||
|
"""
|
||||||
|
def __init__(self, a, b):
|
||||||
|
"""
|
||||||
|
:param ILogger a: One logger
|
||||||
|
:param ILogger b: Another logger
|
||||||
|
"""
|
||||||
|
self._a = a # type: ILogger
|
||||||
|
self._b = b # type: ILogger
|
||||||
|
|
||||||
|
def write(self, dictionary, serializer=None):
|
||||||
|
self._a.write(dictionary, serializer)
|
||||||
|
self._b.write(dictionary, serializer)
|
||||||
|
|
|
@ -18,17 +18,25 @@ if PY2:
|
||||||
from sys import stdout
|
from sys import stdout
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from unittest import (
|
||||||
|
skip,
|
||||||
|
)
|
||||||
|
|
||||||
from fixtures import (
|
from fixtures import (
|
||||||
TempDir,
|
TempDir,
|
||||||
)
|
)
|
||||||
from testtools import (
|
from testtools import (
|
||||||
TestCase,
|
TestCase,
|
||||||
)
|
)
|
||||||
|
from testtools import (
|
||||||
|
TestResult,
|
||||||
|
)
|
||||||
from testtools.matchers import (
|
from testtools.matchers import (
|
||||||
Is,
|
Is,
|
||||||
IsInstance,
|
IsInstance,
|
||||||
MatchesStructure,
|
MatchesStructure,
|
||||||
Equals,
|
Equals,
|
||||||
|
HasLength,
|
||||||
AfterPreprocessing,
|
AfterPreprocessing,
|
||||||
)
|
)
|
||||||
from testtools.twistedsupport import (
|
from testtools.twistedsupport import (
|
||||||
|
@ -38,12 +46,16 @@ from testtools.twistedsupport import (
|
||||||
|
|
||||||
from eliot import (
|
from eliot import (
|
||||||
Message,
|
Message,
|
||||||
|
MessageType,
|
||||||
|
fields,
|
||||||
FileDestination,
|
FileDestination,
|
||||||
|
MemoryLogger,
|
||||||
)
|
)
|
||||||
from eliot.twisted import DeferredContext
|
from eliot.twisted import DeferredContext
|
||||||
from eliot.testing import (
|
from eliot.testing import (
|
||||||
capture_logging,
|
capture_logging,
|
||||||
assertHasAction,
|
assertHasAction,
|
||||||
|
swap_logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
from twisted.internet.defer import (
|
from twisted.internet.defer import (
|
||||||
|
@ -173,6 +185,62 @@ class EliotLoggingTests(TestCase):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_validation_failure(self):
|
||||||
|
"""
|
||||||
|
If a test emits a log message that fails validation then an error is added
|
||||||
|
to the result.
|
||||||
|
"""
|
||||||
|
# Make sure we preserve the original global Eliot state.
|
||||||
|
original = swap_logger(MemoryLogger())
|
||||||
|
self.addCleanup(lambda: swap_logger(original))
|
||||||
|
|
||||||
|
class ValidationFailureProbe(SyncTestCase):
|
||||||
|
def test_bad_message(self):
|
||||||
|
# This message does not validate because "Hello" is not an
|
||||||
|
# int.
|
||||||
|
MSG = MessageType("test:eliotutil", fields(foo=int))
|
||||||
|
MSG(foo="Hello").write()
|
||||||
|
|
||||||
|
result = TestResult()
|
||||||
|
case = ValidationFailureProbe("test_bad_message")
|
||||||
|
case.run(result)
|
||||||
|
|
||||||
|
self.assertThat(
|
||||||
|
result.errors,
|
||||||
|
HasLength(1),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_skip_cleans_up(self):
|
||||||
|
"""
|
||||||
|
After a skipped test the global Eliot logging state is restored.
|
||||||
|
"""
|
||||||
|
# Save the logger that's active before we do anything so that we can
|
||||||
|
# restore it later. Also install another logger so we can compare it
|
||||||
|
# to the active logger later.
|
||||||
|
expected = MemoryLogger()
|
||||||
|
original = swap_logger(expected)
|
||||||
|
|
||||||
|
# Restore it, whatever else happens.
|
||||||
|
self.addCleanup(lambda: swap_logger(original))
|
||||||
|
|
||||||
|
class SkipProbe(SyncTestCase):
|
||||||
|
@skip("It's a skip test.")
|
||||||
|
def test_skipped(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
case = SkipProbe("test_skipped")
|
||||||
|
case.run()
|
||||||
|
|
||||||
|
# Retrieve the logger that's active now that the skipped test is done
|
||||||
|
# so we can check it against the expected value.
|
||||||
|
actual = swap_logger(MemoryLogger())
|
||||||
|
self.assertThat(
|
||||||
|
actual,
|
||||||
|
Is(expected),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class LogCallDeferredTests(TestCase):
|
class LogCallDeferredTests(TestCase):
|
||||||
"""
|
"""
|
||||||
Tests for ``log_call_deferred``.
|
Tests for ``log_call_deferred``.
|
||||||
|
|
|
@ -126,6 +126,42 @@ class HashUtilTests(unittest.TestCase):
|
||||||
base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"),
|
base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_convergence_hasher_tag(self):
|
||||||
|
"""
|
||||||
|
``_convergence_hasher_tag`` constructs the convergence hasher tag from a
|
||||||
|
unique prefix, the required, total, and segment size parameters, and a
|
||||||
|
convergence secret.
|
||||||
|
"""
|
||||||
|
self.assertEqual(
|
||||||
|
b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||||
|
b"16:\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42,"
|
||||||
|
b"9:3,10,1024,",
|
||||||
|
hashutil._convergence_hasher_tag(
|
||||||
|
k=3,
|
||||||
|
n=10,
|
||||||
|
segsize=1024,
|
||||||
|
convergence=b"\x42" * 16,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_convergence_hasher_out_of_bounds(self):
|
||||||
|
"""
|
||||||
|
``_convergence_hasher_tag`` raises ``ValueError`` if k or n is not between
|
||||||
|
1 and 256 inclusive or if k is greater than n.
|
||||||
|
"""
|
||||||
|
segsize = 1024
|
||||||
|
secret = b"\x42" * 16
|
||||||
|
for bad_k in (0, 2, 257):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
hashutil._convergence_hasher_tag(
|
||||||
|
k=bad_k, n=1, segsize=segsize, convergence=secret,
|
||||||
|
)
|
||||||
|
for bad_n in (0, 1, 257):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
hashutil._convergence_hasher_tag(
|
||||||
|
k=2, n=bad_n, segsize=segsize, convergence=secret,
|
||||||
|
)
|
||||||
|
|
||||||
def test_known_answers(self):
|
def test_known_answers(self):
|
||||||
"""
|
"""
|
||||||
Verify backwards compatibility by comparing hash outputs for some
|
Verify backwards compatibility by comparing hash outputs for some
|
||||||
|
|
|
@ -491,12 +491,16 @@ class JSONBytes(unittest.TestCase):
|
||||||
"""Tests for BytesJSONEncoder."""
|
"""Tests for BytesJSONEncoder."""
|
||||||
|
|
||||||
def test_encode_bytes(self):
|
def test_encode_bytes(self):
|
||||||
"""BytesJSONEncoder can encode bytes."""
|
"""BytesJSONEncoder can encode bytes.
|
||||||
|
|
||||||
|
Bytes are presumed to be UTF-8 encoded.
|
||||||
|
"""
|
||||||
|
snowman = u"def\N{SNOWMAN}\uFF00"
|
||||||
data = {
|
data = {
|
||||||
b"hello": [1, b"cd"],
|
b"hello": [1, b"cd", {b"abc": [123, snowman.encode("utf-8")]}],
|
||||||
}
|
}
|
||||||
expected = {
|
expected = {
|
||||||
u"hello": [1, u"cd"],
|
u"hello": [1, u"cd", {u"abc": [123, snowman]}],
|
||||||
}
|
}
|
||||||
# Bytes get passed through as if they were UTF-8 Unicode:
|
# Bytes get passed through as if they were UTF-8 Unicode:
|
||||||
encoded = jsonbytes.dumps(data)
|
encoded = jsonbytes.dumps(data)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -196,5 +196,6 @@ PORTED_TEST_MODULES = [
|
||||||
"allmydata.test.web.test_root",
|
"allmydata.test.web.test_root",
|
||||||
"allmydata.test.web.test_status",
|
"allmydata.test.web.test_status",
|
||||||
"allmydata.test.web.test_util",
|
"allmydata.test.web.test_util",
|
||||||
|
"allmydata.test.web.test_web",
|
||||||
"allmydata.test.web.test_webish",
|
"allmydata.test.web.test_webish",
|
||||||
]
|
]
|
||||||
|
|
|
@ -176,10 +176,44 @@ def convergence_hash(k, n, segsize, data, convergence):
|
||||||
return h.digest()
|
return h.digest()
|
||||||
|
|
||||||
|
|
||||||
def convergence_hasher(k, n, segsize, convergence):
|
def _convergence_hasher_tag(k, n, segsize, convergence):
|
||||||
|
"""
|
||||||
|
Create the convergence hashing tag.
|
||||||
|
|
||||||
|
:param int k: Required shares (in [1..256]).
|
||||||
|
:param int n: Total shares (in [1..256]).
|
||||||
|
:param int segsize: Maximum segment size.
|
||||||
|
:param bytes convergence: The convergence secret.
|
||||||
|
|
||||||
|
:return bytes: The bytestring to use as a tag in the convergence hash.
|
||||||
|
"""
|
||||||
assert isinstance(convergence, bytes)
|
assert isinstance(convergence, bytes)
|
||||||
|
if k > n:
|
||||||
|
raise ValueError(
|
||||||
|
"k > n not allowed; k = {}, n = {}".format(k, n),
|
||||||
|
)
|
||||||
|
if k < 1 or n < 1:
|
||||||
|
# It doesn't make sense to have zero shares. Zero shares carry no
|
||||||
|
# information, cannot encode any part of the application data.
|
||||||
|
raise ValueError(
|
||||||
|
"k, n < 1 not allowed; k = {}, n = {}".format(k, n),
|
||||||
|
)
|
||||||
|
if k > 256 or n > 256:
|
||||||
|
# ZFEC supports encoding application data into a maximum of 256
|
||||||
|
# shares. If we ignore the limitations of ZFEC, it may be fine to use
|
||||||
|
# a configuration with more shares than that and it may be fine to
|
||||||
|
# construct a convergence tag from such a configuration. Since ZFEC
|
||||||
|
# is the only supported encoder, though, this is moot for now.
|
||||||
|
raise ValueError(
|
||||||
|
"k, n > 256 not allowed; k = {}, n = {}".format(k, n),
|
||||||
|
)
|
||||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||||
|
return tag
|
||||||
|
|
||||||
|
|
||||||
|
def convergence_hasher(k, n, segsize, convergence):
|
||||||
|
tag = _convergence_hasher_tag(k, n, segsize, convergence)
|
||||||
return tagged_hasher(tag, KEYLEN)
|
return tagged_hasher(tag, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -13,20 +13,34 @@ from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def _bytes_to_unicode(obj):
|
||||||
|
"""Convert any bytes objects to unicode, recursively."""
|
||||||
|
if isinstance(obj, bytes):
|
||||||
|
return obj.decode("utf-8")
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
new_obj = {}
|
||||||
|
for k, v in obj.items():
|
||||||
|
if isinstance(k, bytes):
|
||||||
|
k = k.decode("utf-8")
|
||||||
|
v = _bytes_to_unicode(v)
|
||||||
|
new_obj[k] = v
|
||||||
|
return new_obj
|
||||||
|
if isinstance(obj, (list, set, tuple)):
|
||||||
|
return [_bytes_to_unicode(i) for i in obj]
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
class BytesJSONEncoder(json.JSONEncoder):
|
class BytesJSONEncoder(json.JSONEncoder):
|
||||||
"""
|
"""
|
||||||
A JSON encoder than can also encode bytes.
|
A JSON encoder than can also encode bytes.
|
||||||
|
|
||||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||||
"""
|
"""
|
||||||
def default(self, o):
|
def iterencode(self, o, **kwargs):
|
||||||
if isinstance(o, bytes):
|
return json.JSONEncoder.iterencode(self, _bytes_to_unicode(o), **kwargs)
|
||||||
return o.decode("utf-8")
|
|
||||||
return json.JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(obj, *args, **kwargs):
|
def dumps(obj, *args, **kwargs):
|
||||||
|
@ -34,13 +48,6 @@ def dumps(obj, *args, **kwargs):
|
||||||
|
|
||||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||||
"""
|
"""
|
||||||
if isinstance(obj, dict):
|
|
||||||
new_obj = {}
|
|
||||||
for k, v in obj.items():
|
|
||||||
if isinstance(k, bytes):
|
|
||||||
k = k.decode("utf-8")
|
|
||||||
new_obj[k] = v
|
|
||||||
obj = new_obj
|
|
||||||
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -432,7 +432,7 @@ class DeepCheckResultsRenderer(MultiFormatResource):
|
||||||
return CheckResultsRenderer(self._client,
|
return CheckResultsRenderer(self._client,
|
||||||
r.get_results_for_storage_index(si))
|
r.get_results_for_storage_index(si))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
raise WebError("No detailed results for SI %s" % html.escape(str(name, "utf-8")),
|
||||||
http.NOT_FOUND)
|
http.NOT_FOUND)
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
|
|
|
@ -186,7 +186,7 @@ def convert_children_json(nodemaker, children_json):
|
||||||
children = {}
|
children = {}
|
||||||
if children_json:
|
if children_json:
|
||||||
data = json.loads(children_json)
|
data = json.loads(children_json)
|
||||||
for (namex, (ctype, propdict)) in data.iteritems():
|
for (namex, (ctype, propdict)) in data.items():
|
||||||
namex = unicode(namex)
|
namex = unicode(namex)
|
||||||
writecap = to_bytes(propdict.get("rw_uri"))
|
writecap = to_bytes(propdict.get("rw_uri"))
|
||||||
readcap = to_bytes(propdict.get("ro_uri"))
|
readcap = to_bytes(propdict.get("ro_uri"))
|
||||||
|
@ -283,8 +283,8 @@ def render_time_attr(t):
|
||||||
# actual exception). The latter is growing increasingly annoying.
|
# actual exception). The latter is growing increasingly annoying.
|
||||||
|
|
||||||
def should_create_intermediate_directories(req):
|
def should_create_intermediate_directories(req):
|
||||||
t = get_arg(req, "t", "").strip()
|
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||||
return bool(req.method in ("PUT", "POST") and
|
return bool(req.method in (b"PUT", b"POST") and
|
||||||
t not in ("delete", "rename", "rename-form", "check"))
|
t not in ("delete", "rename", "rename-form", "check"))
|
||||||
|
|
||||||
def humanize_exception(exc):
|
def humanize_exception(exc):
|
||||||
|
@ -674,7 +674,7 @@ def url_for_string(req, url_string):
|
||||||
and the given URL string.
|
and the given URL string.
|
||||||
"""
|
"""
|
||||||
url = DecodedURL.from_text(url_string.decode("utf-8"))
|
url = DecodedURL.from_text(url_string.decode("utf-8"))
|
||||||
if url.host == b"":
|
if not url.host:
|
||||||
root = req.URLPath()
|
root = req.URLPath()
|
||||||
netloc = root.netloc.split(b":", 1)
|
netloc = root.netloc.split(b":", 1)
|
||||||
if len(netloc) == 1:
|
if len(netloc) == 1:
|
||||||
|
|
|
@ -40,8 +40,12 @@ def get_arg(req, argname, default=None, multiple=False):
|
||||||
results = []
|
results = []
|
||||||
if argname in req.args:
|
if argname in req.args:
|
||||||
results.extend(req.args[argname])
|
results.extend(req.args[argname])
|
||||||
if req.fields and argname in req.fields:
|
argname_unicode = unicode(argname, "utf-8")
|
||||||
results.append(req.fields[argname].value)
|
if req.fields and argname_unicode in req.fields:
|
||||||
|
value = req.fields[argname_unicode].value
|
||||||
|
if isinstance(value, unicode):
|
||||||
|
value = value.encode("utf-8")
|
||||||
|
results.append(value)
|
||||||
if multiple:
|
if multiple:
|
||||||
return tuple(results)
|
return tuple(results)
|
||||||
if results:
|
if results:
|
||||||
|
@ -79,7 +83,13 @@ class MultiFormatResource(resource.Resource, object):
|
||||||
if isinstance(t, bytes):
|
if isinstance(t, bytes):
|
||||||
t = unicode(t, "ascii")
|
t = unicode(t, "ascii")
|
||||||
renderer = self._get_renderer(t)
|
renderer = self._get_renderer(t)
|
||||||
return renderer(req)
|
result = renderer(req)
|
||||||
|
# On Python 3, json.dumps() returns Unicode for example, but
|
||||||
|
# twisted.web expects bytes. Instead of updating every single render
|
||||||
|
# method, just handle Unicode one time here.
|
||||||
|
if isinstance(result, unicode):
|
||||||
|
result = result.encode("utf-8")
|
||||||
|
return result
|
||||||
|
|
||||||
def _get_renderer(self, fmt):
|
def _get_renderer(self, fmt):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,3 +1,11 @@
|
||||||
|
"""
|
||||||
|
TODO: When porting to Python 3, the filename handling logic seems wrong. On
|
||||||
|
Python 3 filename will _already_ be correctly decoded. So only decode if it's
|
||||||
|
bytes.
|
||||||
|
|
||||||
|
Also there's a lot of code duplication I think.
|
||||||
|
"""
|
||||||
|
|
||||||
from past.builtins import unicode
|
from past.builtins import unicode
|
||||||
|
|
||||||
from urllib.parse import quote as url_quote
|
from urllib.parse import quote as url_quote
|
||||||
|
@ -135,7 +143,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
||||||
nonterminal = not terminal #len(req.postpath) > 0
|
nonterminal = not terminal #len(req.postpath) > 0
|
||||||
|
|
||||||
t = get_arg(req, b"t", b"").strip()
|
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||||
if isinstance(node_or_failure, Failure):
|
if isinstance(node_or_failure, Failure):
|
||||||
f = node_or_failure
|
f = node_or_failure
|
||||||
f.trap(NoSuchChildError)
|
f.trap(NoSuchChildError)
|
||||||
|
@ -150,10 +158,10 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
else:
|
else:
|
||||||
# terminal node
|
# terminal node
|
||||||
terminal_requests = (
|
terminal_requests = (
|
||||||
("POST", "mkdir"),
|
(b"POST", "mkdir"),
|
||||||
("PUT", "mkdir"),
|
(b"PUT", "mkdir"),
|
||||||
("POST", "mkdir-with-children"),
|
(b"POST", "mkdir-with-children"),
|
||||||
("POST", "mkdir-immutable")
|
(b"POST", "mkdir-immutable")
|
||||||
)
|
)
|
||||||
if (req.method, t) in terminal_requests:
|
if (req.method, t) in terminal_requests:
|
||||||
# final directory
|
# final directory
|
||||||
|
@ -182,8 +190,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
)
|
)
|
||||||
return d
|
return d
|
||||||
leaf_requests = (
|
leaf_requests = (
|
||||||
("PUT",""),
|
(b"PUT",""),
|
||||||
("PUT","uri"),
|
(b"PUT","uri"),
|
||||||
)
|
)
|
||||||
if (req.method, t) in leaf_requests:
|
if (req.method, t) in leaf_requests:
|
||||||
# we were trying to find the leaf filenode (to put a new
|
# we were trying to find the leaf filenode (to put a new
|
||||||
|
@ -224,7 +232,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
||||||
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
||||||
si = self.node.get_storage_index()
|
si = self.node.get_storage_index()
|
||||||
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
if si and req.setETag(b'DIR:%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
if not t:
|
if not t:
|
||||||
|
@ -255,7 +263,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
def render_PUT(self, req):
|
def render_PUT(self, req):
|
||||||
t = get_arg(req, b"t", b"").strip()
|
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||||
|
|
||||||
if t == "mkdir":
|
if t == "mkdir":
|
||||||
|
@ -364,7 +372,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _POST_upload(self, req):
|
def _POST_upload(self, req):
|
||||||
charset = get_arg(req, "_charset", "utf-8")
|
charset = unicode(get_arg(req, "_charset", b"utf-8"), "utf-8")
|
||||||
contents = req.fields["file"]
|
contents = req.fields["file"]
|
||||||
assert contents.filename is None or isinstance(contents.filename, str)
|
assert contents.filename is None or isinstance(contents.filename, str)
|
||||||
name = get_arg(req, "name")
|
name = get_arg(req, "name")
|
||||||
|
@ -374,7 +382,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
if not name:
|
if not name:
|
||||||
# this prohibts empty, missing, and all-whitespace filenames
|
# this prohibts empty, missing, and all-whitespace filenames
|
||||||
raise WebError("upload requires a name")
|
raise WebError("upload requires a name")
|
||||||
assert isinstance(name, str)
|
if isinstance(name, bytes):
|
||||||
name = name.decode(charset)
|
name = name.decode(charset)
|
||||||
if "/" in name:
|
if "/" in name:
|
||||||
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
||||||
|
@ -413,7 +421,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
name = get_arg(req, "name")
|
name = get_arg(req, "name")
|
||||||
if not name:
|
if not name:
|
||||||
raise WebError("set-uri requires a name")
|
raise WebError("set-uri requires a name")
|
||||||
charset = get_arg(req, "_charset", "utf-8")
|
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||||
name = name.decode(charset)
|
name = name.decode(charset)
|
||||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||||
|
|
||||||
|
@ -436,8 +444,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
# a slightly confusing error message if someone does a POST
|
# a slightly confusing error message if someone does a POST
|
||||||
# without a name= field. For our own HTML this isn't a big
|
# without a name= field. For our own HTML this isn't a big
|
||||||
# deal, because we create the 'unlink' POST buttons ourselves.
|
# deal, because we create the 'unlink' POST buttons ourselves.
|
||||||
name = ''
|
name = b''
|
||||||
charset = get_arg(req, "_charset", "utf-8")
|
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||||
name = name.decode(charset)
|
name = name.decode(charset)
|
||||||
d = self.node.delete(name)
|
d = self.node.delete(name)
|
||||||
d.addCallback(lambda res: "thing unlinked")
|
d.addCallback(lambda res: "thing unlinked")
|
||||||
|
@ -453,7 +461,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
return self._POST_relink(req)
|
return self._POST_relink(req)
|
||||||
|
|
||||||
def _POST_relink(self, req):
|
def _POST_relink(self, req):
|
||||||
charset = get_arg(req, "_charset", "utf-8")
|
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||||
|
|
||||||
from_name = get_arg(req, "from_name")
|
from_name = get_arg(req, "from_name")
|
||||||
|
@ -624,14 +632,14 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||||
# TODO test handling of bad JSON
|
# TODO test handling of bad JSON
|
||||||
raise
|
raise
|
||||||
cs = {}
|
cs = {}
|
||||||
for name, (file_or_dir, mddict) in children.iteritems():
|
for name, (file_or_dir, mddict) in children.items():
|
||||||
name = unicode(name) # json returns str *or* unicode
|
name = unicode(name) # json returns str *or* unicode
|
||||||
writecap = mddict.get('rw_uri')
|
writecap = mddict.get('rw_uri')
|
||||||
if writecap is not None:
|
if writecap is not None:
|
||||||
writecap = str(writecap)
|
writecap = writecap.encode("utf-8")
|
||||||
readcap = mddict.get('ro_uri')
|
readcap = mddict.get('ro_uri')
|
||||||
if readcap is not None:
|
if readcap is not None:
|
||||||
readcap = str(readcap)
|
readcap = readcap.encode("utf-8")
|
||||||
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
||||||
d = self.node.set_children(cs, replace)
|
d = self.node.set_children(cs, replace)
|
||||||
d.addCallback(lambda res: "Okay so I did it.")
|
d.addCallback(lambda res: "Okay so I did it.")
|
||||||
|
@ -1144,8 +1152,8 @@ def _slashify_path(path):
|
||||||
in it
|
in it
|
||||||
"""
|
"""
|
||||||
if not path:
|
if not path:
|
||||||
return ""
|
return b""
|
||||||
return "/".join([p.encode("utf-8") for p in path])
|
return b"/".join([p.encode("utf-8") for p in path])
|
||||||
|
|
||||||
|
|
||||||
def _cap_to_link(root, path, cap):
|
def _cap_to_link(root, path, cap):
|
||||||
|
@ -1234,10 +1242,10 @@ class ManifestResults(MultiFormatResource, ReloadMixin):
|
||||||
req.setHeader("content-type", "text/plain")
|
req.setHeader("content-type", "text/plain")
|
||||||
lines = []
|
lines = []
|
||||||
is_finished = self.monitor.is_finished()
|
is_finished = self.monitor.is_finished()
|
||||||
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
lines.append(b"finished: " + {True: b"yes", False: b"no"}[is_finished])
|
||||||
for path, cap in self.monitor.get_status()["manifest"]:
|
for path, cap in self.monitor.get_status()["manifest"]:
|
||||||
lines.append(_slashify_path(path) + " " + cap)
|
lines.append(_slashify_path(path) + b" " + cap)
|
||||||
return "\n".join(lines) + "\n"
|
return b"\n".join(lines) + b"\n"
|
||||||
|
|
||||||
def render_JSON(self, req):
|
def render_JSON(self, req):
|
||||||
req.setHeader("content-type", "text/plain")
|
req.setHeader("content-type", "text/plain")
|
||||||
|
@ -1290,7 +1298,7 @@ class DeepSizeResults(MultiFormatResource):
|
||||||
+ stats.get("size-mutable-files", 0)
|
+ stats.get("size-mutable-files", 0)
|
||||||
+ stats.get("size-directories", 0))
|
+ stats.get("size-directories", 0))
|
||||||
output += "size: %d\n" % total
|
output += "size: %d\n" % total
|
||||||
return output
|
return output.encode("utf-8")
|
||||||
render_TEXT = render_HTML
|
render_TEXT = render_HTML
|
||||||
|
|
||||||
def render_JSON(self, req):
|
def render_JSON(self, req):
|
||||||
|
@ -1315,7 +1323,7 @@ class DeepStatsResults(Resource, object):
|
||||||
req.setHeader("content-type", "text/plain")
|
req.setHeader("content-type", "text/plain")
|
||||||
s = self.monitor.get_status().copy()
|
s = self.monitor.get_status().copy()
|
||||||
s["finished"] = self.monitor.is_finished()
|
s["finished"] = self.monitor.is_finished()
|
||||||
return json.dumps(s, indent=1)
|
return json.dumps(s, indent=1).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
@implementer(IPushProducer)
|
@implementer(IPushProducer)
|
||||||
|
|
|
@ -127,7 +127,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||||
http.NOT_IMPLEMENTED)
|
http.NOT_IMPLEMENTED)
|
||||||
if not t:
|
if not t:
|
||||||
return self.replace_me_with_a_child(req, self.client, replace)
|
return self.replace_me_with_a_child(req, self.client, replace)
|
||||||
if t == "uri":
|
if t == b"uri":
|
||||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||||
|
|
||||||
raise WebError("PUT to a file: bad t=%s" % t)
|
raise WebError("PUT to a file: bad t=%s" % t)
|
||||||
|
@ -188,8 +188,8 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||||
# if the client already has the ETag then we can
|
# if the client already has the ETag then we can
|
||||||
# short-circuit the whole process.
|
# short-circuit the whole process.
|
||||||
si = self.node.get_storage_index()
|
si = self.node.get_storage_index()
|
||||||
if si and req.setETag('%s-%s' % (base32.b2a(si), t or "")):
|
if si and req.setETag(b'%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||||
return ""
|
return b""
|
||||||
|
|
||||||
if not t:
|
if not t:
|
||||||
# just get the contents
|
# just get the contents
|
||||||
|
@ -281,7 +281,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||||
assert self.parentnode and self.name
|
assert self.parentnode and self.name
|
||||||
return self.replace_me_with_a_child(req, self.client, replace)
|
return self.replace_me_with_a_child(req, self.client, replace)
|
||||||
|
|
||||||
if t == "uri":
|
if t == b"uri":
|
||||||
if not replace:
|
if not replace:
|
||||||
raise ExistingChildError()
|
raise ExistingChildError()
|
||||||
assert self.parentnode and self.name
|
assert self.parentnode and self.name
|
||||||
|
@ -309,7 +309,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||||
assert self.parentnode and self.name
|
assert self.parentnode and self.name
|
||||||
d = self.replace_me_with_a_formpost(req, self.client, replace)
|
d = self.replace_me_with_a_formpost(req, self.client, replace)
|
||||||
else:
|
else:
|
||||||
raise WebError("POST to file: bad t=%s" % t)
|
raise WebError("POST to file: bad t=%s" % unicode(t, "ascii"))
|
||||||
|
|
||||||
return handle_when_done(req, d)
|
return handle_when_done(req, d)
|
||||||
|
|
||||||
|
@ -439,7 +439,7 @@ class FileDownloader(Resource, object):
|
||||||
# bytes we were given in the URL. See the comment in
|
# bytes we were given in the URL. See the comment in
|
||||||
# FileNodeHandler.render_GET for the sad details.
|
# FileNodeHandler.render_GET for the sad details.
|
||||||
req.setHeader("content-disposition",
|
req.setHeader("content-disposition",
|
||||||
'attachment; filename="%s"' % self.filename)
|
b'attachment; filename="%s"' % self.filename)
|
||||||
|
|
||||||
filesize = self.filenode.get_size()
|
filesize = self.filenode.get_size()
|
||||||
assert isinstance(filesize, (int,long)), filesize
|
assert isinstance(filesize, (int,long)), filesize
|
||||||
|
@ -475,8 +475,8 @@ class FileDownloader(Resource, object):
|
||||||
size = contentsize
|
size = contentsize
|
||||||
|
|
||||||
req.setHeader("content-length", b"%d" % contentsize)
|
req.setHeader("content-length", b"%d" % contentsize)
|
||||||
if req.method == "HEAD":
|
if req.method == b"HEAD":
|
||||||
return ""
|
return b""
|
||||||
|
|
||||||
d = self.filenode.read(req, first, size)
|
d = self.filenode.read(req, first, size)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
|
||||||
import os, urllib
|
import os
|
||||||
|
from urllib.parse import quote as urlquote
|
||||||
|
|
||||||
from twisted.python.filepath import FilePath
|
from twisted.python.filepath import FilePath
|
||||||
from twisted.web.template import tags as T, Element, renderElement, XMLFile, renderer
|
from twisted.web.template import tags as T, Element, renderElement, XMLFile, renderer
|
||||||
|
@ -180,7 +181,7 @@ class MoreInfoElement(Element):
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
root = self.get_root(req)
|
root = self.get_root(req)
|
||||||
quoted_uri = urllib.quote(node.get_uri())
|
quoted_uri = urlquote(node.get_uri())
|
||||||
text_plain_url = "%s/file/%s/@@named=/raw.txt" % (root, quoted_uri)
|
text_plain_url = "%s/file/%s/@@named=/raw.txt" % (root, quoted_uri)
|
||||||
return T.li("Raw data as ", T.a("text/plain", href=text_plain_url))
|
return T.li("Raw data as ", T.a("text/plain", href=text_plain_url))
|
||||||
|
|
||||||
|
@ -196,7 +197,7 @@ class MoreInfoElement(Element):
|
||||||
@renderer
|
@renderer
|
||||||
def check_form(self, req, tag):
|
def check_form(self, req, tag):
|
||||||
node = self.original
|
node = self.original
|
||||||
quoted_uri = urllib.quote(node.get_uri())
|
quoted_uri = urlquote(node.get_uri())
|
||||||
target = self.get_root(req) + "/uri/" + quoted_uri
|
target = self.get_root(req) + "/uri/" + quoted_uri
|
||||||
if IDirectoryNode.providedBy(node):
|
if IDirectoryNode.providedBy(node):
|
||||||
target += "/"
|
target += "/"
|
||||||
|
@ -236,8 +237,8 @@ class MoreInfoElement(Element):
|
||||||
def overwrite_form(self, req, tag):
|
def overwrite_form(self, req, tag):
|
||||||
node = self.original
|
node = self.original
|
||||||
root = self.get_root(req)
|
root = self.get_root(req)
|
||||||
action = "%s/uri/%s" % (root, urllib.quote(node.get_uri()))
|
action = "%s/uri/%s" % (root, urlquote(node.get_uri()))
|
||||||
done_url = "%s/uri/%s?t=info" % (root, urllib.quote(node.get_uri()))
|
done_url = "%s/uri/%s?t=info" % (root, urlquote(node.get_uri()))
|
||||||
overwrite = T.form(action=action, method="post",
|
overwrite = T.form(action=action, method="post",
|
||||||
enctype="multipart/form-data")(
|
enctype="multipart/form-data")(
|
||||||
T.fieldset(
|
T.fieldset(
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from hyperlink import (
|
from hyperlink import (
|
||||||
|
@ -101,12 +102,12 @@ class OphandleTable(resource.Resource, service.Service):
|
||||||
def getChild(self, name, req):
|
def getChild(self, name, req):
|
||||||
ophandle = name
|
ophandle = name
|
||||||
if ophandle not in self.handles:
|
if ophandle not in self.handles:
|
||||||
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
|
raise WebError("unknown/expired handle '%s'" % escape(unicode(ophandle, "utf-8")),
|
||||||
NOT_FOUND)
|
NOT_FOUND)
|
||||||
(monitor, renderer, when_added) = self.handles[ophandle]
|
(monitor, renderer, when_added) = self.handles[ophandle]
|
||||||
|
|
||||||
t = get_arg(req, "t", "status")
|
t = get_arg(req, "t", "status")
|
||||||
if t == "cancel" and req.method == "POST":
|
if t == b"cancel" and req.method == b"POST":
|
||||||
monitor.cancel()
|
monitor.cancel()
|
||||||
# return the status anyways, but release the handle
|
# return the status anyways, but release the handle
|
||||||
self._release_ophandle(ophandle)
|
self._release_ophandle(ophandle)
|
||||||
|
@ -151,7 +152,7 @@ class ReloadMixin(object):
|
||||||
@renderer
|
@renderer
|
||||||
def refresh(self, req, tag):
|
def refresh(self, req, tag):
|
||||||
if self.monitor.is_finished():
|
if self.monitor.is_finished():
|
||||||
return ""
|
return b""
|
||||||
tag.attributes["http-equiv"] = "refresh"
|
tag.attributes["http-equiv"] = "refresh"
|
||||||
tag.attributes["content"] = str(self.REFRESH_TIME)
|
tag.attributes["content"] = str(self.REFRESH_TIME)
|
||||||
return tag
|
return tag
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from future.utils import PY3
|
from future.utils import PY3
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
@ -97,7 +98,7 @@ class URIHandler(resource.Resource, object):
|
||||||
either "PUT /uri" to create an unlinked file, or
|
either "PUT /uri" to create an unlinked file, or
|
||||||
"PUT /uri?t=mkdir" to create an unlinked directory
|
"PUT /uri?t=mkdir" to create an unlinked directory
|
||||||
"""
|
"""
|
||||||
t = get_arg(req, "t", "").strip()
|
t = unicode(get_arg(req, "t", "").strip(), "utf-8")
|
||||||
if t == "":
|
if t == "":
|
||||||
file_format = get_format(req, "CHK")
|
file_format = get_format(req, "CHK")
|
||||||
mutable_type = get_mutable_type(file_format)
|
mutable_type = get_mutable_type(file_format)
|
||||||
|
@ -120,7 +121,7 @@ class URIHandler(resource.Resource, object):
|
||||||
unlinked file or "POST /uri?t=mkdir" to create a
|
unlinked file or "POST /uri?t=mkdir" to create a
|
||||||
new directory
|
new directory
|
||||||
"""
|
"""
|
||||||
t = get_arg(req, "t", "").strip()
|
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||||
if t in ("", "upload"):
|
if t in ("", "upload"):
|
||||||
file_format = get_format(req)
|
file_format = get_format(req)
|
||||||
mutable_type = get_mutable_type(file_format)
|
mutable_type = get_mutable_type(file_format)
|
||||||
|
@ -177,7 +178,7 @@ class FileHandler(resource.Resource, object):
|
||||||
|
|
||||||
@exception_to_child
|
@exception_to_child
|
||||||
def getChild(self, name, req):
|
def getChild(self, name, req):
|
||||||
if req.method not in ("GET", "HEAD"):
|
if req.method not in (b"GET", b"HEAD"):
|
||||||
raise WebError("/file can only be used with GET or HEAD")
|
raise WebError("/file can only be used with GET or HEAD")
|
||||||
# 'name' must be a file URI
|
# 'name' must be a file URI
|
||||||
try:
|
try:
|
||||||
|
@ -200,7 +201,7 @@ class IncidentReporter(MultiFormatResource):
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
def render(self, req):
|
def render(self, req):
|
||||||
if req.method != "POST":
|
if req.method != b"POST":
|
||||||
raise WebError("/report_incident can only be used with POST")
|
raise WebError("/report_incident can only be used with POST")
|
||||||
|
|
||||||
log.msg(format="User reports incident through web page: %(details)s",
|
log.msg(format="User reports incident through web page: %(details)s",
|
||||||
|
@ -255,11 +256,11 @@ class Root(MultiFormatResource):
|
||||||
if not path:
|
if not path:
|
||||||
# Render "/" path.
|
# Render "/" path.
|
||||||
return self
|
return self
|
||||||
if path == "helper_status":
|
if path == b"helper_status":
|
||||||
# the Helper isn't attached until after the Tub starts, so this child
|
# the Helper isn't attached until after the Tub starts, so this child
|
||||||
# needs to created on each request
|
# needs to created on each request
|
||||||
return status.HelperStatus(self._client.helper)
|
return status.HelperStatus(self._client.helper)
|
||||||
if path == "storage":
|
if path == b"storage":
|
||||||
# Storage isn't initialized until after the web hierarchy is
|
# Storage isn't initialized until after the web hierarchy is
|
||||||
# constructed so this child needs to be created later than
|
# constructed so this child needs to be created later than
|
||||||
# `__init__`.
|
# `__init__`.
|
||||||
|
@ -293,7 +294,7 @@ class Root(MultiFormatResource):
|
||||||
self._describe_server(server)
|
self._describe_server(server)
|
||||||
for server
|
for server
|
||||||
in broker.get_known_servers()
|
in broker.get_known_servers()
|
||||||
))
|
), key=lambda o: sorted(o.items()))
|
||||||
|
|
||||||
|
|
||||||
def _describe_server(self, server):
|
def _describe_server(self, server):
|
||||||
|
|
|
@ -284,7 +284,7 @@ def _find_overlap(events, start_key, end_key):
|
||||||
rows = []
|
rows = []
|
||||||
for ev in events:
|
for ev in events:
|
||||||
ev = ev.copy()
|
ev = ev.copy()
|
||||||
if ev.has_key('server'):
|
if 'server' in ev:
|
||||||
ev["serverid"] = ev["server"].get_longname()
|
ev["serverid"] = ev["server"].get_longname()
|
||||||
del ev["server"]
|
del ev["server"]
|
||||||
# find an empty slot in the rows
|
# find an empty slot in the rows
|
||||||
|
@ -362,8 +362,8 @@ def _find_overlap_requests(events):
|
||||||
def _color(server):
|
def _color(server):
|
||||||
h = hashlib.sha256(server.get_serverid()).digest()
|
h = hashlib.sha256(server.get_serverid()).digest()
|
||||||
def m(c):
|
def m(c):
|
||||||
return min(ord(c) / 2 + 0x80, 0xff)
|
return min(ord(c) // 2 + 0x80, 0xff)
|
||||||
return "#%02x%02x%02x" % (m(h[0]), m(h[1]), m(h[2]))
|
return "#%02x%02x%02x" % (m(h[0:1]), m(h[1:2]), m(h[2:3]))
|
||||||
|
|
||||||
class _EventJson(Resource, object):
|
class _EventJson(Resource, object):
|
||||||
|
|
||||||
|
@ -426,7 +426,7 @@ class DownloadStatusPage(Resource, object):
|
||||||
"""
|
"""
|
||||||
super(DownloadStatusPage, self).__init__()
|
super(DownloadStatusPage, self).__init__()
|
||||||
self._download_status = download_status
|
self._download_status = download_status
|
||||||
self.putChild("event_json", _EventJson(self._download_status))
|
self.putChild(b"event_json", _EventJson(self._download_status))
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
def render_GET(self, req):
|
def render_GET(self, req):
|
||||||
|
@ -1288,14 +1288,14 @@ class Status(MultiFormatResource):
|
||||||
# final URL segment will be an empty string. Resources can
|
# final URL segment will be an empty string. Resources can
|
||||||
# thus know if they were requested with or without a final
|
# thus know if they were requested with or without a final
|
||||||
# slash."
|
# slash."
|
||||||
if not path and request.postpath != ['']:
|
if not path and request.postpath != [b'']:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
h = self.history
|
h = self.history
|
||||||
try:
|
try:
|
||||||
stype, count_s = path.split("-")
|
stype, count_s = path.split(b"-")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise WebError("no '-' in '{}'".format(path))
|
raise WebError("no '-' in '{}'".format(unicode(path, "utf-8")))
|
||||||
count = int(count_s)
|
count = int(count_s)
|
||||||
stype = unicode(stype, "ascii")
|
stype = unicode(stype, "ascii")
|
||||||
if stype == "up":
|
if stype == "up":
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import urllib
|
from urllib.parse import quote as urlquote
|
||||||
|
|
||||||
from twisted.web import http
|
from twisted.web import http
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -65,8 +66,8 @@ def POSTUnlinkedCHK(req, client):
|
||||||
# if when_done= is provided, return a redirect instead of our
|
# if when_done= is provided, return a redirect instead of our
|
||||||
# usual upload-results page
|
# usual upload-results page
|
||||||
def _done(upload_results, redir_to):
|
def _done(upload_results, redir_to):
|
||||||
if "%(uri)s" in redir_to:
|
if b"%(uri)s" in redir_to:
|
||||||
redir_to = redir_to.replace("%(uri)s", urllib.quote(upload_results.get_uri()))
|
redir_to = redir_to.replace(b"%(uri)s", urlquote(upload_results.get_uri()).encode("utf-8"))
|
||||||
return url_for_string(req, redir_to)
|
return url_for_string(req, redir_to)
|
||||||
d.addCallback(_done, when_done)
|
d.addCallback(_done, when_done)
|
||||||
else:
|
else:
|
||||||
|
@ -118,8 +119,8 @@ class UploadResultsElement(status.UploadResultsRendererMixin):
|
||||||
def download_link(self, req, tag):
|
def download_link(self, req, tag):
|
||||||
d = self.upload_results()
|
d = self.upload_results()
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
tags.a("/uri/" + res.get_uri(),
|
tags.a("/uri/" + unicode(res.get_uri(), "utf-8"),
|
||||||
href="/uri/" + urllib.quote(res.get_uri())))
|
href="/uri/" + urlquote(unicode(res.get_uri(), "utf-8"))))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
@ -158,7 +159,7 @@ def POSTUnlinkedCreateDirectory(req, client):
|
||||||
redirect = get_arg(req, "redirect_to_result", "false")
|
redirect = get_arg(req, "redirect_to_result", "false")
|
||||||
if boolean_of_arg(redirect):
|
if boolean_of_arg(redirect):
|
||||||
def _then_redir(res):
|
def _then_redir(res):
|
||||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
new_url = "uri/" + urlquote(res.get_uri())
|
||||||
req.setResponseCode(http.SEE_OTHER) # 303
|
req.setResponseCode(http.SEE_OTHER) # 303
|
||||||
req.setHeader('location', new_url)
|
req.setHeader('location', new_url)
|
||||||
return ''
|
return ''
|
||||||
|
@ -176,7 +177,7 @@ def POSTUnlinkedCreateDirectoryWithChildren(req, client):
|
||||||
redirect = get_arg(req, "redirect_to_result", "false")
|
redirect = get_arg(req, "redirect_to_result", "false")
|
||||||
if boolean_of_arg(redirect):
|
if boolean_of_arg(redirect):
|
||||||
def _then_redir(res):
|
def _then_redir(res):
|
||||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
new_url = "uri/" + urlquote(res.get_uri())
|
||||||
req.setResponseCode(http.SEE_OTHER) # 303
|
req.setResponseCode(http.SEE_OTHER) # 303
|
||||||
req.setHeader('location', new_url)
|
req.setHeader('location', new_url)
|
||||||
return ''
|
return ''
|
||||||
|
@ -194,7 +195,7 @@ def POSTUnlinkedCreateImmutableDirectory(req, client):
|
||||||
redirect = get_arg(req, "redirect_to_result", "false")
|
redirect = get_arg(req, "redirect_to_result", "false")
|
||||||
if boolean_of_arg(redirect):
|
if boolean_of_arg(redirect):
|
||||||
def _then_redir(res):
|
def _then_redir(res):
|
||||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
new_url = "uri/" + urlquote(res.get_uri())
|
||||||
req.setResponseCode(http.SEE_OTHER) # 303
|
req.setResponseCode(http.SEE_OTHER) # 303
|
||||||
req.setHeader('location', new_url)
|
req.setHeader('location', new_url)
|
||||||
return ''
|
return ''
|
||||||
|
|
|
@ -44,6 +44,43 @@ from .web.storage_plugins import (
|
||||||
StoragePlugins,
|
StoragePlugins,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
FileUploadFieldStorage = FieldStorage
|
||||||
|
else:
|
||||||
|
class FileUploadFieldStorage(FieldStorage):
|
||||||
|
"""
|
||||||
|
Do terrible things to ensure files are still bytes.
|
||||||
|
|
||||||
|
On Python 2, uploaded files were always bytes. On Python 3, there's a
|
||||||
|
heuristic: if the filename is set on a field, it's assumed to be a file
|
||||||
|
upload and therefore bytes. If no filename is set, it's Unicode.
|
||||||
|
|
||||||
|
Unfortunately, we always want it to be bytes, and Tahoe-LAFS also
|
||||||
|
enables setting the filename not via the MIME filename, but via a
|
||||||
|
separate field called "name".
|
||||||
|
|
||||||
|
Thus we need to do this ridiculous workaround. Mypy doesn't like it
|
||||||
|
either, thus the ``# type: ignore`` below.
|
||||||
|
|
||||||
|
Source for idea:
|
||||||
|
https://mail.python.org/pipermail/python-dev/2017-February/147402.html
|
||||||
|
"""
|
||||||
|
@property # type: ignore
|
||||||
|
def filename(self):
|
||||||
|
if self.name == "file" and not self._mime_filename:
|
||||||
|
# We use the file field to upload files, see directory.py's
|
||||||
|
# _POST_upload. Lack of _mime_filename means we need to trick
|
||||||
|
# FieldStorage into thinking there is a filename so it'll
|
||||||
|
# return bytes.
|
||||||
|
return "unknown-filename"
|
||||||
|
return self._mime_filename
|
||||||
|
|
||||||
|
@filename.setter
|
||||||
|
def filename(self, value):
|
||||||
|
self._mime_filename = value
|
||||||
|
|
||||||
|
|
||||||
class TahoeLAFSRequest(Request, object):
|
class TahoeLAFSRequest(Request, object):
|
||||||
"""
|
"""
|
||||||
``TahoeLAFSRequest`` adds several features to a Twisted Web ``Request``
|
``TahoeLAFSRequest`` adds several features to a Twisted Web ``Request``
|
||||||
|
@ -94,7 +131,8 @@ class TahoeLAFSRequest(Request, object):
|
||||||
headers['content-length'] = str(self.content.tell())
|
headers['content-length'] = str(self.content.tell())
|
||||||
self.content.seek(0)
|
self.content.seek(0)
|
||||||
|
|
||||||
self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
self.fields = FileUploadFieldStorage(
|
||||||
|
self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||||
self.content.seek(0)
|
self.content.seek(0)
|
||||||
|
|
||||||
self._tahoeLAFSSecurityPolicy()
|
self._tahoeLAFSSecurityPolicy()
|
||||||
|
@ -211,7 +249,7 @@ class WebishServer(service.MultiService):
|
||||||
# use to test ophandle expiration.
|
# use to test ophandle expiration.
|
||||||
self._operations = OphandleTable(clock)
|
self._operations = OphandleTable(clock)
|
||||||
self._operations.setServiceParent(self)
|
self._operations.setServiceParent(self)
|
||||||
self.root.putChild("operations", self._operations)
|
self.root.putChild(b"operations", self._operations)
|
||||||
|
|
||||||
self.root.putChild(b"storage-plugins", StoragePlugins(client))
|
self.root.putChild(b"storage-plugins", StoragePlugins(client))
|
||||||
|
|
||||||
|
@ -220,7 +258,7 @@ class WebishServer(service.MultiService):
|
||||||
self.site = TahoeLAFSSite(tempdir, self.root)
|
self.site = TahoeLAFSSite(tempdir, self.root)
|
||||||
self.staticdir = staticdir # so tests can check
|
self.staticdir = staticdir # so tests can check
|
||||||
if staticdir:
|
if staticdir:
|
||||||
self.root.putChild("static", static.File(staticdir))
|
self.root.putChild(b"static", static.File(staticdir))
|
||||||
if re.search(r'^\d', webport):
|
if re.search(r'^\d', webport):
|
||||||
webport = "tcp:"+webport # twisted warns about bare "0" or "3456"
|
webport = "tcp:"+webport # twisted warns about bare "0" or "3456"
|
||||||
# strports must be native strings.
|
# strports must be native strings.
|
||||||
|
|
Loading…
Reference in New Issue