Merge remote-tracking branch 'origin/master' into 3581.unicode_to_argv.1
This commit is contained in:
commit
8ffb083d38
|
@ -29,7 +29,7 @@ workflows:
|
|||
- "debian-9": &DOCKERHUB_CONTEXT
|
||||
context: "dockerhub-auth"
|
||||
|
||||
- "debian-8":
|
||||
- "debian-10":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
requires:
|
||||
- "debian-9"
|
||||
|
@ -86,11 +86,6 @@ workflows:
|
|||
# integration tests.
|
||||
- "debian-9"
|
||||
|
||||
# Generate the underlying data for a visualization to aid with Python 3
|
||||
# porting.
|
||||
- "build-porting-depgraph":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
- "typechecks":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
|
@ -107,7 +102,7 @@ workflows:
|
|||
- "master"
|
||||
|
||||
jobs:
|
||||
- "build-image-debian-8":
|
||||
- "build-image-debian-10":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
- "build-image-debian-9":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
@ -277,11 +272,11 @@ jobs:
|
|||
fi
|
||||
|
||||
|
||||
debian-8:
|
||||
debian-10:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/debian:8-py2.7"
|
||||
image: "tahoelafsci/debian:10-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
|
@ -451,33 +446,6 @@ jobs:
|
|||
# them in parallel.
|
||||
nix-build --cores 3 --max-jobs 2 nix/
|
||||
|
||||
# Generate up-to-date data for the dependency graph visualizer.
|
||||
build-porting-depgraph:
|
||||
# Get a system in which we can easily install Tahoe-LAFS and all its
|
||||
# dependencies. The dependency graph analyzer works by executing the code.
|
||||
# It's Python, what do you expect?
|
||||
<<: *DEBIAN
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
|
||||
- add_ssh_keys:
|
||||
fingerprints:
|
||||
# Jean-Paul Calderone <exarkun@twistedmatrix.com> (CircleCI depgraph key)
|
||||
# This lets us push to tahoe-lafs/tahoe-depgraph in the next step.
|
||||
- "86:38:18:a7:c0:97:42:43:18:46:55:d6:21:b0:5f:d4"
|
||||
|
||||
- run:
|
||||
name: "Setup Python Environment"
|
||||
command: |
|
||||
/tmp/venv/bin/pip install -e /tmp/project
|
||||
|
||||
- run:
|
||||
name: "Generate dependency graph data"
|
||||
command: |
|
||||
. /tmp/venv/bin/activate
|
||||
./misc/python3/depgraph.sh
|
||||
|
||||
typechecks:
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
|
@ -529,12 +497,12 @@ jobs:
|
|||
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||
|
||||
|
||||
build-image-debian-8:
|
||||
build-image-debian-10:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "8"
|
||||
TAG: "10"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ class ProvisioningTool(rend.Page):
|
|||
req = inevow.IRequest(ctx)
|
||||
|
||||
def getarg(name, astype=int):
|
||||
if req.method != "POST":
|
||||
if req.method != b"POST":
|
||||
return None
|
||||
if name in req.fields:
|
||||
return astype(req.fields[name].value)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Debian 8 support has been replaced with Debian 10 support.
|
|
@ -454,7 +454,7 @@ class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
|||
return self.storage_index
|
||||
|
||||
def check(self, monitor, verify=False, add_lease=False):
|
||||
s = StubServer("\x00"*20)
|
||||
s = StubServer(b"\x00"*20)
|
||||
r = CheckResults(self.my_uri, self.storage_index,
|
||||
healthy=True, recoverable=True,
|
||||
count_happiness=10,
|
||||
|
@ -588,12 +588,12 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
self.file_types[self.storage_index] = version
|
||||
initial_contents = self._get_initial_contents(contents)
|
||||
data = initial_contents.read(initial_contents.get_size())
|
||||
data = "".join(data)
|
||||
data = b"".join(data)
|
||||
self.all_contents[self.storage_index] = data
|
||||
return defer.succeed(self)
|
||||
def _get_initial_contents(self, contents):
|
||||
if contents is None:
|
||||
return MutableData("")
|
||||
return MutableData(b"")
|
||||
|
||||
if IMutableUploadable.providedBy(contents):
|
||||
return contents
|
||||
|
@ -647,7 +647,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
def raise_error(self):
|
||||
pass
|
||||
def get_writekey(self):
|
||||
return "\x00"*16
|
||||
return b"\x00"*16
|
||||
def get_size(self):
|
||||
return len(self.all_contents[self.storage_index])
|
||||
def get_current_size(self):
|
||||
|
@ -666,7 +666,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
return self.file_types[self.storage_index]
|
||||
|
||||
def check(self, monitor, verify=False, add_lease=False):
|
||||
s = StubServer("\x00"*20)
|
||||
s = StubServer(b"\x00"*20)
|
||||
r = CheckResults(self.my_uri, self.storage_index,
|
||||
healthy=True, recoverable=True,
|
||||
count_happiness=10,
|
||||
|
@ -677,7 +677,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
count_recoverable_versions=1,
|
||||
count_unrecoverable_versions=0,
|
||||
servers_responding=[s],
|
||||
sharemap={"seq1-abcd-sh0": [s]},
|
||||
sharemap={b"seq1-abcd-sh0": [s]},
|
||||
count_wrong_shares=0,
|
||||
list_corrupt_shares=[],
|
||||
count_corrupt_shares=0,
|
||||
|
@ -731,7 +731,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
def overwrite(self, new_contents):
|
||||
assert not self.is_readonly()
|
||||
new_data = new_contents.read(new_contents.get_size())
|
||||
new_data = "".join(new_data)
|
||||
new_data = b"".join(new_data)
|
||||
self.all_contents[self.storage_index] = new_data
|
||||
return defer.succeed(None)
|
||||
def modify(self, modifier):
|
||||
|
@ -762,7 +762,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
|||
def update(self, data, offset):
|
||||
assert not self.is_readonly()
|
||||
def modifier(old, servermap, first_time):
|
||||
new = old[:offset] + "".join(data.read(data.get_size()))
|
||||
new = old[:offset] + b"".join(data.read(data.get_size()))
|
||||
new += old[len(new):]
|
||||
return new
|
||||
return self.modify(modifier)
|
||||
|
@ -881,6 +881,8 @@ class WebErrorMixin(object):
|
|||
body = yield response.content()
|
||||
self.assertEquals(response.code, code)
|
||||
if response_substring is not None:
|
||||
if isinstance(response_substring, unicode):
|
||||
response_substring = response_substring.encode("utf-8")
|
||||
self.assertIn(response_substring, body)
|
||||
returnValue(body)
|
||||
|
||||
|
|
|
@ -203,6 +203,14 @@ def flip_one_bit(s, offset=0, size=None):
|
|||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
# Make sure unicode strings are a consistent type. Specifically there's
|
||||
# Future newstr (backported Unicode type) vs. Python 2 native unicode
|
||||
# type. They're equal, and _logically_ the same type, but have
|
||||
# different types in practice.
|
||||
if a.__class__ == future_str:
|
||||
a = unicode(a)
|
||||
if b.__class__ == future_str:
|
||||
b = unicode(b)
|
||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||
|
||||
|
||||
|
|
|
@ -6,29 +6,43 @@ Tools aimed at the interaction between tests and Eliot.
|
|||
# Can't use `builtins.str` because it's not JSON encodable:
|
||||
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
|
||||
from past.builtins import unicode as str
|
||||
from future.utils import PY3
|
||||
from future.utils import PY2
|
||||
from six import ensure_text
|
||||
|
||||
__all__ = [
|
||||
"RUN_TEST",
|
||||
"EliotLoggedRunTest",
|
||||
"eliot_logged_test",
|
||||
]
|
||||
|
||||
try:
|
||||
from typing import Callable
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from functools import (
|
||||
wraps,
|
||||
partial,
|
||||
wraps,
|
||||
)
|
||||
|
||||
import attr
|
||||
|
||||
from zope.interface import (
|
||||
implementer,
|
||||
)
|
||||
|
||||
from eliot import (
|
||||
ActionType,
|
||||
Field,
|
||||
MemoryLogger,
|
||||
ILogger,
|
||||
)
|
||||
from eliot.testing import (
|
||||
swap_logger,
|
||||
check_for_errors,
|
||||
)
|
||||
from eliot.testing import capture_logging
|
||||
|
||||
from twisted.internet.defer import (
|
||||
maybeDeferred,
|
||||
from twisted.python.monkey import (
|
||||
MonkeyPatcher,
|
||||
)
|
||||
|
||||
from ..util.jsonbytes import BytesJSONEncoder
|
||||
|
@ -48,92 +62,12 @@ RUN_TEST = ActionType(
|
|||
)
|
||||
|
||||
|
||||
def eliot_logged_test(f):
|
||||
"""
|
||||
Decorate a test method to run in a dedicated Eliot action context.
|
||||
|
||||
The action will finish after the test is done (after the returned Deferred
|
||||
fires, if a Deferred is returned). It will note the name of the test
|
||||
being run.
|
||||
|
||||
All messages emitted by the test will be validated. They will still be
|
||||
delivered to the global logger.
|
||||
"""
|
||||
# A convenient, mutable container into which nested functions can write
|
||||
# state to be shared among them.
|
||||
class storage(object):
|
||||
pass
|
||||
|
||||
|
||||
# On Python 3, we want to use our custom JSON encoder when validating
|
||||
# messages can be encoded to JSON:
|
||||
if PY3:
|
||||
capture = lambda f : capture_logging(None, encoder_=BytesJSONEncoder)(f)
|
||||
# On Python 3, we want to use our custom JSON encoder when validating messages
|
||||
# can be encoded to JSON:
|
||||
if PY2:
|
||||
_memory_logger = MemoryLogger
|
||||
else:
|
||||
capture = lambda f : capture_logging(None)(f)
|
||||
|
||||
@wraps(f)
|
||||
def run_and_republish(self, *a, **kw):
|
||||
# Unfortunately the only way to get at the global/default logger...
|
||||
# This import is delayed here so that we get the *current* default
|
||||
# logger at the time the decorated function is run.
|
||||
from eliot._output import _DEFAULT_LOGGER as default_logger
|
||||
|
||||
def republish():
|
||||
# This is called as a cleanup function after capture_logging has
|
||||
# restored the global/default logger to its original state. We
|
||||
# can now emit messages that go to whatever global destinations
|
||||
# are installed.
|
||||
|
||||
# storage.logger.serialize() seems like it would make more sense
|
||||
# than storage.logger.messages here. However, serialize()
|
||||
# explodes, seemingly as a result of double-serializing the logged
|
||||
# messages. I don't understand this.
|
||||
for msg in storage.logger.messages:
|
||||
default_logger.write(msg)
|
||||
|
||||
# And now that we've re-published all of the test's messages, we
|
||||
# can finish the test's action.
|
||||
storage.action.finish()
|
||||
|
||||
@capture
|
||||
def run(self, logger):
|
||||
# Record the MemoryLogger for later message extraction.
|
||||
storage.logger = logger
|
||||
# Give the test access to the logger as well. It would be just
|
||||
# fine to pass this as a keyword argument to `f` but implementing
|
||||
# that now will give me conflict headaches so I'm not doing it.
|
||||
self.eliot_logger = logger
|
||||
return f(self, *a, **kw)
|
||||
|
||||
# Arrange for all messages written to the memory logger that
|
||||
# `capture_logging` installs to be re-written to the global/default
|
||||
# logger so they might end up in a log file somewhere, if someone
|
||||
# wants. This has to be done in a cleanup function (or later) because
|
||||
# capture_logging restores the original logger in a cleanup function.
|
||||
# We install our cleanup function here, before we call run, so that it
|
||||
# runs *after* the cleanup function capture_logging installs (cleanup
|
||||
# functions are a stack).
|
||||
self.addCleanup(republish)
|
||||
|
||||
# Begin an action that should comprise all messages from the decorated
|
||||
# test method.
|
||||
with RUN_TEST(name=self.id()).context() as action:
|
||||
# When the test method Deferred fires, the RUN_TEST action is
|
||||
# done. However, we won't have re-published the MemoryLogger
|
||||
# messages into the global/default logger when this Deferred
|
||||
# fires. So we need to delay finishing the action until that has
|
||||
# happened. Record the action so we can do that.
|
||||
storage.action = action
|
||||
|
||||
# Support both Deferred-returning and non-Deferred-returning
|
||||
# tests.
|
||||
d = maybeDeferred(run, self)
|
||||
|
||||
# Let the test runner do its thing.
|
||||
return d
|
||||
|
||||
return run_and_republish
|
||||
_memory_logger = lambda: MemoryLogger(encoder=BytesJSONEncoder)
|
||||
|
||||
|
||||
@attr.s
|
||||
|
@ -174,10 +108,91 @@ class EliotLoggedRunTest(object):
|
|||
def id(self):
|
||||
return self.case.id()
|
||||
|
||||
@eliot_logged_test
|
||||
def run(self, result=None):
|
||||
def run(self, result):
|
||||
"""
|
||||
Run the test case in the context of a distinct Eliot action.
|
||||
|
||||
The action will finish after the test is done. It will note the name of
|
||||
the test being run.
|
||||
|
||||
All messages emitted by the test will be validated. They will still be
|
||||
delivered to the global logger.
|
||||
"""
|
||||
# The idea here is to decorate the test method itself so that all of
|
||||
# the extra logic happens at the point where test/application logic is
|
||||
# expected to be. This `run` method is more like test infrastructure
|
||||
# and things do not go well when we add too much extra behavior here.
|
||||
# For example, exceptions raised here often just kill the whole
|
||||
# runner.
|
||||
patcher = MonkeyPatcher()
|
||||
|
||||
# So, grab the test method.
|
||||
name = self.case._testMethodName
|
||||
original = getattr(self.case, name)
|
||||
decorated = with_logging(ensure_text(self.case.id()), original)
|
||||
patcher.addPatch(self.case, name, decorated)
|
||||
try:
|
||||
# Patch it in
|
||||
patcher.patch()
|
||||
# Then use the rest of the machinery to run it.
|
||||
return self._run_tests_with_factory(
|
||||
self.case,
|
||||
self.handlers,
|
||||
self.last_resort,
|
||||
).run(result)
|
||||
finally:
|
||||
# Clean up the patching for idempotency or something.
|
||||
patcher.restore()
|
||||
|
||||
|
||||
def with_logging(
|
||||
test_id, # type: str
|
||||
test_method, # type: Callable
|
||||
):
|
||||
"""
|
||||
Decorate a test method with additional log-related behaviors.
|
||||
|
||||
1. The test method will run in a distinct Eliot action.
|
||||
2. Typed log messages will be validated.
|
||||
3. Logged tracebacks will be added as errors.
|
||||
|
||||
:param test_id: The full identifier of the test being decorated.
|
||||
:param test_method: The method itself.
|
||||
"""
|
||||
@wraps(test_method)
|
||||
def run_with_logging(*args, **kwargs):
|
||||
validating_logger = _memory_logger()
|
||||
original = swap_logger(None)
|
||||
try:
|
||||
swap_logger(_TwoLoggers(original, validating_logger))
|
||||
with RUN_TEST(name=test_id):
|
||||
try:
|
||||
return test_method(*args, **kwargs)
|
||||
finally:
|
||||
check_for_errors(validating_logger)
|
||||
finally:
|
||||
swap_logger(original)
|
||||
return run_with_logging
|
||||
|
||||
|
||||
@implementer(ILogger)
|
||||
class _TwoLoggers(object):
|
||||
"""
|
||||
Log to two loggers.
|
||||
|
||||
A single logger can have multiple destinations so this isn't typically a
|
||||
useful thing to do. However, MemoryLogger has inline validation instead
|
||||
of destinations. That means this *is* useful to simultaneously write to
|
||||
the normal places and validate all written log messages.
|
||||
"""
|
||||
def __init__(self, a, b):
|
||||
"""
|
||||
:param ILogger a: One logger
|
||||
:param ILogger b: Another logger
|
||||
"""
|
||||
self._a = a # type: ILogger
|
||||
self._b = b # type: ILogger
|
||||
|
||||
def write(self, dictionary, serializer=None):
|
||||
self._a.write(dictionary, serializer)
|
||||
self._b.write(dictionary, serializer)
|
||||
|
|
|
@ -18,17 +18,25 @@ if PY2:
|
|||
from sys import stdout
|
||||
import logging
|
||||
|
||||
from unittest import (
|
||||
skip,
|
||||
)
|
||||
|
||||
from fixtures import (
|
||||
TempDir,
|
||||
)
|
||||
from testtools import (
|
||||
TestCase,
|
||||
)
|
||||
from testtools import (
|
||||
TestResult,
|
||||
)
|
||||
from testtools.matchers import (
|
||||
Is,
|
||||
IsInstance,
|
||||
MatchesStructure,
|
||||
Equals,
|
||||
HasLength,
|
||||
AfterPreprocessing,
|
||||
)
|
||||
from testtools.twistedsupport import (
|
||||
|
@ -38,12 +46,16 @@ from testtools.twistedsupport import (
|
|||
|
||||
from eliot import (
|
||||
Message,
|
||||
MessageType,
|
||||
fields,
|
||||
FileDestination,
|
||||
MemoryLogger,
|
||||
)
|
||||
from eliot.twisted import DeferredContext
|
||||
from eliot.testing import (
|
||||
capture_logging,
|
||||
assertHasAction,
|
||||
swap_logger,
|
||||
)
|
||||
|
||||
from twisted.internet.defer import (
|
||||
|
@ -173,6 +185,62 @@ class EliotLoggingTests(TestCase):
|
|||
),
|
||||
)
|
||||
|
||||
def test_validation_failure(self):
|
||||
"""
|
||||
If a test emits a log message that fails validation then an error is added
|
||||
to the result.
|
||||
"""
|
||||
# Make sure we preserve the original global Eliot state.
|
||||
original = swap_logger(MemoryLogger())
|
||||
self.addCleanup(lambda: swap_logger(original))
|
||||
|
||||
class ValidationFailureProbe(SyncTestCase):
|
||||
def test_bad_message(self):
|
||||
# This message does not validate because "Hello" is not an
|
||||
# int.
|
||||
MSG = MessageType("test:eliotutil", fields(foo=int))
|
||||
MSG(foo="Hello").write()
|
||||
|
||||
result = TestResult()
|
||||
case = ValidationFailureProbe("test_bad_message")
|
||||
case.run(result)
|
||||
|
||||
self.assertThat(
|
||||
result.errors,
|
||||
HasLength(1),
|
||||
)
|
||||
|
||||
def test_skip_cleans_up(self):
|
||||
"""
|
||||
After a skipped test the global Eliot logging state is restored.
|
||||
"""
|
||||
# Save the logger that's active before we do anything so that we can
|
||||
# restore it later. Also install another logger so we can compare it
|
||||
# to the active logger later.
|
||||
expected = MemoryLogger()
|
||||
original = swap_logger(expected)
|
||||
|
||||
# Restore it, whatever else happens.
|
||||
self.addCleanup(lambda: swap_logger(original))
|
||||
|
||||
class SkipProbe(SyncTestCase):
|
||||
@skip("It's a skip test.")
|
||||
def test_skipped(self):
|
||||
pass
|
||||
|
||||
case = SkipProbe("test_skipped")
|
||||
case.run()
|
||||
|
||||
# Retrieve the logger that's active now that the skipped test is done
|
||||
# so we can check it against the expected value.
|
||||
actual = swap_logger(MemoryLogger())
|
||||
self.assertThat(
|
||||
actual,
|
||||
Is(expected),
|
||||
)
|
||||
|
||||
|
||||
|
||||
class LogCallDeferredTests(TestCase):
|
||||
"""
|
||||
Tests for ``log_call_deferred``.
|
||||
|
|
|
@ -126,6 +126,42 @@ class HashUtilTests(unittest.TestCase):
|
|||
base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"),
|
||||
)
|
||||
|
||||
def test_convergence_hasher_tag(self):
|
||||
"""
|
||||
``_convergence_hasher_tag`` constructs the convergence hasher tag from a
|
||||
unique prefix, the required, total, and segment size parameters, and a
|
||||
convergence secret.
|
||||
"""
|
||||
self.assertEqual(
|
||||
b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
b"16:\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42,"
|
||||
b"9:3,10,1024,",
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=3,
|
||||
n=10,
|
||||
segsize=1024,
|
||||
convergence=b"\x42" * 16,
|
||||
),
|
||||
)
|
||||
|
||||
def test_convergence_hasher_out_of_bounds(self):
|
||||
"""
|
||||
``_convergence_hasher_tag`` raises ``ValueError`` if k or n is not between
|
||||
1 and 256 inclusive or if k is greater than n.
|
||||
"""
|
||||
segsize = 1024
|
||||
secret = b"\x42" * 16
|
||||
for bad_k in (0, 2, 257):
|
||||
with self.assertRaises(ValueError):
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=bad_k, n=1, segsize=segsize, convergence=secret,
|
||||
)
|
||||
for bad_n in (0, 1, 257):
|
||||
with self.assertRaises(ValueError):
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=2, n=bad_n, segsize=segsize, convergence=secret,
|
||||
)
|
||||
|
||||
def test_known_answers(self):
|
||||
"""
|
||||
Verify backwards compatibility by comparing hash outputs for some
|
||||
|
|
|
@ -491,12 +491,16 @@ class JSONBytes(unittest.TestCase):
|
|||
"""Tests for BytesJSONEncoder."""
|
||||
|
||||
def test_encode_bytes(self):
|
||||
"""BytesJSONEncoder can encode bytes."""
|
||||
"""BytesJSONEncoder can encode bytes.
|
||||
|
||||
Bytes are presumed to be UTF-8 encoded.
|
||||
"""
|
||||
snowman = u"def\N{SNOWMAN}\uFF00"
|
||||
data = {
|
||||
b"hello": [1, b"cd"],
|
||||
b"hello": [1, b"cd", {b"abc": [123, snowman.encode("utf-8")]}],
|
||||
}
|
||||
expected = {
|
||||
u"hello": [1, u"cd"],
|
||||
u"hello": [1, u"cd", {u"abc": [123, snowman]}],
|
||||
}
|
||||
# Bytes get passed through as if they were UTF-8 Unicode:
|
||||
encoded = jsonbytes.dumps(data)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -196,5 +196,6 @@ PORTED_TEST_MODULES = [
|
|||
"allmydata.test.web.test_root",
|
||||
"allmydata.test.web.test_status",
|
||||
"allmydata.test.web.test_util",
|
||||
"allmydata.test.web.test_web",
|
||||
"allmydata.test.web.test_webish",
|
||||
]
|
||||
|
|
|
@ -176,10 +176,44 @@ def convergence_hash(k, n, segsize, data, convergence):
|
|||
return h.digest()
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
def _convergence_hasher_tag(k, n, segsize, convergence):
|
||||
"""
|
||||
Create the convergence hashing tag.
|
||||
|
||||
:param int k: Required shares (in [1..256]).
|
||||
:param int n: Total shares (in [1..256]).
|
||||
:param int segsize: Maximum segment size.
|
||||
:param bytes convergence: The convergence secret.
|
||||
|
||||
:return bytes: The bytestring to use as a tag in the convergence hash.
|
||||
"""
|
||||
assert isinstance(convergence, bytes)
|
||||
if k > n:
|
||||
raise ValueError(
|
||||
"k > n not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
if k < 1 or n < 1:
|
||||
# It doesn't make sense to have zero shares. Zero shares carry no
|
||||
# information, cannot encode any part of the application data.
|
||||
raise ValueError(
|
||||
"k, n < 1 not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
if k > 256 or n > 256:
|
||||
# ZFEC supports encoding application data into a maximum of 256
|
||||
# shares. If we ignore the limitations of ZFEC, it may be fine to use
|
||||
# a configuration with more shares than that and it may be fine to
|
||||
# construct a convergence tag from such a configuration. Since ZFEC
|
||||
# is the only supported encoder, though, this is moot for now.
|
||||
raise ValueError(
|
||||
"k, n > 256 not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||
return tag
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
tag = _convergence_hasher_tag(k, n, segsize, convergence)
|
||||
return tagged_hasher(tag, KEYLEN)
|
||||
|
||||
|
||||
|
|
|
@ -13,20 +13,34 @@ from future.utils import PY2
|
|||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def _bytes_to_unicode(obj):
|
||||
"""Convert any bytes objects to unicode, recursively."""
|
||||
if isinstance(obj, bytes):
|
||||
return obj.decode("utf-8")
|
||||
if isinstance(obj, dict):
|
||||
new_obj = {}
|
||||
for k, v in obj.items():
|
||||
if isinstance(k, bytes):
|
||||
k = k.decode("utf-8")
|
||||
v = _bytes_to_unicode(v)
|
||||
new_obj[k] = v
|
||||
return new_obj
|
||||
if isinstance(obj, (list, set, tuple)):
|
||||
return [_bytes_to_unicode(i) for i in obj]
|
||||
return obj
|
||||
|
||||
|
||||
class BytesJSONEncoder(json.JSONEncoder):
|
||||
"""
|
||||
A JSON encoder than can also encode bytes.
|
||||
|
||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||
"""
|
||||
def default(self, o):
|
||||
if isinstance(o, bytes):
|
||||
return o.decode("utf-8")
|
||||
return json.JSONEncoder.default(self, o)
|
||||
def iterencode(self, o, **kwargs):
|
||||
return json.JSONEncoder.iterencode(self, _bytes_to_unicode(o), **kwargs)
|
||||
|
||||
|
||||
def dumps(obj, *args, **kwargs):
|
||||
|
@ -34,13 +48,6 @@ def dumps(obj, *args, **kwargs):
|
|||
|
||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||
"""
|
||||
if isinstance(obj, dict):
|
||||
new_obj = {}
|
||||
for k, v in obj.items():
|
||||
if isinstance(k, bytes):
|
||||
k = k.decode("utf-8")
|
||||
new_obj[k] = v
|
||||
obj = new_obj
|
||||
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -432,7 +432,7 @@ class DeepCheckResultsRenderer(MultiFormatResource):
|
|||
return CheckResultsRenderer(self._client,
|
||||
r.get_results_for_storage_index(si))
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
raise WebError("No detailed results for SI %s" % html.escape(str(name, "utf-8")),
|
||||
http.NOT_FOUND)
|
||||
|
||||
@render_exception
|
||||
|
|
|
@ -186,7 +186,7 @@ def convert_children_json(nodemaker, children_json):
|
|||
children = {}
|
||||
if children_json:
|
||||
data = json.loads(children_json)
|
||||
for (namex, (ctype, propdict)) in data.iteritems():
|
||||
for (namex, (ctype, propdict)) in data.items():
|
||||
namex = unicode(namex)
|
||||
writecap = to_bytes(propdict.get("rw_uri"))
|
||||
readcap = to_bytes(propdict.get("ro_uri"))
|
||||
|
@ -283,8 +283,8 @@ def render_time_attr(t):
|
|||
# actual exception). The latter is growing increasingly annoying.
|
||||
|
||||
def should_create_intermediate_directories(req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
return bool(req.method in ("PUT", "POST") and
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
return bool(req.method in (b"PUT", b"POST") and
|
||||
t not in ("delete", "rename", "rename-form", "check"))
|
||||
|
||||
def humanize_exception(exc):
|
||||
|
@ -674,7 +674,7 @@ def url_for_string(req, url_string):
|
|||
and the given URL string.
|
||||
"""
|
||||
url = DecodedURL.from_text(url_string.decode("utf-8"))
|
||||
if url.host == b"":
|
||||
if not url.host:
|
||||
root = req.URLPath()
|
||||
netloc = root.netloc.split(b":", 1)
|
||||
if len(netloc) == 1:
|
||||
|
|
|
@ -40,8 +40,12 @@ def get_arg(req, argname, default=None, multiple=False):
|
|||
results = []
|
||||
if argname in req.args:
|
||||
results.extend(req.args[argname])
|
||||
if req.fields and argname in req.fields:
|
||||
results.append(req.fields[argname].value)
|
||||
argname_unicode = unicode(argname, "utf-8")
|
||||
if req.fields and argname_unicode in req.fields:
|
||||
value = req.fields[argname_unicode].value
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode("utf-8")
|
||||
results.append(value)
|
||||
if multiple:
|
||||
return tuple(results)
|
||||
if results:
|
||||
|
@ -79,7 +83,13 @@ class MultiFormatResource(resource.Resource, object):
|
|||
if isinstance(t, bytes):
|
||||
t = unicode(t, "ascii")
|
||||
renderer = self._get_renderer(t)
|
||||
return renderer(req)
|
||||
result = renderer(req)
|
||||
# On Python 3, json.dumps() returns Unicode for example, but
|
||||
# twisted.web expects bytes. Instead of updating every single render
|
||||
# method, just handle Unicode one time here.
|
||||
if isinstance(result, unicode):
|
||||
result = result.encode("utf-8")
|
||||
return result
|
||||
|
||||
def _get_renderer(self, fmt):
|
||||
"""
|
||||
|
|
|
@ -1,3 +1,11 @@
|
|||
"""
|
||||
TODO: When porting to Python 3, the filename handling logic seems wrong. On
|
||||
Python 3 filename will _already_ be correctly decoded. So only decode if it's
|
||||
bytes.
|
||||
|
||||
Also there's a lot of code duplication I think.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
|
@ -135,7 +143,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
||||
nonterminal = not terminal #len(req.postpath) > 0
|
||||
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
if isinstance(node_or_failure, Failure):
|
||||
f = node_or_failure
|
||||
f.trap(NoSuchChildError)
|
||||
|
@ -150,10 +158,10 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
else:
|
||||
# terminal node
|
||||
terminal_requests = (
|
||||
("POST", "mkdir"),
|
||||
("PUT", "mkdir"),
|
||||
("POST", "mkdir-with-children"),
|
||||
("POST", "mkdir-immutable")
|
||||
(b"POST", "mkdir"),
|
||||
(b"PUT", "mkdir"),
|
||||
(b"POST", "mkdir-with-children"),
|
||||
(b"POST", "mkdir-immutable")
|
||||
)
|
||||
if (req.method, t) in terminal_requests:
|
||||
# final directory
|
||||
|
@ -182,8 +190,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
)
|
||||
return d
|
||||
leaf_requests = (
|
||||
("PUT",""),
|
||||
("PUT","uri"),
|
||||
(b"PUT",""),
|
||||
(b"PUT","uri"),
|
||||
)
|
||||
if (req.method, t) in leaf_requests:
|
||||
# we were trying to find the leaf filenode (to put a new
|
||||
|
@ -224,7 +232,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
||||
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
||||
si = self.node.get_storage_index()
|
||||
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
||||
if si and req.setETag(b'DIR:%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||
return b""
|
||||
|
||||
if not t:
|
||||
|
@ -255,7 +263,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
if t == "mkdir":
|
||||
|
@ -364,7 +372,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
return d
|
||||
|
||||
def _POST_upload(self, req):
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "utf-8")
|
||||
contents = req.fields["file"]
|
||||
assert contents.filename is None or isinstance(contents.filename, str)
|
||||
name = get_arg(req, "name")
|
||||
|
@ -374,7 +382,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
if not name:
|
||||
# this prohibts empty, missing, and all-whitespace filenames
|
||||
raise WebError("upload requires a name")
|
||||
assert isinstance(name, str)
|
||||
if isinstance(name, bytes):
|
||||
name = name.decode(charset)
|
||||
if "/" in name:
|
||||
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
||||
|
@ -413,7 +421,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
name = get_arg(req, "name")
|
||||
if not name:
|
||||
raise WebError("set-uri requires a name")
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
|
@ -436,8 +444,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
# a slightly confusing error message if someone does a POST
|
||||
# without a name= field. For our own HTML this isn't a big
|
||||
# deal, because we create the 'unlink' POST buttons ourselves.
|
||||
name = ''
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
name = b''
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
d = self.node.delete(name)
|
||||
d.addCallback(lambda res: "thing unlinked")
|
||||
|
@ -453,7 +461,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
return self._POST_relink(req)
|
||||
|
||||
def _POST_relink(self, req):
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
from_name = get_arg(req, "from_name")
|
||||
|
@ -624,14 +632,14 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
|||
# TODO test handling of bad JSON
|
||||
raise
|
||||
cs = {}
|
||||
for name, (file_or_dir, mddict) in children.iteritems():
|
||||
for name, (file_or_dir, mddict) in children.items():
|
||||
name = unicode(name) # json returns str *or* unicode
|
||||
writecap = mddict.get('rw_uri')
|
||||
if writecap is not None:
|
||||
writecap = str(writecap)
|
||||
writecap = writecap.encode("utf-8")
|
||||
readcap = mddict.get('ro_uri')
|
||||
if readcap is not None:
|
||||
readcap = str(readcap)
|
||||
readcap = readcap.encode("utf-8")
|
||||
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
||||
d = self.node.set_children(cs, replace)
|
||||
d.addCallback(lambda res: "Okay so I did it.")
|
||||
|
@ -1144,8 +1152,8 @@ def _slashify_path(path):
|
|||
in it
|
||||
"""
|
||||
if not path:
|
||||
return ""
|
||||
return "/".join([p.encode("utf-8") for p in path])
|
||||
return b""
|
||||
return b"/".join([p.encode("utf-8") for p in path])
|
||||
|
||||
|
||||
def _cap_to_link(root, path, cap):
|
||||
|
@ -1234,10 +1242,10 @@ class ManifestResults(MultiFormatResource, ReloadMixin):
|
|||
req.setHeader("content-type", "text/plain")
|
||||
lines = []
|
||||
is_finished = self.monitor.is_finished()
|
||||
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
||||
lines.append(b"finished: " + {True: b"yes", False: b"no"}[is_finished])
|
||||
for path, cap in self.monitor.get_status()["manifest"]:
|
||||
lines.append(_slashify_path(path) + " " + cap)
|
||||
return "\n".join(lines) + "\n"
|
||||
lines.append(_slashify_path(path) + b" " + cap)
|
||||
return b"\n".join(lines) + b"\n"
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
|
@ -1290,7 +1298,7 @@ class DeepSizeResults(MultiFormatResource):
|
|||
+ stats.get("size-mutable-files", 0)
|
||||
+ stats.get("size-directories", 0))
|
||||
output += "size: %d\n" % total
|
||||
return output
|
||||
return output.encode("utf-8")
|
||||
render_TEXT = render_HTML
|
||||
|
||||
def render_JSON(self, req):
|
||||
|
@ -1315,7 +1323,7 @@ class DeepStatsResults(Resource, object):
|
|||
req.setHeader("content-type", "text/plain")
|
||||
s = self.monitor.get_status().copy()
|
||||
s["finished"] = self.monitor.is_finished()
|
||||
return json.dumps(s, indent=1)
|
||||
return json.dumps(s, indent=1).encode("utf-8")
|
||||
|
||||
|
||||
@implementer(IPushProducer)
|
||||
|
|
|
@ -127,7 +127,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
|||
http.NOT_IMPLEMENTED)
|
||||
if not t:
|
||||
return self.replace_me_with_a_child(req, self.client, replace)
|
||||
if t == "uri":
|
||||
if t == b"uri":
|
||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||
|
||||
raise WebError("PUT to a file: bad t=%s" % t)
|
||||
|
@ -188,8 +188,8 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
|||
# if the client already has the ETag then we can
|
||||
# short-circuit the whole process.
|
||||
si = self.node.get_storage_index()
|
||||
if si and req.setETag('%s-%s' % (base32.b2a(si), t or "")):
|
||||
return ""
|
||||
if si and req.setETag(b'%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||
return b""
|
||||
|
||||
if not t:
|
||||
# just get the contents
|
||||
|
@ -281,7 +281,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
|||
assert self.parentnode and self.name
|
||||
return self.replace_me_with_a_child(req, self.client, replace)
|
||||
|
||||
if t == "uri":
|
||||
if t == b"uri":
|
||||
if not replace:
|
||||
raise ExistingChildError()
|
||||
assert self.parentnode and self.name
|
||||
|
@ -309,7 +309,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
|||
assert self.parentnode and self.name
|
||||
d = self.replace_me_with_a_formpost(req, self.client, replace)
|
||||
else:
|
||||
raise WebError("POST to file: bad t=%s" % t)
|
||||
raise WebError("POST to file: bad t=%s" % unicode(t, "ascii"))
|
||||
|
||||
return handle_when_done(req, d)
|
||||
|
||||
|
@ -439,7 +439,7 @@ class FileDownloader(Resource, object):
|
|||
# bytes we were given in the URL. See the comment in
|
||||
# FileNodeHandler.render_GET for the sad details.
|
||||
req.setHeader("content-disposition",
|
||||
'attachment; filename="%s"' % self.filename)
|
||||
b'attachment; filename="%s"' % self.filename)
|
||||
|
||||
filesize = self.filenode.get_size()
|
||||
assert isinstance(filesize, (int,long)), filesize
|
||||
|
@ -475,8 +475,8 @@ class FileDownloader(Resource, object):
|
|||
size = contentsize
|
||||
|
||||
req.setHeader("content-length", b"%d" % contentsize)
|
||||
if req.method == "HEAD":
|
||||
return ""
|
||||
if req.method == b"HEAD":
|
||||
return b""
|
||||
|
||||
d = self.filenode.read(req, first, size)
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
import os, urllib
|
||||
import os
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import tags as T, Element, renderElement, XMLFile, renderer
|
||||
|
@ -180,7 +181,7 @@ class MoreInfoElement(Element):
|
|||
else:
|
||||
return ""
|
||||
root = self.get_root(req)
|
||||
quoted_uri = urllib.quote(node.get_uri())
|
||||
quoted_uri = urlquote(node.get_uri())
|
||||
text_plain_url = "%s/file/%s/@@named=/raw.txt" % (root, quoted_uri)
|
||||
return T.li("Raw data as ", T.a("text/plain", href=text_plain_url))
|
||||
|
||||
|
@ -196,7 +197,7 @@ class MoreInfoElement(Element):
|
|||
@renderer
|
||||
def check_form(self, req, tag):
|
||||
node = self.original
|
||||
quoted_uri = urllib.quote(node.get_uri())
|
||||
quoted_uri = urlquote(node.get_uri())
|
||||
target = self.get_root(req) + "/uri/" + quoted_uri
|
||||
if IDirectoryNode.providedBy(node):
|
||||
target += "/"
|
||||
|
@ -236,8 +237,8 @@ class MoreInfoElement(Element):
|
|||
def overwrite_form(self, req, tag):
|
||||
node = self.original
|
||||
root = self.get_root(req)
|
||||
action = "%s/uri/%s" % (root, urllib.quote(node.get_uri()))
|
||||
done_url = "%s/uri/%s?t=info" % (root, urllib.quote(node.get_uri()))
|
||||
action = "%s/uri/%s" % (root, urlquote(node.get_uri()))
|
||||
done_url = "%s/uri/%s?t=info" % (root, urlquote(node.get_uri()))
|
||||
overwrite = T.form(action=action, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from past.builtins import unicode
|
||||
|
||||
import time
|
||||
from hyperlink import (
|
||||
|
@ -101,12 +102,12 @@ class OphandleTable(resource.Resource, service.Service):
|
|||
def getChild(self, name, req):
|
||||
ophandle = name
|
||||
if ophandle not in self.handles:
|
||||
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
|
||||
raise WebError("unknown/expired handle '%s'" % escape(unicode(ophandle, "utf-8")),
|
||||
NOT_FOUND)
|
||||
(monitor, renderer, when_added) = self.handles[ophandle]
|
||||
|
||||
t = get_arg(req, "t", "status")
|
||||
if t == "cancel" and req.method == "POST":
|
||||
if t == b"cancel" and req.method == b"POST":
|
||||
monitor.cancel()
|
||||
# return the status anyways, but release the handle
|
||||
self._release_ophandle(ophandle)
|
||||
|
@ -151,7 +152,7 @@ class ReloadMixin(object):
|
|||
@renderer
|
||||
def refresh(self, req, tag):
|
||||
if self.monitor.is_finished():
|
||||
return ""
|
||||
return b""
|
||||
tag.attributes["http-equiv"] = "refresh"
|
||||
tag.attributes["content"] = str(self.REFRESH_TIME)
|
||||
return tag
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from future.utils import PY3
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import time
|
||||
|
@ -97,7 +98,7 @@ class URIHandler(resource.Resource, object):
|
|||
either "PUT /uri" to create an unlinked file, or
|
||||
"PUT /uri?t=mkdir" to create an unlinked directory
|
||||
"""
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, "t", "").strip(), "utf-8")
|
||||
if t == "":
|
||||
file_format = get_format(req, "CHK")
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
|
@ -120,7 +121,7 @@ class URIHandler(resource.Resource, object):
|
|||
unlinked file or "POST /uri?t=mkdir" to create a
|
||||
new directory
|
||||
"""
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
if t in ("", "upload"):
|
||||
file_format = get_format(req)
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
|
@ -177,7 +178,7 @@ class FileHandler(resource.Resource, object):
|
|||
|
||||
@exception_to_child
|
||||
def getChild(self, name, req):
|
||||
if req.method not in ("GET", "HEAD"):
|
||||
if req.method not in (b"GET", b"HEAD"):
|
||||
raise WebError("/file can only be used with GET or HEAD")
|
||||
# 'name' must be a file URI
|
||||
try:
|
||||
|
@ -200,7 +201,7 @@ class IncidentReporter(MultiFormatResource):
|
|||
|
||||
@render_exception
|
||||
def render(self, req):
|
||||
if req.method != "POST":
|
||||
if req.method != b"POST":
|
||||
raise WebError("/report_incident can only be used with POST")
|
||||
|
||||
log.msg(format="User reports incident through web page: %(details)s",
|
||||
|
@ -255,11 +256,11 @@ class Root(MultiFormatResource):
|
|||
if not path:
|
||||
# Render "/" path.
|
||||
return self
|
||||
if path == "helper_status":
|
||||
if path == b"helper_status":
|
||||
# the Helper isn't attached until after the Tub starts, so this child
|
||||
# needs to created on each request
|
||||
return status.HelperStatus(self._client.helper)
|
||||
if path == "storage":
|
||||
if path == b"storage":
|
||||
# Storage isn't initialized until after the web hierarchy is
|
||||
# constructed so this child needs to be created later than
|
||||
# `__init__`.
|
||||
|
@ -293,7 +294,7 @@ class Root(MultiFormatResource):
|
|||
self._describe_server(server)
|
||||
for server
|
||||
in broker.get_known_servers()
|
||||
))
|
||||
), key=lambda o: sorted(o.items()))
|
||||
|
||||
|
||||
def _describe_server(self, server):
|
||||
|
|
|
@ -284,7 +284,7 @@ def _find_overlap(events, start_key, end_key):
|
|||
rows = []
|
||||
for ev in events:
|
||||
ev = ev.copy()
|
||||
if ev.has_key('server'):
|
||||
if 'server' in ev:
|
||||
ev["serverid"] = ev["server"].get_longname()
|
||||
del ev["server"]
|
||||
# find an empty slot in the rows
|
||||
|
@ -362,8 +362,8 @@ def _find_overlap_requests(events):
|
|||
def _color(server):
|
||||
h = hashlib.sha256(server.get_serverid()).digest()
|
||||
def m(c):
|
||||
return min(ord(c) / 2 + 0x80, 0xff)
|
||||
return "#%02x%02x%02x" % (m(h[0]), m(h[1]), m(h[2]))
|
||||
return min(ord(c) // 2 + 0x80, 0xff)
|
||||
return "#%02x%02x%02x" % (m(h[0:1]), m(h[1:2]), m(h[2:3]))
|
||||
|
||||
class _EventJson(Resource, object):
|
||||
|
||||
|
@ -426,7 +426,7 @@ class DownloadStatusPage(Resource, object):
|
|||
"""
|
||||
super(DownloadStatusPage, self).__init__()
|
||||
self._download_status = download_status
|
||||
self.putChild("event_json", _EventJson(self._download_status))
|
||||
self.putChild(b"event_json", _EventJson(self._download_status))
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
|
@ -1288,14 +1288,14 @@ class Status(MultiFormatResource):
|
|||
# final URL segment will be an empty string. Resources can
|
||||
# thus know if they were requested with or without a final
|
||||
# slash."
|
||||
if not path and request.postpath != ['']:
|
||||
if not path and request.postpath != [b'']:
|
||||
return self
|
||||
|
||||
h = self.history
|
||||
try:
|
||||
stype, count_s = path.split("-")
|
||||
stype, count_s = path.split(b"-")
|
||||
except ValueError:
|
||||
raise WebError("no '-' in '{}'".format(path))
|
||||
raise WebError("no '-' in '{}'".format(unicode(path, "utf-8")))
|
||||
count = int(count_s)
|
||||
stype = unicode(stype, "ascii")
|
||||
if stype == "up":
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from past.builtins import unicode
|
||||
|
||||
import urllib
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from twisted.web import http
|
||||
from twisted.internet import defer
|
||||
|
@ -65,8 +66,8 @@ def POSTUnlinkedCHK(req, client):
|
|||
# if when_done= is provided, return a redirect instead of our
|
||||
# usual upload-results page
|
||||
def _done(upload_results, redir_to):
|
||||
if "%(uri)s" in redir_to:
|
||||
redir_to = redir_to.replace("%(uri)s", urllib.quote(upload_results.get_uri()))
|
||||
if b"%(uri)s" in redir_to:
|
||||
redir_to = redir_to.replace(b"%(uri)s", urlquote(upload_results.get_uri()).encode("utf-8"))
|
||||
return url_for_string(req, redir_to)
|
||||
d.addCallback(_done, when_done)
|
||||
else:
|
||||
|
@ -118,8 +119,8 @@ class UploadResultsElement(status.UploadResultsRendererMixin):
|
|||
def download_link(self, req, tag):
|
||||
d = self.upload_results()
|
||||
d.addCallback(lambda res:
|
||||
tags.a("/uri/" + res.get_uri(),
|
||||
href="/uri/" + urllib.quote(res.get_uri())))
|
||||
tags.a("/uri/" + unicode(res.get_uri(), "utf-8"),
|
||||
href="/uri/" + urlquote(unicode(res.get_uri(), "utf-8"))))
|
||||
return d
|
||||
|
||||
|
||||
|
@ -158,7 +159,7 @@ def POSTUnlinkedCreateDirectory(req, client):
|
|||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
|
@ -176,7 +177,7 @@ def POSTUnlinkedCreateDirectoryWithChildren(req, client):
|
|||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
|
@ -194,7 +195,7 @@ def POSTUnlinkedCreateImmutableDirectory(req, client):
|
|||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
|
|
|
@ -44,6 +44,43 @@ from .web.storage_plugins import (
|
|||
StoragePlugins,
|
||||
)
|
||||
|
||||
|
||||
if PY2:
|
||||
FileUploadFieldStorage = FieldStorage
|
||||
else:
|
||||
class FileUploadFieldStorage(FieldStorage):
|
||||
"""
|
||||
Do terrible things to ensure files are still bytes.
|
||||
|
||||
On Python 2, uploaded files were always bytes. On Python 3, there's a
|
||||
heuristic: if the filename is set on a field, it's assumed to be a file
|
||||
upload and therefore bytes. If no filename is set, it's Unicode.
|
||||
|
||||
Unfortunately, we always want it to be bytes, and Tahoe-LAFS also
|
||||
enables setting the filename not via the MIME filename, but via a
|
||||
separate field called "name".
|
||||
|
||||
Thus we need to do this ridiculous workaround. Mypy doesn't like it
|
||||
either, thus the ``# type: ignore`` below.
|
||||
|
||||
Source for idea:
|
||||
https://mail.python.org/pipermail/python-dev/2017-February/147402.html
|
||||
"""
|
||||
@property # type: ignore
|
||||
def filename(self):
|
||||
if self.name == "file" and not self._mime_filename:
|
||||
# We use the file field to upload files, see directory.py's
|
||||
# _POST_upload. Lack of _mime_filename means we need to trick
|
||||
# FieldStorage into thinking there is a filename so it'll
|
||||
# return bytes.
|
||||
return "unknown-filename"
|
||||
return self._mime_filename
|
||||
|
||||
@filename.setter
|
||||
def filename(self, value):
|
||||
self._mime_filename = value
|
||||
|
||||
|
||||
class TahoeLAFSRequest(Request, object):
|
||||
"""
|
||||
``TahoeLAFSRequest`` adds several features to a Twisted Web ``Request``
|
||||
|
@ -94,7 +131,8 @@ class TahoeLAFSRequest(Request, object):
|
|||
headers['content-length'] = str(self.content.tell())
|
||||
self.content.seek(0)
|
||||
|
||||
self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||
self.fields = FileUploadFieldStorage(
|
||||
self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||
self.content.seek(0)
|
||||
|
||||
self._tahoeLAFSSecurityPolicy()
|
||||
|
@ -211,7 +249,7 @@ class WebishServer(service.MultiService):
|
|||
# use to test ophandle expiration.
|
||||
self._operations = OphandleTable(clock)
|
||||
self._operations.setServiceParent(self)
|
||||
self.root.putChild("operations", self._operations)
|
||||
self.root.putChild(b"operations", self._operations)
|
||||
|
||||
self.root.putChild(b"storage-plugins", StoragePlugins(client))
|
||||
|
||||
|
@ -220,7 +258,7 @@ class WebishServer(service.MultiService):
|
|||
self.site = TahoeLAFSSite(tempdir, self.root)
|
||||
self.staticdir = staticdir # so tests can check
|
||||
if staticdir:
|
||||
self.root.putChild("static", static.File(staticdir))
|
||||
self.root.putChild(b"static", static.File(staticdir))
|
||||
if re.search(r'^\d', webport):
|
||||
webport = "tcp:"+webport # twisted warns about bare "0" or "3456"
|
||||
# strports must be native strings.
|
||||
|
|
Loading…
Reference in New Issue