Merge remote-tracking branch 'origin/master' into 3714-cli-testing-coverage
This commit is contained in:
commit
1fa6ce97e1
|
@ -33,7 +33,6 @@ jobs:
|
||||||
python-version: 2.7
|
python-version: 2.7
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
# See https://github.com/actions/checkout. A fetch-depth of 0
|
# See https://github.com/actions/checkout. A fetch-depth of 0
|
||||||
# fetches all tags and branches.
|
# fetches all tags and branches.
|
||||||
- name: Check out Tahoe-LAFS sources
|
- name: Check out Tahoe-LAFS sources
|
||||||
|
@ -182,6 +181,9 @@ jobs:
|
||||||
- windows-latest
|
- windows-latest
|
||||||
python-version:
|
python-version:
|
||||||
- 2.7
|
- 2.7
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
python-version: 3.6
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
|
@ -239,9 +241,14 @@ jobs:
|
||||||
- name: Display tool versions
|
- name: Display tool versions
|
||||||
run: python misc/build_helpers/show-tool-versions.py
|
run: python misc/build_helpers/show-tool-versions.py
|
||||||
|
|
||||||
- name: Run "tox -e integration"
|
- name: Run "Python 2 integration tests"
|
||||||
|
if: ${{ matrix.python-version == '2.7' }}
|
||||||
run: tox -e integration
|
run: tox -e integration
|
||||||
|
|
||||||
|
- name: Run "Python 3 integration tests"
|
||||||
|
if: ${{ matrix.python-version != '2.7' }}
|
||||||
|
run: tox -e integration3
|
||||||
|
|
||||||
- name: Upload eliot.log in case of failure
|
- name: Upload eliot.log in case of failure
|
||||||
uses: actions/upload-artifact@v1
|
uses: actions/upload-artifact@v1
|
||||||
if: failure()
|
if: failure()
|
||||||
|
|
|
@ -1,5 +1,15 @@
|
||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
@ -28,7 +38,7 @@ from twisted.internet.error import (
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
|
||||||
from util import (
|
from .util import (
|
||||||
_CollectOutputProtocol,
|
_CollectOutputProtocol,
|
||||||
_MagicTextProtocol,
|
_MagicTextProtocol,
|
||||||
_DumpOutputProtocol,
|
_DumpOutputProtocol,
|
||||||
|
|
|
@ -5,6 +5,15 @@
|
||||||
# You can safely skip any of these tests, it'll just appear to "take
|
# You can safely skip any of these tests, it'll just appear to "take
|
||||||
# longer" to start the first test as the fixtures get built
|
# longer" to start the first test as the fixtures get built
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
def test_create_flogger(flog_gatherer):
|
def test_create_flogger(flog_gatherer):
|
||||||
print("Created flog_gatherer")
|
print("Created flog_gatherer")
|
||||||
|
|
|
@ -1,9 +1,21 @@
|
||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
from twisted.internet.error import ProcessTerminated
|
from twisted.internet.error import ProcessTerminated
|
||||||
|
|
||||||
import util
|
from . import util
|
||||||
|
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
|
||||||
|
@ -42,4 +54,4 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto
|
||||||
assert isinstance(e, ProcessTerminated)
|
assert isinstance(e, ProcessTerminated)
|
||||||
|
|
||||||
output = proto.output.getvalue()
|
output = proto.output.getvalue()
|
||||||
assert "shares could be placed on only" in output
|
assert b"shares could be placed on only" in output
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
from __future__ import (
|
from __future__ import (
|
||||||
print_function,
|
print_function,
|
||||||
unicode_literals,
|
unicode_literals,
|
||||||
|
@ -5,12 +8,18 @@ from __future__ import (
|
||||||
division,
|
division,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
from six import ensure_text
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from os.path import (
|
from os.path import (
|
||||||
join,
|
join,
|
||||||
)
|
)
|
||||||
from urlparse import (
|
from urllib.parse import (
|
||||||
urlsplit,
|
urlsplit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -68,7 +77,7 @@ def _connect_client(reactor, api_auth_token, ws_url):
|
||||||
factory = WebSocketClientFactory(
|
factory = WebSocketClientFactory(
|
||||||
url=ws_url,
|
url=ws_url,
|
||||||
headers={
|
headers={
|
||||||
"Authorization": "{} {}".format(SCHEME, api_auth_token),
|
"Authorization": "{} {}".format(str(SCHEME, "ascii"), api_auth_token),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
factory.protocol = _StreamingLogClientProtocol
|
factory.protocol = _StreamingLogClientProtocol
|
||||||
|
@ -127,7 +136,7 @@ def _test_streaming_logs(reactor, temp_dir, alice):
|
||||||
node_url = cfg.get_config_from_file("node.url")
|
node_url = cfg.get_config_from_file("node.url")
|
||||||
api_auth_token = cfg.get_private_config("api_auth_token")
|
api_auth_token = cfg.get_private_config("api_auth_token")
|
||||||
|
|
||||||
ws_url = node_url.replace("http://", "ws://")
|
ws_url = ensure_text(node_url).replace("http://", "ws://")
|
||||||
log_url = ws_url + "private/logs/v1"
|
log_url = ws_url + "private/logs/v1"
|
||||||
|
|
||||||
print("Connecting to {}".format(log_url))
|
print("Connecting to {}".format(log_url))
|
||||||
|
|
|
@ -1,12 +1,22 @@
|
||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
|
||||||
import util
|
from . import util
|
||||||
|
|
||||||
from twisted.python.filepath import (
|
from twisted.python.filepath import (
|
||||||
FilePath,
|
FilePath,
|
||||||
|
@ -55,7 +65,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
||||||
cap = proto.output.getvalue().strip().split()[-1]
|
cap = proto.output.getvalue().strip().split()[-1]
|
||||||
print("TEH CAP!", cap)
|
print("TEH CAP!", cap)
|
||||||
|
|
||||||
proto = util._CollectOutputProtocol()
|
proto = util._CollectOutputProtocol(capture_stderr=False)
|
||||||
reactor.spawnProcess(
|
reactor.spawnProcess(
|
||||||
proto,
|
proto,
|
||||||
sys.executable,
|
sys.executable,
|
||||||
|
@ -68,7 +78,7 @@ def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_ne
|
||||||
yield proto.done
|
yield proto.done
|
||||||
|
|
||||||
dave_got = proto.output.getvalue().strip()
|
dave_got = proto.output.getvalue().strip()
|
||||||
assert dave_got == open(gold_path, 'r').read().strip()
|
assert dave_got == open(gold_path, 'rb').read().strip()
|
||||||
|
|
||||||
|
|
||||||
@pytest_twisted.inlineCallbacks
|
@pytest_twisted.inlineCallbacks
|
||||||
|
@ -100,7 +110,7 @@ def _create_anonymous_node(reactor, name, control_port, request, temp_dir, flog_
|
||||||
# Which services should this client connect to?
|
# Which services should this client connect to?
|
||||||
write_introducer(node_dir, "default", introducer_furl)
|
write_introducer(node_dir, "default", introducer_furl)
|
||||||
with node_dir.child('tahoe.cfg').open('w') as f:
|
with node_dir.child('tahoe.cfg').open('w') as f:
|
||||||
f.write('''
|
node_config = '''
|
||||||
[node]
|
[node]
|
||||||
nickname = %(name)s
|
nickname = %(name)s
|
||||||
web.port = %(web_port)s
|
web.port = %(web_port)s
|
||||||
|
@ -125,7 +135,9 @@ shares.total = 2
|
||||||
'log_furl': flog_gatherer,
|
'log_furl': flog_gatherer,
|
||||||
'control_port': control_port,
|
'control_port': control_port,
|
||||||
'local_port': control_port + 1000,
|
'local_port': control_port + 1000,
|
||||||
})
|
}
|
||||||
|
node_config = node_config.encode("utf-8")
|
||||||
|
f.write(node_config)
|
||||||
|
|
||||||
print("running")
|
print("running")
|
||||||
yield util._run_node(reactor, node_dir.path, request, None)
|
yield util._run_node(reactor, node_dir.path, request, None)
|
||||||
|
|
|
@ -7,17 +7,26 @@ Most of the tests have cursory asserts and encode 'what the WebAPI did
|
||||||
at the time of testing' -- not necessarily a cohesive idea of what the
|
at the time of testing' -- not necessarily a cohesive idea of what the
|
||||||
WebAPI *should* do in every situation. It's not clear the latter
|
WebAPI *should* do in every situation. It's not clear the latter
|
||||||
exists anywhere, however.
|
exists anywhere, however.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from past.builtins import unicode
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import json
|
from urllib.parse import unquote as url_unquote, quote as url_quote
|
||||||
import urllib2
|
|
||||||
|
|
||||||
import allmydata.uri
|
import allmydata.uri
|
||||||
|
from allmydata.util import jsonbytes as json
|
||||||
|
|
||||||
import util
|
from . import util
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import html5lib
|
import html5lib
|
||||||
|
@ -66,7 +75,7 @@ def test_upload_download(alice):
|
||||||
u"filename": u"boom",
|
u"filename": u"boom",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
assert data == FILE_CONTENTS
|
assert str(data, "utf-8") == FILE_CONTENTS
|
||||||
|
|
||||||
|
|
||||||
def test_put(alice):
|
def test_put(alice):
|
||||||
|
@ -97,7 +106,7 @@ def test_helper_status(storage_nodes):
|
||||||
resp = requests.get(url)
|
resp = requests.get(url)
|
||||||
assert resp.status_code >= 200 and resp.status_code < 300
|
assert resp.status_code >= 200 and resp.status_code < 300
|
||||||
dom = BeautifulSoup(resp.content, "html5lib")
|
dom = BeautifulSoup(resp.content, "html5lib")
|
||||||
assert unicode(dom.h1.string) == u"Helper Status"
|
assert str(dom.h1.string) == u"Helper Status"
|
||||||
|
|
||||||
|
|
||||||
def test_deep_stats(alice):
|
def test_deep_stats(alice):
|
||||||
|
@ -117,10 +126,10 @@ def test_deep_stats(alice):
|
||||||
|
|
||||||
# when creating a directory, we'll be re-directed to a URL
|
# when creating a directory, we'll be re-directed to a URL
|
||||||
# containing our writecap..
|
# containing our writecap..
|
||||||
uri = urllib2.unquote(resp.url)
|
uri = url_unquote(resp.url)
|
||||||
assert 'URI:DIR2:' in uri
|
assert 'URI:DIR2:' in uri
|
||||||
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
||||||
dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(urllib2.quote(dircap)))
|
dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(url_quote(dircap)))
|
||||||
|
|
||||||
# POST a file into this directory
|
# POST a file into this directory
|
||||||
FILE_CONTENTS = u"a file in a directory"
|
FILE_CONTENTS = u"a file in a directory"
|
||||||
|
@ -147,7 +156,7 @@ def test_deep_stats(alice):
|
||||||
k, data = d
|
k, data = d
|
||||||
assert k == u"dirnode"
|
assert k == u"dirnode"
|
||||||
assert len(data['children']) == 1
|
assert len(data['children']) == 1
|
||||||
k, child = data['children'].values()[0]
|
k, child = list(data['children'].values())[0]
|
||||||
assert k == u"filenode"
|
assert k == u"filenode"
|
||||||
assert child['size'] == len(FILE_CONTENTS)
|
assert child['size'] == len(FILE_CONTENTS)
|
||||||
|
|
||||||
|
@ -198,11 +207,11 @@ def test_status(alice):
|
||||||
|
|
||||||
print("Uploaded data, cap={}".format(cap))
|
print("Uploaded data, cap={}".format(cap))
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, u"uri/{}".format(urllib2.quote(cap))),
|
util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap))),
|
||||||
)
|
)
|
||||||
|
|
||||||
print("Downloaded {} bytes of data".format(len(resp.content)))
|
print("Downloaded {} bytes of data".format(len(resp.content)))
|
||||||
assert resp.content == FILE_CONTENTS
|
assert str(resp.content, "ascii") == FILE_CONTENTS
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, "status"),
|
util.node_url(alice.node_dir, "status"),
|
||||||
|
@ -221,12 +230,12 @@ def test_status(alice):
|
||||||
continue
|
continue
|
||||||
resp = requests.get(util.node_url(alice.node_dir, href))
|
resp = requests.get(util.node_url(alice.node_dir, href))
|
||||||
if href.startswith(u"/status/up"):
|
if href.startswith(u"/status/up"):
|
||||||
assert "File Upload Status" in resp.content
|
assert b"File Upload Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
||||||
found_upload = True
|
found_upload = True
|
||||||
elif href.startswith(u"/status/down"):
|
elif href.startswith(u"/status/down"):
|
||||||
assert "File Download Status" in resp.content
|
assert b"File Download Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
||||||
found_download = True
|
found_download = True
|
||||||
|
|
||||||
# download the specialized event information
|
# download the specialized event information
|
||||||
|
@ -299,7 +308,7 @@ def test_directory_deep_check(alice):
|
||||||
print("Uploaded data1, cap={}".format(cap1))
|
print("Uploaded data1, cap={}".format(cap1))
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, u"uri/{}".format(urllib2.quote(cap0))),
|
util.node_url(alice.node_dir, u"uri/{}".format(url_quote(cap0))),
|
||||||
params={u"t": u"info"},
|
params={u"t": u"info"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -400,9 +409,9 @@ def test_directory_deep_check(alice):
|
||||||
for _ in range(5):
|
for _ in range(5):
|
||||||
resp = requests.get(deepcheck_uri)
|
resp = requests.get(deepcheck_uri)
|
||||||
dom = BeautifulSoup(resp.content, "html5lib")
|
dom = BeautifulSoup(resp.content, "html5lib")
|
||||||
if dom.h1 and u'Results' in unicode(dom.h1.string):
|
if dom.h1 and u'Results' in str(dom.h1.string):
|
||||||
break
|
break
|
||||||
if dom.h2 and dom.h2.a and u"Reload" in unicode(dom.h2.a.string):
|
if dom.h2 and dom.h2.a and u"Reload" in str(dom.h2.a.string):
|
||||||
dom = None
|
dom = None
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
assert dom is not None, "Operation never completed"
|
assert dom is not None, "Operation never completed"
|
||||||
|
@ -440,7 +449,7 @@ def test_introducer_info(introducer):
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(introducer.node_dir, u""),
|
util.node_url(introducer.node_dir, u""),
|
||||||
)
|
)
|
||||||
assert "Introducer" in resp.content
|
assert b"Introducer" in resp.content
|
||||||
|
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(introducer.node_dir, u""),
|
util.node_url(introducer.node_dir, u""),
|
||||||
|
@ -513,6 +522,6 @@ def test_mkdir_with_children(alice):
|
||||||
params={u"t": "mkdir-with-children"},
|
params={u"t": "mkdir-with-children"},
|
||||||
data=json.dumps(meta),
|
data=json.dumps(meta),
|
||||||
)
|
)
|
||||||
assert resp.startswith("URI:DIR2")
|
assert resp.startswith(b"URI:DIR2")
|
||||||
cap = allmydata.uri.from_string(resp)
|
cap = allmydata.uri.from_string(resp)
|
||||||
assert isinstance(cap, allmydata.uri.DirectoryURI)
|
assert isinstance(cap, allmydata.uri.DirectoryURI)
|
||||||
|
|
|
@ -1,11 +1,21 @@
|
||||||
from past.builtins import unicode
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
from os import mkdir, environ
|
from os import mkdir, environ
|
||||||
from os.path import exists, join
|
from os.path import exists, join
|
||||||
from six.moves import StringIO
|
from io import StringIO, BytesIO
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from subprocess import check_output
|
from subprocess import check_output
|
||||||
|
|
||||||
|
@ -57,9 +67,10 @@ class _CollectOutputProtocol(ProcessProtocol):
|
||||||
self.output, and callback's on done with all of it after the
|
self.output, and callback's on done with all of it after the
|
||||||
process exits (for any reason).
|
process exits (for any reason).
|
||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self, capture_stderr=True):
|
||||||
self.done = Deferred()
|
self.done = Deferred()
|
||||||
self.output = StringIO()
|
self.output = BytesIO()
|
||||||
|
self.capture_stderr = capture_stderr
|
||||||
|
|
||||||
def processEnded(self, reason):
|
def processEnded(self, reason):
|
||||||
if not self.done.called:
|
if not self.done.called:
|
||||||
|
@ -73,8 +84,9 @@ class _CollectOutputProtocol(ProcessProtocol):
|
||||||
self.output.write(data)
|
self.output.write(data)
|
||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
print("ERR: {}".format(data))
|
print("ERR: {!r}".format(data))
|
||||||
self.output.write(data)
|
if self.capture_stderr:
|
||||||
|
self.output.write(data)
|
||||||
|
|
||||||
|
|
||||||
class _DumpOutputProtocol(ProcessProtocol):
|
class _DumpOutputProtocol(ProcessProtocol):
|
||||||
|
@ -94,9 +106,11 @@ class _DumpOutputProtocol(ProcessProtocol):
|
||||||
self.done.errback(reason)
|
self.done.errback(reason)
|
||||||
|
|
||||||
def outReceived(self, data):
|
def outReceived(self, data):
|
||||||
|
data = str(data, sys.stdout.encoding)
|
||||||
self._out.write(data)
|
self._out.write(data)
|
||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
|
data = str(data, sys.stdout.encoding)
|
||||||
self._out.write(data)
|
self._out.write(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -116,6 +130,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
||||||
self.exited.callback(None)
|
self.exited.callback(None)
|
||||||
|
|
||||||
def outReceived(self, data):
|
def outReceived(self, data):
|
||||||
|
data = str(data, sys.stdout.encoding)
|
||||||
sys.stdout.write(data)
|
sys.stdout.write(data)
|
||||||
self._output.write(data)
|
self._output.write(data)
|
||||||
if not self.magic_seen.called and self._magic_text in self._output.getvalue():
|
if not self.magic_seen.called and self._magic_text in self._output.getvalue():
|
||||||
|
@ -123,6 +138,7 @@ class _MagicTextProtocol(ProcessProtocol):
|
||||||
self.magic_seen.callback(self)
|
self.magic_seen.callback(self)
|
||||||
|
|
||||||
def errReceived(self, data):
|
def errReceived(self, data):
|
||||||
|
data = str(data, sys.stderr.encoding)
|
||||||
sys.stdout.write(data)
|
sys.stdout.write(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -263,9 +279,9 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||||
'--hostname', 'localhost',
|
'--hostname', 'localhost',
|
||||||
'--listen', 'tcp',
|
'--listen', 'tcp',
|
||||||
'--webport', web_port,
|
'--webport', web_port,
|
||||||
'--shares-needed', unicode(needed),
|
'--shares-needed', str(needed),
|
||||||
'--shares-happy', unicode(happy),
|
'--shares-happy', str(happy),
|
||||||
'--shares-total', unicode(total),
|
'--shares-total', str(total),
|
||||||
'--helper',
|
'--helper',
|
||||||
]
|
]
|
||||||
if not storage:
|
if not storage:
|
||||||
|
@ -282,7 +298,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||||
config,
|
config,
|
||||||
u'node',
|
u'node',
|
||||||
u'log_gatherer.furl',
|
u'log_gatherer.furl',
|
||||||
flog_gatherer.decode("utf-8"),
|
flog_gatherer,
|
||||||
)
|
)
|
||||||
write_config(FilePath(config_path), config)
|
write_config(FilePath(config_path), config)
|
||||||
created_d.addCallback(created)
|
created_d.addCallback(created)
|
||||||
|
@ -528,7 +544,8 @@ def generate_ssh_key(path):
|
||||||
key = RSAKey.generate(2048)
|
key = RSAKey.generate(2048)
|
||||||
key.write_private_key_file(path)
|
key.write_private_key_file(path)
|
||||||
with open(path + ".pub", "wb") as f:
|
with open(path + ".pub", "wb") as f:
|
||||||
f.write(b"%s %s" % (key.get_name(), key.get_base64()))
|
s = "%s %s" % (key.get_name(), key.get_base64())
|
||||||
|
f.write(s.encode("ascii"))
|
||||||
|
|
||||||
|
|
||||||
def run_in_thread(f):
|
def run_in_thread(f):
|
||||||
|
|
|
@ -928,7 +928,8 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||||
random data in "api_auth_token" which must be echoed to API
|
random data in "api_auth_token" which must be echoed to API
|
||||||
calls.
|
calls.
|
||||||
"""
|
"""
|
||||||
return self.config.get_private_config('api_auth_token')
|
return self.config.get_private_config(
|
||||||
|
'api_auth_token').encode("ascii")
|
||||||
|
|
||||||
def _create_auth_token(self):
|
def _create_auth_token(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -80,5 +80,5 @@ class AccountFileChecker(object):
|
||||||
return defer.fail(error.UnauthorizedLogin())
|
return defer.fail(error.UnauthorizedLogin())
|
||||||
|
|
||||||
d = defer.maybeDeferred(creds.checkPassword, correct)
|
d = defer.maybeDeferred(creds.checkPassword, correct)
|
||||||
d.addCallback(self._cbPasswordMatch, str(creds.username))
|
d.addCallback(self._cbPasswordMatch, creds.username)
|
||||||
return d
|
return d
|
||||||
|
|
|
@ -1011,8 +1011,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||||
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=username)
|
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=username)
|
||||||
if noisy: self.log(".__init__(%r, %r, %r)" % (client, rootnode, username), level=NOISY)
|
if noisy: self.log(".__init__(%r, %r, %r)" % (client, rootnode, username), level=NOISY)
|
||||||
|
|
||||||
self.channelLookup["session"] = session.SSHSession
|
self.channelLookup[b"session"] = session.SSHSession
|
||||||
self.subsystemLookup["sftp"] = FileTransferServer
|
self.subsystemLookup[b"sftp"] = FileTransferServer
|
||||||
|
|
||||||
self._client = client
|
self._client = client
|
||||||
self._root = rootnode
|
self._root = rootnode
|
||||||
|
|
|
@ -6,7 +6,7 @@ from __future__ import unicode_literals
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
import warnings
|
|
||||||
import os, sys
|
import os, sys
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
import six
|
import six
|
||||||
|
@ -183,10 +183,12 @@ def _maybe_enable_eliot_logging(options, reactor):
|
||||||
# Pass on the options so we can dispatch the subcommand.
|
# Pass on the options so we can dispatch the subcommand.
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
PYTHON_3_WARNING = ("Support for Python 3 is an incomplete work-in-progress."
|
||||||
|
" Use at your own risk.")
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
warnings.warn("Support for Python 3 is an incomplete work-in-progress."
|
print(PYTHON_3_WARNING, file=sys.stderr)
|
||||||
" Use at your own risk.")
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from allmydata.windows.fixups import initialize
|
from allmydata.windows.fixups import initialize
|
||||||
|
|
|
@ -65,7 +65,7 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||||
avatarId = self.checker.requestAvatarId(key_credentials)
|
avatarId = self.checker.requestAvatarId(key_credentials)
|
||||||
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
||||||
|
|
||||||
def test_password_auth_user(self):
|
def test_password_auth_user_with_ssh_key(self):
|
||||||
"""
|
"""
|
||||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||||
UnauthorizedLogin if called with an SSHPrivateKey object for a username
|
UnauthorizedLogin if called with an SSHPrivateKey object for a username
|
||||||
|
@ -76,6 +76,29 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
|
||||||
avatarId = self.checker.requestAvatarId(key_credentials)
|
avatarId = self.checker.requestAvatarId(key_credentials)
|
||||||
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
||||||
|
|
||||||
|
def test_password_auth_user_with_correct_password(self):
|
||||||
|
"""
|
||||||
|
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||||
|
the user if the correct password is given.
|
||||||
|
"""
|
||||||
|
key_credentials = credentials.UsernamePassword(b"alice", b"password")
|
||||||
|
d = self.checker.requestAvatarId(key_credentials)
|
||||||
|
def authenticated(avatarId):
|
||||||
|
self.assertEqual(
|
||||||
|
(b"alice",
|
||||||
|
b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"),
|
||||||
|
(avatarId.username, avatarId.rootcap))
|
||||||
|
return d
|
||||||
|
|
||||||
|
def test_password_auth_user_with_wrong_password(self):
|
||||||
|
"""
|
||||||
|
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||||
|
UnauthorizedLogin if the wrong password is given.
|
||||||
|
"""
|
||||||
|
key_credentials = credentials.UsernamePassword(b"alice", b"WRONG")
|
||||||
|
avatarId = self.checker.requestAvatarId(key_credentials)
|
||||||
|
return self.assertFailure(avatarId, error.UnauthorizedLogin)
|
||||||
|
|
||||||
def test_unrecognized_key(self):
|
def test_unrecognized_key(self):
|
||||||
"""
|
"""
|
||||||
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
AccountFileChecker.requestAvatarId returns a Deferred that fires with
|
||||||
|
|
|
@ -415,7 +415,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||||
f.write("deadbeef")
|
f.write("deadbeef")
|
||||||
|
|
||||||
token = c.get_auth_token()
|
token = c.get_auth_token()
|
||||||
self.assertEqual("deadbeef", token)
|
self.assertEqual(b"deadbeef", token)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_web_staticdir(self):
|
def test_web_staticdir(self):
|
||||||
|
|
|
@ -43,6 +43,7 @@ from allmydata.monitor import Monitor
|
||||||
from allmydata.mutable.common import NotWriteableError
|
from allmydata.mutable.common import NotWriteableError
|
||||||
from allmydata.mutable import layout as mutable_layout
|
from allmydata.mutable import layout as mutable_layout
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
|
from allmydata.scripts.runner import PYTHON_3_WARNING
|
||||||
|
|
||||||
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
from foolscap.api import DeadReferenceError, fireEventually, flushEventualQueue
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
@ -2635,7 +2636,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
out, err, rc_or_sig = res
|
out, err, rc_or_sig = res
|
||||||
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
||||||
if check_stderr:
|
if check_stderr:
|
||||||
self.failUnlessEqual(err, b"")
|
self.assertIn(err.strip(), (b"", PYTHON_3_WARNING.encode("ascii")))
|
||||||
|
|
||||||
d.addCallback(_run_in_subprocess, "create-alias", "newalias")
|
d.addCallback(_run_in_subprocess, "create-alias", "newalias")
|
||||||
d.addCallback(_check_succeeded)
|
d.addCallback(_check_succeeded)
|
||||||
|
@ -2655,7 +2656,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
def _check_ls(res):
|
def _check_ls(res):
|
||||||
out, err, rc_or_sig = res
|
out, err, rc_or_sig = res
|
||||||
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
||||||
self.failUnlessEqual(err, b"", str(res))
|
self.assertIn(err.strip(), (b"", PYTHON_3_WARNING.encode("ascii")))
|
||||||
self.failUnlessIn(b"tahoe-moved", out)
|
self.failUnlessIn(b"tahoe-moved", out)
|
||||||
self.failIfIn(b"tahoe-file", out)
|
self.failIfIn(b"tahoe-file", out)
|
||||||
d.addCallback(_check_ls)
|
d.addCallback(_check_ls)
|
||||||
|
|
|
@ -14,6 +14,7 @@ import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, error
|
from twisted.internet import defer, error
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
from six import ensure_str
|
||||||
import mock
|
import mock
|
||||||
from ..util import tor_provider
|
from ..util import tor_provider
|
||||||
from ..scripts import create_node, runner
|
from ..scripts import create_node, runner
|
||||||
|
@ -185,7 +186,8 @@ class CreateOnion(unittest.TestCase):
|
||||||
protocol)))
|
protocol)))
|
||||||
txtorcon = mock.Mock()
|
txtorcon = mock.Mock()
|
||||||
ehs = mock.Mock()
|
ehs = mock.Mock()
|
||||||
ehs.private_key = b"privkey"
|
# This appears to be a native string in the real txtorcon object...
|
||||||
|
ehs.private_key = ensure_str("privkey")
|
||||||
ehs.hostname = "ONION.onion"
|
ehs.hostname = "ONION.onion"
|
||||||
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
txtorcon.EphemeralHiddenService = mock.Mock(return_value=ehs)
|
||||||
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
ehs.add_to_tor = mock.Mock(return_value=defer.succeed(None))
|
||||||
|
|
|
@ -218,7 +218,7 @@ class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/35
|
||||||
return self.connected
|
return self.connected
|
||||||
def get_version(self):
|
def get_version(self):
|
||||||
return {
|
return {
|
||||||
"application-version": "1.0"
|
b"application-version": b"1.0"
|
||||||
}
|
}
|
||||||
def get_permutation_seed(self):
|
def get_permutation_seed(self):
|
||||||
return b""
|
return b""
|
||||||
|
|
|
@ -16,6 +16,21 @@ from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
PORTED_INTEGRATION_TESTS = [
|
||||||
|
"integration.test_aaa_aardvark",
|
||||||
|
"integration.test_servers_of_happiness",
|
||||||
|
"integration.test_sftp",
|
||||||
|
"integration.test_streaming_logs",
|
||||||
|
"integration.test_tor",
|
||||||
|
"integration.test_web",
|
||||||
|
]
|
||||||
|
|
||||||
|
PORTED_INTEGRATION_MODULES = [
|
||||||
|
"integration",
|
||||||
|
"integration.conftest",
|
||||||
|
"integration.util",
|
||||||
|
]
|
||||||
|
|
||||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||||
PORTED_MODULES = [
|
PORTED_MODULES = [
|
||||||
"allmydata",
|
"allmydata",
|
||||||
|
|
|
@ -211,6 +211,8 @@ def create_config(reactor, cli_config):
|
||||||
"tor_onion.privkey")
|
"tor_onion.privkey")
|
||||||
privkeyfile = os.path.join(private_dir, "tor_onion.privkey")
|
privkeyfile = os.path.join(private_dir, "tor_onion.privkey")
|
||||||
with open(privkeyfile, "wb") as f:
|
with open(privkeyfile, "wb") as f:
|
||||||
|
if isinstance(privkey, str):
|
||||||
|
privkey = privkey.encode("ascii")
|
||||||
f.write(privkey)
|
f.write(privkey)
|
||||||
|
|
||||||
# tahoe_config_tor: this is a dictionary of keys/values to add to the
|
# tahoe_config_tor: this is a dictionary of keys/values to add to the
|
||||||
|
|
|
@ -318,7 +318,7 @@ class Root(MultiFormatResource):
|
||||||
}
|
}
|
||||||
version = server.get_version()
|
version = server.get_version()
|
||||||
if version is not None:
|
if version is not None:
|
||||||
description[u"version"] = version["application-version"]
|
description[u"version"] = version[b"application-version"]
|
||||||
|
|
||||||
return description
|
return description
|
||||||
|
|
||||||
|
|
|
@ -1173,7 +1173,8 @@ class MapupdateStatusElement(Element):
|
||||||
def privkey_from(self, req, tag):
|
def privkey_from(self, req, tag):
|
||||||
server = self._update_status.get_privkey_from()
|
server = self._update_status.get_privkey_from()
|
||||||
if server:
|
if server:
|
||||||
return tag(tags.li("Got privkey from: [%s]" % server.get_name()))
|
return tag(tags.li("Got privkey from: [%s]" % str(
|
||||||
|
server.get_name(), "utf-8")))
|
||||||
else:
|
else:
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
|
|
16
tox.ini
16
tox.ini
|
@ -18,7 +18,7 @@ python =
|
||||||
twisted = 1
|
twisted = 1
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = typechecks,codechecks,codechecks3,py{27,36,37,38,39}-{coverage},pypy27,pypy3
|
envlist = typechecks,codechecks,codechecks3,py{27,36,37,38,39}-{coverage},pypy27,pypy3,integration,integration3
|
||||||
minversion = 2.4
|
minversion = 2.4
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
|
@ -97,6 +97,18 @@ commands =
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
|
|
||||||
|
[testenv:integration3]
|
||||||
|
basepython = python3
|
||||||
|
setenv =
|
||||||
|
COVERAGE_PROCESS_START=.coveragerc
|
||||||
|
commands =
|
||||||
|
python --version
|
||||||
|
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||||
|
python3 -b -m pytest --timeout=1800 --coverage -v {posargs:integration}
|
||||||
|
coverage combine
|
||||||
|
coverage report
|
||||||
|
|
||||||
|
|
||||||
[testenv:codechecks]
|
[testenv:codechecks]
|
||||||
basepython = python2.7
|
basepython = python2.7
|
||||||
# On macOS, git inside of towncrier needs $HOME.
|
# On macOS, git inside of towncrier needs $HOME.
|
||||||
|
@ -269,6 +281,8 @@ deps =
|
||||||
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
||||||
# Python 3 we can reconsider this constraint.
|
# Python 3 we can reconsider this constraint.
|
||||||
pyinstaller < 4.0
|
pyinstaller < 4.0
|
||||||
|
# 2021.5.13 broke on Windows. See https://github.com/erocarrera/pefile/issues/318
|
||||||
|
pefile < 2021.5.13 ; platform_system == "Windows"
|
||||||
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
||||||
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
||||||
setenv=PYTHONHASHSEED=1
|
setenv=PYTHONHASHSEED=1
|
||||||
|
|
Loading…
Reference in New Issue