WIP
This commit is contained in:
parent
ccb06c47e1
commit
7a5900aaaf
|
@ -11,7 +11,8 @@ from allmydata.util import log
|
||||||
from allmydata.util import fileutil, iputil, observer
|
from allmydata.util import fileutil, iputil, observer
|
||||||
from allmydata.util.assertutil import precondition, _assert
|
from allmydata.util.assertutil import precondition, _assert
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
|
from allmydata.util.encodingutil import get_filesystem_encoding, quote_output, \
|
||||||
|
quote_local_unicode_path
|
||||||
|
|
||||||
# Add our application versions to the data that Foolscap's LogPublisher
|
# Add our application versions to the data that Foolscap's LogPublisher
|
||||||
# reports.
|
# reports.
|
||||||
|
@ -50,7 +51,7 @@ class OldConfigError(Exception):
|
||||||
return ("Found pre-Tahoe-LAFS-v1.3 configuration file(s):\n"
|
return ("Found pre-Tahoe-LAFS-v1.3 configuration file(s):\n"
|
||||||
"%s\n"
|
"%s\n"
|
||||||
"See docs/historical/configuration.rst."
|
"See docs/historical/configuration.rst."
|
||||||
% "\n".join([quote_output(fname) for fname in self.args[0]]))
|
% "\n".join([quote_local_unicode_path(fname) for fname in self.args[0]]))
|
||||||
|
|
||||||
class OldConfigOptionError(Exception):
|
class OldConfigOptionError(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -74,8 +75,8 @@ class Node(service.MultiService):
|
||||||
self.basedir = abspath_expanduser_unicode(unicode(basedir))
|
self.basedir = abspath_expanduser_unicode(unicode(basedir))
|
||||||
self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
|
self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
|
||||||
self._tub_ready_observerlist = observer.OneShotObserverList()
|
self._tub_ready_observerlist = observer.OneShotObserverList()
|
||||||
fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
|
fileutil.make_dirs(os.path.join(self.basedir, u"private"), 0700)
|
||||||
open(os.path.join(self.basedir, "private", "README"), "w").write(PRIV_README)
|
open(os.path.join(self.basedir, u"private", u"README"), "w").write(PRIV_README)
|
||||||
|
|
||||||
# creates self.config
|
# creates self.config
|
||||||
self.read_config()
|
self.read_config()
|
||||||
|
@ -143,7 +144,7 @@ class Node(service.MultiService):
|
||||||
self.error_about_old_config_files()
|
self.error_about_old_config_files()
|
||||||
self.config = ConfigParser.SafeConfigParser()
|
self.config = ConfigParser.SafeConfigParser()
|
||||||
|
|
||||||
tahoe_cfg = os.path.join(self.basedir, "tahoe.cfg")
|
tahoe_cfg = os.path.join(self.basedir, u"tahoe.cfg")
|
||||||
try:
|
try:
|
||||||
f = open(tahoe_cfg, "rb")
|
f = open(tahoe_cfg, "rb")
|
||||||
try:
|
try:
|
||||||
|
@ -181,7 +182,7 @@ class Node(service.MultiService):
|
||||||
'no_storage', 'readonly_storage', 'sizelimit',
|
'no_storage', 'readonly_storage', 'sizelimit',
|
||||||
'debug_discard_storage', 'run_helper']:
|
'debug_discard_storage', 'run_helper']:
|
||||||
if name not in self.GENERATED_FILES:
|
if name not in self.GENERATED_FILES:
|
||||||
fullfname = os.path.join(self.basedir, name)
|
fullfname = os.path.join(self.basedir, unicode(name))
|
||||||
if os.path.exists(fullfname):
|
if os.path.exists(fullfname):
|
||||||
oldfnames.add(fullfname)
|
oldfnames.add(fullfname)
|
||||||
if oldfnames:
|
if oldfnames:
|
||||||
|
@ -190,7 +191,7 @@ class Node(service.MultiService):
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
def create_tub(self):
|
def create_tub(self):
|
||||||
certfile = os.path.join(self.basedir, "private", self.CERTFILE)
|
certfile = os.path.join(self.basedir, u"private", self.CERTFILE)
|
||||||
self.tub = Tub(certFile=certfile)
|
self.tub = Tub(certFile=certfile)
|
||||||
self.tub.setOption("logLocalFailures", True)
|
self.tub.setOption("logLocalFailures", True)
|
||||||
self.tub.setOption("logRemoteFailures", True)
|
self.tub.setOption("logRemoteFailures", True)
|
||||||
|
@ -248,7 +249,7 @@ class Node(service.MultiService):
|
||||||
config file that resides within the subdirectory named 'private'), and
|
config file that resides within the subdirectory named 'private'), and
|
||||||
return it.
|
return it.
|
||||||
"""
|
"""
|
||||||
privname = os.path.join(self.basedir, "private", name)
|
privname = os.path.join(self.basedir, u"private", name)
|
||||||
open(privname, "w").write(value)
|
open(privname, "w").write(value)
|
||||||
|
|
||||||
def get_private_config(self, name, default=_None):
|
def get_private_config(self, name, default=_None):
|
||||||
|
@ -257,7 +258,7 @@ class Node(service.MultiService):
|
||||||
and return it. Return a default, or raise an error if one was not
|
and return it. Return a default, or raise an error if one was not
|
||||||
given.
|
given.
|
||||||
"""
|
"""
|
||||||
privname = os.path.join(self.basedir, "private", name)
|
privname = os.path.join(self.basedir, u"private", name)
|
||||||
try:
|
try:
|
||||||
return fileutil.read(privname)
|
return fileutil.read(privname)
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
|
@ -280,7 +281,7 @@ class Node(service.MultiService):
|
||||||
If 'default' is a string, use it as a default value. If not, treat it
|
If 'default' is a string, use it as a default value. If not, treat it
|
||||||
as a zero-argument callable that is expected to return a string.
|
as a zero-argument callable that is expected to return a string.
|
||||||
"""
|
"""
|
||||||
privname = os.path.join(self.basedir, "private", name)
|
privname = os.path.join(self.basedir, u"private", name)
|
||||||
try:
|
try:
|
||||||
value = fileutil.read(privname)
|
value = fileutil.read(privname)
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
|
@ -373,16 +374,16 @@ class Node(service.MultiService):
|
||||||
ob.formatTime = newmeth
|
ob.formatTime = newmeth
|
||||||
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
|
||||||
|
|
||||||
lgfurl_file = os.path.join(self.basedir, "private", "logport.furl").encode(get_filesystem_encoding())
|
logport_furl_file = os.path.join(self.basedir, u"private", u"logport.furl")
|
||||||
self.tub.setOption("logport-furlfile", lgfurl_file)
|
self.tub.setOption("logport-furlfile", logport_furl_file.encode(get_filesystem_encoding()))
|
||||||
lgfurl = self.get_config("node", "log_gatherer.furl", "")
|
lgfurl = self.get_config("node", "log_gatherer.furl", "")
|
||||||
if lgfurl:
|
if lgfurl:
|
||||||
# this is in addition to the contents of log-gatherer-furlfile
|
# this is in addition to the contents of log-gatherer-furlfile
|
||||||
self.tub.setOption("log-gatherer-furl", lgfurl)
|
self.tub.setOption("log-gatherer-furl", lgfurl)
|
||||||
self.tub.setOption("log-gatherer-furlfile",
|
log_gatherer_furl_file = os.path.join(self.basedir, u"log_gatherer.furl")
|
||||||
os.path.join(self.basedir, "log_gatherer.furl"))
|
self.tub.setOption("log-gatherer-furlfile",log_gatherer_furl_file.encode(get_filesystem_encoding()))
|
||||||
self.tub.setOption("bridge-twisted-logs", True)
|
self.tub.setOption("bridge-twisted-logs", True)
|
||||||
incident_dir = os.path.join(self.basedir, "logs", "incidents")
|
incident_dir = os.path.join(self.basedir, u"logs", u"incidents")
|
||||||
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
|
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
|
||||||
|
|
||||||
def log(self, *args, **kwargs):
|
def log(self, *args, **kwargs):
|
||||||
|
|
|
@ -2,7 +2,8 @@ import os.path, re, fnmatch
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||||
DEFAULT_ALIAS, BaseOptions
|
DEFAULT_ALIAS, BaseOptions
|
||||||
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
|
from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output, \
|
||||||
|
quote_local_unicode_path
|
||||||
|
|
||||||
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
|
||||||
|
|
||||||
|
@ -528,7 +529,7 @@ def get(options):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
print >>options.stderr, "%s retrieved and written to %s" % \
|
print >>options.stderr, "%s retrieved and written to %s" % \
|
||||||
(options.from_file, options.to_file)
|
(quote_output(options.from_file), quote_local_unicode_path(options.to_file))
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
def put(options):
|
def put(options):
|
||||||
|
|
|
@ -37,12 +37,12 @@ verify-cap for the file that uses the share.
|
||||||
|
|
||||||
def dump_share(options):
|
def dump_share(options):
|
||||||
from allmydata.storage.mutable import MutableShareFile
|
from allmydata.storage.mutable import MutableShareFile
|
||||||
from allmydata.util.encodingutil import quote_output
|
from allmydata.util.encodingutil import quote_local_unicode_path
|
||||||
|
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
|
|
||||||
# check the version, to see if we have a mutable or immutable share
|
# check the version, to see if we have a mutable or immutable share
|
||||||
print >>out, "share filename: %s" % quote_output(options['filename'])
|
print >>out, "share filename: %s" % quote_local_unicode_path(options['filename'])
|
||||||
|
|
||||||
f = open(options['filename'], "rb")
|
f = open(options['filename'], "rb")
|
||||||
prefix = f.read(32)
|
prefix = f.read(32)
|
||||||
|
@ -649,11 +649,11 @@ def find_shares(options):
|
||||||
|
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
||||||
for d in options.nodedirs:
|
for nodedir in options.nodedirs:
|
||||||
d = os.path.join(d, "storage", "shares", sharedir)
|
abs_sharedir = os.path.join(nodedir, u"storage", u"shares", sharedir)
|
||||||
if os.path.exists(d):
|
if os.path.exists(abs_sharedir):
|
||||||
for shnum in listdir_unicode(d):
|
for shnum in listdir_unicode(abs_sharedir):
|
||||||
print >>out, quote_local_unicode_path(os.path.join(d, shnum), quotemarks=False)
|
print >>out, quote_local_unicode_path(os.path.join(abs_sharedir, shnum), quotemarks=False)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -712,7 +712,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||||
from allmydata.mutable.common import NeedMoreDataError
|
from allmydata.mutable.common import NeedMoreDataError
|
||||||
from allmydata.immutable.layout import ReadBucketProxy
|
from allmydata.immutable.layout import ReadBucketProxy
|
||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
from allmydata.util.encodingutil import quote_output
|
from allmydata.util.encodingutil import quote_local_unicode_path
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
f = open(abs_sharefile, "rb")
|
f = open(abs_sharefile, "rb")
|
||||||
|
@ -755,7 +755,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||||
print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
|
print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
|
||||||
(si_s, k, N, datalen,
|
(si_s, k, N, datalen,
|
||||||
seqnum, base32.b2a(root_hash),
|
seqnum, base32.b2a(root_hash),
|
||||||
expiration, quote_output(abs_sharefile))
|
expiration, quote_local_unicode_path(abs_sharefile))
|
||||||
elif share_type == "MDMF":
|
elif share_type == "MDMF":
|
||||||
from allmydata.mutable.layout import MDMFSlotReadProxy
|
from allmydata.mutable.layout import MDMFSlotReadProxy
|
||||||
fake_shnum = 0
|
fake_shnum = 0
|
||||||
|
@ -784,9 +784,9 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||||
print >>out, "MDMF %s %d/%d %d #%d:%s %d %s" % \
|
print >>out, "MDMF %s %d/%d %d #%d:%s %d %s" % \
|
||||||
(si_s, k, N, datalen,
|
(si_s, k, N, datalen,
|
||||||
seqnum, base32.b2a(root_hash),
|
seqnum, base32.b2a(root_hash),
|
||||||
expiration, quote_output(abs_sharefile))
|
expiration, quote_local_unicode_path(abs_sharefile))
|
||||||
else:
|
else:
|
||||||
print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
|
print >>out, "UNKNOWN mutable %s" % quote_local_unicode_path(abs_sharefile)
|
||||||
|
|
||||||
elif struct.unpack(">L", prefix[:4]) == (1,):
|
elif struct.unpack(">L", prefix[:4]) == (1,):
|
||||||
# immutable
|
# immutable
|
||||||
|
@ -818,10 +818,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||||
|
|
||||||
print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
|
print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
|
||||||
ueb_hash, expiration,
|
ueb_hash, expiration,
|
||||||
quote_output(abs_sharefile))
|
quote_local_unicode_path(abs_sharefile))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print >>out, "UNKNOWN really-unknown %s" % quote_output(abs_sharefile)
|
print >>out, "UNKNOWN really-unknown %s" % quote_local_unicode_path(abs_sharefile)
|
||||||
|
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
@ -831,18 +831,18 @@ def catalog_shares(options):
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
err = options.stderr
|
err = options.stderr
|
||||||
now = time.time()
|
now = time.time()
|
||||||
for d in options.nodedirs:
|
for nodedir in options.nodedirs:
|
||||||
d = os.path.join(d, "storage", "shares")
|
abs_sharedir = os.path.join(nodedir, u"storage", u"shares")
|
||||||
try:
|
try:
|
||||||
abbrevs = listdir_unicode(d)
|
abbrevs = listdir_unicode(abs_sharedir)
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
# ignore nodes that have storage turned off altogether
|
# ignore nodes that have storage turned off altogether
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
for abbrevdir in sorted(abbrevs):
|
for abbrevdir in sorted(abbrevs):
|
||||||
if abbrevdir == "incoming":
|
if abbrevdir == u"incoming":
|
||||||
continue
|
continue
|
||||||
abbrevdir = os.path.join(d, abbrevdir)
|
abbrevdir = os.path.join(nodedir, abbrevdir)
|
||||||
# this tool may get run against bad disks, so we can't assume
|
# this tool may get run against bad disks, so we can't assume
|
||||||
# that listdir_unicode will always succeed. Try to catalog as much
|
# that listdir_unicode will always succeed. Try to catalog as much
|
||||||
# as possible.
|
# as possible.
|
||||||
|
@ -864,7 +864,7 @@ def _as_number(s):
|
||||||
return "not int"
|
return "not int"
|
||||||
|
|
||||||
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
|
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
|
||||||
from allmydata.util.encodingutil import listdir_unicode, quote_output
|
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
|
for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
|
||||||
|
@ -874,10 +874,10 @@ def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
|
||||||
describe_share(abs_sharefile, si_s, shnum_s, now,
|
describe_share(abs_sharefile, si_s, shnum_s, now,
|
||||||
out)
|
out)
|
||||||
except:
|
except:
|
||||||
print >>err, "Error processing %s" % quote_output(abs_sharefile)
|
print >>err, "Error processing %s" % quote_local_unicode_path(abs_sharefile)
|
||||||
failure.Failure().printTraceback(err)
|
failure.Failure().printTraceback(err)
|
||||||
except:
|
except:
|
||||||
print >>err, "Error processing %s" % quote_output(si_dir)
|
print >>err, "Error processing %s" % quote_local_unicode_path(si_dir)
|
||||||
failure.Failure().printTraceback(err)
|
failure.Failure().printTraceback(err)
|
||||||
|
|
||||||
class CorruptShareOptions(BaseOptions):
|
class CorruptShareOptions(BaseOptions):
|
||||||
|
|
|
@ -4,6 +4,8 @@ from cStringIO import StringIO
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
|
from allmydata.util.assertutil import precondition
|
||||||
|
|
||||||
from allmydata.scripts.common import get_default_nodedir
|
from allmydata.scripts.common import get_default_nodedir
|
||||||
from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer, admin
|
from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer, admin
|
||||||
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
|
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
|
||||||
|
@ -88,6 +90,8 @@ def runner(argv,
|
||||||
run_by_human=True,
|
run_by_human=True,
|
||||||
stdin=None, stdout=None, stderr=None,
|
stdin=None, stdout=None, stderr=None,
|
||||||
install_node_control=True, additional_commands=None):
|
install_node_control=True, additional_commands=None):
|
||||||
|
for arg in argv:
|
||||||
|
precondition(isinstance(arg, str), argv)
|
||||||
|
|
||||||
stdin = stdin or sys.stdin
|
stdin = stdin or sys.stdin
|
||||||
stdout = stdout or sys.stdout
|
stdout = stdout or sys.stdout
|
||||||
|
|
|
@ -18,4 +18,4 @@ def si_a2b(ascii_storageindex):
|
||||||
|
|
||||||
def storage_index_to_dir(storageindex):
|
def storage_index_to_dir(storageindex):
|
||||||
sia = si_b2a(storageindex)
|
sia = si_b2a(storageindex)
|
||||||
return os.path.join(sia[:2], sia)
|
return unicode(os.path.join(sia[:2], sia))
|
||||||
|
|
|
@ -6,6 +6,7 @@ from twisted.application import service
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from allmydata.interfaces import RIStorageServer, IStatsProducer
|
from allmydata.interfaces import RIStorageServer, IStatsProducer
|
||||||
from allmydata.util import fileutil, idlib, log, time_format
|
from allmydata.util import fileutil, idlib, log, time_format
|
||||||
|
from allmydata.util.assertutil import precondition
|
||||||
import allmydata # for __full_version__
|
import allmydata # for __full_version__
|
||||||
|
|
||||||
from allmydata.storage.common import si_b2a, si_a2b, storage_index_to_dir
|
from allmydata.storage.common import si_b2a, si_a2b, storage_index_to_dir
|
||||||
|
@ -47,23 +48,25 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
expiration_cutoff_date=None,
|
expiration_cutoff_date=None,
|
||||||
expiration_sharetypes=("mutable", "immutable")):
|
expiration_sharetypes=("mutable", "immutable")):
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
assert isinstance(nodeid, str)
|
|
||||||
assert len(nodeid) == 20
|
precondition(isinstance(nodeid, str), nodeid)
|
||||||
|
precondition(len(nodeid) == 20, nodeid)
|
||||||
|
|
||||||
self.my_nodeid = nodeid
|
self.my_nodeid = nodeid
|
||||||
self.storedir = storedir
|
self.storedir = unicode(storedir)
|
||||||
sharedir = os.path.join(storedir, "shares")
|
sharedir = os.path.join(storedir, u"shares")
|
||||||
fileutil.make_dirs(sharedir)
|
fileutil.make_dirs(sharedir)
|
||||||
self.sharedir = sharedir
|
self.sharedir = sharedir
|
||||||
# we don't actually create the corruption-advisory dir until necessary
|
# we don't actually create the corruption-advisory dir until necessary
|
||||||
self.corruption_advisory_dir = os.path.join(storedir,
|
self.corruption_advisory_dir = os.path.join(storedir,
|
||||||
"corruption-advisories")
|
u"corruption-advisories")
|
||||||
self.reserved_space = int(reserved_space)
|
self.reserved_space = int(reserved_space)
|
||||||
self.no_storage = discard_storage
|
self.no_storage = discard_storage
|
||||||
self.readonly_storage = readonly_storage
|
self.readonly_storage = readonly_storage
|
||||||
self.stats_provider = stats_provider
|
self.stats_provider = stats_provider
|
||||||
if self.stats_provider:
|
if self.stats_provider:
|
||||||
self.stats_provider.register_producer(self)
|
self.stats_provider.register_producer(self)
|
||||||
self.incomingdir = os.path.join(sharedir, 'incoming')
|
self.incomingdir = os.path.join(sharedir, u"incoming")
|
||||||
self._clean_incomplete()
|
self._clean_incomplete()
|
||||||
fileutil.make_dirs(self.incomingdir)
|
fileutil.make_dirs(self.incomingdir)
|
||||||
self._active_writers = weakref.WeakKeyDictionary()
|
self._active_writers = weakref.WeakKeyDictionary()
|
||||||
|
@ -87,8 +90,8 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
}
|
}
|
||||||
self.add_bucket_counter()
|
self.add_bucket_counter()
|
||||||
|
|
||||||
statefile = os.path.join(self.storedir, "lease_checker.state")
|
statefile = os.path.join(self.storedir, u"lease_checker.state")
|
||||||
historyfile = os.path.join(self.storedir, "lease_checker.history")
|
historyfile = os.path.join(self.storedir, u"lease_checker.history")
|
||||||
klass = self.LeaseCheckerClass
|
klass = self.LeaseCheckerClass
|
||||||
self.lease_checker = klass(self, statefile, historyfile,
|
self.lease_checker = klass(self, statefile, historyfile,
|
||||||
expiration_enabled, expiration_mode,
|
expiration_enabled, expiration_mode,
|
||||||
|
@ -106,7 +109,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
return bool(set(os.listdir(self.sharedir)) - set(["incoming"]))
|
return bool(set(os.listdir(self.sharedir)) - set(["incoming"]))
|
||||||
|
|
||||||
def add_bucket_counter(self):
|
def add_bucket_counter(self):
|
||||||
statefile = os.path.join(self.storedir, "bucket_counter.state")
|
statefile = os.path.join(self.storedir, u"bucket_counter.state")
|
||||||
self.bucket_counter = BucketCountingCrawler(self, statefile)
|
self.bucket_counter = BucketCountingCrawler(self, statefile)
|
||||||
self.bucket_counter.setServiceParent(self)
|
self.bucket_counter.setServiceParent(self)
|
||||||
|
|
||||||
|
@ -283,8 +286,8 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
sf.add_or_renew_lease(lease_info)
|
sf.add_or_renew_lease(lease_info)
|
||||||
|
|
||||||
for shnum in sharenums:
|
for shnum in sharenums:
|
||||||
incominghome = os.path.join(self.incomingdir, si_dir, "%d" % shnum)
|
incominghome = os.path.join(self.incomingdir, si_dir, u"%d" % shnum)
|
||||||
finalhome = os.path.join(self.sharedir, si_dir, "%d" % shnum)
|
finalhome = os.path.join(self.sharedir, si_dir, u"%d" % shnum)
|
||||||
if os.path.exists(finalhome):
|
if os.path.exists(finalhome):
|
||||||
# great! we already have it. easy.
|
# great! we already have it. easy.
|
||||||
pass
|
pass
|
||||||
|
@ -491,7 +494,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
(write_enabler, renew_secret, cancel_secret) = secrets
|
(write_enabler, renew_secret, cancel_secret) = secrets
|
||||||
my_nodeid = self.my_nodeid
|
my_nodeid = self.my_nodeid
|
||||||
fileutil.make_dirs(bucketdir)
|
fileutil.make_dirs(bucketdir)
|
||||||
filename = os.path.join(bucketdir, "%d" % sharenum)
|
filename = os.path.join(bucketdir, u"%d" % sharenum)
|
||||||
share = create_mutable_sharefile(filename, my_nodeid, write_enabler,
|
share = create_mutable_sharefile(filename, my_nodeid, write_enabler,
|
||||||
self)
|
self)
|
||||||
return share
|
return share
|
||||||
|
@ -530,7 +533,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
# windows can't handle colons in the filename
|
# windows can't handle colons in the filename
|
||||||
fn = os.path.join(self.corruption_advisory_dir,
|
fn = os.path.join(self.corruption_advisory_dir,
|
||||||
"%s--%s-%d" % (now, si_s, shnum)).replace(":","")
|
(u"%s--%s-%d" % (now, si_s, shnum)).replace(u":", u""))
|
||||||
f = open(fn, "w")
|
f = open(fn, "w")
|
||||||
f.write("report: Share Corruption\n")
|
f.write("report: Share Corruption\n")
|
||||||
f.write("type: %s\n" % share_type)
|
f.write("type: %s\n" % share_type)
|
||||||
|
|
|
@ -24,6 +24,7 @@ from allmydata import uri as tahoe_uri
|
||||||
from allmydata.client import Client
|
from allmydata.client import Client
|
||||||
from allmydata.storage.server import StorageServer, storage_index_to_dir
|
from allmydata.storage.server import StorageServer, storage_index_to_dir
|
||||||
from allmydata.util import fileutil, idlib, hashutil
|
from allmydata.util import fileutil, idlib, hashutil
|
||||||
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.hashutil import sha1
|
from allmydata.util.hashutil import sha1
|
||||||
from allmydata.test.common_web import HTTPClientGETFactory
|
from allmydata.test.common_web import HTTPClientGETFactory
|
||||||
from allmydata.interfaces import IStorageBroker, IServer
|
from allmydata.interfaces import IStorageBroker, IServer
|
||||||
|
@ -224,6 +225,8 @@ class NoNetworkGrid(service.MultiService):
|
||||||
def __init__(self, basedir, num_clients=1, num_servers=10,
|
def __init__(self, basedir, num_clients=1, num_servers=10,
|
||||||
client_config_hooks={}):
|
client_config_hooks={}):
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
|
precondition(isinstance(basedir, unicode), basedir)
|
||||||
|
|
||||||
self.basedir = basedir
|
self.basedir = basedir
|
||||||
fileutil.make_dirs(basedir)
|
fileutil.make_dirs(basedir)
|
||||||
|
|
||||||
|
@ -266,8 +269,8 @@ class NoNetworkGrid(service.MultiService):
|
||||||
|
|
||||||
def make_server(self, i, readonly=False):
|
def make_server(self, i, readonly=False):
|
||||||
serverid = hashutil.tagged_hash("serverid", str(i))[:20]
|
serverid = hashutil.tagged_hash("serverid", str(i))[:20]
|
||||||
serverdir = os.path.join(self.basedir, "servers",
|
serverdir = os.path.join(self.basedir, u"servers",
|
||||||
idlib.shortnodeid_b2a(serverid), "storage")
|
unicode(idlib.shortnodeid_b2a(serverid)), u"storage")
|
||||||
fileutil.make_dirs(serverdir)
|
fileutil.make_dirs(serverdir)
|
||||||
ss = StorageServer(serverdir, serverid, stats_provider=SimpleStats(),
|
ss = StorageServer(serverdir, serverid, stats_provider=SimpleStats(),
|
||||||
readonly_storage=readonly)
|
readonly_storage=readonly)
|
||||||
|
@ -345,7 +348,7 @@ class GridTestMixin:
|
||||||
def set_up_grid(self, num_clients=1, num_servers=10,
|
def set_up_grid(self, num_clients=1, num_servers=10,
|
||||||
client_config_hooks={}):
|
client_config_hooks={}):
|
||||||
# self.basedir must be set
|
# self.basedir must be set
|
||||||
self.g = NoNetworkGrid(self.basedir,
|
self.g = NoNetworkGrid(unicode(self.basedir),
|
||||||
num_clients=num_clients,
|
num_clients=num_clients,
|
||||||
num_servers=num_servers,
|
num_servers=num_servers,
|
||||||
client_config_hooks=client_config_hooks)
|
client_config_hooks=client_config_hooks)
|
||||||
|
|
|
@ -6,6 +6,8 @@ import urllib, sys
|
||||||
|
|
||||||
from mock import Mock, call
|
from mock import Mock, call
|
||||||
|
|
||||||
|
from allmydata.util.assertutil import precondition
|
||||||
|
|
||||||
import allmydata
|
import allmydata
|
||||||
from allmydata.util import fileutil, hashutil, base32, keyutil
|
from allmydata.util import fileutil, hashutil, base32, keyutil
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
|
@ -33,9 +35,8 @@ from allmydata.test.no_network import GridTestMixin
|
||||||
from twisted.internet import threads # CLI tests use deferToThread
|
from twisted.internet import threads # CLI tests use deferToThread
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
|
||||||
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
|
||||||
get_io_encoding, get_filesystem_encoding
|
get_io_encoding, get_filesystem_encoding, unicode_to_argv
|
||||||
|
|
||||||
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
|
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
|
||||||
|
|
||||||
|
@ -52,6 +53,11 @@ class CLITestMixin(ReallyEqualMixin):
|
||||||
"--node-directory", unicode_to_argv(self.get_clientdir()),
|
"--node-directory", unicode_to_argv(self.get_clientdir()),
|
||||||
]
|
]
|
||||||
argv = nodeargs + [verb] + list(args)
|
argv = nodeargs + [verb] + list(args)
|
||||||
|
|
||||||
|
# runner.runner will also check this, but in another thread; this gives a better traceback
|
||||||
|
for arg in argv:
|
||||||
|
precondition(isinstance(arg, str), argv)
|
||||||
|
|
||||||
stdin = kwargs.get("stdin", "")
|
stdin = kwargs.get("stdin", "")
|
||||||
stdout, stderr = StringIO(), StringIO()
|
stdout, stderr = StringIO(), StringIO()
|
||||||
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
|
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
|
||||||
|
|
|
@ -136,7 +136,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
|
||||||
# timestamp to force a check on all files
|
# timestamp to force a check on all files
|
||||||
def _reset_last_checked(res):
|
def _reset_last_checked(res):
|
||||||
dbfile = os.path.join(self.get_clientdir(),
|
dbfile = os.path.join(self.get_clientdir(),
|
||||||
"private", "backupdb.sqlite")
|
u"private", u"backupdb.sqlite")
|
||||||
self.failUnless(os.path.exists(dbfile), dbfile)
|
self.failUnless(os.path.exists(dbfile), dbfile)
|
||||||
bdb = backupdb.get_backupdb(dbfile)
|
bdb = backupdb.get_backupdb(dbfile)
|
||||||
bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
|
bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
|
||||||
|
|
|
@ -5,7 +5,7 @@ from allmydata.util import fileutil
|
||||||
from allmydata.scripts.common import get_aliases
|
from allmydata.scripts.common import get_aliases
|
||||||
from allmydata.scripts import cli, runner
|
from allmydata.scripts import cli, runner
|
||||||
from allmydata.test.no_network import GridTestMixin
|
from allmydata.test.no_network import GridTestMixin
|
||||||
from allmydata.util.encodingutil import quote_output, get_io_encoding
|
from allmydata.util.encodingutil import quote_output, get_io_encoding, unicode_to_argv
|
||||||
from .test_cli import CLITestMixin
|
from .test_cli import CLITestMixin
|
||||||
|
|
||||||
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
|
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
|
||||||
|
@ -14,7 +14,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||||
|
|
||||||
def _test_webopen(self, args, expected_url):
|
def _test_webopen(self, args, expected_url):
|
||||||
o = runner.Options()
|
o = runner.Options()
|
||||||
o.parseOptions(["--node-directory", self.get_clientdir(), "webopen"]
|
o.parseOptions(["--node-directory", unicode_to_argv(self.get_clientdir()), "webopen"]
|
||||||
+ list(args))
|
+ list(args))
|
||||||
urls = []
|
urls = []
|
||||||
rc = cli.webopen(o, urls.append)
|
rc = cli.webopen(o, urls.append)
|
||||||
|
@ -25,7 +25,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
self.basedir = "cli/CreateAlias/create"
|
self.basedir = "cli/CreateAlias/create"
|
||||||
self.set_up_grid()
|
self.set_up_grid()
|
||||||
aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
|
aliasfile = os.path.join(self.get_clientdir(), u"private", u"aliases")
|
||||||
|
|
||||||
d = self.do_cli("create-alias", "tahoe")
|
d = self.do_cli("create-alias", "tahoe")
|
||||||
def _done((rc,stdout,stderr)):
|
def _done((rc,stdout,stderr)):
|
||||||
|
@ -39,7 +39,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||||
|
|
||||||
def _stash_urls(res):
|
def _stash_urls(res):
|
||||||
aliases = get_aliases(self.get_clientdir())
|
aliases = get_aliases(self.get_clientdir())
|
||||||
node_url_file = os.path.join(self.get_clientdir(), "node.url")
|
node_url_file = os.path.join(self.get_clientdir(), u"node.url")
|
||||||
nodeurl = fileutil.read(node_url_file).strip()
|
nodeurl = fileutil.read(node_url_file).strip()
|
||||||
self.welcome_url = nodeurl
|
self.welcome_url = nodeurl
|
||||||
uribase = nodeurl + "uri/"
|
uribase = nodeurl + "uri/"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import os, sys
|
import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||||
fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
|
fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
|
||||||
|
|
||||||
e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
|
e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
|
||||||
abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
|
abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir))
|
||||||
self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
|
self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
|
||||||
self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
|
self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
|
||||||
self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
|
self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
|
||||||
|
|
|
@ -4,11 +4,15 @@ from cStringIO import StringIO
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.internet import threads # CLI tests use deferToThread
|
from twisted.internet import threads # CLI tests use deferToThread
|
||||||
|
|
||||||
|
from allmydata.util.assertutil import precondition
|
||||||
|
|
||||||
from allmydata.immutable import upload
|
from allmydata.immutable import upload
|
||||||
from allmydata.mutable.common import UnrecoverableFileError
|
from allmydata.mutable.common import UnrecoverableFileError
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
from allmydata.util import idlib
|
from allmydata.util import idlib
|
||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
|
from allmydata.util.encodingutil import unicode_to_argv
|
||||||
from allmydata.scripts import runner
|
from allmydata.scripts import runner
|
||||||
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||||
IDeepCheckResults, IDeepCheckAndRepairResults
|
IDeepCheckResults, IDeepCheckAndRepairResults
|
||||||
|
@ -25,9 +29,10 @@ timeout = 2400 # One of these took 1046.091s on Zandr's ARM box.
|
||||||
|
|
||||||
class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
|
class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
|
||||||
def _run_cli(self, argv):
|
def _run_cli(self, argv):
|
||||||
|
precondition(argv[0] == "debug", argv)
|
||||||
|
|
||||||
stdout, stderr = StringIO(), StringIO()
|
stdout, stderr = StringIO(), StringIO()
|
||||||
# this can only do synchronous operations
|
# this can only do synchronous operations
|
||||||
assert argv[0] == "debug"
|
|
||||||
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
|
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
|
||||||
return stdout.getvalue()
|
return stdout.getvalue()
|
||||||
|
|
||||||
|
@ -728,6 +733,10 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _run_cli(self, argv, stdin=""):
|
def _run_cli(self, argv, stdin=""):
|
||||||
|
# runner.runner will also check this, but in another thread; this gives a better traceback
|
||||||
|
for arg in argv:
|
||||||
|
precondition(isinstance(arg, str), argv)
|
||||||
|
|
||||||
#print "CLI:", argv
|
#print "CLI:", argv
|
||||||
stdout, stderr = StringIO(), StringIO()
|
stdout, stderr = StringIO(), StringIO()
|
||||||
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
|
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
|
||||||
|
@ -758,7 +767,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_manifest_stream1(self):
|
def do_cli_manifest_stream1(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"manifest",
|
"manifest",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
def _check((out,err)):
|
def _check((out,err)):
|
||||||
|
@ -786,7 +795,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_manifest_stream2(self):
|
def do_cli_manifest_stream2(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"manifest",
|
"manifest",
|
||||||
"--raw",
|
"--raw",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
|
@ -799,7 +808,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_manifest_stream3(self):
|
def do_cli_manifest_stream3(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"manifest",
|
"manifest",
|
||||||
"--storage-index",
|
"--storage-index",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
|
@ -811,7 +820,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_manifest_stream4(self):
|
def do_cli_manifest_stream4(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"manifest",
|
"manifest",
|
||||||
"--verify-cap",
|
"--verify-cap",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
|
@ -827,7 +836,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_manifest_stream5(self):
|
def do_cli_manifest_stream5(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"manifest",
|
"manifest",
|
||||||
"--repair-cap",
|
"--repair-cap",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
|
@ -843,7 +852,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_stats1(self):
|
def do_cli_stats1(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"stats",
|
"stats",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
def _check3((out,err)):
|
def _check3((out,err)):
|
||||||
|
@ -863,7 +872,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
||||||
|
|
||||||
def do_cli_stats2(self):
|
def do_cli_stats2(self):
|
||||||
basedir = self.get_clientdir(0)
|
basedir = self.get_clientdir(0)
|
||||||
d = self._run_cli(["--node-directory", basedir,
|
d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
|
||||||
"stats",
|
"stats",
|
||||||
"--raw",
|
"--raw",
|
||||||
self.root_uri])
|
self.root_uri])
|
||||||
|
@ -984,9 +993,10 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _run_cli(self, argv):
|
def _run_cli(self, argv):
|
||||||
|
precondition(argv[0] == "debug", argv)
|
||||||
|
|
||||||
stdout, stderr = StringIO(), StringIO()
|
stdout, stderr = StringIO(), StringIO()
|
||||||
# this can only do synchronous operations
|
# this can only do synchronous operations
|
||||||
assert argv[0] == "debug"
|
|
||||||
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
|
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
|
||||||
return stdout.getvalue()
|
return stdout.getvalue()
|
||||||
|
|
||||||
|
@ -996,7 +1006,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
|
||||||
def _corrupt_some_shares(self, node):
|
def _corrupt_some_shares(self, node):
|
||||||
for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
|
for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
|
||||||
if shnum in (0,1):
|
if shnum in (0,1):
|
||||||
self._run_cli(["debug", "corrupt-share", sharefile])
|
self._run_cli(["debug", "corrupt-share", unicode_to_argv(sharefile)])
|
||||||
|
|
||||||
def _delete_most_shares(self, node):
|
def _delete_most_shares(self, node):
|
||||||
self.delete_shares_numbered(node.get_uri(), range(1,10))
|
self.delete_shares_numbered(node.get_uri(), range(1,10))
|
||||||
|
|
|
@ -3113,7 +3113,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
|
||||||
fso = debug.FindSharesOptions()
|
fso = debug.FindSharesOptions()
|
||||||
storage_index = base32.b2a(n.get_storage_index())
|
storage_index = base32.b2a(n.get_storage_index())
|
||||||
fso.si_s = storage_index
|
fso.si_s = storage_index
|
||||||
fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
|
fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(storedir))
|
||||||
for (i,ss,storedir)
|
for (i,ss,storedir)
|
||||||
in self.iterate_servers()]
|
in self.iterate_servers()]
|
||||||
fso.stdout = StringIO()
|
fso.stdout = StringIO()
|
||||||
|
|
|
@ -7,6 +7,7 @@ from allmydata.test.no_network import NoNetworkGrid
|
||||||
from allmydata.immutable.upload import Data
|
from allmydata.immutable.upload import Data
|
||||||
from allmydata.util.consumer import download_to_data
|
from allmydata.util.consumer import download_to_data
|
||||||
|
|
||||||
|
|
||||||
class Harness(unittest.TestCase):
|
class Harness(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.s = service.MultiService()
|
self.s = service.MultiService()
|
||||||
|
@ -16,13 +17,13 @@ class Harness(unittest.TestCase):
|
||||||
return self.s.stopService()
|
return self.s.stopService()
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
basedir = "no_network/Harness/create"
|
basedir = u"no_network/Harness/create"
|
||||||
g = NoNetworkGrid(basedir)
|
g = NoNetworkGrid(basedir)
|
||||||
g.startService()
|
g.startService()
|
||||||
return g.stopService()
|
return g.stopService()
|
||||||
|
|
||||||
def test_upload(self):
|
def test_upload(self):
|
||||||
basedir = "no_network/Harness/upload"
|
basedir = u"no_network/Harness/upload"
|
||||||
g = NoNetworkGrid(basedir)
|
g = NoNetworkGrid(basedir)
|
||||||
g.setServiceParent(self.s)
|
g.setServiceParent(self.s)
|
||||||
|
|
||||||
|
|
|
@ -1414,7 +1414,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
|
||||||
|
|
||||||
|
|
||||||
def workdir(self, name):
|
def workdir(self, name):
|
||||||
basedir = os.path.join("storage", "MutableServer", name)
|
basedir = os.path.join("storage", "MDMFProxies", name)
|
||||||
return basedir
|
return basedir
|
||||||
|
|
||||||
|
|
||||||
|
@ -2793,7 +2793,7 @@ class Stats(unittest.TestCase):
|
||||||
return self.sparent.stopService()
|
return self.sparent.stopService()
|
||||||
|
|
||||||
def workdir(self, name):
|
def workdir(self, name):
|
||||||
basedir = os.path.join("storage", "Server", name)
|
basedir = os.path.join("storage", "Stats", name)
|
||||||
return basedir
|
return basedir
|
||||||
|
|
||||||
def create(self, name):
|
def create(self, name):
|
||||||
|
|
|
@ -16,7 +16,8 @@ from allmydata.immutable.filenode import ImmutableFileNode
|
||||||
from allmydata.util import idlib, mathutil
|
from allmydata.util import idlib, mathutil
|
||||||
from allmydata.util import log, base32
|
from allmydata.util import log, base32
|
||||||
from allmydata.util.verlib import NormalizedVersion
|
from allmydata.util.verlib import NormalizedVersion
|
||||||
from allmydata.util.encodingutil import quote_output, unicode_to_argv, get_filesystem_encoding
|
from allmydata.util.encodingutil import quote_local_unicode_path, \
|
||||||
|
unicode_to_argv, get_filesystem_encoding
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
from allmydata.util.consumer import MemoryConsumer, download_to_data
|
from allmydata.util.consumer import MemoryConsumer, download_to_data
|
||||||
from allmydata.scripts import runner
|
from allmydata.scripts import runner
|
||||||
|
@ -730,6 +731,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
d.addCallback(self.log, "starting publish")
|
d.addCallback(self.log, "starting publish")
|
||||||
d.addCallback(self._do_publish1)
|
d.addCallback(self._do_publish1)
|
||||||
d.addCallback(self._test_runner)
|
d.addCallback(self._test_runner)
|
||||||
|
return d
|
||||||
d.addCallback(self._do_publish2)
|
d.addCallback(self._do_publish2)
|
||||||
# at this point, we have the following filesystem (where "R" denotes
|
# at this point, we have the following filesystem (where "R" denotes
|
||||||
# self._root_directory_uri):
|
# self._root_directory_uri):
|
||||||
|
@ -1314,14 +1316,14 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
|
|
||||||
# find a share
|
# find a share
|
||||||
for (dirpath, dirnames, filenames) in os.walk(unicode(self.basedir)):
|
for (dirpath, dirnames, filenames) in os.walk(unicode(self.basedir)):
|
||||||
if "storage" not in dirpath:
|
if u"storage" not in dirpath:
|
||||||
continue
|
continue
|
||||||
if not filenames:
|
if not filenames:
|
||||||
continue
|
continue
|
||||||
pieces = dirpath.split(os.sep)
|
pieces = dirpath.split(os.sep)
|
||||||
if (len(pieces) >= 4
|
if (len(pieces) >= 4
|
||||||
and pieces[-4] == "storage"
|
and pieces[-4] == u"storage"
|
||||||
and pieces[-3] == "shares"):
|
and pieces[-3] == u"shares"):
|
||||||
# we're sitting in .../storage/shares/$START/$SINDEX , and there
|
# we're sitting in .../storage/shares/$START/$SINDEX , and there
|
||||||
# are sharefiles here
|
# are sharefiles here
|
||||||
filename = os.path.join(dirpath, filenames[0])
|
filename = os.path.join(dirpath, filenames[0])
|
||||||
|
@ -1343,7 +1345,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
|
|
||||||
# we only upload a single file, so we can assert some things about
|
# we only upload a single file, so we can assert some things about
|
||||||
# its size and shares.
|
# its size and shares.
|
||||||
self.failUnlessIn("share filename: %s" % quote_output(abspath_expanduser_unicode(filename)), output)
|
self.failUnlessIn("share filename: %s" % quote_local_unicode_path(abspath_expanduser_unicode(filename)), output)
|
||||||
self.failUnlessIn("size: %d\n" % len(self.data), output)
|
self.failUnlessIn("size: %d\n" % len(self.data), output)
|
||||||
self.failUnlessIn("num_segments: 1\n", output)
|
self.failUnlessIn("num_segments: 1\n", output)
|
||||||
# segment_size is always a multiple of needed_shares
|
# segment_size is always a multiple of needed_shares
|
||||||
|
@ -1377,11 +1379,12 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||||
out,err = StringIO(), StringIO()
|
out,err = StringIO(), StringIO()
|
||||||
nodedirs = [self.getdir("client%d" % i) for i in range(self.numclients)]
|
nodedirs = [self.getdir("client%d" % i) for i in range(self.numclients)]
|
||||||
cmd = ["debug", "catalog-shares"] + nodedirs
|
cmd = ["debug", "catalog-shares"] + nodedirs
|
||||||
|
#import pdb; pdb.set_trace()
|
||||||
rc = runner.runner(cmd, stdout=out, stderr=err)
|
rc = runner.runner(cmd, stdout=out, stderr=err)
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
descriptions = [sfn.strip() for sfn in out.readlines()]
|
descriptions = [sfn.strip() for sfn in out.readlines()]
|
||||||
self.failUnlessEqual(len(descriptions), 30)
|
self.failUnlessEqual(len(descriptions), 30, descriptions)
|
||||||
matching = [line
|
matching = [line
|
||||||
for line in descriptions
|
for line in descriptions
|
||||||
if line.startswith("CHK %s " % storage_index_s)]
|
if line.startswith("CHK %s " % storage_index_s)]
|
||||||
|
|
|
@ -175,6 +175,7 @@ class Math(unittest.TestCase):
|
||||||
f = mathutil.round_sigfigs
|
f = mathutil.round_sigfigs
|
||||||
self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
|
self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
|
||||||
|
|
||||||
|
|
||||||
class Statistics(unittest.TestCase):
|
class Statistics(unittest.TestCase):
|
||||||
def should_assert(self, msg, func, *args, **kwargs):
|
def should_assert(self, msg, func, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
|
@ -372,6 +373,7 @@ class Asserts(unittest.TestCase):
|
||||||
m = self.should_assert(f, False, othermsg="message2")
|
m = self.should_assert(f, False, othermsg="message2")
|
||||||
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
|
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
|
||||||
|
|
||||||
|
|
||||||
class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||||
def mkdir(self, basedir, path, mode=0777):
|
def mkdir(self, basedir, path, mode=0777):
|
||||||
fn = os.path.join(basedir, path)
|
fn = os.path.join(basedir, path)
|
||||||
|
@ -554,6 +556,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||||
disk = fileutil.get_disk_stats(u".", 2**128)
|
disk = fileutil.get_disk_stats(u".", 2**128)
|
||||||
self.failUnlessEqual(disk['avail'], 0)
|
self.failUnlessEqual(disk['avail'], 0)
|
||||||
|
|
||||||
|
|
||||||
class PollMixinTests(unittest.TestCase):
|
class PollMixinTests(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.pm = pollmixin.PollMixin()
|
self.pm = pollmixin.PollMixin()
|
||||||
|
@ -581,6 +584,7 @@ class PollMixinTests(unittest.TestCase):
|
||||||
d.addCallbacks(_suc, _err)
|
d.addCallbacks(_suc, _err)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class DeferredUtilTests(unittest.TestCase):
|
class DeferredUtilTests(unittest.TestCase):
|
||||||
def test_gather_results(self):
|
def test_gather_results(self):
|
||||||
d1 = defer.Deferred()
|
d1 = defer.Deferred()
|
||||||
|
@ -621,8 +625,8 @@ class DeferredUtilTests(unittest.TestCase):
|
||||||
self.failUnless(isinstance(f, Failure))
|
self.failUnless(isinstance(f, Failure))
|
||||||
self.failUnless(f.check(ValueError))
|
self.failUnless(f.check(ValueError))
|
||||||
|
|
||||||
class HashUtilTests(unittest.TestCase):
|
|
||||||
|
|
||||||
|
class HashUtilTests(unittest.TestCase):
|
||||||
def test_random_key(self):
|
def test_random_key(self):
|
||||||
k = hashutil.random_key()
|
k = hashutil.random_key()
|
||||||
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
self.failUnlessEqual(len(k), hashutil.KEYLEN)
|
||||||
|
@ -815,6 +819,7 @@ class Abbreviate(unittest.TestCase):
|
||||||
e = self.failUnlessRaises(ValueError, p, "fhtagn")
|
e = self.failUnlessRaises(ValueError, p, "fhtagn")
|
||||||
self.failUnlessIn("fhtagn", str(e))
|
self.failUnlessIn("fhtagn", str(e))
|
||||||
|
|
||||||
|
|
||||||
class Limiter(unittest.TestCase):
|
class Limiter(unittest.TestCase):
|
||||||
timeout = 480 # This takes longer than 240 seconds on Francois's arm box.
|
timeout = 480 # This takes longer than 240 seconds on Francois's arm box.
|
||||||
|
|
||||||
|
@ -890,6 +895,7 @@ class Limiter(unittest.TestCase):
|
||||||
d.addCallback(_all_done)
|
d.addCallback(_all_done)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class TimeFormat(unittest.TestCase):
|
class TimeFormat(unittest.TestCase):
|
||||||
def test_epoch(self):
|
def test_epoch(self):
|
||||||
return self._help_test_epoch()
|
return self._help_test_epoch()
|
||||||
|
@ -978,6 +984,7 @@ class TimeFormat(unittest.TestCase):
|
||||||
def test_parse_date(self):
|
def test_parse_date(self):
|
||||||
self.failUnlessEqual(time_format.parse_date("2010-02-21"), 1266710400)
|
self.failUnlessEqual(time_format.parse_date("2010-02-21"), 1266710400)
|
||||||
|
|
||||||
|
|
||||||
class CacheDir(unittest.TestCase):
|
class CacheDir(unittest.TestCase):
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
basedir = "test_util/CacheDir/test_basic"
|
basedir = "test_util/CacheDir/test_basic"
|
||||||
|
@ -1042,6 +1049,7 @@ class CacheDir(unittest.TestCase):
|
||||||
_failUnlessExists("c")
|
_failUnlessExists("c")
|
||||||
del b2
|
del b2
|
||||||
|
|
||||||
|
|
||||||
ctr = [0]
|
ctr = [0]
|
||||||
class EqButNotIs:
|
class EqButNotIs:
|
||||||
def __init__(self, x):
|
def __init__(self, x):
|
||||||
|
@ -1065,6 +1073,7 @@ class EqButNotIs:
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.x == other
|
return self.x == other
|
||||||
|
|
||||||
|
|
||||||
class DictUtil(unittest.TestCase):
|
class DictUtil(unittest.TestCase):
|
||||||
def _help_test_empty_dict(self, klass):
|
def _help_test_empty_dict(self, klass):
|
||||||
d1 = klass()
|
d1 = klass()
|
||||||
|
@ -1440,6 +1449,7 @@ class DictUtil(unittest.TestCase):
|
||||||
self.failUnlessEqual(d["one"], 1)
|
self.failUnlessEqual(d["one"], 1)
|
||||||
self.failUnlessEqual(d.get_aux("one"), None)
|
self.failUnlessEqual(d.get_aux("one"), None)
|
||||||
|
|
||||||
|
|
||||||
class Pipeline(unittest.TestCase):
|
class Pipeline(unittest.TestCase):
|
||||||
def pause(self, *args, **kwargs):
|
def pause(self, *args, **kwargs):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
|
@ -1614,9 +1624,11 @@ class Pipeline(unittest.TestCase):
|
||||||
|
|
||||||
del d1,d2,d3,d4
|
del d1,d2,d3,d4
|
||||||
|
|
||||||
|
|
||||||
class SampleError(Exception):
|
class SampleError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Log(unittest.TestCase):
|
class Log(unittest.TestCase):
|
||||||
def test_err(self):
|
def test_err(self):
|
||||||
if not hasattr(self, "flushLoggedErrors"):
|
if not hasattr(self, "flushLoggedErrors"):
|
||||||
|
@ -1716,6 +1728,7 @@ class SimpleSpans:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class ByteSpans(unittest.TestCase):
|
class ByteSpans(unittest.TestCase):
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
s = Spans()
|
s = Spans()
|
||||||
|
@ -1950,6 +1963,7 @@ class ByteSpans(unittest.TestCase):
|
||||||
out.write(" ")
|
out.write(" ")
|
||||||
out.write("\n")
|
out.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def extend(s, start, length, fill):
|
def extend(s, start, length, fill):
|
||||||
if len(s) >= start+length:
|
if len(s) >= start+length:
|
||||||
return s
|
return s
|
||||||
|
|
Loading…
Reference in New Issue