Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.

* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
  - pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests

Detailed list of changes (done one at a time, then merged together)

always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
This commit is contained in:
Brian Warner 2009-08-15 04:02:56 -07:00
parent ffd5135fed
commit 0d5dc51617
29 changed files with 974 additions and 946 deletions

View File

@ -1,9 +1,9 @@
import os, stat, time, weakref import os, stat, time
from allmydata.interfaces import RIStorageServer from allmydata.interfaces import RIStorageServer
from allmydata import node from allmydata import node
from zope.interface import implements from zope.interface import implements
from twisted.internet import reactor from twisted.internet import reactor, defer
from twisted.application.internet import TimerService from twisted.application.internet import TimerService
from foolscap.api import Referenceable from foolscap.api import Referenceable
from pycryptopp.publickey import rsa from pycryptopp.publickey import rsa
@ -13,22 +13,17 @@ from allmydata.storage.server import StorageServer
from allmydata import storage_client from allmydata import storage_client
from allmydata.immutable.upload import Uploader from allmydata.immutable.upload import Uploader
from allmydata.immutable.download import Downloader from allmydata.immutable.download import Downloader
from allmydata.immutable.filenode import FileNode, LiteralFileNode
from allmydata.immutable.offloaded import Helper from allmydata.immutable.offloaded import Helper
from allmydata.control import ControlServer from allmydata.control import ControlServer
from allmydata.introducer.client import IntroducerClient from allmydata.introducer.client import IntroducerClient
from allmydata.util import hashutil, base32, pollmixin, cachedir, log from allmydata.util import hashutil, base32, pollmixin, cachedir, log
from allmydata.util.abbreviate import parse_abbreviated_size from allmydata.util.abbreviate import parse_abbreviated_size
from allmydata.util.time_format import parse_duration, parse_date from allmydata.util.time_format import parse_duration, parse_date
from allmydata.uri import LiteralFileURI, UnknownURI
from allmydata.dirnode import DirectoryNode
from allmydata.mutable.filenode import MutableFileNode
from allmydata.unknown import UnknownNode
from allmydata.stats import StatsProvider from allmydata.stats import StatsProvider
from allmydata.history import History from allmydata.history import History
from allmydata.interfaces import IURI, IDirectoryURI, IStatsProducer, \ from allmydata.interfaces import IStatsProducer, RIStubClient
IReadonlyDirectoryURI, IFileURI, IMutableFileURI, RIStubClient, \ from allmydata.nodemaker import NodeMaker
UnhandledCapTypeError
KiB=1024 KiB=1024
MiB=1024*KiB MiB=1024*KiB
@ -42,6 +37,47 @@ class StubClient(Referenceable):
def _make_secret(): def _make_secret():
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n" return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
class SecretHolder:
def __init__(self, lease_secret):
self._lease_secret = lease_secret
def get_renewal_secret(self):
return hashutil.my_renewal_secret_hash(self._lease_secret)
def get_cancel_secret(self):
return hashutil.my_cancel_secret_hash(self._lease_secret)
class KeyGenerator:
def __init__(self):
self._remote = None
self.default_keysize = 2048
def set_remote_generator(self, keygen):
self._remote = keygen
def set_default_keysize(self, keysize):
"""Call this to override the size of the RSA keys created for new
mutable files. The default of None means to let mutable.filenode
choose its own size, which means 2048 bits."""
self.default_keysize = keysize
def generate(self, keysize=None):
keysize = keysize or self.default_keysize
if self._remote:
d = self._remote.callRemote('get_rsa_key_pair', keysize)
def make_key_objs((verifying_key, signing_key)):
v = rsa.create_verifying_key_from_string(verifying_key)
s = rsa.create_signing_key_from_string(signing_key)
return v, s
d.addCallback(make_key_objs)
return d
else:
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
# secs
signer = rsa.generate(keysize)
verifier = signer.get_verifying_key()
return defer.succeed( (verifier, signer) )
class Client(node.Node, pollmixin.PollMixin): class Client(node.Node, pollmixin.PollMixin):
implements(IStatsProducer) implements(IStatsProducer)
@ -65,11 +101,6 @@ class Client(node.Node, pollmixin.PollMixin):
"max_segment_size": 128*KiB, "max_segment_size": 128*KiB,
} }
# set this to override the size of the RSA keys created for new mutable
# files. The default of None means to let mutable.filenode choose its own
# size, which means 2048 bits.
DEFAULT_MUTABLE_KEYSIZE = None
def __init__(self, basedir="."): def __init__(self, basedir="."):
node.Node.__init__(self, basedir) node.Node.__init__(self, basedir)
self.started_timestamp = time.time() self.started_timestamp = time.time()
@ -82,11 +113,11 @@ class Client(node.Node, pollmixin.PollMixin):
self.init_control() self.init_control()
if self.get_config("helper", "enabled", False, boolean=True): if self.get_config("helper", "enabled", False, boolean=True):
self.init_helper() self.init_helper()
self.init_client() self._key_generator = KeyGenerator()
self._key_generator = None
key_gen_furl = self.get_config("client", "key_generator.furl", None) key_gen_furl = self.get_config("client", "key_generator.furl", None)
if key_gen_furl: if key_gen_furl:
self.init_key_gen(key_gen_furl) self.init_key_gen(key_gen_furl)
self.init_client()
# ControlServer and Helper are attached after Tub startup # ControlServer and Helper are attached after Tub startup
self.init_ftp_server() self.init_ftp_server()
self.init_sftp_server() self.init_sftp_server()
@ -149,7 +180,8 @@ class Client(node.Node, pollmixin.PollMixin):
def init_lease_secret(self): def init_lease_secret(self):
secret_s = self.get_or_create_private_config("secret", _make_secret) secret_s = self.get_or_create_private_config("secret", _make_secret)
self._lease_secret = base32.a2b(secret_s) lease_secret = base32.a2b(secret_s)
self._secret_holder = SecretHolder(lease_secret)
def init_storage(self): def init_storage(self):
# should we run a storage server (and publish it for others to use)? # should we run a storage server (and publish it for others to use)?
@ -224,10 +256,9 @@ class Client(node.Node, pollmixin.PollMixin):
DEP["happy"] = int(self.get_config("client", "shares.happy", DEP["happy"])) DEP["happy"] = int(self.get_config("client", "shares.happy", DEP["happy"]))
convergence_s = self.get_or_create_private_config('convergence', _make_secret) convergence_s = self.get_or_create_private_config('convergence', _make_secret)
self.convergence = base32.a2b(convergence_s) self.convergence = base32.a2b(convergence_s)
self._node_cache = weakref.WeakValueDictionary() # uri -> node
self.init_client_storage_broker() self.init_client_storage_broker()
self.add_service(History(self.stats_provider)) self.history = self.add_service(History(self.stats_provider))
self.add_service(Uploader(helper_furl, self.stats_provider)) self.add_service(Uploader(helper_furl, self.stats_provider))
download_cachedir = os.path.join(self.basedir, download_cachedir = os.path.join(self.basedir,
"private", "cache", "download") "private", "cache", "download")
@ -235,6 +266,7 @@ class Client(node.Node, pollmixin.PollMixin):
self.download_cache_dirman.setServiceParent(self) self.download_cache_dirman.setServiceParent(self)
self.add_service(Downloader(self.stats_provider)) self.add_service(Downloader(self.stats_provider))
self.init_stub_client() self.init_stub_client()
self.init_nodemaker()
def init_client_storage_broker(self): def init_client_storage_broker(self):
# create a StorageFarmBroker object, for use by Uploader/Downloader # create a StorageFarmBroker object, for use by Uploader/Downloader
@ -286,6 +318,16 @@ class Client(node.Node, pollmixin.PollMixin):
d.addErrback(log.err, facility="tahoe.init", d.addErrback(log.err, facility="tahoe.init",
level=log.BAD, umid="OEHq3g") level=log.BAD, umid="OEHq3g")
def init_nodemaker(self):
self.nodemaker = NodeMaker(self.storage_broker,
self._secret_holder,
self.get_history(),
self.getServiceNamed("uploader"),
self.getServiceNamed("downloader"),
self.download_cache_dirman,
self.get_encoding_parameters(),
self._key_generator)
def get_history(self): def get_history(self):
return self.getServiceNamed("history") return self.getServiceNamed("history")
@ -303,7 +345,8 @@ class Client(node.Node, pollmixin.PollMixin):
def init_helper(self): def init_helper(self):
d = self.when_tub_ready() d = self.when_tub_ready()
def _publish(self): def _publish(self):
h = Helper(os.path.join(self.basedir, "helper"), self.stats_provider) h = Helper(os.path.join(self.basedir, "helper"),
self.stats_provider, self.history)
h.setServiceParent(self) h.setServiceParent(self)
# TODO: this is confusing. BASEDIR/private/helper.furl is created # TODO: this is confusing. BASEDIR/private/helper.furl is created
# by the helper. BASEDIR/helper.furl is consumed by the client # by the helper. BASEDIR/helper.furl is consumed by the client
@ -326,11 +369,14 @@ class Client(node.Node, pollmixin.PollMixin):
level=log.BAD, umid="z9DMzw") level=log.BAD, umid="z9DMzw")
def _got_key_generator(self, key_generator): def _got_key_generator(self, key_generator):
self._key_generator = key_generator self._key_generator.set_remote_generator(key_generator)
key_generator.notifyOnDisconnect(self._lost_key_generator) key_generator.notifyOnDisconnect(self._lost_key_generator)
def _lost_key_generator(self): def _lost_key_generator(self):
self._key_generator = None self._key_generator.set_remote_generator(None)
def set_default_mutable_keysize(self, keysize):
self._key_generator.set_default_keysize(keysize)
def init_web(self, webport): def init_web(self, webport):
self.log("init_web(webport=%s)", args=(webport,)) self.log("init_web(webport=%s)", args=(webport,))
@ -384,11 +430,11 @@ class Client(node.Node, pollmixin.PollMixin):
return self.introducer_client.connected_to_introducer() return self.introducer_client.connected_to_introducer()
return False return False
def get_renewal_secret(self): def get_renewal_secret(self): # this will go away
return hashutil.my_renewal_secret_hash(self._lease_secret) return self._secret_holder.get_renewal_secret()
def get_cancel_secret(self): def get_cancel_secret(self):
return hashutil.my_cancel_secret_hash(self._lease_secret) return self._secret_holder.get_cancel_secret()
def debug_wait_for_client_connections(self, num_clients): def debug_wait_for_client_connections(self, num_clients):
"""Return a Deferred that fires (with None) when we have connections """Return a Deferred that fires (with None) when we have connections
@ -408,84 +454,14 @@ class Client(node.Node, pollmixin.PollMixin):
def create_node_from_uri(self, writecap, readcap=None): def create_node_from_uri(self, writecap, readcap=None):
# this returns synchronously. # this returns synchronously.
u = writecap or readcap return self.nodemaker.create_from_cap(writecap, readcap)
if not u:
# maybe the writecap was hidden because we're in a readonly
# directory, and the future cap format doesn't have a readcap, or
# something.
return UnknownNode(writecap, readcap)
u = IURI(u)
if isinstance(u, UnknownURI):
return UnknownNode(writecap, readcap)
u_s = u.to_string()
if u_s not in self._node_cache:
if IReadonlyDirectoryURI.providedBy(u):
# read-only dirnodes
node = DirectoryNode(self).init_from_uri(u)
elif IDirectoryURI.providedBy(u):
# dirnodes
node = DirectoryNode(self).init_from_uri(u)
elif IFileURI.providedBy(u):
if isinstance(u, LiteralFileURI):
node = LiteralFileNode(u, self) # LIT
else:
node = FileNode(u, self, self.download_cache_dirman) # CHK
elif IMutableFileURI.providedBy(u):
node = MutableFileNode(self).init_from_uri(u)
else:
raise UnhandledCapTypeError("cap is recognized, but has no Node")
self._node_cache[u_s] = node # note: WeakValueDictionary
return self._node_cache[u_s]
def create_empty_dirnode(self): def create_empty_dirnode(self):
d = self.create_mutable_file() return self.nodemaker.create_new_mutable_directory()
d.addCallback(DirectoryNode.create_with_mutablefile, self)
return d
def create_mutable_file(self, contents="", keysize=None): def create_mutable_file(self, contents="", keysize=None):
keysize = keysize or self.DEFAULT_MUTABLE_KEYSIZE return self.nodemaker.create_mutable_file(contents, keysize)
n = MutableFileNode(self)
d = n.create(contents, self._generate_pubprivkeys, keysize=keysize)
d.addCallback(lambda res: n)
return d
def _generate_pubprivkeys(self, key_size):
if self._key_generator:
d = self._key_generator.callRemote('get_rsa_key_pair', key_size)
def make_key_objs((verifying_key, signing_key)):
v = rsa.create_verifying_key_from_string(verifying_key)
s = rsa.create_signing_key_from_string(signing_key)
return v, s
d.addCallback(make_key_objs)
return d
else:
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
# secs
signer = rsa.generate(key_size)
verifier = signer.get_verifying_key()
return verifier, signer
def upload(self, uploadable): def upload(self, uploadable):
uploader = self.getServiceNamed("uploader") uploader = self.getServiceNamed("uploader")
return uploader.upload(uploadable, history=self.get_history()) return uploader.upload(uploadable, history=self.get_history())
def list_all_upload_statuses(self):
return self.get_history().list_all_upload_statuses()
def list_all_download_statuses(self):
return self.get_history().list_all_download_statuses()
def list_all_mapupdate_statuses(self):
return self.get_history().list_all_mapupdate_statuses()
def list_all_publish_statuses(self):
return self.get_history().list_all_publish_statuses()
def list_all_retrieve_statuses(self):
return self.get_history().list_all_retrieve_statuses()
def list_all_helper_statuses(self):
try:
helper = self.getServiceNamed("helper")
except KeyError:
return []
return helper.get_all_upload_statuses()

View File

@ -9,7 +9,7 @@ from allmydata.mutable.common import NotMutableError
from allmydata.mutable.filenode import MutableFileNode from allmydata.mutable.filenode import MutableFileNode
from allmydata.unknown import UnknownNode from allmydata.unknown import UnknownNode
from allmydata.interfaces import IMutableFileNode, IDirectoryNode,\ from allmydata.interfaces import IMutableFileNode, IDirectoryNode,\
IURI, IFileNode, IMutableFileURI, IFilesystemNode, \ IFileNode, IMutableFileURI, IFilesystemNode, \
ExistingChildError, NoSuchChildError, ICheckable, IDeepCheckable, \ ExistingChildError, NoSuchChildError, ICheckable, IDeepCheckable, \
CannotPackUnknownNodeError CannotPackUnknownNodeError
from allmydata.check_results import DeepCheckResults, \ from allmydata.check_results import DeepCheckResults, \
@ -18,7 +18,8 @@ from allmydata.monitor import Monitor
from allmydata.util import hashutil, mathutil, base32, log from allmydata.util import hashutil, mathutil, base32, log
from allmydata.util.assertutil import _assert, precondition from allmydata.util.assertutil import _assert, precondition
from allmydata.util.netstring import netstring, split_netstring from allmydata.util.netstring import netstring, split_netstring
from allmydata.uri import DirectoryURI, LiteralFileURI, from_string from allmydata.uri import DirectoryURI, ReadonlyDirectoryURI, \
LiteralFileURI, from_string
from pycryptopp.cipher.aes import AES from pycryptopp.cipher.aes import AES
class CachingDict(dict): class CachingDict(dict):
@ -147,39 +148,19 @@ class DirectoryNode:
implements(IDirectoryNode, ICheckable, IDeepCheckable) implements(IDirectoryNode, ICheckable, IDeepCheckable)
filenode_class = MutableFileNode filenode_class = MutableFileNode
def __init__(self, client): def __init__(self, filenode, nodemaker, uploader):
self._client = client self._node = filenode
filenode_uri = IMutableFileURI(filenode.get_uri())
if filenode_uri.is_readonly():
self._uri = ReadonlyDirectoryURI(filenode_uri)
else:
self._uri = DirectoryURI(filenode_uri)
self._nodemaker = nodemaker
self._uploader = uploader
self._most_recent_size = None self._most_recent_size = None
def __repr__(self): def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__, self.is_readonly() and "RO" or "RW", hasattr(self, '_uri') and self._uri.abbrev()) return "<%s %s %s>" % (self.__class__.__name__, self.is_readonly() and "RO" or "RW", hasattr(self, '_uri') and self._uri.abbrev())
def init_from_uri(self, myuri):
self._uri = IURI(myuri)
self._node = self.filenode_class(self._client)
self._node.init_from_uri(self._uri.get_filenode_uri())
return self
@classmethod
def create_with_mutablefile(cls, filenode, client):
self = cls(client)
self._node = filenode
return self._filenode_created(filenode)
def create(self, keypair_generator=None, keysize=None):
"""
Returns a deferred that eventually fires with self once the directory
has been created (distributed across a set of storage servers).
"""
# first we create a MutableFileNode with empty_contents, then use its
# URI to create our own.
self._node = self.filenode_class(self._client)
empty_contents = self._pack_contents(CachingDict())
d = self._node.create(empty_contents, keypair_generator, keysize=keysize)
d.addCallback(self._filenode_created)
return d
def _filenode_created(self, res):
self._uri = DirectoryURI(IMutableFileURI(self._node.get_uri()))
return self
def get_size(self): def get_size(self):
# return the size of our backing mutable file, in bytes, if we've # return the size of our backing mutable file, in bytes, if we've
@ -217,7 +198,7 @@ class DirectoryNode:
return plaintext return plaintext
def _create_node(self, rwcap, rocap): def _create_node(self, rwcap, rocap):
return self._client.create_node_from_uri(rwcap, rocap) return self._nodemaker.create_from_cap(rwcap, rocap)
def _unpack_contents(self, data): def _unpack_contents(self, data):
# the directory is serialized as a list of netstrings, one per child. # the directory is serialized as a list of netstrings, one per child.
@ -435,6 +416,7 @@ class DirectoryNode:
assert len(e) == 3 assert len(e) == 3
name, child_uri, metadata = e name, child_uri, metadata = e
assert isinstance(name, unicode) assert isinstance(name, unicode)
assert isinstance(child_uri, str)
child_node = self._create_node(child_uri, None) child_node = self._create_node(child_uri, None)
if isinstance(child_node, UnknownNode): if isinstance(child_node, UnknownNode):
msg = "cannot pack unknown node as child %s" % str(name) msg = "cannot pack unknown node as child %s" % str(name)
@ -480,9 +462,9 @@ class DirectoryNode:
assert isinstance(name, unicode) assert isinstance(name, unicode)
if self.is_readonly(): if self.is_readonly():
return defer.fail(NotMutableError()) return defer.fail(NotMutableError())
d = self._client.upload(uploadable) d = self._uploader.upload(uploadable)
d.addCallback(lambda results: results.uri) d.addCallback(lambda results: results.uri)
d.addCallback(self._client.create_node_from_uri) d.addCallback(self._nodemaker.create_from_cap)
d.addCallback(lambda node: d.addCallback(lambda node:
self.set_node(name, node, metadata, overwrite)) self.set_node(name, node, metadata, overwrite))
return d return d
@ -505,7 +487,7 @@ class DirectoryNode:
assert isinstance(name, unicode) assert isinstance(name, unicode)
if self.is_readonly(): if self.is_readonly():
return defer.fail(NotMutableError()) return defer.fail(NotMutableError())
d = self._client.create_empty_dirnode() d = self._nodemaker.create_new_mutable_directory()
def _created(child): def _created(child):
entries = [(name, child, None)] entries = [(name, child, None)]
a = Adder(self, entries, overwrite=overwrite) a = Adder(self, entries, overwrite=overwrite)

View File

@ -27,6 +27,9 @@ class History(service.Service):
self.all_retrieve_status = weakref.WeakKeyDictionary() self.all_retrieve_status = weakref.WeakKeyDictionary()
self.recent_retrieve_status = [] self.recent_retrieve_status = []
self.all_helper_upload_statuses = weakref.WeakKeyDictionary()
self.recent_helper_upload_statuses = []
def add_download(self, download_status): def add_download(self, download_status):
self.all_downloads_statuses[download_status] = None self.all_downloads_statuses[download_status] = None
@ -89,5 +92,13 @@ class History(service.Service):
for s in self.all_retrieve_status: for s in self.all_retrieve_status:
yield s yield s
def notify_helper_upload(self, s):
self.all_helper_upload_statuses[s] = None
self.recent_helper_upload_statuses.append(s)
while len(self.recent_helper_upload_statuses) > self.MAX_UPLOAD_STATUSES:
self.recent_helper_upload_statuses.pop(0)
def list_all_helper_statuses(self):
for s in self.all_helper_upload_statuses:
yield s

View File

@ -33,7 +33,8 @@ class Checker(log.PrefixingLogMixin):
object that was passed into my constructor whether this task has been object that was passed into my constructor whether this task has been
cancelled (by invoking its raise_if_cancelled() method). cancelled (by invoking its raise_if_cancelled() method).
""" """
def __init__(self, client, verifycap, servers, verify, add_lease, monitor): def __init__(self, verifycap, servers, verify, add_lease, secret_holder,
monitor):
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap)) assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
assert precondition(isinstance(servers, (set, frozenset)), servers) assert precondition(isinstance(servers, (set, frozenset)), servers)
for (serverid, serverrref) in servers: for (serverid, serverrref) in servers:
@ -42,7 +43,6 @@ class Checker(log.PrefixingLogMixin):
prefix = "%s" % base32.b2a_l(verifycap.storage_index[:8], 60) prefix = "%s" % base32.b2a_l(verifycap.storage_index[:8], 60)
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix) log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
self._client = client
self._verifycap = verifycap self._verifycap = verifycap
self._monitor = monitor self._monitor = monitor
@ -52,10 +52,10 @@ class Checker(log.PrefixingLogMixin):
self._share_hash_tree = None self._share_hash_tree = None
frs = file_renewal_secret_hash(client.get_renewal_secret(), frs = file_renewal_secret_hash(secret_holder.get_renewal_secret(),
self._verifycap.storage_index) self._verifycap.storage_index)
self.file_renewal_secret = frs self.file_renewal_secret = frs
fcs = file_cancel_secret_hash(client.get_cancel_secret(), fcs = file_cancel_secret_hash(secret_holder.get_cancel_secret(),
self._verifycap.storage_index) self._verifycap.storage_index)
self.file_cancel_secret = fcs self.file_cancel_secret = fcs

View File

@ -5,10 +5,9 @@ from twisted.internet import defer
from twisted.internet.interfaces import IPushProducer, IConsumer from twisted.internet.interfaces import IPushProducer, IConsumer
from twisted.protocols import basic from twisted.protocols import basic
from foolscap.api import eventually from foolscap.api import eventually
from allmydata.interfaces import IFileNode, IFileURI, ICheckable, \ from allmydata.interfaces import IFileNode, ICheckable, \
IDownloadTarget, IUploadResults IDownloadTarget, IUploadResults
from allmydata.util import dictutil, log, base32 from allmydata.util import dictutil, log, base32
from allmydata.util.assertutil import precondition
from allmydata import uri as urimodule from allmydata import uri as urimodule
from allmydata.immutable.checker import Checker from allmydata.immutable.checker import Checker
from allmydata.check_results import CheckResults, CheckAndRepairResults from allmydata.check_results import CheckResults, CheckAndRepairResults
@ -18,11 +17,6 @@ from allmydata.immutable import download
class _ImmutableFileNodeBase(object): class _ImmutableFileNodeBase(object):
implements(IFileNode, ICheckable) implements(IFileNode, ICheckable)
def __init__(self, uri, client):
precondition(urimodule.IImmutableFileURI.providedBy(uri), uri)
self.u = IFileURI(uri)
self._client = client
def get_readonly_uri(self): def get_readonly_uri(self):
return self.get_uri() return self.get_uri()
@ -68,10 +62,11 @@ class PortionOfFile:
class DownloadCache: class DownloadCache:
implements(IDownloadTarget) implements(IDownloadTarget)
def __init__(self, node, cachedirectorymanager): def __init__(self, filecap, storage_index, downloader,
self._downloader = node._client.getServiceNamed("downloader") cachedirectorymanager):
self._uri = node.get_uri() self._downloader = downloader
self._storage_index = node.get_storage_index() self._uri = filecap
self._storage_index = storage_index
self.milestones = set() # of (offset,size,Deferred) self.milestones = set() # of (offset,size,Deferred)
self.cachedirectorymanager = cachedirectorymanager self.cachedirectorymanager = cachedirectorymanager
self.cachefile = None self.cachefile = None
@ -173,9 +168,10 @@ class DownloadCache:
pass pass
def finish(self): def finish(self):
return None return None
# The following methods are just because the target might be a repairer.DownUpConnector, # The following methods are just because the target might be a
# and just because the current CHKUpload object expects to find the storage index and # repairer.DownUpConnector, and just because the current CHKUpload object
# encoding parameters in its Uploadable. # expects to find the storage index and encoding parameters in its
# Uploadable.
def set_storageindex(self, storageindex): def set_storageindex(self, storageindex):
pass pass
def set_encodingparams(self, encodingparams): def set_encodingparams(self, encodingparams):
@ -183,10 +179,18 @@ class DownloadCache:
class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin): class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
def __init__(self, uri, client, cachedirectorymanager): def __init__(self, filecap, storage_broker, secret_holder,
_ImmutableFileNodeBase.__init__(self, uri, client) downloader, history, cachedirectorymanager):
self.download_cache = DownloadCache(self, cachedirectorymanager) assert isinstance(filecap, str)
prefix = uri.get_verify_cap().to_string() self.u = urimodule.CHKFileURI.init_from_string(filecap)
self._storage_broker = storage_broker
self._secret_holder = secret_holder
self._downloader = downloader
self._history = history
storage_index = self.get_storage_index()
self.download_cache = DownloadCache(filecap, storage_index, downloader,
cachedirectorymanager)
prefix = self.u.get_verify_cap().to_string()
log.PrefixingLogMixin.__init__(self, "allmydata.immutable.filenode", prefix=prefix) log.PrefixingLogMixin.__init__(self, "allmydata.immutable.filenode", prefix=prefix)
self.log("starting", level=log.OPERATIONAL) self.log("starting", level=log.OPERATIONAL)
@ -208,11 +212,13 @@ class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
def check_and_repair(self, monitor, verify=False, add_lease=False): def check_and_repair(self, monitor, verify=False, add_lease=False):
verifycap = self.get_verify_cap() verifycap = self.get_verify_cap()
sb = self._client.get_storage_broker() sb = self._storage_broker
servers = sb.get_all_servers() servers = sb.get_all_servers()
sh = self._secret_holder
c = Checker(client=self._client, verifycap=verifycap, servers=servers, c = Checker(verifycap=verifycap, servers=servers,
verify=verify, add_lease=add_lease, monitor=monitor) verify=verify, add_lease=add_lease, secret_holder=sh,
monitor=monitor)
d = c.start() d = c.start()
def _maybe_repair(cr): def _maybe_repair(cr):
crr = CheckAndRepairResults(self.u.storage_index) crr = CheckAndRepairResults(self.u.storage_index)
@ -252,7 +258,8 @@ class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
crr.repair_successful = False crr.repair_successful = False
crr.repair_failure = f crr.repair_failure = f
return f return f
r = Repairer(client=self._client, verifycap=verifycap, monitor=monitor) r = Repairer(storage_broker=sb, secret_holder=sh,
verifycap=verifycap, monitor=monitor)
d = r.start() d = r.start()
d.addCallbacks(_gather_repair_results, _repair_error) d.addCallbacks(_gather_repair_results, _repair_error)
return d return d
@ -262,11 +269,13 @@ class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
def check(self, monitor, verify=False, add_lease=False): def check(self, monitor, verify=False, add_lease=False):
verifycap = self.get_verify_cap() verifycap = self.get_verify_cap()
sb = self._client.get_storage_broker() sb = self._storage_broker
servers = sb.get_all_servers() servers = sb.get_all_servers()
sh = self._secret_holder
v = Checker(client=self._client, verifycap=verifycap, servers=servers, v = Checker(verifycap=verifycap, servers=servers,
verify=verify, add_lease=add_lease, monitor=monitor) verify=verify, add_lease=add_lease, secret_holder=sh,
monitor=monitor)
return v.start() return v.start()
def read(self, consumer, offset=0, size=None): def read(self, consumer, offset=0, size=None):
@ -285,15 +294,13 @@ class FileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
return d return d
def download(self, target): def download(self, target):
downloader = self._client.getServiceNamed("downloader") return self._downloader.download(self.get_uri(), target,
history = self._client.get_history() self._parentmsgid,
return downloader.download(self.get_uri(), target, self._parentmsgid, history=self._history)
history=history)
def download_to_data(self): def download_to_data(self):
downloader = self._client.getServiceNamed("downloader") return self._downloader.download_to_data(self.get_uri(),
history = self._client.get_history() history=self._history)
return downloader.download_to_data(self.get_uri(), history=history)
class LiteralProducer: class LiteralProducer:
implements(IPushProducer) implements(IPushProducer)
@ -305,9 +312,9 @@ class LiteralProducer:
class LiteralFileNode(_ImmutableFileNodeBase): class LiteralFileNode(_ImmutableFileNodeBase):
def __init__(self, uri, client): def __init__(self, filecap):
precondition(urimodule.IImmutableFileURI.providedBy(uri), uri) assert isinstance(filecap, str)
_ImmutableFileNodeBase.__init__(self, uri, client) self.u = urimodule.LiteralFileURI.init_from_string(filecap)
def get_uri(self): def get_uri(self):
return self.u.to_string() return self.u.to_string()

View File

@ -153,7 +153,9 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader):
self._helper.log("CHKUploadHelper starting for SI %s" % self._upload_id, self._helper.log("CHKUploadHelper starting for SI %s" % self._upload_id,
parent=log_number) parent=log_number)
self._client = helper.parent client = helper.parent
self._storage_broker = client.get_storage_broker()
self._secret_holder = client._secret_holder
self._fetcher = CHKCiphertextFetcher(self, incoming_file, encoding_file, self._fetcher = CHKCiphertextFetcher(self, incoming_file, encoding_file,
self._log_number) self._log_number)
self._reader = LocalCiphertextReader(self, storage_index, encoding_file) self._reader = LocalCiphertextReader(self, storage_index, encoding_file)
@ -493,7 +495,7 @@ class Helper(Referenceable, service.MultiService):
chk_upload_helper_class = CHKUploadHelper chk_upload_helper_class = CHKUploadHelper
MAX_UPLOAD_STATUSES = 10 MAX_UPLOAD_STATUSES = 10
def __init__(self, basedir, stats_provider=None): def __init__(self, basedir, stats_provider=None, history=None):
self._basedir = basedir self._basedir = basedir
self._chk_incoming = os.path.join(basedir, "CHK_incoming") self._chk_incoming = os.path.join(basedir, "CHK_incoming")
self._chk_encoding = os.path.join(basedir, "CHK_encoding") self._chk_encoding = os.path.join(basedir, "CHK_encoding")
@ -501,8 +503,6 @@ class Helper(Referenceable, service.MultiService):
fileutil.make_dirs(self._chk_encoding) fileutil.make_dirs(self._chk_encoding)
self._active_uploads = {} self._active_uploads = {}
self._all_uploads = weakref.WeakKeyDictionary() # for debugging self._all_uploads = weakref.WeakKeyDictionary() # for debugging
self._all_upload_statuses = weakref.WeakKeyDictionary()
self._recent_upload_statuses = []
self.stats_provider = stats_provider self.stats_provider = stats_provider
if stats_provider: if stats_provider:
stats_provider.register_producer(self) stats_provider.register_producer(self)
@ -513,6 +513,7 @@ class Helper(Referenceable, service.MultiService):
"chk_upload_helper.fetched_bytes": 0, "chk_upload_helper.fetched_bytes": 0,
"chk_upload_helper.encoded_bytes": 0, "chk_upload_helper.encoded_bytes": 0,
} }
self._history = history
service.MultiService.__init__(self) service.MultiService.__init__(self)
def setServiceParent(self, parent): def setServiceParent(self, parent):
@ -637,11 +638,9 @@ class Helper(Referenceable, service.MultiService):
def _add_upload(self, uh): def _add_upload(self, uh):
self._all_uploads[uh] = None self._all_uploads[uh] = None
s = uh.get_upload_status() if self._history:
self._all_upload_statuses[s] = None s = uh.get_upload_status()
self._recent_upload_statuses.append(s) self._history.notify_helper_upload(s)
while len(self._recent_upload_statuses) > self.MAX_UPLOAD_STATUSES:
self._recent_upload_statuses.pop(0)
def upload_finished(self, storage_index, size): def upload_finished(self, storage_index, size):
# this is called with size=0 if the upload failed # this is called with size=0 if the upload failed
@ -650,6 +649,3 @@ class Helper(Referenceable, service.MultiService):
del self._active_uploads[storage_index] del self._active_uploads[storage_index]
s = uh.get_upload_status() s = uh.get_upload_status()
s.set_active(False) s.set_active(False)
def get_all_upload_statuses(self):
return self._all_upload_statuses

View File

@ -43,24 +43,25 @@ class Repairer(log.PrefixingLogMixin):
cancelled (by invoking its raise_if_cancelled() method). cancelled (by invoking its raise_if_cancelled() method).
""" """
def __init__(self, client, verifycap, monitor): def __init__(self, storage_broker, secret_holder, verifycap, monitor):
assert precondition(isinstance(verifycap, CHKFileVerifierURI)) assert precondition(isinstance(verifycap, CHKFileVerifierURI))
logprefix = si_b2a(verifycap.storage_index)[:5] logprefix = si_b2a(verifycap.storage_index)[:5]
log.PrefixingLogMixin.__init__(self, "allmydata.immutable.repairer", log.PrefixingLogMixin.__init__(self, "allmydata.immutable.repairer",
prefix=logprefix) prefix=logprefix)
self._client = client self._storage_broker = storage_broker
self._secret_holder = secret_holder
self._verifycap = verifycap self._verifycap = verifycap
self._monitor = monitor self._monitor = monitor
def start(self): def start(self):
self.log("starting repair") self.log("starting repair")
duc = DownUpConnector() duc = DownUpConnector()
sb = self._client.get_storage_broker() dl = download.CiphertextDownloader(self._storage_broker,
dl = download.CiphertextDownloader(sb, self._verifycap, target=duc, self._verifycap, target=duc,
monitor=self._monitor) monitor=self._monitor)
ul = upload.CHKUploader(self._client) ul = upload.CHKUploader(self._storage_broker, self._secret_holder)
d = defer.Deferred() d = defer.Deferred()

View File

@ -142,7 +142,7 @@ class Tahoe2PeerSelector:
def __repr__(self): def __repr__(self):
return "<Tahoe2PeerSelector for upload %s>" % self.upload_id return "<Tahoe2PeerSelector for upload %s>" % self.upload_id
def get_shareholders(self, client, def get_shareholders(self, storage_broker, secret_holder,
storage_index, share_size, block_size, storage_index, share_size, block_size,
num_segments, total_shares, shares_of_happiness): num_segments, total_shares, shares_of_happiness):
""" """
@ -167,8 +167,7 @@ class Tahoe2PeerSelector:
self.use_peers = set() # PeerTrackers that have shares assigned to them self.use_peers = set() # PeerTrackers that have shares assigned to them
self.preexisting_shares = {} # sharenum -> peerid holding the share self.preexisting_shares = {} # sharenum -> peerid holding the share
sb = client.get_storage_broker() peers = storage_broker.get_servers_for_index(storage_index)
peers = sb.get_servers_for_index(storage_index)
if not peers: if not peers:
raise NoServersError("client gave us zero peers") raise NoServersError("client gave us zero peers")
@ -199,9 +198,9 @@ class Tahoe2PeerSelector:
raise NoServersError("no peers could accept an allocated_size of %d" % allocated_size) raise NoServersError("no peers could accept an allocated_size of %d" % allocated_size)
# decide upon the renewal/cancel secrets, to include them in the # decide upon the renewal/cancel secrets, to include them in the
# allocat_buckets query. # allocate_buckets query.
client_renewal_secret = client.get_renewal_secret() client_renewal_secret = secret_holder.get_renewal_secret()
client_cancel_secret = client.get_cancel_secret() client_cancel_secret = secret_holder.get_cancel_secret()
file_renewal_secret = file_renewal_secret_hash(client_renewal_secret, file_renewal_secret = file_renewal_secret_hash(client_renewal_secret,
storage_index) storage_index)
@ -659,9 +658,11 @@ class UploadStatus:
class CHKUploader: class CHKUploader:
peer_selector_class = Tahoe2PeerSelector peer_selector_class = Tahoe2PeerSelector
def __init__(self, client): def __init__(self, storage_broker, secret_holder):
self._client = client # peer_selector needs storage_broker and secret_holder
self._log_number = self._client.log("CHKUploader starting") self._storage_broker = storage_broker
self._secret_holder = secret_holder
self._log_number = self.log("CHKUploader starting", parent=None)
self._encoder = None self._encoder = None
self._results = UploadResults() self._results = UploadResults()
self._storage_index = None self._storage_index = None
@ -678,7 +679,7 @@ class CHKUploader:
kwargs["parent"] = self._log_number kwargs["parent"] = self._log_number
if "facility" not in kwargs: if "facility" not in kwargs:
kwargs["facility"] = "tahoe.upload" kwargs["facility"] = "tahoe.upload"
return self._client.log(*args, **kwargs) return log.msg(*args, **kwargs)
def start(self, encrypted_uploadable): def start(self, encrypted_uploadable):
"""Start uploading the file. """Start uploading the file.
@ -724,6 +725,8 @@ class CHKUploader:
def locate_all_shareholders(self, encoder, started): def locate_all_shareholders(self, encoder, started):
peer_selection_started = now = time.time() peer_selection_started = now = time.time()
self._storage_index_elapsed = now - started self._storage_index_elapsed = now - started
storage_broker = self._storage_broker
secret_holder = self._secret_holder
storage_index = encoder.get_param("storage_index") storage_index = encoder.get_param("storage_index")
self._storage_index = storage_index self._storage_index = storage_index
upload_id = si_b2a(storage_index)[:5] upload_id = si_b2a(storage_index)[:5]
@ -737,7 +740,8 @@ class CHKUploader:
k,desired,n = encoder.get_param("share_counts") k,desired,n = encoder.get_param("share_counts")
self._peer_selection_started = time.time() self._peer_selection_started = time.time()
d = peer_selector.get_shareholders(self._client, storage_index, d = peer_selector.get_shareholders(storage_broker, secret_holder,
storage_index,
share_size, block_size, share_size, block_size,
num_segments, n, desired) num_segments, n, desired)
def _done(res): def _done(res):
@ -809,8 +813,7 @@ def read_this_many_bytes(uploadable, size, prepend_data=[]):
class LiteralUploader: class LiteralUploader:
def __init__(self, client): def __init__(self):
self._client = client
self._results = UploadResults() self._results = UploadResults()
self._status = s = UploadStatus() self._status = s = UploadStatus()
s.set_storage_index(None) s.set_storage_index(None)
@ -1263,7 +1266,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
self.stats_provider.count('uploader.bytes_uploaded', size) self.stats_provider.count('uploader.bytes_uploaded', size)
if size <= self.URI_LIT_SIZE_THRESHOLD: if size <= self.URI_LIT_SIZE_THRESHOLD:
uploader = LiteralUploader(self.parent) uploader = LiteralUploader()
return uploader.start(uploadable) return uploader.start(uploadable)
else: else:
eu = EncryptAnUploadable(uploadable, self._parentmsgid) eu = EncryptAnUploadable(uploadable, self._parentmsgid)
@ -1273,7 +1276,9 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
d2.addCallback(lambda x: eu.get_storage_index()) d2.addCallback(lambda x: eu.get_storage_index())
d2.addCallback(lambda si: uploader.start(eu, si)) d2.addCallback(lambda si: uploader.start(eu, si))
else: else:
uploader = CHKUploader(self.parent) storage_broker = self.parent.get_storage_broker()
secret_holder = self.parent._secret_holder
uploader = CHKUploader(storage_broker, secret_holder)
d2.addCallback(lambda x: uploader.start(eu)) d2.addCallback(lambda x: uploader.start(eu))
self._all_uploads[uploader] = None self._all_uploads[uploader] = None

View File

@ -483,22 +483,22 @@ class UnhandledCapTypeError(Exception):
class IFilesystemNode(Interface): class IFilesystemNode(Interface):
def get_uri(): def get_uri():
""" """
Return the URI that can be used by others to get access to this Return the URI string that can be used by others to get access to
node. If this node is read-only, the URI will only offer read-only this node. If this node is read-only, the URI will only offer
access. If this node is read-write, the URI will offer read-write read-only access. If this node is read-write, the URI will offer
access. read-write access.
If you have read-write access to a node and wish to share merely If you have read-write access to a node and wish to share merely
read-only access with others, use get_readonly_uri(). read-only access with others, use get_readonly_uri().
""" """
def get_readonly_uri(): def get_readonly_uri():
"""Return the directory URI that can be used by others to get """Return the URI string that can be used by others to get read-only
read-only access to this directory node. The result is a read-only access to this node. The result is a read-only URI, regardless of
URI, regardless of whether this dirnode is read-only or read-write. whether this node is read-only or read-write.
If you have merely read-only access to this dirnode, If you have merely read-only access to this node, get_readonly_uri()
get_readonly_uri() will return the same thing as get_uri(). will return the same thing as get_uri().
""" """
def get_repair_cap(): def get_repair_cap():

View File

@ -12,8 +12,10 @@ from layout import unpack_share, SIGNED_PREFIX_LENGTH
class MutableChecker: class MutableChecker:
def __init__(self, node, monitor): def __init__(self, node, storage_broker, history, monitor):
self._node = node self._node = node
self._storage_broker = storage_broker
self._history = history
self._monitor = monitor self._monitor = monitor
self.bad_shares = [] # list of (nodeid,shnum,failure) self.bad_shares = [] # list of (nodeid,shnum,failure)
self._storage_index = self._node.get_storage_index() self._storage_index = self._node.get_storage_index()
@ -23,11 +25,10 @@ class MutableChecker:
def check(self, verify=False, add_lease=False): def check(self, verify=False, add_lease=False):
servermap = ServerMap() servermap = ServerMap()
u = ServermapUpdater(self._node, self._monitor, servermap, MODE_CHECK, u = ServermapUpdater(self._node, self._storage_broker, self._monitor,
add_lease=add_lease) servermap, MODE_CHECK, add_lease=add_lease)
history = self._node._client.get_history() if self._history:
if history: self._history.notify_mapupdate(u.get_status())
history.notify_mapupdate(u.get_status())
d = u.update() d = u.update()
d.addCallback(self._got_mapupdate_results) d.addCallback(self._got_mapupdate_results)
if verify: if verify:
@ -280,8 +281,8 @@ class MutableChecker:
class MutableCheckAndRepairer(MutableChecker): class MutableCheckAndRepairer(MutableChecker):
def __init__(self, node, monitor): def __init__(self, node, storage_broker, history, monitor):
MutableChecker.__init__(self, node, monitor) MutableChecker.__init__(self, node, storage_broker, history, monitor)
self.cr_results = CheckAndRepairResults(self._storage_index) self.cr_results = CheckAndRepairResults(self._storage_index)
self.cr_results.pre_repair_results = self.results self.cr_results.pre_repair_results = self.results
self.need_repair = False self.need_repair = False

View File

@ -8,9 +8,8 @@ from allmydata.interfaces import IMutableFileNode, IMutableFileURI, \
ICheckable, ICheckResults, NotEnoughSharesError ICheckable, ICheckResults, NotEnoughSharesError
from allmydata.util import hashutil, log from allmydata.util import hashutil, log
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
from allmydata.uri import WriteableSSKFileURI from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI
from allmydata.monitor import Monitor from allmydata.monitor import Monitor
from pycryptopp.publickey import rsa
from pycryptopp.cipher.aes import AES from pycryptopp.cipher.aes import AES
from publish import Publish from publish import Publish
@ -44,24 +43,24 @@ class BackoffAgent:
reactor.callLater(self._delay, d.callback, None) reactor.callLater(self._delay, d.callback, None)
return d return d
# use client.create_mutable_file() to make one of these # use nodemaker.create_mutable_file() to make one of these
class MutableFileNode: class MutableFileNode:
implements(IMutableFileNode, ICheckable) implements(IMutableFileNode, ICheckable)
SIGNATURE_KEY_SIZE = 2048
checker_class = MutableChecker
check_and_repairer_class = MutableCheckAndRepairer
def __init__(self, client): def __init__(self, storage_broker, secret_holder,
self._client = client default_encoding_parameters, history):
self._storage_broker = storage_broker
self._secret_holder = secret_holder
self._default_encoding_parameters = default_encoding_parameters
self._history = history
self._pubkey = None # filled in upon first read self._pubkey = None # filled in upon first read
self._privkey = None # filled in if we're mutable self._privkey = None # filled in if we're mutable
# we keep track of the last encoding parameters that we use. These # we keep track of the last encoding parameters that we use. These
# are updated upon retrieve, and used by publish. If we publish # are updated upon retrieve, and used by publish. If we publish
# without ever reading (i.e. overwrite()), then we use these values. # without ever reading (i.e. overwrite()), then we use these values.
defaults = client.get_encoding_parameters() self._required_shares = default_encoding_parameters["k"]
self._required_shares = defaults["k"] self._total_shares = default_encoding_parameters["n"]
self._total_shares = defaults["n"]
self._sharemap = {} # known shares, shnum-to-[nodeids] self._sharemap = {} # known shares, shnum-to-[nodeids]
self._cache = ResponseCache() self._cache = ResponseCache()
@ -77,15 +76,18 @@ class MutableFileNode:
else: else:
return "<%s %x %s %s>" % (self.__class__.__name__, id(self), None, None) return "<%s %x %s %s>" % (self.__class__.__name__, id(self), None, None)
def init_from_uri(self, myuri): def init_from_uri(self, filecap):
# we have the URI, but we have not yet retrieved the public # we have the URI, but we have not yet retrieved the public
# verification key, nor things like 'k' or 'N'. If and when someone # verification key, nor things like 'k' or 'N'. If and when someone
# wants to get our contents, we'll pull from shares and fill those # wants to get our contents, we'll pull from shares and fill those
# in. # in.
self._uri = IMutableFileURI(myuri) assert isinstance(filecap, str)
if not self._uri.is_readonly(): if filecap.startswith("URI:SSK:"):
self._uri = WriteableSSKFileURI.init_from_string(filecap)
self._writekey = self._uri.writekey self._writekey = self._uri.writekey
else: else:
assert filecap.startswith("URI:SSK-RO:")
self._uri = ReadonlySSKFileURI.init_from_string(filecap)
self._writekey = None self._writekey = None
self._readkey = self._uri.readkey self._readkey = self._uri.readkey
self._storage_index = self._uri.storage_index self._storage_index = self._uri.storage_index
@ -100,21 +102,12 @@ class MutableFileNode:
self._encprivkey = None self._encprivkey = None
return self return self
def create(self, initial_contents, keypair_generator=None, keysize=None): def create_with_keys(self, (pubkey, privkey), initial_contents):
"""Call this when the filenode is first created. This will generate """Call this to create a brand-new mutable file. It will create the
the keys, generate the initial shares, wait until at least numpeers shares, find homes for them, and upload the initial contents. Returns
are connected, allocate shares, and upload the initial a Deferred that fires (with the MutableFileNode instance you should
contents. Returns a Deferred that fires (with the MutableFileNode use) when it completes.
instance you should use) when it completes.
""" """
keysize = keysize or self.SIGNATURE_KEY_SIZE
d = defer.maybeDeferred(self._generate_pubprivkeys,
keypair_generator, keysize)
d.addCallback(self._generated)
d.addCallback(lambda res: self._upload(initial_contents, None))
return d
def _generated(self, (pubkey, privkey) ):
self._pubkey, self._privkey = pubkey, privkey self._pubkey, self._privkey = pubkey, privkey
pubkey_s = self._pubkey.serialize() pubkey_s = self._pubkey.serialize()
privkey_s = self._privkey.serialize() privkey_s = self._privkey.serialize()
@ -124,16 +117,7 @@ class MutableFileNode:
self._uri = WriteableSSKFileURI(self._writekey, self._fingerprint) self._uri = WriteableSSKFileURI(self._writekey, self._fingerprint)
self._readkey = self._uri.readkey self._readkey = self._uri.readkey
self._storage_index = self._uri.storage_index self._storage_index = self._uri.storage_index
return self._upload(initial_contents, None)
def _generate_pubprivkeys(self, keypair_generator, keysize):
if keypair_generator:
return keypair_generator(keysize)
else:
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
# secs
signer = rsa.generate(keysize)
verifier = signer.get_verifying_key()
return verifier, signer
def _encrypt_privkey(self, writekey, privkey): def _encrypt_privkey(self, writekey, privkey):
enc = AES(writekey) enc = AES(writekey)
@ -163,12 +147,12 @@ class MutableFileNode:
return hashutil.ssk_write_enabler_hash(self._writekey, peerid) return hashutil.ssk_write_enabler_hash(self._writekey, peerid)
def get_renewal_secret(self, peerid): def get_renewal_secret(self, peerid):
assert len(peerid) == 20 assert len(peerid) == 20
crs = self._client.get_renewal_secret() crs = self._secret_holder.get_renewal_secret()
frs = hashutil.file_renewal_secret_hash(crs, self._storage_index) frs = hashutil.file_renewal_secret_hash(crs, self._storage_index)
return hashutil.bucket_renewal_secret_hash(frs, peerid) return hashutil.bucket_renewal_secret_hash(frs, peerid)
def get_cancel_secret(self, peerid): def get_cancel_secret(self, peerid):
assert len(peerid) == 20 assert len(peerid) == 20
ccs = self._client.get_cancel_secret() ccs = self._secret_holder.get_cancel_secret()
fcs = hashutil.file_cancel_secret_hash(ccs, self._storage_index) fcs = hashutil.file_cancel_secret_hash(ccs, self._storage_index)
return hashutil.bucket_cancel_secret_hash(fcs, peerid) return hashutil.bucket_cancel_secret_hash(fcs, peerid)
@ -200,8 +184,9 @@ class MutableFileNode:
def get_readonly(self): def get_readonly(self):
if self.is_readonly(): if self.is_readonly():
return self return self
ro = MutableFileNode(self._client) ro = MutableFileNode(self._storage_broker, self._secret_holder,
ro.init_from_uri(self._uri.get_readonly()) self._default_encoding_parameters, self._history)
ro.init_from_uri(self.get_readonly_uri())
return ro return ro
def get_readonly_uri(self): def get_readonly_uri(self):
@ -251,11 +236,13 @@ class MutableFileNode:
# ICheckable # ICheckable
def check(self, monitor, verify=False, add_lease=False): def check(self, monitor, verify=False, add_lease=False):
checker = self.checker_class(self, monitor) checker = MutableChecker(self, self._storage_broker,
self._history, monitor)
return checker.check(verify, add_lease) return checker.check(verify, add_lease)
def check_and_repair(self, monitor, verify=False, add_lease=False): def check_and_repair(self, monitor, verify=False, add_lease=False):
checker = self.check_and_repairer_class(self, monitor) checker = MutableCheckAndRepairer(self, self._storage_broker,
self._history, monitor)
return checker.check(verify, add_lease) return checker.check(verify, add_lease)
################################# #################################
@ -414,10 +401,10 @@ class MutableFileNode:
servermap = ServerMap() servermap = ServerMap()
return self._update_servermap(servermap, mode) return self._update_servermap(servermap, mode)
def _update_servermap(self, servermap, mode): def _update_servermap(self, servermap, mode):
u = ServermapUpdater(self, Monitor(), servermap, mode) u = ServermapUpdater(self, self._storage_broker, Monitor(), servermap,
history = self._client.get_history() mode)
if history: if self._history:
history.notify_mapupdate(u.get_status()) self._history.notify_mapupdate(u.get_status())
return u.update() return u.update()
def download_version(self, servermap, version, fetch_privkey=False): def download_version(self, servermap, version, fetch_privkey=False):
@ -426,17 +413,15 @@ class MutableFileNode:
def _try_once_to_download_version(self, servermap, version, def _try_once_to_download_version(self, servermap, version,
fetch_privkey=False): fetch_privkey=False):
r = Retrieve(self, servermap, version, fetch_privkey) r = Retrieve(self, servermap, version, fetch_privkey)
history = self._client.get_history() if self._history:
if history: self._history.notify_retrieve(r.get_status())
history.notify_retrieve(r.get_status())
return r.download() return r.download()
def upload(self, new_contents, servermap): def upload(self, new_contents, servermap):
return self._do_serialized(self._upload, new_contents, servermap) return self._do_serialized(self._upload, new_contents, servermap)
def _upload(self, new_contents, servermap): def _upload(self, new_contents, servermap):
assert self._pubkey, "update_servermap must be called before publish" assert self._pubkey, "update_servermap must be called before publish"
p = Publish(self, servermap) p = Publish(self, self._storage_broker, servermap)
history = self._client.get_history() if self._history:
if history: self._history.notify_publish(p.get_status(), len(new_contents))
history.notify_publish(p.get_status(), len(new_contents))
return p.publish(new_contents) return p.publish(new_contents)

View File

@ -90,12 +90,13 @@ class Publish:
To make the initial publish, set servermap to None. To make the initial publish, set servermap to None.
""" """
def __init__(self, filenode, servermap): def __init__(self, filenode, storage_broker, servermap):
self._node = filenode self._node = filenode
self._storage_broker = storage_broker
self._servermap = servermap self._servermap = servermap
self._storage_index = self._node.get_storage_index() self._storage_index = self._node.get_storage_index()
self._log_prefix = prefix = si_b2a(self._storage_index)[:5] self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
num = self._node._client.log("Publish(%s): starting" % prefix) num = self.log("Publish(%s): starting" % prefix, parent=None)
self._log_number = num self._log_number = num
self._running = True self._running = True
self._first_write_error = None self._first_write_error = None
@ -176,7 +177,7 @@ class Publish:
assert self._privkey assert self._privkey
self._encprivkey = self._node.get_encprivkey() self._encprivkey = self._node.get_encprivkey()
sb = self._node._client.get_storage_broker() sb = self._storage_broker
full_peerlist = sb.get_servers_for_index(self._storage_index) full_peerlist = sb.get_servers_for_index(self._storage_index)
self.full_peerlist = full_peerlist # for use later, immutable self.full_peerlist = full_peerlist # for use later, immutable
self.bad_peers = set() # peerids who have errbacked/refused requests self.bad_peers = set() # peerids who have errbacked/refused requests

View File

@ -337,14 +337,15 @@ class ServerMap:
class ServermapUpdater: class ServermapUpdater:
def __init__(self, filenode, monitor, servermap, mode=MODE_READ, def __init__(self, filenode, storage_broker, monitor, servermap,
add_lease=False): mode=MODE_READ, add_lease=False):
"""I update a servermap, locating a sufficient number of useful """I update a servermap, locating a sufficient number of useful
shares and remembering where they are located. shares and remembering where they are located.
""" """
self._node = filenode self._node = filenode
self._storage_broker = storage_broker
self._monitor = monitor self._monitor = monitor
self._servermap = servermap self._servermap = servermap
self.mode = mode self.mode = mode
@ -421,8 +422,8 @@ class ServermapUpdater:
self._queries_completed = 0 self._queries_completed = 0
sb = self._node._client.get_storage_broker() sb = self._storage_broker
full_peerlist = sb.get_servers_for_index(self._node._storage_index) full_peerlist = sb.get_servers_for_index(self._storage_index)
self.full_peerlist = full_peerlist # for use later, immutable self.full_peerlist = full_peerlist # for use later, immutable
self.extra_peers = full_peerlist[:] # peers are removed as we use them self.extra_peers = full_peerlist[:] # peers are removed as we use them
self._good_peers = set() # peers who had some shares self._good_peers = set() # peers who had some shares

96
src/allmydata/nodemaker.py Executable file
View File

@ -0,0 +1,96 @@
import weakref
from allmydata.immutable.filenode import FileNode, LiteralFileNode
from allmydata.mutable.filenode import MutableFileNode
from allmydata.dirnode import DirectoryNode
from allmydata.unknown import UnknownNode
from allmydata.uri import DirectoryURI, ReadonlyDirectoryURI
# the "node maker" is a two-argument callable (really a 'create' method on a
# NodeMaker instance) which accepts a URI string (and an optional readcap
# string, for use by dirnode.copy) and returns an object which (at the very
# least) provides IFilesystemNode. That interface has other methods that can
# be used to determine if the node represents a file or directory, in which
# case other methods are available (like download() or modify()). Each Tahoe
# process will typically have a single NodeMaker, but unit tests may create
# simplified/mocked forms for test purposes.
# any authorities which fsnodes will need (like a reference to the
# StorageFarmBroker, to access storage servers for publish/retrieve/download)
# will be retained as attributes inside the NodeMaker and passed to fsnodes
# as necessary.
class NodeMaker:
def __init__(self, storage_broker, secret_holder, history,
uploader, downloader, download_cache_dirman,
default_encoding_parameters, key_generator):
self.storage_broker = storage_broker
self.secret_holder = secret_holder
self.history = history
self.uploader = uploader
self.downloader = downloader
self.download_cache_dirman = download_cache_dirman
self.default_encoding_parameters = default_encoding_parameters
self.key_generator = key_generator
self._node_cache = weakref.WeakValueDictionary() # uri -> node
def _create_lit(self, cap):
return LiteralFileNode(cap)
def _create_immutable(self, cap):
return FileNode(cap, self.storage_broker, self.secret_holder,
self.downloader, self.history,
self.download_cache_dirman)
def _create_mutable(self, cap):
n = MutableFileNode(self.storage_broker, self.secret_holder,
self.default_encoding_parameters,
self.history)
return n.init_from_uri(cap)
def _create_dirnode(self, filenode):
return DirectoryNode(filenode, self, self.uploader)
def create_from_cap(self, writecap, readcap=None):
# this returns synchronously.
assert isinstance(writecap, (str, type(None))), type(writecap)
assert isinstance(readcap, (str, type(None))), type(readcap)
cap = writecap or readcap
if not cap:
# maybe the writecap was hidden because we're in a readonly
# directory, and the future cap format doesn't have a readcap, or
# something.
return UnknownNode(writecap, readcap)
if cap in self._node_cache:
return self._node_cache[cap]
elif cap.startswith("URI:LIT:"):
node = self._create_lit(cap)
elif cap.startswith("URI:CHK:"):
node = self._create_immutable(cap)
elif cap.startswith("URI:SSK-RO:") or cap.startswith("URI:SSK:"):
node = self._create_mutable(cap)
elif cap.startswith("URI:DIR2-RO:") or cap.startswith("URI:DIR2:"):
if cap.startswith("URI:DIR2-RO:"):
dircap = ReadonlyDirectoryURI.init_from_string(cap)
elif cap.startswith("URI:DIR2:"):
dircap = DirectoryURI.init_from_string(cap)
filecap = dircap.get_filenode_uri().to_string()
filenode = self.create_from_cap(filecap)
node = self._create_dirnode(filenode)
else:
return UnknownNode(writecap, readcap) # don't cache UnknownNode
self._node_cache[cap] = node # note: WeakValueDictionary
return node
def create_mutable_file(self, contents="", keysize=None):
n = MutableFileNode(self.storage_broker, self.secret_holder,
self.default_encoding_parameters, self.history)
d = self.key_generator.generate(keysize)
d.addCallback(n.create_with_keys, contents)
d.addCallback(lambda res: n)
return d
def create_new_mutable_directory(self, initial_children={}):
if initial_children:
raise NotImplementedError("initial_children= not implemented yet")
d = self.create_mutable_file()
d.addCallback(self._create_dirnode)
return d

View File

@ -15,21 +15,28 @@ class FakeClient(client.Client):
self.download_cache_dirman = cachedir.CacheDirectoryManager(download_cachedir.name) self.download_cache_dirman = cachedir.CacheDirectoryManager(download_cachedir.name)
def getServiceNamed(self, name): def getServiceNamed(self, name):
return None return None
def get_storage_broker(self):
return None
_secret_holder=None
def get_history(self):
return None
def get_encoding_parameters(self): def get_encoding_parameters(self):
return {"k": 3, "n": 10} return {"k": 3, "n": 10}
def get_writekey(self): def get_writekey(self):
return os.urandom(16) return os.urandom(16)
def create_node_from_uri(self, writecap, readcap):
return None
class FakeMutableFileNode(mut_filenode.MutableFileNode): class FakeMutableFileNode(mut_filenode.MutableFileNode):
def __init__(self, client): def __init__(self, *args, **kwargs):
mut_filenode.MutableFileNode.__init__(self, client) mut_filenode.MutableFileNode.__init__(self, *args, **kwargs)
self._uri = uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32)) self._uri = uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32))
class FakeDirectoryNode(dirnode.DirectoryNode): class FakeDirectoryNode(dirnode.DirectoryNode):
def __init__(self, client): def __init__(self, client):
dirnode.DirectoryNode.__init__(self, client) dirnode.DirectoryNode.__init__(self, client)
mutfileuri = uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32)) mutfileuri = uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32))
myuri = uri.DirectoryURI(mutfileuri) myuri = uri.DirectoryURI(mutfileuri).to_string()
self.init_from_uri(myuri) self.init_from_uri(myuri)
@ -37,7 +44,7 @@ children = [] # tuples of (k, v) (suitable for passing to dict())
packstr = None packstr = None
fakeclient = FakeClient() fakeclient = FakeClient()
testdirnode = dirnode.DirectoryNode(fakeclient) testdirnode = dirnode.DirectoryNode(fakeclient)
testdirnode.init_from_uri(uri.DirectoryURI(uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32)))) testdirnode.init_from_uri(uri.DirectoryURI(uri.WriteableSSKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32))).to_string())
def random_unicode(l): def random_unicode(l):
while True: while True:
@ -49,9 +56,10 @@ def random_unicode(l):
def random_fsnode(): def random_fsnode():
coin = random.randrange(0, 3) coin = random.randrange(0, 3)
if coin == 0: if coin == 0:
return immut_filenode.FileNode(uri.CHKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32), random.randrange(1, 5), random.randrange(6, 15), random.randrange(99, 1000000000000)), fakeclient, None) return immut_filenode.FileNode(uri.CHKFileURI(randutil.insecurerandstr(16), randutil.insecurerandstr(32), random.randrange(1, 5), random.randrange(6, 15), random.randrange(99, 1000000000000)).to_string(), None, None, None, None, None)
elif coin == 1: elif coin == 1:
return FakeMutableFileNode(fakeclient) encoding_parameters = {"k": 3, "n": 10}
return FakeMutableFileNode(None, None, encoding_parameters, None)
else: else:
assert coin == 2 assert coin == 2
return FakeDirectoryNode(fakeclient) return FakeDirectoryNode(fakeclient)
@ -91,7 +99,7 @@ PROF_FILE_NAME="bench_dirnode.prof"
def run_benchmarks(profile=False): def run_benchmarks(profile=False):
for (func, initfunc) in [(unpack, init_for_unpack), (pack, init_for_pack), (unpack_and_repack, init_for_unpack)]: for (func, initfunc) in [(unpack, init_for_unpack), (pack, init_for_pack), (unpack_and_repack, init_for_unpack)]:
print "benchmarking %s" % (func,) print "benchmarking %s" % (func,)
benchutil.bench(unpack_and_repack, initfunc=init_for_unpack, TOPXP=12, profile=profile, profresults=PROF_FILE_NAME) benchutil.bench(unpack_and_repack, initfunc=init_for_unpack, TOPXP=12)#, profile=profile, profresults=PROF_FILE_NAME)
def print_stats(): def print_stats():
s = hotshot.stats.load(PROF_FILE_NAME) s = hotshot.stats.load(PROF_FILE_NAME)

View File

@ -8,7 +8,7 @@ from twisted.web.error import Error as WebError
from foolscap.api import flushEventualQueue, fireEventually from foolscap.api import flushEventualQueue, fireEventually
from allmydata import uri, dirnode, client from allmydata import uri, dirnode, client
from allmydata.introducer.server import IntroducerNode from allmydata.introducer.server import IntroducerNode
from allmydata.interfaces import IURI, IMutableFileNode, IFileNode, \ from allmydata.interfaces import IMutableFileNode, IFileNode, \
FileTooLargeError, NotEnoughSharesError, ICheckable FileTooLargeError, NotEnoughSharesError, ICheckable
from allmydata.check_results import CheckResults, CheckAndRepairResults, \ from allmydata.check_results import CheckResults, CheckAndRepairResults, \
DeepCheckResults, DeepCheckAndRepairResults DeepCheckResults, DeepCheckAndRepairResults
@ -38,11 +38,10 @@ class FakeCHKFileNode:
all_contents = {} all_contents = {}
bad_shares = {} bad_shares = {}
def __init__(self, u, thisclient): def __init__(self, filecap):
precondition(IURI.providedBy(u), u) precondition(isinstance(filecap, str), filecap)
self.client = thisclient self.my_uri = uri.CHKFileURI.init_from_string(filecap)
self.my_uri = u self.storage_index = self.my_uri.storage_index
self.storage_index = u.storage_index
def get_uri(self): def get_uri(self):
return self.my_uri.to_string() return self.my_uri.to_string()
@ -132,16 +131,17 @@ class FakeCHKFileNode:
return d return d
def make_chk_file_uri(size): def make_chk_file_uri(size):
return uri.CHKFileURI(key=os.urandom(16), u = uri.CHKFileURI(key=os.urandom(16),
uri_extension_hash=os.urandom(32), uri_extension_hash=os.urandom(32),
needed_shares=3, needed_shares=3,
total_shares=10, total_shares=10,
size=size) size=size)
return u.to_string()
def create_chk_filenode(thisclient, contents): def create_chk_filenode(contents):
u = make_chk_file_uri(len(contents)) filecap = make_chk_file_uri(len(contents))
n = FakeCHKFileNode(u, thisclient) n = FakeCHKFileNode(filecap)
FakeCHKFileNode.all_contents[u.to_string()] = contents FakeCHKFileNode.all_contents[filecap] = contents
return n return n
@ -154,10 +154,9 @@ class FakeMutableFileNode:
all_contents = {} all_contents = {}
bad_shares = {} bad_shares = {}
def __init__(self, thisclient): def __init__(self, storage_broker, secret_holder,
self.client = thisclient default_encoding_parameters, history):
self.my_uri = make_mutable_file_uri() self.init_from_uri(make_mutable_file_uri())
self.storage_index = self.my_uri.storage_index
def create(self, initial_contents, key_generator=None, keysize=None): def create(self, initial_contents, key_generator=None, keysize=None):
if len(initial_contents) > self.MUTABLE_SIZELIMIT: if len(initial_contents) > self.MUTABLE_SIZELIMIT:
raise FileTooLargeError("SDMF is limited to one segment, and " raise FileTooLargeError("SDMF is limited to one segment, and "
@ -165,8 +164,13 @@ class FakeMutableFileNode:
self.MUTABLE_SIZELIMIT)) self.MUTABLE_SIZELIMIT))
self.all_contents[self.storage_index] = initial_contents self.all_contents[self.storage_index] = initial_contents
return defer.succeed(self) return defer.succeed(self)
def init_from_uri(self, myuri): def init_from_uri(self, filecap):
self.my_uri = IURI(myuri) assert isinstance(filecap, str)
if filecap.startswith("URI:SSK:"):
self.my_uri = uri.WriteableSSKFileURI.init_from_string(filecap)
else:
assert filecap.startswith("URI:SSK-RO:")
self.my_uri = uri.ReadonlySSKFileURI.init_from_string(filecap)
self.storage_index = self.my_uri.storage_index self.storage_index = self.my_uri.storage_index
return self return self
def get_uri(self): def get_uri(self):
@ -285,10 +289,10 @@ class FakeMutableFileNode:
def make_mutable_file_uri(): def make_mutable_file_uri():
return uri.WriteableSSKFileURI(writekey=os.urandom(16), return uri.WriteableSSKFileURI(writekey=os.urandom(16),
fingerprint=os.urandom(32)) fingerprint=os.urandom(32)).to_string()
def make_verifier_uri(): def make_verifier_uri():
return uri.SSKVerifierURI(storage_index=os.urandom(16), return uri.SSKVerifierURI(storage_index=os.urandom(16),
fingerprint=os.urandom(32)) fingerprint=os.urandom(32)).to_string()
class FakeDirectoryNode(dirnode.DirectoryNode): class FakeDirectoryNode(dirnode.DirectoryNode):
"""This offers IDirectoryNode, but uses a FakeMutableFileNode for the """This offers IDirectoryNode, but uses a FakeMutableFileNode for the
@ -444,7 +448,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
# will have registered the helper furl). # will have registered the helper furl).
c = self.add_service(client.Client(basedir=basedirs[0])) c = self.add_service(client.Client(basedir=basedirs[0]))
self.clients.append(c) self.clients.append(c)
c.DEFAULT_MUTABLE_KEYSIZE = 522 c.set_default_mutable_keysize(522)
d = c.when_tub_ready() d = c.when_tub_ready()
def _ready(res): def _ready(res):
f = open(os.path.join(basedirs[0],"private","helper.furl"), "r") f = open(os.path.join(basedirs[0],"private","helper.furl"), "r")
@ -460,7 +464,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
for i in range(1, self.numclients): for i in range(1, self.numclients):
c = self.add_service(client.Client(basedir=basedirs[i])) c = self.add_service(client.Client(basedir=basedirs[i]))
self.clients.append(c) self.clients.append(c)
c.DEFAULT_MUTABLE_KEYSIZE = 522 c.set_default_mutable_keysize(522)
log.msg("STARTING") log.msg("STARTING")
return self.wait_for_connections() return self.wait_for_connections()
d.addCallback(_ready) d.addCallback(_ready)
@ -497,7 +501,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
def _stopped(res): def _stopped(res):
new_c = client.Client(basedir=self.getdir("client%d" % num)) new_c = client.Client(basedir=self.getdir("client%d" % num))
self.clients[num] = new_c self.clients[num] = new_c
new_c.DEFAULT_MUTABLE_KEYSIZE = 522 new_c.set_default_mutable_keysize(522)
self.add_service(new_c) self.add_service(new_c)
return new_c.when_tub_ready() return new_c.when_tub_ready()
d.addCallback(_stopped) d.addCallback(_stopped)
@ -527,7 +531,7 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
c = client.Client(basedir=basedir) c = client.Client(basedir=basedir)
self.clients.append(c) self.clients.append(c)
c.DEFAULT_MUTABLE_KEYSIZE = 522 c.set_default_mutable_keysize(522)
self.numclients += 1 self.numclients += 1
if add_to_sparent: if add_to_sparent:
c.setServiceParent(self.sparent) c.setServiceParent(self.sparent)
@ -904,8 +908,9 @@ class ShareManglingMixin(SystemTestMixin):
cl0.DEFAULT_ENCODING_PARAMETERS['max_segment_size'] = 12 cl0.DEFAULT_ENCODING_PARAMETERS['max_segment_size'] = 12
d2 = cl0.upload(immutable.upload.Data(TEST_DATA, convergence="")) d2 = cl0.upload(immutable.upload.Data(TEST_DATA, convergence=""))
def _after_upload(u): def _after_upload(u):
self.uri = IURI(u.uri) filecap = u.uri
return cl0.create_node_from_uri(self.uri) self.uri = uri.CHKFileURI.init_from_string(filecap)
return cl0.create_node_from_uri(filecap)
d2.addCallback(_after_upload) d2.addCallback(_after_upload)
return d2 return d2
d.addCallback(_upload_a_file) d.addCallback(_upload_a_file)

View File

@ -82,7 +82,7 @@ class LocalWrapper:
return res return res
d.addCallback(_return_membrane) d.addCallback(_return_membrane)
if self.post_call_notifier: if self.post_call_notifier:
d.addCallback(self.post_call_notifier, methname) d.addCallback(self.post_call_notifier, self, methname)
return d return d
def notifyOnDisconnect(self, f, *args, **kwargs): def notifyOnDisconnect(self, f, *args, **kwargs):
@ -112,8 +112,6 @@ class NoNetworkStorageBroker:
return None return None
class NoNetworkClient(Client): class NoNetworkClient(Client):
DEFAULT_MUTABLE_KEYSIZE = 522
def create_tub(self): def create_tub(self):
pass pass
def init_introducer_client(self): def init_introducer_client(self):
@ -174,6 +172,7 @@ class NoNetworkGrid(service.MultiService):
for i in range(num_servers): for i in range(num_servers):
ss = self.make_server(i) ss = self.make_server(i)
self.add_server(i, ss) self.add_server(i, ss)
self.rebuild_serverlist()
for i in range(num_clients): for i in range(num_clients):
clientid = hashutil.tagged_hash("clientid", str(i))[:20] clientid = hashutil.tagged_hash("clientid", str(i))[:20]
@ -194,6 +193,7 @@ class NoNetworkGrid(service.MultiService):
c = client_config_hooks[i](clientdir) c = client_config_hooks[i](clientdir)
if not c: if not c:
c = NoNetworkClient(clientdir) c = NoNetworkClient(clientdir)
c.set_default_mutable_keysize(522)
c.nodeid = clientid c.nodeid = clientid
c.short_nodeid = b32encode(clientid).lower()[:8] c.short_nodeid = b32encode(clientid).lower()[:8]
c._servers = self.all_servers # can be updated later c._servers = self.all_servers # can be updated later
@ -217,10 +217,28 @@ class NoNetworkGrid(service.MultiService):
serverid = ss.my_nodeid serverid = ss.my_nodeid
self.servers_by_number[i] = ss self.servers_by_number[i] = ss
self.servers_by_id[serverid] = wrap_storage_server(ss) self.servers_by_id[serverid] = wrap_storage_server(ss)
self.rebuild_serverlist()
def rebuild_serverlist(self):
self.all_servers = frozenset(self.servers_by_id.items()) self.all_servers = frozenset(self.servers_by_id.items())
for c in self.clients: for c in self.clients:
c._servers = self.all_servers c._servers = self.all_servers
def remove_server(self, serverid):
# it's enough to remove the server from c._servers (we don't actually
# have to detach and stopService it)
for i,ss in self.servers_by_number.items():
if ss.my_nodeid == serverid:
del self.servers_by_number[i]
break
del self.servers_by_id[serverid]
self.rebuild_serverlist()
def break_server(self, serverid):
# mark the given server as broken, so it will throw exceptions when
# asked to hold a share
self.servers_by_id[serverid].broken = True
class GridTestMixin: class GridTestMixin:
def setUp(self): def setUp(self):
self.s = service.MultiService() self.s = service.MultiService()

View File

@ -1186,7 +1186,7 @@ class Large(DeepCheckBase, unittest.TestCase):
self.subdir_node = subdir_node self.subdir_node = subdir_node
kids = [] kids = []
for i in range(1, COUNT): for i in range(1, COUNT):
litnode = LiteralFileURI("%03d-data" % i) litnode = LiteralFileURI("%03d-data" % i).to_string()
kids.append( (u"%03d-small" % i, litnode) ) kids.append( (u"%03d-small" % i, litnode) )
return subdir_node.set_children(kids) return subdir_node.set_children(kids)
d.addCallback(_add_children) d.addCallback(_add_children)

View File

@ -1,13 +1,11 @@
import time import time
from zope.interface import implements
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer
from allmydata import uri, dirnode from allmydata import uri, dirnode
from allmydata.client import Client from allmydata.client import Client
from allmydata.immutable import upload from allmydata.immutable import upload
from allmydata.interfaces import IURI, IClient, IMutableFileNode, \ from allmydata.interfaces import IFileNode, \
IDirectoryURI, IReadonlyDirectoryURI, IFileNode, \
ExistingChildError, NoSuchChildError, \ ExistingChildError, NoSuchChildError, \
IDeepCheckResults, IDeepCheckAndRepairResults, CannotPackUnknownNodeError IDeepCheckResults, IDeepCheckAndRepairResults, CannotPackUnknownNodeError
from allmydata.mutable.filenode import MutableFileNode from allmydata.mutable.filenode import MutableFileNode
@ -15,112 +13,34 @@ from allmydata.mutable.common import UncoordinatedWriteError
from allmydata.util import hashutil, base32 from allmydata.util import hashutil, base32
from allmydata.monitor import Monitor from allmydata.monitor import Monitor
from allmydata.test.common import make_chk_file_uri, make_mutable_file_uri, \ from allmydata.test.common import make_chk_file_uri, make_mutable_file_uri, \
FakeDirectoryNode, create_chk_filenode, ErrorMixin ErrorMixin
from allmydata.test.no_network import GridTestMixin from allmydata.test.no_network import GridTestMixin
from allmydata.check_results import CheckResults, CheckAndRepairResults
from allmydata.unknown import UnknownNode from allmydata.unknown import UnknownNode
from allmydata.nodemaker import NodeMaker
from base64 import b32decode from base64 import b32decode
import common_util as testutil import common_util as testutil
# to test dirnode.py, we want to construct a tree of real DirectoryNodes that class Dirnode(GridTestMixin, unittest.TestCase,
# contain pointers to fake files. We start with a fake MutableFileNode that
# stores all of its data in a static table.
class Marker:
implements(IFileNode, IMutableFileNode) # sure, why not
def __init__(self, nodeuri):
if not isinstance(nodeuri, str):
nodeuri = nodeuri.to_string()
self.nodeuri = nodeuri
si = hashutil.tagged_hash("tag1", nodeuri)[:16]
self.storage_index = si
fp = hashutil.tagged_hash("tag2", nodeuri)
self.verifieruri = uri.SSKVerifierURI(storage_index=si, fingerprint=fp)
def get_uri(self):
return self.nodeuri
def get_readonly_uri(self):
return self.nodeuri
def get_verify_cap(self):
return self.verifieruri
def get_storage_index(self):
return self.storage_index
def check(self, monitor, verify=False, add_lease=False):
r = CheckResults(uri.from_string(self.nodeuri), None)
r.set_healthy(True)
r.set_recoverable(True)
return defer.succeed(r)
def check_and_repair(self, monitor, verify=False, add_lease=False):
d = self.check(verify)
def _got(cr):
r = CheckAndRepairResults(None)
r.pre_repair_results = r.post_repair_results = cr
return r
d.addCallback(_got)
return d
# dirnode requires three methods from the client: upload(),
# create_node_from_uri(), and create_empty_dirnode(). Of these, upload() is
# only used by the convenience composite method add_file().
class FakeClient:
implements(IClient)
def upload(self, uploadable):
d = uploadable.get_size()
d.addCallback(lambda size: uploadable.read(size))
def _got_data(datav):
data = "".join(datav)
n = create_chk_filenode(self, data)
results = upload.UploadResults()
results.uri = n.get_uri()
return results
d.addCallback(_got_data)
return d
def create_node_from_uri(self, u, readcap=None):
if not u:
u = readcap
u = IURI(u)
if (IDirectoryURI.providedBy(u)
or IReadonlyDirectoryURI.providedBy(u)):
return FakeDirectoryNode(self).init_from_uri(u)
return Marker(u.to_string())
def create_empty_dirnode(self):
n = FakeDirectoryNode(self)
d = n.create()
d.addCallback(lambda res: n)
return d
class Dirnode(unittest.TestCase,
testutil.ShouldFailMixin, testutil.StallMixin, ErrorMixin): testutil.ShouldFailMixin, testutil.StallMixin, ErrorMixin):
timeout = 240 # It takes longer than 120 seconds on Francois's arm box. timeout = 240 # It takes longer than 120 seconds on Francois's arm box.
def setUp(self):
self.client = FakeClient()
# This is a base32-encoded representation of the directory tree
# root/file1
# root/file2
# root/file3
# as represented after being fed to _pack_contents.
# We have it here so we can decode it, feed it to
# _unpack_contents, and verify that _unpack_contents
# works correctly.
self.known_tree = "GM4TOORVHJTGS3DFGEWDSNJ2KVJESOSDJBFTU33MPB2GS3LZNVYG6N3GGI3WU5TIORTXC3DOMJ2G4NB2MVWXUZDONBVTE5LNGRZWK2LYN55GY23XGNYXQMTOMZUWU5TENN4DG23ZG5UTO2L2NQ2DO6LFMRWDMZJWGRQTUMZ2GEYDUMJQFQYTIMZ22XZKZORX5XS7CAQCSK3URR6QOHISHRCMGER5LRFSZRNAS5ZSALCS6TWFQAE754IVOIKJVK73WZPP3VUUEDTX3WHTBBZ5YX3CEKHCPG3ZWQLYA4QM6LDRCF7TJQYWLIZHKGN5ROA3AUZPXESBNLQQ6JTC2DBJU2D47IZJTLR3PKZ4RVF57XLPWY7FX7SZV3T6IJ3ORFW37FXUPGOE3ROPFNUX5DCGMAQJ3PGGULBRGM3TU6ZCMN2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGI3TKNRWGEWCAITUMFUG6ZJCHIQHWITMNFXGW3LPORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBSG42TMNRRFQQCE3DJNZVWG4TUNFWWKIR2EAYTENBWGY3DGOBZG4XDIMRXGU3DMML5FQQCE3LUNFWWKIR2EAYTENBWGY3DGOBZG4XDIMRXGU3DMML5FQWDGOJRHI2TUZTJNRSTELBZGQ5FKUSJHJBUQSZ2MFYGKZ3SOBSWQ43IO52WO23CNAZWU3DUGVSWSNTIOE5DK33POVTW4ZLNMNWDK6DHPA2GS2THNF2W25DEN5VGY2LQNFRGG5DKNNRHO5TZPFTWI6LNMRYGQ2LCGJTHM4J2GM5DCMB2GQWDCNBSHKVVQBGRYMACKJ27CVQ6O6B4QPR72RFVTGOZUI76XUSWAX73JRV5PYRHMIFYZIA25MXDPGUGML6M2NMRSG4YD4W4K37ZDYSXHMJ3IUVT4F64YTQQVBJFFFOUC7J7LAB2VFCL5UKKGMR2D3F4EPOYC7UYWQZNR5KXHBSNXLCNBX2SNF22DCXJIHSMEKWEWOG5XCJEVVZ7UW5IB6I64XXQSJ34B5CAYZGZIIMR6LBRGMZTU6ZCMN2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMYDEMJYFQQCE5DBNBXWKIR2EB5SE3DJNZVW233UNFWWKIR2EAYTENBWGY3DGOBZG4XDIMZQGIYTQLBAEJWGS3TLMNZHI2LNMURDUIBRGI2DMNRWGM4DSNZOGQZTAMRRHB6SYIBCNV2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMYDEMJYPUWCYMZZGU5DKOTGNFWGKMZMHE2DUVKSJE5EGSCLHJRW25DDPBYTO2DXPB3GM6DBNYZTI6LJMV3DM2LWNB4TU4LWMNSWW3LKORXWK5DEMN3TI23NNE3WEM3SORRGY5THPA3TKNBUMNZG453BOF2GSZLXMVWWI3DJOFZW623RHIZTUMJQHI2SYMJUGI5BOSHWDPG3WKPAVXCF3XMKA7QVIWPRMWJHDTQHD27AHDCPJWDQENQ5H5ZZILTXQNIXXCIW4LKQABU2GCFRG5FHQN7CHD7HF4EKNRZFIV2ZYQIBM7IQU7F4RGB3XCX3FREPBKQ7UCICHVWPCYFGA6OLH3J45LXQ6GWWICJ3PGWJNLZ7PCRNLAPNYUGU6BENS7OXMBEOOFRIZV3PF2FFWZ5WHDPKXERYP7GNHKRMGEZTOOT3EJRXI2LNMURDUIBRGI2DMNRWGM4DSNZOGQZTGNRSGY4SYIBCORQWQ33FEI5CA6ZCNRUW423NN52GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMZTMMRWHEWCAITMNFXGWY3SORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBTGM3DENRZPUWCAITNORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBTGM3DENRZPUWCY==="
def test_basic(self): def test_basic(self):
d = self.client.create_empty_dirnode() self.basedir = "dirnode/Dirnode/test_basic"
self.set_up_grid()
c = self.g.clients[0]
d = c.create_empty_dirnode()
def _done(res): def _done(res):
self.failUnless(isinstance(res, FakeDirectoryNode)) self.failUnless(isinstance(res, dirnode.DirectoryNode))
rep = str(res) rep = str(res)
self.failUnless("RW" in rep) self.failUnless("RW" in rep)
d.addCallback(_done) d.addCallback(_done)
return d return d
def test_check(self): def test_check(self):
d = self.client.create_empty_dirnode() self.basedir = "dirnode/Dirnode/test_check"
self.set_up_grid()
c = self.g.clients[0]
d = c.create_empty_dirnode()
d.addCallback(lambda dn: dn.check(Monitor())) d.addCallback(lambda dn: dn.check(Monitor()))
def _done(res): def _done(res):
self.failUnless(res.is_healthy()) self.failUnless(res.is_healthy())
@ -134,16 +54,17 @@ class Dirnode(unittest.TestCase,
# root/subdir/file1 # root/subdir/file1
# root/subdir/link -> root # root/subdir/link -> root
# root/rodir # root/rodir
d = self.client.create_empty_dirnode() c = self.g.clients[0]
d = c.create_empty_dirnode()
def _created_root(rootnode): def _created_root(rootnode):
self._rootnode = rootnode self._rootnode = rootnode
return rootnode.create_empty_directory(u"subdir") return rootnode.create_empty_directory(u"subdir")
d.addCallback(_created_root) d.addCallback(_created_root)
def _created_subdir(subdir): def _created_subdir(subdir):
self._subdir = subdir self._subdir = subdir
d = subdir.add_file(u"file1", upload.Data("data", None)) d = subdir.add_file(u"file1", upload.Data("data"*100, None))
d.addCallback(lambda res: subdir.set_node(u"link", self._rootnode)) d.addCallback(lambda res: subdir.set_node(u"link", self._rootnode))
d.addCallback(lambda res: self.client.create_empty_dirnode()) d.addCallback(lambda res: c.create_empty_dirnode())
d.addCallback(lambda dn: d.addCallback(lambda dn:
self._rootnode.set_uri(u"rodir", self._rootnode.set_uri(u"rodir",
dn.get_readonly_uri())) dn.get_readonly_uri()))
@ -155,6 +76,8 @@ class Dirnode(unittest.TestCase,
return d return d
def test_deepcheck(self): def test_deepcheck(self):
self.basedir = "dirnode/Dirnode/test_deepcheck"
self.set_up_grid()
d = self._test_deepcheck_create() d = self._test_deepcheck_create()
d.addCallback(lambda rootnode: rootnode.start_deep_check().when_done()) d.addCallback(lambda rootnode: rootnode.start_deep_check().when_done())
def _check_results(r): def _check_results(r):
@ -173,6 +96,8 @@ class Dirnode(unittest.TestCase,
return d return d
def test_deepcheck_and_repair(self): def test_deepcheck_and_repair(self):
self.basedir = "dirnode/Dirnode/test_deepcheck_and_repair"
self.set_up_grid()
d = self._test_deepcheck_create() d = self._test_deepcheck_create()
d.addCallback(lambda rootnode: d.addCallback(lambda rootnode:
rootnode.start_deep_check_and_repair().when_done()) rootnode.start_deep_check_and_repair().when_done())
@ -200,11 +125,13 @@ class Dirnode(unittest.TestCase,
return d return d
def _mark_file_bad(self, rootnode): def _mark_file_bad(self, rootnode):
si = IURI(rootnode.get_uri())._filenode_uri.storage_index si = rootnode.get_storage_index()
rootnode._node.bad_shares[si] = "unhealthy" self.delete_shares_numbered(rootnode.get_uri(), [0])
return rootnode return rootnode
def test_deepcheck_problems(self): def test_deepcheck_problems(self):
self.basedir = "dirnode/Dirnode/test_deepcheck_problems"
self.set_up_grid()
d = self._test_deepcheck_create() d = self._test_deepcheck_create()
d.addCallback(lambda rootnode: self._mark_file_bad(rootnode)) d.addCallback(lambda rootnode: self._mark_file_bad(rootnode))
d.addCallback(lambda rootnode: rootnode.start_deep_check().when_done()) d.addCallback(lambda rootnode: rootnode.start_deep_check().when_done())
@ -222,25 +149,29 @@ class Dirnode(unittest.TestCase,
return d return d
def test_readonly(self): def test_readonly(self):
fileuri = make_chk_file_uri(1234) self.basedir = "dirnode/Dirnode/test_readonly"
filenode = self.client.create_node_from_uri(fileuri) self.set_up_grid()
c = self.g.clients[0]
nm = c.nodemaker
filecap = make_chk_file_uri(1234)
filenode = nm.create_from_cap(filecap)
uploadable = upload.Data("some data", convergence="some convergence string") uploadable = upload.Data("some data", convergence="some convergence string")
d = self.client.create_empty_dirnode() d = c.create_empty_dirnode()
def _created(rw_dn): def _created(rw_dn):
d2 = rw_dn.set_uri(u"child", fileuri.to_string()) d2 = rw_dn.set_uri(u"child", filecap)
d2.addCallback(lambda res: rw_dn) d2.addCallback(lambda res: rw_dn)
return d2 return d2
d.addCallback(_created) d.addCallback(_created)
def _ready(rw_dn): def _ready(rw_dn):
ro_uri = rw_dn.get_readonly_uri() ro_uri = rw_dn.get_readonly_uri()
ro_dn = self.client.create_node_from_uri(ro_uri) ro_dn = c.create_node_from_uri(ro_uri)
self.failUnless(ro_dn.is_readonly()) self.failUnless(ro_dn.is_readonly())
self.failUnless(ro_dn.is_mutable()) self.failUnless(ro_dn.is_mutable())
self.shouldFail(dirnode.NotMutableError, "set_uri ro", None, self.shouldFail(dirnode.NotMutableError, "set_uri ro", None,
ro_dn.set_uri, u"newchild", fileuri.to_string()) ro_dn.set_uri, u"newchild", filecap)
self.shouldFail(dirnode.NotMutableError, "set_uri ro", None, self.shouldFail(dirnode.NotMutableError, "set_uri ro", None,
ro_dn.set_node, u"newchild", filenode) ro_dn.set_node, u"newchild", filenode)
self.shouldFail(dirnode.NotMutableError, "set_nodes ro", None, self.shouldFail(dirnode.NotMutableError, "set_nodes ro", None,
@ -271,13 +202,18 @@ class Dirnode(unittest.TestCase,
self.failUnless(a >= b, "%r should be >= %r" % (a, b)) self.failUnless(a >= b, "%r should be >= %r" % (a, b))
def test_create(self): def test_create(self):
self.basedir = "dirnode/Dirnode/test_create"
self.set_up_grid()
c = self.g.clients[0]
self.expected_manifest = [] self.expected_manifest = []
self.expected_verifycaps = set() self.expected_verifycaps = set()
self.expected_storage_indexes = set() self.expected_storage_indexes = set()
d = self.client.create_empty_dirnode() d = c.create_empty_dirnode()
def _then(n): def _then(n):
# / # /
self.rootnode = n
self.failUnless(n.is_mutable()) self.failUnless(n.is_mutable())
u = n.get_uri() u = n.get_uri()
self.failUnless(u) self.failUnless(u)
@ -299,18 +235,19 @@ class Dirnode(unittest.TestCase,
d.addCallback(lambda res: self.failUnlessEqual(res, {})) d.addCallback(lambda res: self.failUnlessEqual(res, {}))
d.addCallback(lambda res: n.has_child(u"missing")) d.addCallback(lambda res: n.has_child(u"missing"))
d.addCallback(lambda res: self.failIf(res)) d.addCallback(lambda res: self.failIf(res))
fake_file_uri = make_mutable_file_uri() fake_file_uri = make_mutable_file_uri()
other_file_uri = make_mutable_file_uri() other_file_uri = make_mutable_file_uri()
m = Marker(fake_file_uri) m = c.nodemaker.create_from_cap(fake_file_uri)
ffu_v = m.get_verify_cap().to_string() ffu_v = m.get_verify_cap().to_string()
self.expected_manifest.append( ((u"child",) , m.get_uri()) ) self.expected_manifest.append( ((u"child",) , m.get_uri()) )
self.expected_verifycaps.add(ffu_v) self.expected_verifycaps.add(ffu_v)
self.expected_storage_indexes.add(base32.b2a(m.get_storage_index())) self.expected_storage_indexes.add(base32.b2a(m.get_storage_index()))
d.addCallback(lambda res: n.set_uri(u"child", fake_file_uri.to_string())) d.addCallback(lambda res: n.set_uri(u"child", fake_file_uri))
d.addCallback(lambda res: d.addCallback(lambda res:
self.shouldFail(ExistingChildError, "set_uri-no", self.shouldFail(ExistingChildError, "set_uri-no",
"child 'child' already exists", "child 'child' already exists",
n.set_uri, u"child", other_file_uri.to_string(), n.set_uri, u"child", other_file_uri,
overwrite=False)) overwrite=False))
# / # /
# /child = mutable # /child = mutable
@ -321,7 +258,7 @@ class Dirnode(unittest.TestCase,
# /child = mutable # /child = mutable
# /subdir = directory # /subdir = directory
def _created(subdir): def _created(subdir):
self.failUnless(isinstance(subdir, FakeDirectoryNode)) self.failUnless(isinstance(subdir, dirnode.DirectoryNode))
self.subdir = subdir self.subdir = subdir
new_v = subdir.get_verify_cap().to_string() new_v = subdir.get_verify_cap().to_string()
assert isinstance(new_v, str) assert isinstance(new_v, str)
@ -391,7 +328,7 @@ class Dirnode(unittest.TestCase,
d.addCallback(lambda res: n.get_child_at_path(u"subdir/subsubdir")) d.addCallback(lambda res: n.get_child_at_path(u"subdir/subsubdir"))
d.addCallback(lambda subsubdir: d.addCallback(lambda subsubdir:
self.failUnless(isinstance(subsubdir, self.failUnless(isinstance(subsubdir,
FakeDirectoryNode))) dirnode.DirectoryNode)))
d.addCallback(lambda res: n.get_child_at_path(u"")) d.addCallback(lambda res: n.get_child_at_path(u""))
d.addCallback(lambda res: self.failUnlessEqual(res.get_uri(), d.addCallback(lambda res: self.failUnlessEqual(res.get_uri(),
n.get_uri())) n.get_uri()))
@ -410,7 +347,7 @@ class Dirnode(unittest.TestCase,
n.get_child_and_metadata_at_path(u"")) n.get_child_and_metadata_at_path(u""))
def _check_child_and_metadata1(res): def _check_child_and_metadata1(res):
child, metadata = res child, metadata = res
self.failUnless(isinstance(child, FakeDirectoryNode)) self.failUnless(isinstance(child, dirnode.DirectoryNode))
# edge-metadata needs at least one path segment # edge-metadata needs at least one path segment
self.failUnlessEqual(sorted(metadata.keys()), []) self.failUnlessEqual(sorted(metadata.keys()), [])
d.addCallback(_check_child_and_metadata1) d.addCallback(_check_child_and_metadata1)
@ -420,7 +357,7 @@ class Dirnode(unittest.TestCase,
def _check_child_and_metadata2(res): def _check_child_and_metadata2(res):
child, metadata = res child, metadata = res
self.failUnlessEqual(child.get_uri(), self.failUnlessEqual(child.get_uri(),
fake_file_uri.to_string()) fake_file_uri)
self.failUnlessEqual(set(metadata.keys()), self.failUnlessEqual(set(metadata.keys()),
set(["tahoe", "ctime", "mtime"])) set(["tahoe", "ctime", "mtime"]))
d.addCallback(_check_child_and_metadata2) d.addCallback(_check_child_and_metadata2)
@ -429,19 +366,19 @@ class Dirnode(unittest.TestCase,
n.get_child_and_metadata_at_path(u"subdir/subsubdir")) n.get_child_and_metadata_at_path(u"subdir/subsubdir"))
def _check_child_and_metadata3(res): def _check_child_and_metadata3(res):
child, metadata = res child, metadata = res
self.failUnless(isinstance(child, FakeDirectoryNode)) self.failUnless(isinstance(child, dirnode.DirectoryNode))
self.failUnlessEqual(set(metadata.keys()), self.failUnlessEqual(set(metadata.keys()),
set(["tahoe", "ctime", "mtime"])) set(["tahoe", "ctime", "mtime"]))
d.addCallback(_check_child_and_metadata3) d.addCallback(_check_child_and_metadata3)
# set_uri + metadata # set_uri + metadata
# it should be possible to add a child without any metadata # it should be possible to add a child without any metadata
d.addCallback(lambda res: n.set_uri(u"c2", fake_file_uri.to_string(), {})) d.addCallback(lambda res: n.set_uri(u"c2", fake_file_uri, {}))
d.addCallback(lambda res: n.get_metadata_for(u"c2")) d.addCallback(lambda res: n.get_metadata_for(u"c2"))
d.addCallback(lambda metadata: self.failUnlessEqual(metadata.keys(), ['tahoe'])) d.addCallback(lambda metadata: self.failUnlessEqual(metadata.keys(), ['tahoe']))
# You can't override the link timestamps. # You can't override the link timestamps.
d.addCallback(lambda res: n.set_uri(u"c2", fake_file_uri.to_string(), { 'tahoe': {'linkcrtime': "bogus"}})) d.addCallback(lambda res: n.set_uri(u"c2", fake_file_uri, { 'tahoe': {'linkcrtime': "bogus"}}))
d.addCallback(lambda res: n.get_metadata_for(u"c2")) d.addCallback(lambda res: n.get_metadata_for(u"c2"))
def _has_good_linkcrtime(metadata): def _has_good_linkcrtime(metadata):
self.failUnless(metadata.has_key('tahoe')) self.failUnless(metadata.has_key('tahoe'))
@ -450,7 +387,7 @@ class Dirnode(unittest.TestCase,
d.addCallback(_has_good_linkcrtime) d.addCallback(_has_good_linkcrtime)
# if we don't set any defaults, the child should get timestamps # if we don't set any defaults, the child should get timestamps
d.addCallback(lambda res: n.set_uri(u"c3", fake_file_uri.to_string())) d.addCallback(lambda res: n.set_uri(u"c3", fake_file_uri))
d.addCallback(lambda res: n.get_metadata_for(u"c3")) d.addCallback(lambda res: n.get_metadata_for(u"c3"))
d.addCallback(lambda metadata: d.addCallback(lambda metadata:
self.failUnlessEqual(set(metadata.keys()), self.failUnlessEqual(set(metadata.keys()),
@ -458,7 +395,7 @@ class Dirnode(unittest.TestCase,
# or we can add specific metadata at set_uri() time, which # or we can add specific metadata at set_uri() time, which
# overrides the timestamps # overrides the timestamps
d.addCallback(lambda res: n.set_uri(u"c4", fake_file_uri.to_string(), d.addCallback(lambda res: n.set_uri(u"c4", fake_file_uri,
{"key": "value"})) {"key": "value"}))
d.addCallback(lambda res: n.get_metadata_for(u"c4")) d.addCallback(lambda res: n.get_metadata_for(u"c4"))
d.addCallback(lambda metadata: d.addCallback(lambda metadata:
@ -472,7 +409,7 @@ class Dirnode(unittest.TestCase,
# set_node + metadata # set_node + metadata
# it should be possible to add a child without any metadata # it should be possible to add a child without any metadata
d.addCallback(lambda res: n.set_node(u"d2", n, {})) d.addCallback(lambda res: n.set_node(u"d2", n, {}))
d.addCallback(lambda res: self.client.create_empty_dirnode()) d.addCallback(lambda res: c.create_empty_dirnode())
d.addCallback(lambda n2: d.addCallback(lambda n2:
self.shouldFail(ExistingChildError, "set_node-no", self.shouldFail(ExistingChildError, "set_node-no",
"child 'd2' already exists", "child 'd2' already exists",
@ -502,9 +439,9 @@ class Dirnode(unittest.TestCase,
d.addCallback(lambda res: n.delete(u"d4")) d.addCallback(lambda res: n.delete(u"d4"))
# metadata through set_children() # metadata through set_children()
d.addCallback(lambda res: n.set_children([ (u"e1", fake_file_uri.to_string()), d.addCallback(lambda res: n.set_children([ (u"e1", fake_file_uri),
(u"e2", fake_file_uri.to_string(), {}), (u"e2", fake_file_uri, {}),
(u"e3", fake_file_uri.to_string(), (u"e3", fake_file_uri,
{"key": "value"}), {"key": "value"}),
])) ]))
d.addCallback(lambda res: d.addCallback(lambda res:
@ -639,16 +576,16 @@ class Dirnode(unittest.TestCase,
self.failUnlessEqual(sorted(children.keys()), self.failUnlessEqual(sorted(children.keys()),
sorted([u"child"]))) sorted([u"child"])))
uploadable = upload.Data("some data", convergence="some convergence string") uploadable1 = upload.Data("some data", convergence="converge")
d.addCallback(lambda res: n.add_file(u"newfile", uploadable)) d.addCallback(lambda res: n.add_file(u"newfile", uploadable1))
d.addCallback(lambda newnode: d.addCallback(lambda newnode:
self.failUnless(IFileNode.providedBy(newnode))) self.failUnless(IFileNode.providedBy(newnode)))
other_uploadable = upload.Data("some data", convergence="stuff") uploadable2 = upload.Data("some data", convergence="stuff")
d.addCallback(lambda res: d.addCallback(lambda res:
self.shouldFail(ExistingChildError, "add_file-no", self.shouldFail(ExistingChildError, "add_file-no",
"child 'newfile' already exists", "child 'newfile' already exists",
n.add_file, u"newfile", n.add_file, u"newfile",
other_uploadable, uploadable2,
overwrite=False)) overwrite=False))
d.addCallback(lambda res: n.list()) d.addCallback(lambda res: n.list())
d.addCallback(lambda children: d.addCallback(lambda children:
@ -659,8 +596,9 @@ class Dirnode(unittest.TestCase,
self.failUnlessEqual(set(metadata.keys()), self.failUnlessEqual(set(metadata.keys()),
set(["tahoe", "ctime", "mtime"]))) set(["tahoe", "ctime", "mtime"])))
uploadable3 = upload.Data("some data", convergence="converge")
d.addCallback(lambda res: n.add_file(u"newfile-metadata", d.addCallback(lambda res: n.add_file(u"newfile-metadata",
uploadable, uploadable3,
{"key": "value"})) {"key": "value"}))
d.addCallback(lambda newnode: d.addCallback(lambda newnode:
self.failUnless(IFileNode.providedBy(newnode))) self.failUnless(IFileNode.providedBy(newnode)))
@ -691,7 +629,7 @@ class Dirnode(unittest.TestCase,
d.addCallback(lambda res: self.subdir2.get(u"child")) d.addCallback(lambda res: self.subdir2.get(u"child"))
d.addCallback(lambda child: d.addCallback(lambda child:
self.failUnlessEqual(child.get_uri(), self.failUnlessEqual(child.get_uri(),
fake_file_uri.to_string())) fake_file_uri))
# move it back, using new_child_name= # move it back, using new_child_name=
d.addCallback(lambda res: d.addCallback(lambda res:
@ -707,7 +645,7 @@ class Dirnode(unittest.TestCase,
# now make sure that we honor overwrite=False # now make sure that we honor overwrite=False
d.addCallback(lambda res: d.addCallback(lambda res:
self.subdir2.set_uri(u"newchild", other_file_uri.to_string())) self.subdir2.set_uri(u"newchild", other_file_uri))
d.addCallback(lambda res: d.addCallback(lambda res:
self.shouldFail(ExistingChildError, "move_child_to-no", self.shouldFail(ExistingChildError, "move_child_to-no",
@ -718,7 +656,7 @@ class Dirnode(unittest.TestCase,
d.addCallback(lambda res: self.subdir2.get(u"newchild")) d.addCallback(lambda res: self.subdir2.get(u"newchild"))
d.addCallback(lambda child: d.addCallback(lambda child:
self.failUnlessEqual(child.get_uri(), self.failUnlessEqual(child.get_uri(),
other_file_uri.to_string())) other_file_uri))
return d return d
@ -727,59 +665,68 @@ class Dirnode(unittest.TestCase,
d.addErrback(self.explain_error) d.addErrback(self.explain_error)
return d return d
class Packing(unittest.TestCase):
# This is a base32-encoded representation of the directory tree
# root/file1
# root/file2
# root/file3
# as represented after being fed to _pack_contents.
# We have it here so we can decode it, feed it to
# _unpack_contents, and verify that _unpack_contents
# works correctly.
known_tree = "GM4TOORVHJTGS3DFGEWDSNJ2KVJESOSDJBFTU33MPB2GS3LZNVYG6N3GGI3WU5TIORTXC3DOMJ2G4NB2MVWXUZDONBVTE5LNGRZWK2LYN55GY23XGNYXQMTOMZUWU5TENN4DG23ZG5UTO2L2NQ2DO6LFMRWDMZJWGRQTUMZ2GEYDUMJQFQYTIMZ22XZKZORX5XS7CAQCSK3URR6QOHISHRCMGER5LRFSZRNAS5ZSALCS6TWFQAE754IVOIKJVK73WZPP3VUUEDTX3WHTBBZ5YX3CEKHCPG3ZWQLYA4QM6LDRCF7TJQYWLIZHKGN5ROA3AUZPXESBNLQQ6JTC2DBJU2D47IZJTLR3PKZ4RVF57XLPWY7FX7SZV3T6IJ3ORFW37FXUPGOE3ROPFNUX5DCGMAQJ3PGGULBRGM3TU6ZCMN2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGI3TKNRWGEWCAITUMFUG6ZJCHIQHWITMNFXGW3LPORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBSG42TMNRRFQQCE3DJNZVWG4TUNFWWKIR2EAYTENBWGY3DGOBZG4XDIMRXGU3DMML5FQQCE3LUNFWWKIR2EAYTENBWGY3DGOBZG4XDIMRXGU3DMML5FQWDGOJRHI2TUZTJNRSTELBZGQ5FKUSJHJBUQSZ2MFYGKZ3SOBSWQ43IO52WO23CNAZWU3DUGVSWSNTIOE5DK33POVTW4ZLNMNWDK6DHPA2GS2THNF2W25DEN5VGY2LQNFRGG5DKNNRHO5TZPFTWI6LNMRYGQ2LCGJTHM4J2GM5DCMB2GQWDCNBSHKVVQBGRYMACKJ27CVQ6O6B4QPR72RFVTGOZUI76XUSWAX73JRV5PYRHMIFYZIA25MXDPGUGML6M2NMRSG4YD4W4K37ZDYSXHMJ3IUVT4F64YTQQVBJFFFOUC7J7LAB2VFCL5UKKGMR2D3F4EPOYC7UYWQZNR5KXHBSNXLCNBX2SNF22DCXJIHSMEKWEWOG5XCJEVVZ7UW5IB6I64XXQSJ34B5CAYZGZIIMR6LBRGMZTU6ZCMN2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMYDEMJYFQQCE5DBNBXWKIR2EB5SE3DJNZVW233UNFWWKIR2EAYTENBWGY3DGOBZG4XDIMZQGIYTQLBAEJWGS3TLMNZHI2LNMURDUIBRGI2DMNRWGM4DSNZOGQZTAMRRHB6SYIBCNV2GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMYDEMJYPUWCYMZZGU5DKOTGNFWGKMZMHE2DUVKSJE5EGSCLHJRW25DDPBYTO2DXPB3GM6DBNYZTI6LJMV3DM2LWNB4TU4LWMNSWW3LKORXWK5DEMN3TI23NNE3WEM3SORRGY5THPA3TKNBUMNZG453BOF2GSZLXMVWWI3DJOFZW623RHIZTUMJQHI2SYMJUGI5BOSHWDPG3WKPAVXCF3XMKA7QVIWPRMWJHDTQHD27AHDCPJWDQENQ5H5ZZILTXQNIXXCIW4LKQABU2GCFRG5FHQN7CHD7HF4EKNRZFIV2ZYQIBM7IQU7F4RGB3XCX3FREPBKQ7UCICHVWPCYFGA6OLH3J45LXQ6GWWICJ3PGWJNLZ7PCRNLAPNYUGU6BENS7OXMBEOOFRIZV3PF2FFWZ5WHDPKXERYP7GNHKRMGEZTOOT3EJRXI2LNMURDUIBRGI2DMNRWGM4DSNZOGQZTGNRSGY4SYIBCORQWQ33FEI5CA6ZCNRUW423NN52GS3LFEI5CAMJSGQ3DMNRTHA4TOLRUGMZTMMRWHEWCAITMNFXGWY3SORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBTGM3DENRZPUWCAITNORUW2ZJCHIQDCMRUGY3DMMZYHE3S4NBTGM3DENRZPUWCY==="
def test_unpack_and_pack_behavior(self): def test_unpack_and_pack_behavior(self):
known_tree = b32decode(self.known_tree) known_tree = b32decode(self.known_tree)
d = self.client.create_empty_dirnode() nodemaker = NodeMaker(None, None, None,
None, None, None,
{"k": 3, "n": 10}, None)
writecap = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
filenode = nodemaker.create_from_cap(writecap)
node = dirnode.DirectoryNode(filenode, nodemaker, None)
children = node._unpack_contents(known_tree)
self._check_children(children)
def _check_tree(node): packed_children = node._pack_contents(children)
def check_children(children): children = node._unpack_contents(packed_children)
# Are all the expected child nodes there? self._check_children(children)
self.failUnless(children.has_key(u'file1'))
self.failUnless(children.has_key(u'file2'))
self.failUnless(children.has_key(u'file3'))
# Are the metadata for child 3 right? def _check_children(self, children):
file3_rocap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" # Are all the expected child nodes there?
file3_rwcap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5" self.failUnless(children.has_key(u'file1'))
file3_metadata = {'ctime': 1246663897.4336269, 'tahoe': {'linkmotime': 1246663897.4336269, 'linkcrtime': 1246663897.4336269}, 'mtime': 1246663897.4336269} self.failUnless(children.has_key(u'file2'))
self.failUnlessEqual(file3_metadata, children[u'file3'][1]) self.failUnless(children.has_key(u'file3'))
self.failUnlessEqual(file3_rocap,
children[u'file3'][0].get_readonly_uri())
self.failUnlessEqual(file3_rwcap,
children[u'file3'][0].get_uri())
# Are the metadata for child 2 right? # Are the metadata for child 3 right?
file2_rocap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" file3_rocap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5"
file2_rwcap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4" file3_rwcap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5"
file2_metadata = {'ctime': 1246663897.430218, 'tahoe': {'linkmotime': 1246663897.430218, 'linkcrtime': 1246663897.430218}, 'mtime': 1246663897.430218} file3_metadata = {'ctime': 1246663897.4336269, 'tahoe': {'linkmotime': 1246663897.4336269, 'linkcrtime': 1246663897.4336269}, 'mtime': 1246663897.4336269}
self.failUnlessEqual(file2_metadata, children[u'file2'][1]) self.failUnlessEqual(file3_metadata, children[u'file3'][1])
self.failUnlessEqual(file2_rocap, self.failUnlessEqual(file3_rocap,
children[u'file2'][0].get_readonly_uri()) children[u'file3'][0].get_readonly_uri())
self.failUnlessEqual(file2_rwcap, self.failUnlessEqual(file3_rwcap,
children[u'file2'][0].get_uri()) children[u'file3'][0].get_uri())
# Are the metadata for child 1 right? # Are the metadata for child 2 right?
file1_rocap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" file2_rocap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4"
file1_rwcap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10" file2_rwcap = "URI:CHK:apegrpehshwugkbh3jlt5ei6hq:5oougnemcl5xgx4ijgiumtdojlipibctjkbwvyygdymdphib2fvq:3:10:4"
file1_metadata = {'ctime': 1246663897.4275661, 'tahoe': {'linkmotime': 1246663897.4275661, 'linkcrtime': 1246663897.4275661}, 'mtime': 1246663897.4275661} file2_metadata = {'ctime': 1246663897.430218, 'tahoe': {'linkmotime': 1246663897.430218, 'linkcrtime': 1246663897.430218}, 'mtime': 1246663897.430218}
self.failUnlessEqual(file1_metadata, children[u'file1'][1]) self.failUnlessEqual(file2_metadata, children[u'file2'][1])
self.failUnlessEqual(file1_rocap, self.failUnlessEqual(file2_rocap,
children[u'file1'][0].get_readonly_uri()) children[u'file2'][0].get_readonly_uri())
self.failUnlessEqual(file1_rwcap, self.failUnlessEqual(file2_rwcap,
children[u'file1'][0].get_uri()) children[u'file2'][0].get_uri())
children = node._unpack_contents(known_tree) # Are the metadata for child 1 right?
file1_rocap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10"
check_children(children) file1_rwcap = "URI:CHK:olxtimympo7f27jvhtgqlnbtn4:emzdnhk2um4seixozlkw3qx2nfijvdkx3ky7i7izl47yedl6e64a:3:10:10"
file1_metadata = {'ctime': 1246663897.4275661, 'tahoe': {'linkmotime': 1246663897.4275661, 'linkcrtime': 1246663897.4275661}, 'mtime': 1246663897.4275661}
packed_children = node._pack_contents(children) self.failUnlessEqual(file1_metadata, children[u'file1'][1])
self.failUnlessEqual(file1_rocap,
children = node._unpack_contents(packed_children) children[u'file1'][0].get_readonly_uri())
self.failUnlessEqual(file1_rwcap,
check_children(children) children[u'file1'][0].get_uri())
d.addCallback(_check_tree)
return d
def test_caching_dict(self): def test_caching_dict(self):
d = dirnode.CachingDict() d = dirnode.CachingDict()
@ -819,21 +766,28 @@ class FakeMutableFile:
self.data = modifier(self.data, None, True) self.data = modifier(self.data, None, True)
return defer.succeed(None) return defer.succeed(None)
class FakeNodeMaker(NodeMaker):
def create_mutable_file(self, contents="", keysize=None):
return defer.succeed(FakeMutableFile(contents))
class FakeClient2(Client): class FakeClient2(Client):
def __init__(self): def __init__(self):
pass self.nodemaker = FakeNodeMaker(None, None, None,
def create_mutable_file(self, initial_contents=""): None, None, None,
return defer.succeed(FakeMutableFile(initial_contents)) {"k":3,"n":10}, None)
def create_node_from_uri(self, rwcap, rocap):
return self.nodemaker.create_from_cap(rwcap, rocap)
class Dirnode2(unittest.TestCase, testutil.ShouldFailMixin): class Dirnode2(unittest.TestCase, testutil.ShouldFailMixin):
def setUp(self): def setUp(self):
self.client = FakeClient2() self.client = FakeClient2()
self.nodemaker = self.client.nodemaker
def test_from_future(self): def test_from_future(self):
# create a dirnode that contains unknown URI types, and make sure we # create a dirnode that contains unknown URI types, and make sure we
# tolerate them properly. Since dirnodes aren't allowed to add # tolerate them properly. Since dirnodes aren't allowed to add
# unknown node types, we have to be tricky. # unknown node types, we have to be tricky.
d = self.client.create_empty_dirnode() d = self.nodemaker.create_new_mutable_directory()
future_writecap = "x-tahoe-crazy://I_am_from_the_future." future_writecap = "x-tahoe-crazy://I_am_from_the_future."
future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future." future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
future_node = UnknownNode(future_writecap, future_readcap) future_node = UnknownNode(future_writecap, future_readcap)
@ -929,8 +883,13 @@ class UCWEingMutableFileNode(MutableFileNode):
return res return res
d.addCallback(_ucwe) d.addCallback(_ucwe)
return d return d
class UCWEingDirectoryNode(dirnode.DirectoryNode):
filenode_class = UCWEingMutableFileNode class UCWEingNodeMaker(NodeMaker):
def _create_mutable(self, cap):
n = UCWEingMutableFileNode(self.storage_broker, self.secret_holder,
self.default_encoding_parameters,
self.history)
return n.init_from_uri(cap)
class Deleter(GridTestMixin, unittest.TestCase): class Deleter(GridTestMixin, unittest.TestCase):
@ -957,7 +916,13 @@ class Deleter(GridTestMixin, unittest.TestCase):
return dn.add_file(u"file", small) return dn.add_file(u"file", small)
d.addCallback(_created_dir) d.addCallback(_created_dir)
def _do_delete(ignored): def _do_delete(ignored):
n = UCWEingDirectoryNode(c0).init_from_uri(self.root_uri) nm = UCWEingNodeMaker(c0.storage_broker, c0._secret_holder,
c0.get_history(), c0.getServiceNamed("uploader"),
c0.getServiceNamed("downloader"),
c0.download_cache_dirman,
c0.get_encoding_parameters(),
c0._key_generator)
n = nm.create_from_cap(self.root_uri)
assert n._node.please_ucwe_after_next_upload == False assert n._node.please_ucwe_after_next_upload == False
n._node.please_ucwe_after_next_upload = True n._node.please_ucwe_after_next_upload = True
# This should succeed, not raise an exception # This should succeed, not raise an exception
@ -966,16 +931,20 @@ class Deleter(GridTestMixin, unittest.TestCase):
return d return d
class Adder(unittest.TestCase, class Adder(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
testutil.ShouldFailMixin, testutil.StallMixin, ErrorMixin):
def setUp(self):
self.client = FakeClient()
def test_overwrite(self): def test_overwrite(self):
# note: This functionality could be tested without actually creating
# several RSA keys. It would be faster without the GridTestMixin: use
# dn.set_node(nodemaker.create_from_cap(make_chk_file_uri())) instead
# of dn.add_file, and use a special NodeMaker that creates fake
# mutable files.
self.basedir = "dirnode/Adder/test_overwrite"
self.set_up_grid()
c = self.g.clients[0]
fileuri = make_chk_file_uri(1234) fileuri = make_chk_file_uri(1234)
filenode = self.client.create_node_from_uri(fileuri) filenode = c.nodemaker.create_from_cap(fileuri)
d = self.client.create_empty_dirnode() d = c.create_empty_dirnode()
def _create_directory_tree(root_node): def _create_directory_tree(root_node):
# Build # Build
@ -1001,7 +970,7 @@ class Adder(unittest.TestCase,
d.addCallback(lambda res: d.addCallback(lambda res:
root_node.set_node(u'dir2', filenode)) root_node.set_node(u'dir2', filenode))
# We try overwriting a file with a child while also specifying # We try overwriting a file with a child while also specifying
# overwrite=False. We should receive an ExistingChildError # overwrite=False. We should receive an ExistingChildError
# when we do this. # when we do this.
d.addCallback(lambda res: d.addCallback(lambda res:
self.shouldFail(ExistingChildError, "set_node", self.shouldFail(ExistingChildError, "set_node",

View File

@ -1,6 +1,6 @@
from twisted.trial import unittest from twisted.trial import unittest
from allmydata import uri from allmydata import uri, client
from allmydata.monitor import Monitor from allmydata.monitor import Monitor
from allmydata.immutable import filenode, download from allmydata.immutable import filenode, download
from allmydata.mutable.filenode import MutableFileNode from allmydata.mutable.filenode import MutableFileNode
@ -17,6 +17,11 @@ class FakeClient:
return None return None
def get_encoding_parameters(self): def get_encoding_parameters(self):
return {"k": 3, "n": 10} return {"k": 3, "n": 10}
def get_storage_broker(self):
return None
def get_history(self):
return None
_secret_holder = client.SecretHolder("lease secret")
class Node(unittest.TestCase): class Node(unittest.TestCase):
def test_chk_filenode(self): def test_chk_filenode(self):
@ -27,8 +32,8 @@ class Node(unittest.TestCase):
size=1000) size=1000)
c = FakeClient() c = FakeClient()
cf = cachedir.CacheFile("none") cf = cachedir.CacheFile("none")
fn1 = filenode.FileNode(u, c, cf) fn1 = filenode.FileNode(u.to_string(), None, None, None, None, cf)
fn2 = filenode.FileNode(u, c, cf) fn2 = filenode.FileNode(u.to_string(), None, None, None, None, cf)
self.failUnlessEqual(fn1, fn2) self.failUnlessEqual(fn1, fn2)
self.failIfEqual(fn1, "I am not a filenode") self.failIfEqual(fn1, "I am not a filenode")
self.failIfEqual(fn1, NotANode()) self.failIfEqual(fn1, NotANode())
@ -49,8 +54,8 @@ class Node(unittest.TestCase):
DATA = "I am a short file." DATA = "I am a short file."
u = uri.LiteralFileURI(data=DATA) u = uri.LiteralFileURI(data=DATA)
c = None c = None
fn1 = filenode.LiteralFileNode(u, c) fn1 = filenode.LiteralFileNode(u.to_string())
fn2 = filenode.LiteralFileNode(u, c) fn2 = filenode.LiteralFileNode(u.to_string())
self.failUnlessEqual(fn1, fn2) self.failUnlessEqual(fn1, fn2)
self.failIfEqual(fn1, "I am not a filenode") self.failIfEqual(fn1, "I am not a filenode")
self.failIfEqual(fn1, NotANode()) self.failIfEqual(fn1, NotANode())
@ -93,7 +98,8 @@ class Node(unittest.TestCase):
si = hashutil.ssk_storage_index_hash(rk) si = hashutil.ssk_storage_index_hash(rk)
u = uri.WriteableSSKFileURI("\x00"*16, "\x00"*32) u = uri.WriteableSSKFileURI("\x00"*16, "\x00"*32)
n = MutableFileNode(client).init_from_uri(u) n = MutableFileNode(None, None, client.get_encoding_parameters(),
None).init_from_uri(u.to_string())
self.failUnlessEqual(n.get_writekey(), wk) self.failUnlessEqual(n.get_writekey(), wk)
self.failUnlessEqual(n.get_readkey(), rk) self.failUnlessEqual(n.get_readkey(), rk)
@ -109,7 +115,8 @@ class Node(unittest.TestCase):
self.failUnlessEqual(n.is_mutable(), True) self.failUnlessEqual(n.is_mutable(), True)
self.failUnlessEqual(n.is_readonly(), False) self.failUnlessEqual(n.is_readonly(), False)
n2 = MutableFileNode(client).init_from_uri(u) n2 = MutableFileNode(None, None, client.get_encoding_parameters(),
None).init_from_uri(u.to_string())
self.failUnlessEqual(n, n2) self.failUnlessEqual(n, n2)
self.failIfEqual(n, "not even the right type") self.failIfEqual(n, "not even the right type")
self.failIfEqual(n, u) # not the right class self.failIfEqual(n, u) # not the right class
@ -136,7 +143,7 @@ class LiteralChecker(unittest.TestCase):
def test_literal_filenode(self): def test_literal_filenode(self):
DATA = "I am a short file." DATA = "I am a short file."
u = uri.LiteralFileURI(data=DATA) u = uri.LiteralFileURI(data=DATA)
fn1 = filenode.LiteralFileNode(u, None) fn1 = filenode.LiteralFileNode(u.to_string())
d = fn1.check(Monitor()) d = fn1.check(Monitor())
def _check_checker_results(cr): def _check_checker_results(cr):

View File

@ -8,7 +8,7 @@ from foolscap.logging import log
from allmydata.storage.server import si_b2a from allmydata.storage.server import si_b2a
from allmydata.storage_client import StorageFarmBroker from allmydata.storage_client import StorageFarmBroker
from allmydata.immutable import offloaded, upload from allmydata.immutable import offloaded, upload
from allmydata import uri from allmydata import uri, client
from allmydata.util import hashutil, fileutil, mathutil from allmydata.util import hashutil, fileutil, mathutil
from pycryptopp.cipher.aes import AES from pycryptopp.cipher.aes import AES
@ -64,6 +64,7 @@ class FakeClient(service.MultiService):
} }
stats_provider = None stats_provider = None
storage_broker = StorageFarmBroker(None, True) storage_broker = StorageFarmBroker(None, True)
_secret_holder = client.SecretHolder("lease secret")
def log(self, *args, **kwargs): def log(self, *args, **kwargs):
return log.msg(*args, **kwargs) return log.msg(*args, **kwargs)
def get_encoding_parameters(self): def get_encoding_parameters(self):

View File

@ -1,20 +1,20 @@
import os, struct import struct
from cStringIO import StringIO from cStringIO import StringIO
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.python import failure from allmydata import uri, client
from allmydata import uri from allmydata.nodemaker import NodeMaker
from allmydata.storage.server import StorageServer
from allmydata.immutable import download from allmydata.immutable import download
from allmydata.util import base32, idlib from allmydata.util import base32
from allmydata.util.idlib import shortnodeid_b2a from allmydata.util.idlib import shortnodeid_b2a
from allmydata.util.hashutil import tagged_hash from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \
from allmydata.util.fileutil import make_dirs ssk_pubkey_fingerprint_hash
from allmydata.interfaces import IURI, IMutableFileURI, IUploadable, \ from allmydata.interfaces import IRepairResults, ICheckAndRepairResults, \
NotEnoughSharesError, IRepairResults, ICheckAndRepairResults NotEnoughSharesError
from allmydata.monitor import Monitor from allmydata.monitor import Monitor
from allmydata.test.common import ShouldFailMixin from allmydata.test.common import ShouldFailMixin
from allmydata.test.no_network import GridTestMixin
from foolscap.api import eventually, fireEventually from foolscap.api import eventually, fireEventually
from foolscap.logging import log from foolscap.logging import log
from allmydata.storage_client import StorageFarmBroker from allmydata.storage_client import StorageFarmBroker
@ -32,13 +32,6 @@ from allmydata.mutable.repairer import MustForceRepairError
import common_util as testutil import common_util as testutil
# this "FastMutableFileNode" exists solely to speed up tests by using smaller
# public/private keys. Once we switch to fast DSA-based keys, we can get rid
# of this.
class FastMutableFileNode(MutableFileNode):
SIGNATURE_KEY_SIZE = 522
# this "FakeStorage" exists to put the share data in RAM and avoid using real # this "FakeStorage" exists to put the share data in RAM and avoid using real
# network connections, both to speed up the tests and to reduce the amount of # network connections, both to speed up the tests and to reduce the amount of
# non-mutable.py code being exercised. # non-mutable.py code being exercised.
@ -64,18 +57,9 @@ class FakeStorage:
self._sequence = None self._sequence = None
self._pending = {} self._pending = {}
self._pending_timer = None self._pending_timer = None
self._special_answers = {}
def read(self, peerid, storage_index): def read(self, peerid, storage_index):
shares = self._peers.get(peerid, {}) shares = self._peers.get(peerid, {})
if self._special_answers.get(peerid, []):
mode = self._special_answers[peerid].pop(0)
if mode == "fail":
shares = failure.Failure(IntentionalError())
elif mode == "none":
shares = {}
elif mode == "normal":
pass
if self._sequence is None: if self._sequence is None:
return defer.succeed(shares) return defer.succeed(shares)
d = defer.Deferred() d = defer.Deferred()
@ -162,73 +146,6 @@ class FakeStorageServer:
return fireEventually(answer) return fireEventually(answer)
# our "FakeClient" has just enough functionality of the real Client to let
# the tests run.
class FakeClient:
mutable_file_node_class = FastMutableFileNode
def __init__(self, num_peers=10):
self._storage = FakeStorage()
self._num_peers = num_peers
peerids = [tagged_hash("peerid", "%d" % i)[:20]
for i in range(self._num_peers)]
self.nodeid = "fakenodeid"
self.storage_broker = StorageFarmBroker(None, True)
for peerid in peerids:
fss = FakeStorageServer(peerid, self._storage)
self.storage_broker.test_add_server(peerid, fss)
def get_storage_broker(self):
return self.storage_broker
def debug_break_connection(self, peerid):
self.storage_broker.test_servers[peerid].broken = True
def debug_remove_connection(self, peerid):
self.storage_broker.test_servers.pop(peerid)
def debug_get_connection(self, peerid):
return self.storage_broker.test_servers[peerid]
def get_encoding_parameters(self):
return {"k": 3, "n": 10}
def log(self, msg, **kw):
return log.msg(msg, **kw)
def get_renewal_secret(self):
return "I hereby permit you to renew my files"
def get_cancel_secret(self):
return "I hereby permit you to cancel my leases"
def create_mutable_file(self, contents=""):
n = self.mutable_file_node_class(self)
d = n.create(contents)
d.addCallback(lambda res: n)
return d
def get_history(self):
return None
def create_node_from_uri(self, u, readcap=None):
if not u:
u = readcap
u = IURI(u)
assert IMutableFileURI.providedBy(u), u
res = self.mutable_file_node_class(self).init_from_uri(u)
return res
def upload(self, uploadable):
assert IUploadable.providedBy(uploadable)
d = uploadable.get_size()
d.addCallback(lambda length: uploadable.read(length))
#d.addCallback(self.create_mutable_file)
def _got_data(datav):
data = "".join(datav)
#newnode = FastMutableFileNode(self)
return uri.LiteralFileURI(data)
d.addCallback(_got_data)
return d
def flip_bit(original, byte_offset): def flip_bit(original, byte_offset):
return (original[:byte_offset] + return (original[:byte_offset] +
chr(ord(original[byte_offset]) ^ 0x01) + chr(ord(original[byte_offset]) ^ 0x01) +
@ -266,28 +183,50 @@ def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0):
shares[shnum] = flip_bit(data, real_offset) shares[shnum] = flip_bit(data, real_offset)
return res return res
def make_storagebroker(s=None, num_peers=10):
if not s:
s = FakeStorage()
peerids = [tagged_hash("peerid", "%d" % i)[:20]
for i in range(num_peers)]
storage_broker = StorageFarmBroker(None, True)
for peerid in peerids:
fss = FakeStorageServer(peerid, s)
storage_broker.test_add_server(peerid, fss)
return storage_broker
def make_nodemaker(s=None, num_peers=10):
storage_broker = make_storagebroker(s, num_peers)
sh = client.SecretHolder("lease secret")
keygen = client.KeyGenerator()
keygen.set_default_keysize(522)
nodemaker = NodeMaker(storage_broker, sh, None,
None, None, None,
{"k": 3, "n": 10}, keygen)
return nodemaker
class Filenode(unittest.TestCase, testutil.ShouldFailMixin): class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
# this used to be in Publish, but we removed the limit. Some of # this used to be in Publish, but we removed the limit. Some of
# these tests test whether the new code correctly allows files # these tests test whether the new code correctly allows files
# larger than the limit. # larger than the limit.
OLD_MAX_SEGMENT_SIZE = 3500000 OLD_MAX_SEGMENT_SIZE = 3500000
def setUp(self): def setUp(self):
self.client = FakeClient() self._storage = s = FakeStorage()
self.nodemaker = make_nodemaker(s)
def test_create(self): def test_create(self):
d = self.client.create_mutable_file() d = self.nodemaker.create_mutable_file()
def _created(n): def _created(n):
self.failUnless(isinstance(n, FastMutableFileNode)) self.failUnless(isinstance(n, MutableFileNode))
self.failUnlessEqual(n.get_storage_index(), n._storage_index) self.failUnlessEqual(n.get_storage_index(), n._storage_index)
sb = self.client.get_storage_broker() sb = self.nodemaker.storage_broker
peer0 = sorted(sb.get_all_serverids())[0] peer0 = sorted(sb.get_all_serverids())[0]
shnums = self.client._storage._peers[peer0].keys() shnums = self._storage._peers[peer0].keys()
self.failUnlessEqual(len(shnums), 1) self.failUnlessEqual(len(shnums), 1)
d.addCallback(_created) d.addCallback(_created)
return d return d
def test_serialize(self): def test_serialize(self):
n = MutableFileNode(self.client) n = MutableFileNode(None, None, {"k": 3, "n": 10}, None)
calls = [] calls = []
def _callback(*args, **kwargs): def _callback(*args, **kwargs):
self.failUnlessEqual(args, (4,) ) self.failUnlessEqual(args, (4,) )
@ -308,7 +247,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
return d return d
def test_upload_and_download(self): def test_upload_and_download(self):
d = self.client.create_mutable_file() d = self.nodemaker.create_mutable_file()
def _created(n): def _created(n):
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda res: n.get_servermap(MODE_READ)) d.addCallback(lambda res: n.get_servermap(MODE_READ))
@ -349,7 +288,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
return d return d
def test_create_with_initial_contents(self): def test_create_with_initial_contents(self):
d = self.client.create_mutable_file("contents 1") d = self.nodemaker.create_mutable_file("contents 1")
def _created(n): def _created(n):
d = n.download_best_version() d = n.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
@ -362,7 +301,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
def test_create_with_too_large_contents(self): def test_create_with_too_large_contents(self):
BIG = "a" * (self.OLD_MAX_SEGMENT_SIZE + 1) BIG = "a" * (self.OLD_MAX_SEGMENT_SIZE + 1)
d = self.client.create_mutable_file(BIG) d = self.nodemaker.create_mutable_file(BIG)
def _created(n): def _created(n):
d = n.overwrite(BIG) d = n.overwrite(BIG)
return d return d
@ -402,7 +341,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
raise UncoordinatedWriteError("simulated") raise UncoordinatedWriteError("simulated")
return old_contents return old_contents
d = self.client.create_mutable_file("line1") d = self.nodemaker.create_mutable_file("line1")
def _created(n): def _created(n):
d = n.modify(_modifier) d = n.modify(_modifier)
d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: n.download_best_version())
@ -484,7 +423,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
giveuper._delay = 0.1 giveuper._delay = 0.1
giveuper.factor = 1 giveuper.factor = 1
d = self.client.create_mutable_file("line1") d = self.nodemaker.create_mutable_file("line1")
def _created(n): def _created(n):
d = n.modify(_modifier) d = n.modify(_modifier)
d.addCallback(lambda res: n.download_best_version()) d.addCallback(lambda res: n.download_best_version())
@ -526,8 +465,8 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
return d return d
def test_upload_and_download_full_size_keys(self): def test_upload_and_download_full_size_keys(self):
self.client.mutable_file_node_class = MutableFileNode self.nodemaker.key_generator = client.KeyGenerator()
d = self.client.create_mutable_file() d = self.nodemaker.create_mutable_file()
def _created(n): def _created(n):
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda res: n.get_servermap(MODE_READ)) d.addCallback(lambda res: n.get_servermap(MODE_READ))
@ -559,12 +498,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
class MakeShares(unittest.TestCase): class MakeShares(unittest.TestCase):
def test_encrypt(self): def test_encrypt(self):
c = FakeClient() nm = make_nodemaker()
fn = FastMutableFileNode(c)
CONTENTS = "some initial contents" CONTENTS = "some initial contents"
d = fn.create(CONTENTS) d = nm.create_mutable_file(CONTENTS)
def _created(res): def _created(fn):
p = Publish(fn, None) p = Publish(fn, nm.storage_broker, None)
p.salt = "SALT" * 4 p.salt = "SALT" * 4
p.readkey = "\x00" * 16 p.readkey = "\x00" * 16
p.newdata = CONTENTS p.newdata = CONTENTS
@ -584,12 +522,12 @@ class MakeShares(unittest.TestCase):
return d return d
def test_generate(self): def test_generate(self):
c = FakeClient() nm = make_nodemaker()
fn = FastMutableFileNode(c)
CONTENTS = "some initial contents" CONTENTS = "some initial contents"
d = fn.create(CONTENTS) d = nm.create_mutable_file(CONTENTS)
def _created(res): def _created(fn):
p = Publish(fn, None) self._fn = fn
p = Publish(fn, nm.storage_broker, None)
self._p = p self._p = p
p.newdata = CONTENTS p.newdata = CONTENTS
p.required_shares = 3 p.required_shares = 3
@ -641,7 +579,7 @@ class MakeShares(unittest.TestCase):
self.failUnlessEqual(len(block_hash_tree), 1) # very small tree self.failUnlessEqual(len(block_hash_tree), 1) # very small tree
self.failUnlessEqual(IV, "SALT"*4) self.failUnlessEqual(IV, "SALT"*4)
self.failUnlessEqual(len(share_data), len("%07d" % 1)) self.failUnlessEqual(len(share_data), len("%07d" % 1))
self.failUnlessEqual(enc_privkey, fn.get_encprivkey()) self.failUnlessEqual(enc_privkey, self._fn.get_encprivkey())
d.addCallback(_generated) d.addCallback(_generated)
return d return d
@ -655,14 +593,16 @@ class PublishMixin:
# later. # later.
self.CONTENTS = "New contents go here" * 1000 self.CONTENTS = "New contents go here" * 1000
num_peers = 20 num_peers = 20
self._client = FakeClient(num_peers) self._storage = FakeStorage()
self._storage = self._client._storage self._nodemaker = make_nodemaker(self._storage)
d = self._client.create_mutable_file(self.CONTENTS) self._storage_broker = self._nodemaker.storage_broker
d = self._nodemaker.create_mutable_file(self.CONTENTS)
def _created(node): def _created(node):
self._fn = node self._fn = node
self._fn2 = self._client.create_node_from_uri(node.get_uri()) self._fn2 = self._nodemaker.create_from_cap(node.get_uri())
d.addCallback(_created) d.addCallback(_created)
return d return d
def publish_multiple(self): def publish_multiple(self):
self.CONTENTS = ["Contents 0", self.CONTENTS = ["Contents 0",
"Contents 1", "Contents 1",
@ -671,9 +611,9 @@ class PublishMixin:
"Contents 3b"] "Contents 3b"]
self._copied_shares = {} self._copied_shares = {}
num_peers = 20 num_peers = 20
self._client = FakeClient(num_peers) self._storage = FakeStorage()
self._storage = self._client._storage self._nodemaker = make_nodemaker(self._storage)
d = self._client.create_mutable_file(self.CONTENTS[0]) # seqnum=1 d = self._nodemaker.create_mutable_file(self.CONTENTS[0]) # seqnum=1
def _created(node): def _created(node):
self._fn = node self._fn = node
# now create multiple versions of the same file, and accumulate # now create multiple versions of the same file, and accumulate
@ -698,7 +638,7 @@ class PublishMixin:
return d return d
def _copy_shares(self, ignored, index): def _copy_shares(self, ignored, index):
shares = self._client._storage._peers shares = self._storage._peers
# we need a deep copy # we need a deep copy
new_shares = {} new_shares = {}
for peerid in shares: for peerid in shares:
@ -711,7 +651,7 @@ class PublishMixin:
# versionmap maps shnums to which version (0,1,2,3,4) we want the # versionmap maps shnums to which version (0,1,2,3,4) we want the
# share to be at. Any shnum which is left out of the map will stay at # share to be at. Any shnum which is left out of the map will stay at
# its current version. # its current version.
shares = self._client._storage._peers shares = self._storage._peers
oldshares = self._copied_shares oldshares = self._copied_shares
for peerid in shares: for peerid in shares:
for shnum in shares[peerid]: for shnum in shares[peerid]:
@ -724,15 +664,19 @@ class Servermap(unittest.TestCase, PublishMixin):
def setUp(self): def setUp(self):
return self.publish_one() return self.publish_one()
def make_servermap(self, mode=MODE_CHECK, fn=None): def make_servermap(self, mode=MODE_CHECK, fn=None, sb=None):
if fn is None: if fn is None:
fn = self._fn fn = self._fn
smu = ServermapUpdater(fn, Monitor(), ServerMap(), mode) if sb is None:
sb = self._storage_broker
smu = ServermapUpdater(fn, sb, Monitor(),
ServerMap(), mode)
d = smu.update() d = smu.update()
return d return d
def update_servermap(self, oldmap, mode=MODE_CHECK): def update_servermap(self, oldmap, mode=MODE_CHECK):
smu = ServermapUpdater(self._fn, Monitor(), oldmap, mode) smu = ServermapUpdater(self._fn, self._storage_broker, Monitor(),
oldmap, mode)
d = smu.update() d = smu.update()
return d return d
@ -792,9 +736,9 @@ class Servermap(unittest.TestCase, PublishMixin):
# create a new file, which is large enough to knock the privkey out # create a new file, which is large enough to knock the privkey out
# of the early part of the file # of the early part of the file
LARGE = "These are Larger contents" * 200 # about 5KB LARGE = "These are Larger contents" * 200 # about 5KB
d.addCallback(lambda res: self._client.create_mutable_file(LARGE)) d.addCallback(lambda res: self._nodemaker.create_mutable_file(LARGE))
def _created(large_fn): def _created(large_fn):
large_fn2 = self._client.create_node_from_uri(large_fn.get_uri()) large_fn2 = self._nodemaker.create_from_cap(large_fn.get_uri())
return self.make_servermap(MODE_WRITE, large_fn2) return self.make_servermap(MODE_WRITE, large_fn2)
d.addCallback(_created) d.addCallback(_created)
d.addCallback(lambda sm: self.failUnlessOneRecoverable(sm, 10)) d.addCallback(lambda sm: self.failUnlessOneRecoverable(sm, 10))
@ -843,7 +787,7 @@ class Servermap(unittest.TestCase, PublishMixin):
self.failUnlessEqual(len(sm.shares_available()), 0) self.failUnlessEqual(len(sm.shares_available()), 0)
def test_no_shares(self): def test_no_shares(self):
self._client._storage._peers = {} # delete all shares self._storage._peers = {} # delete all shares
ms = self.make_servermap ms = self.make_servermap
d = defer.succeed(None) d = defer.succeed(None)
@ -871,7 +815,7 @@ class Servermap(unittest.TestCase, PublishMixin):
return sm return sm
def test_not_quite_enough_shares(self): def test_not_quite_enough_shares(self):
s = self._client._storage s = self._storage
ms = self.make_servermap ms = self.make_servermap
num_shares = len(s._peers) num_shares = len(s._peers)
for peerid in s._peers: for peerid in s._peers:
@ -903,10 +847,12 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
def setUp(self): def setUp(self):
return self.publish_one() return self.publish_one()
def make_servermap(self, mode=MODE_READ, oldmap=None): def make_servermap(self, mode=MODE_READ, oldmap=None, sb=None):
if oldmap is None: if oldmap is None:
oldmap = ServerMap() oldmap = ServerMap()
smu = ServermapUpdater(self._fn, Monitor(), oldmap, mode) if sb is None:
sb = self._storage_broker
smu = ServermapUpdater(self._fn, sb, Monitor(), oldmap, mode)
d = smu.update() d = smu.update()
return d return d
@ -979,11 +925,10 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
return d return d
def test_no_servers(self): def test_no_servers(self):
c2 = FakeClient(0) sb2 = make_storagebroker(num_peers=0)
self._fn._client = c2
# if there are no servers, then a MODE_READ servermap should come # if there are no servers, then a MODE_READ servermap should come
# back empty # back empty
d = self.make_servermap() d = self.make_servermap(sb=sb2)
def _check_servermap(servermap): def _check_servermap(servermap):
self.failUnlessEqual(servermap.best_recoverable_version(), None) self.failUnlessEqual(servermap.best_recoverable_version(), None)
self.failIf(servermap.recoverable_versions()) self.failIf(servermap.recoverable_versions())
@ -994,8 +939,8 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
test_no_servers.timeout = 15 test_no_servers.timeout = 15
def test_no_servers_download(self): def test_no_servers_download(self):
c2 = FakeClient(0) sb2 = make_storagebroker(num_peers=0)
self._fn._client = c2 self._fn._storage_broker = sb2
d = self.shouldFail(UnrecoverableFileError, d = self.shouldFail(UnrecoverableFileError,
"test_no_servers_download", "test_no_servers_download",
"no recoverable versions", "no recoverable versions",
@ -1005,7 +950,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
# anybody should not prevent a subsequent download from working. # anybody should not prevent a subsequent download from working.
# This isn't quite the webapi-driven test that #463 wants, but it # This isn't quite the webapi-driven test that #463 wants, but it
# should be close enough. # should be close enough.
self._fn._client = self._client self._fn._storage_broker = self._storage_broker
return self._fn.download_best_version() return self._fn.download_best_version()
def _retrieved(new_contents): def _retrieved(new_contents):
self.failUnlessEqual(new_contents, self.CONTENTS) self.failUnlessEqual(new_contents, self.CONTENTS)
@ -1489,13 +1434,17 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
d.addCallback(_check_results) d.addCallback(_check_results)
return d return d
class DevNullDictionary(dict):
def __setitem__(self, key, value):
return
class MultipleEncodings(unittest.TestCase): class MultipleEncodings(unittest.TestCase):
def setUp(self): def setUp(self):
self.CONTENTS = "New contents go here" self.CONTENTS = "New contents go here"
num_peers = 20 self._storage = FakeStorage()
self._client = FakeClient(num_peers) self._nodemaker = make_nodemaker(self._storage, num_peers=20)
self._storage = self._client._storage self._storage_broker = self._nodemaker.storage_broker
d = self._client.create_mutable_file(self.CONTENTS) d = self._nodemaker.create_mutable_file(self.CONTENTS)
def _created(node): def _created(node):
self._fn = node self._fn = node
d.addCallback(_created) d.addCallback(_created)
@ -1504,11 +1453,11 @@ class MultipleEncodings(unittest.TestCase):
def _encode(self, k, n, data): def _encode(self, k, n, data):
# encode 'data' into a peerid->shares dict. # encode 'data' into a peerid->shares dict.
fn2 = FastMutableFileNode(self._client)
# init_from_uri populates _uri, _writekey, _readkey, _storage_index,
# and _fingerprint
fn = self._fn fn = self._fn
fn2.init_from_uri(fn.get_uri()) # disable the nodecache, since for these tests we explicitly need
# multiple nodes pointing at the same file
self._nodemaker._node_cache = DevNullDictionary()
fn2 = self._nodemaker.create_from_cap(fn.get_uri())
# then we copy over other fields that are normally fetched from the # then we copy over other fields that are normally fetched from the
# existing shares # existing shares
fn2._pubkey = fn._pubkey fn2._pubkey = fn._pubkey
@ -1518,9 +1467,9 @@ class MultipleEncodings(unittest.TestCase):
fn2._required_shares = k fn2._required_shares = k
fn2._total_shares = n fn2._total_shares = n
s = self._client._storage s = self._storage
s._peers = {} # clear existing storage s._peers = {} # clear existing storage
p2 = Publish(fn2, None) p2 = Publish(fn2, self._storage_broker, None)
d = p2.publish(data) d = p2.publish(data)
def _published(res): def _published(res):
shares = s._peers shares = s._peers
@ -1532,7 +1481,8 @@ class MultipleEncodings(unittest.TestCase):
def make_servermap(self, mode=MODE_READ, oldmap=None): def make_servermap(self, mode=MODE_READ, oldmap=None):
if oldmap is None: if oldmap is None:
oldmap = ServerMap() oldmap = ServerMap()
smu = ServermapUpdater(self._fn, Monitor(), oldmap, mode) smu = ServermapUpdater(self._fn, self._storage_broker, Monitor(),
oldmap, mode)
d = smu.update() d = smu.update()
return d return d
@ -1547,8 +1497,7 @@ class MultipleEncodings(unittest.TestCase):
# we make a retrieval object that doesn't know what encoding # we make a retrieval object that doesn't know what encoding
# parameters to use # parameters to use
fn3 = FastMutableFileNode(self._client) fn3 = self._nodemaker.create_from_cap(self._fn.get_uri())
fn3.init_from_uri(self._fn.get_uri())
# now we upload a file through fn1, and grab its shares # now we upload a file through fn1, and grab its shares
d = self._encode(3, 10, contents1) d = self._encode(3, 10, contents1)
@ -1591,7 +1540,7 @@ class MultipleEncodings(unittest.TestCase):
places = [2, 2, 3, 2, 1, 1, 1, 2] places = [2, 2, 3, 2, 1, 1, 1, 2]
sharemap = {} sharemap = {}
sb = self._client.get_storage_broker() sb = self._storage_broker
for peerid in sorted(sb.get_all_serverids()): for peerid in sorted(sb.get_all_serverids()):
peerid_s = shortnodeid_b2a(peerid) peerid_s = shortnodeid_b2a(peerid)
@ -1600,7 +1549,7 @@ class MultipleEncodings(unittest.TestCase):
which = places[shnum] which = places[shnum]
else: else:
which = "x" which = "x"
self._client._storage._peers[peerid] = peers = {} self._storage._peers[peerid] = peers = {}
in_1 = shnum in self._shares1[peerid] in_1 = shnum in self._shares1[peerid]
in_2 = shnum in self._shares2.get(peerid, {}) in_2 = shnum in self._shares2.get(peerid, {})
in_3 = shnum in self._shares3.get(peerid, {}) in_3 = shnum in self._shares3.get(peerid, {})
@ -1622,7 +1571,7 @@ class MultipleEncodings(unittest.TestCase):
# now sort the sequence so that share 0 is returned first # now sort the sequence so that share 0 is returned first
new_sequence = [sharemap[shnum] new_sequence = [sharemap[shnum]
for shnum in sorted(sharemap.keys())] for shnum in sorted(sharemap.keys())]
self._client._storage._sequence = new_sequence self._storage._sequence = new_sequence
log.msg("merge done") log.msg("merge done")
d.addCallback(_merge) d.addCallback(_merge)
d.addCallback(lambda res: fn3.download_best_version()) d.addCallback(lambda res: fn3.download_best_version())
@ -1789,50 +1738,43 @@ class Exceptions(unittest.TestCase):
ucwe = UncoordinatedWriteError() ucwe = UncoordinatedWriteError()
self.failUnless("UncoordinatedWriteError" in repr(ucwe), repr(ucwe)) self.failUnless("UncoordinatedWriteError" in repr(ucwe), repr(ucwe))
# we can't do this test with a FakeClient, since it uses FakeStorageServer class SameKeyGenerator:
# instances which always succeed. So we need a less-fake one. def __init__(self, pubkey, privkey):
self.pubkey = pubkey
self.privkey = privkey
def generate(self, keysize=None):
return defer.succeed( (self.pubkey, self.privkey) )
class IntentionalError(Exception): class FirstServerGetsKilled:
pass done = False
def notify(self, retval, wrapper, methname):
if not self.done:
wrapper.broken = True
self.done = True
return retval
class LocalWrapper: class FirstServerGetsDeleted:
def __init__(self, original): def __init__(self):
self.original = original self.done = False
self.broken = False self.silenced = None
self.post_call_notifier = None def notify(self, retval, wrapper, methname):
def callRemote(self, methname, *args, **kwargs): if not self.done:
def _call(): # this query will work, but later queries should think the share
if self.broken: # has been deleted
raise IntentionalError("I was asked to break") self.done = True
meth = getattr(self.original, "remote_" + methname) self.silenced = wrapper
return meth(*args, **kwargs) return retval
d = fireEventually() if wrapper == self.silenced:
d.addCallback(lambda res: _call()) assert methname == "slot_testv_and_readv_and_writev"
if self.post_call_notifier: return (True, {})
d.addCallback(self.post_call_notifier, methname) return retval
return d
class LessFakeClient(FakeClient): class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def __init__(self, basedir, num_peers=10):
self._num_peers = num_peers
peerids = [tagged_hash("peerid", "%d" % i)[:20]
for i in range(self._num_peers)]
self.storage_broker = StorageFarmBroker(None, True)
for peerid in peerids:
peerdir = os.path.join(basedir, idlib.shortnodeid_b2a(peerid))
make_dirs(peerdir)
ss = StorageServer(peerdir, peerid)
lw = LocalWrapper(ss)
self.storage_broker.test_add_server(peerid, lw)
self.nodeid = "fakenodeid"
class Problems(unittest.TestCase, testutil.ShouldFailMixin):
def test_publish_surprise(self): def test_publish_surprise(self):
basedir = os.path.join("mutable/CollidingWrites/test_surprise") self.basedir = "mutable/Problems/test_publish_surprise"
self.client = LessFakeClient(basedir) self.set_up_grid()
d = self.client.create_mutable_file("contents 1") nm = self.g.clients[0].nodemaker
d = nm.create_mutable_file("contents 1")
def _created(n): def _created(n):
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
@ -1857,9 +1799,10 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
return d return d
def test_retrieve_surprise(self): def test_retrieve_surprise(self):
basedir = os.path.join("mutable/CollidingWrites/test_retrieve") self.basedir = "mutable/Problems/test_retrieve_surprise"
self.client = LessFakeClient(basedir) self.set_up_grid()
d = self.client.create_mutable_file("contents 1") nm = self.g.clients[0].nodemaker
d = nm.create_mutable_file("contents 1")
def _created(n): def _created(n):
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda res: n.get_servermap(MODE_READ)) d.addCallback(lambda res: n.get_servermap(MODE_READ))
@ -1893,9 +1836,10 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
# upload using the old servermap. The last upload should fail with an # upload using the old servermap. The last upload should fail with an
# UncoordinatedWriteError, because of the shares that didn't appear # UncoordinatedWriteError, because of the shares that didn't appear
# in the servermap. # in the servermap.
basedir = os.path.join("mutable/CollidingWrites/test_unexpexted_shares") self.basedir = "mutable/Problems/test_unexpected_shares"
self.client = LessFakeClient(basedir) self.set_up_grid()
d = self.client.create_mutable_file("contents 1") nm = self.g.clients[0].nodemaker
d = nm.create_mutable_file("contents 1")
def _created(n): def _created(n):
d = defer.succeed(None) d = defer.succeed(None)
d.addCallback(lambda res: n.get_servermap(MODE_WRITE)) d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
@ -1904,7 +1848,7 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
self.old_map = smap self.old_map = smap
# now shut down one of the servers # now shut down one of the servers
peer0 = list(smap.make_sharemap()[0])[0] peer0 = list(smap.make_sharemap()[0])[0]
self.client.debug_remove_connection(peer0) self.g.remove_server(peer0)
# then modify the file, leaving the old map untouched # then modify the file, leaving the old map untouched
log.msg("starting winning write") log.msg("starting winning write")
return n.overwrite("contents 2") return n.overwrite("contents 2")
@ -1926,43 +1870,57 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
# Break one server, then create the file: the initial publish should # Break one server, then create the file: the initial publish should
# complete with an alternate server. Breaking a second server should # complete with an alternate server. Breaking a second server should
# not prevent an update from succeeding either. # not prevent an update from succeeding either.
basedir = os.path.join("mutable/CollidingWrites/test_bad_server") self.basedir = "mutable/Problems/test_bad_server"
self.client = LessFakeClient(basedir, 20) self.set_up_grid()
nm = self.g.clients[0].nodemaker
# to make sure that one of the initial peers is broken, we have to # to make sure that one of the initial peers is broken, we have to
# get creative. We create the keys, so we can figure out the storage # get creative. We create an RSA key and compute its storage-index.
# index, but we hold off on doing the initial publish until we've # Then we make a KeyGenerator that always returns that one key, and
# broken the server on which the first share wants to be stored. # use it to create the mutable file. This will get easier when we can
n = FastMutableFileNode(self.client) # use #467 static-server-selection to disable permutation and force
d = defer.succeed(None) # the choice of server for share[0].
d.addCallback(n._generate_pubprivkeys, keysize=522)
d.addCallback(n._generated) d = nm.key_generator.generate(522)
def _got_key( (pubkey, privkey) ):
nm.key_generator = SameKeyGenerator(pubkey, privkey)
pubkey_s = pubkey.serialize()
privkey_s = privkey.serialize()
u = uri.WriteableSSKFileURI(ssk_writekey_hash(privkey_s),
ssk_pubkey_fingerprint_hash(pubkey_s))
self._storage_index = u.storage_index
d.addCallback(_got_key)
def _break_peer0(res): def _break_peer0(res):
si = n.get_storage_index() si = self._storage_index
peerlist = self.client.storage_broker.get_servers_for_index(si) peerlist = nm.storage_broker.get_servers_for_index(si)
peerid0, connection0 = peerlist[0] peerid0, connection0 = peerlist[0]
peerid1, connection1 = peerlist[1] peerid1, connection1 = peerlist[1]
connection0.broken = True connection0.broken = True
self.connection1 = connection1 self.connection1 = connection1
d.addCallback(_break_peer0) d.addCallback(_break_peer0)
# now let the initial publish finally happen # now "create" the file, using the pre-established key, and let the
d.addCallback(lambda res: n._upload("contents 1", None)) # initial publish finally happen
d.addCallback(lambda res: nm.create_mutable_file("contents 1"))
# that ought to work # that ought to work
d.addCallback(lambda res: n.download_best_version()) def _got_node(n):
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) d = n.download_best_version()
# now break the second peer d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
def _break_peer1(res): # now break the second peer
self.connection1.broken = True def _break_peer1(res):
d.addCallback(_break_peer1) self.connection1.broken = True
d.addCallback(lambda res: n.overwrite("contents 2")) d.addCallback(_break_peer1)
# that ought to work too d.addCallback(lambda res: n.overwrite("contents 2"))
d.addCallback(lambda res: n.download_best_version()) # that ought to work too
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2")) d.addCallback(lambda res: n.download_best_version())
def _explain_error(f): d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2"))
print f def _explain_error(f):
if f.check(NotEnoughServersError): print f
print "first_error:", f.value.first_error if f.check(NotEnoughServersError):
return f print "first_error:", f.value.first_error
d.addErrback(_explain_error) return f
d.addErrback(_explain_error)
return d
d.addCallback(_got_node)
return d return d
def test_bad_server_overlap(self): def test_bad_server_overlap(self):
@ -1975,20 +1933,21 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
# Break one server, then create the file: the initial publish should # Break one server, then create the file: the initial publish should
# complete with an alternate server. Breaking a second server should # complete with an alternate server. Breaking a second server should
# not prevent an update from succeeding either. # not prevent an update from succeeding either.
basedir = os.path.join("mutable/CollidingWrites/test_bad_server") self.basedir = "mutable/Problems/test_bad_server_overlap"
self.client = LessFakeClient(basedir, 10) self.set_up_grid()
sb = self.client.get_storage_broker() nm = self.g.clients[0].nodemaker
sb = nm.storage_broker
peerids = list(sb.get_all_serverids()) peerids = [serverid for (serverid,ss) in sb.get_all_servers()]
self.client.debug_break_connection(peerids[0]) self.g.break_server(peerids[0])
d = self.client.create_mutable_file("contents 1") d = nm.create_mutable_file("contents 1")
def _created(n): def _created(n):
d = n.download_best_version() d = n.download_best_version()
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1")) d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
# now break one of the remaining servers # now break one of the remaining servers
def _break_second_server(res): def _break_second_server(res):
self.client.debug_break_connection(peerids[1]) self.g.break_server(peerids[1])
d.addCallback(_break_second_server) d.addCallback(_break_second_server)
d.addCallback(lambda res: n.overwrite("contents 2")) d.addCallback(lambda res: n.overwrite("contents 2"))
# that ought to work too # that ought to work too
@ -2000,25 +1959,28 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
def test_publish_all_servers_bad(self): def test_publish_all_servers_bad(self):
# Break all servers: the publish should fail # Break all servers: the publish should fail
basedir = os.path.join("mutable/CollidingWrites/publish_all_servers_bad") self.basedir = "mutable/Problems/test_publish_all_servers_bad"
self.client = LessFakeClient(basedir, 20) self.set_up_grid()
sb = self.client.get_storage_broker() nm = self.g.clients[0].nodemaker
for peerid in sb.get_all_serverids(): for (serverid,ss) in nm.storage_broker.get_all_servers():
self.client.debug_break_connection(peerid) ss.broken = True
d = self.shouldFail(NotEnoughServersError, d = self.shouldFail(NotEnoughServersError,
"test_publish_all_servers_bad", "test_publish_all_servers_bad",
"Ran out of non-bad servers", "Ran out of non-bad servers",
self.client.create_mutable_file, "contents") nm.create_mutable_file, "contents")
return d return d
def test_publish_no_servers(self): def test_publish_no_servers(self):
# no servers at all: the publish should fail # no servers at all: the publish should fail
basedir = os.path.join("mutable/CollidingWrites/publish_no_servers") self.basedir = "mutable/Problems/test_publish_no_servers"
self.client = LessFakeClient(basedir, 0) self.set_up_grid(num_servers=0)
nm = self.g.clients[0].nodemaker
d = self.shouldFail(NotEnoughServersError, d = self.shouldFail(NotEnoughServersError,
"test_publish_no_servers", "test_publish_no_servers",
"Ran out of non-bad servers", "Ran out of non-bad servers",
self.client.create_mutable_file, "contents") nm.create_mutable_file, "contents")
return d return d
test_publish_no_servers.timeout = 30 test_publish_no_servers.timeout = 30
@ -2026,68 +1988,61 @@ class Problems(unittest.TestCase, testutil.ShouldFailMixin):
def test_privkey_query_error(self): def test_privkey_query_error(self):
# when a servermap is updated with MODE_WRITE, it tries to get the # when a servermap is updated with MODE_WRITE, it tries to get the
# privkey. Something might go wrong during this query attempt. # privkey. Something might go wrong during this query attempt.
self.client = FakeClient(20) # Exercise the code in _privkey_query_failed which tries to handle
# such an error.
self.basedir = "mutable/Problems/test_privkey_query_error"
self.set_up_grid(num_servers=20)
nm = self.g.clients[0].nodemaker
nm._node_cache = DevNullDictionary() # disable the nodecache
# we need some contents that are large enough to push the privkey out # we need some contents that are large enough to push the privkey out
# of the early part of the file # of the early part of the file
LARGE = "These are Larger contents" * 200 # about 5KB LARGE = "These are Larger contents" * 2000 # about 50KB
d = self.client.create_mutable_file(LARGE) d = nm.create_mutable_file(LARGE)
def _created(n): def _created(n):
self.uri = n.get_uri() self.uri = n.get_uri()
self.n2 = self.client.create_node_from_uri(self.uri) self.n2 = nm.create_from_cap(self.uri)
# we start by doing a map update to figure out which is the first
# server. # When a mapupdate is performed on a node that doesn't yet know
return n.get_servermap(MODE_WRITE) # the privkey, a short read is sent to a batch of servers, to get
# the verinfo and (hopefully, if the file is short enough) the
# encprivkey. Our file is too large to let this first read
# contain the encprivkey. Each non-encprivkey-bearing response
# that arrives (until the node gets the encprivkey) will trigger
# a second read to specifically read the encprivkey.
#
# So, to exercise this case:
# 1. notice which server gets a read() call first
# 2. tell that server to start throwing errors
killer = FirstServerGetsKilled()
for (serverid,ss) in nm.storage_broker.get_all_servers():
ss.post_call_notifier = killer.notify
d.addCallback(_created) d.addCallback(_created)
d.addCallback(lambda res: fireEventually(res))
def _got_smap1(smap):
peer0 = list(smap.make_sharemap()[0])[0]
# we tell the server to respond to this peer first, so that it
# will be asked for the privkey first
self.client._storage._sequence = [peer0]
# now we make the peer fail their second query
self.client._storage._special_answers[peer0] = ["normal", "fail"]
d.addCallback(_got_smap1)
# now we update a servermap from a new node (which doesn't have the # now we update a servermap from a new node (which doesn't have the
# privkey yet, forcing it to use a separate privkey query). Each # privkey yet, forcing it to use a separate privkey query). Note that
# query response will trigger a privkey query, and since we're using # the map-update will succeed, since we'll just get a copy from one
# _sequence to make the peer0 response come back first, we'll send it # of the other shares.
# a privkey query first, and _sequence will again ensure that the
# peer0 query will also come back before the others, and then
# _special_answers will make sure that the query raises an exception.
# The whole point of these hijinks is to exercise the code in
# _privkey_query_failed. Note that the map-update will succeed, since
# we'll just get a copy from one of the other shares.
d.addCallback(lambda res: self.n2.get_servermap(MODE_WRITE)) d.addCallback(lambda res: self.n2.get_servermap(MODE_WRITE))
# Using FakeStorage._sequence means there will be read requests still
# floating around.. wait for them to retire
def _cancel_timer(res):
if self.client._storage._pending_timer:
self.client._storage._pending_timer.cancel()
return res
d.addBoth(_cancel_timer)
return d return d
def test_privkey_query_missing(self): def test_privkey_query_missing(self):
# like test_privkey_query_error, but the shares are deleted by the # like test_privkey_query_error, but the shares are deleted by the
# second query, instead of raising an exception. # second query, instead of raising an exception.
self.client = FakeClient(20) self.basedir = "mutable/Problems/test_privkey_query_missing"
LARGE = "These are Larger contents" * 200 # about 5KB self.set_up_grid(num_servers=20)
d = self.client.create_mutable_file(LARGE) nm = self.g.clients[0].nodemaker
LARGE = "These are Larger contents" * 2000 # about 50KB
nm._node_cache = DevNullDictionary() # disable the nodecache
d = nm.create_mutable_file(LARGE)
def _created(n): def _created(n):
self.uri = n.get_uri() self.uri = n.get_uri()
self.n2 = self.client.create_node_from_uri(self.uri) self.n2 = nm.create_from_cap(self.uri)
return n.get_servermap(MODE_WRITE) deleter = FirstServerGetsDeleted()
for (serverid,ss) in nm.storage_broker.get_all_servers():
ss.post_call_notifier = deleter.notify
d.addCallback(_created) d.addCallback(_created)
d.addCallback(lambda res: fireEventually(res))
def _got_smap1(smap):
peer0 = list(smap.make_sharemap()[0])[0]
self.client._storage._sequence = [peer0]
self.client._storage._special_answers[peer0] = ["normal", "none"]
d.addCallback(_got_smap1)
d.addCallback(lambda res: self.n2.get_servermap(MODE_WRITE)) d.addCallback(lambda res: self.n2.get_servermap(MODE_WRITE))
def _cancel_timer(res):
if self.client._storage._pending_timer:
self.client._storage._pending_timer.cancel()
return res
d.addBoth(_cancel_timer)
return d return d

View File

@ -1179,17 +1179,18 @@ class SystemTest(SystemTestMixin, unittest.TestCase):
def _got_status(res): def _got_status(res):
# find an interesting upload and download to look at. LIT files # find an interesting upload and download to look at. LIT files
# are not interesting. # are not interesting.
for ds in self.clients[0].list_all_download_statuses(): h = self.clients[0].get_history()
for ds in h.list_all_download_statuses():
if ds.get_size() > 200: if ds.get_size() > 200:
self._down_status = ds.get_counter() self._down_status = ds.get_counter()
for us in self.clients[0].list_all_upload_statuses(): for us in h.list_all_upload_statuses():
if us.get_size() > 200: if us.get_size() > 200:
self._up_status = us.get_counter() self._up_status = us.get_counter()
rs = list(self.clients[0].list_all_retrieve_statuses())[0] rs = list(h.list_all_retrieve_statuses())[0]
self._retrieve_status = rs.get_counter() self._retrieve_status = rs.get_counter()
ps = list(self.clients[0].list_all_publish_statuses())[0] ps = list(h.list_all_publish_statuses())[0]
self._publish_status = ps.get_counter() self._publish_status = ps.get_counter()
us = list(self.clients[0].list_all_mapupdate_statuses())[0] us = list(h.list_all_mapupdate_statuses())[0]
self._update_status = us.get_counter() self._update_status = us.get_counter()
# and that there are some upload- and download- status pages # and that there are some upload- and download- status pages

View File

@ -8,7 +8,7 @@ from twisted.internet import defer
from foolscap.api import fireEventually from foolscap.api import fireEventually
import allmydata # for __full_version__ import allmydata # for __full_version__
from allmydata import uri, monitor from allmydata import uri, monitor, client
from allmydata.immutable import upload from allmydata.immutable import upload
from allmydata.interfaces import IFileURI, FileTooLargeError, NoSharesError, \ from allmydata.interfaces import IFileURI, FileTooLargeError, NoSharesError, \
NotEnoughSharesError NotEnoughSharesError
@ -187,11 +187,7 @@ class FakeClient:
return self.DEFAULT_ENCODING_PARAMETERS return self.DEFAULT_ENCODING_PARAMETERS
def get_storage_broker(self): def get_storage_broker(self):
return self.storage_broker return self.storage_broker
_secret_holder = client.SecretHolder("lease secret")
def get_renewal_secret(self):
return ""
def get_cancel_secret(self):
return ""
class GotTooFarError(Exception): class GotTooFarError(Exception):
pass pass

View File

@ -11,19 +11,17 @@ from allmydata import interfaces, uri, webish
from allmydata.storage.shares import get_share_file from allmydata.storage.shares import get_share_file
from allmydata.storage_client import StorageFarmBroker from allmydata.storage_client import StorageFarmBroker
from allmydata.immutable import upload, download from allmydata.immutable import upload, download
from allmydata.nodemaker import NodeMaker
from allmydata.unknown import UnknownNode from allmydata.unknown import UnknownNode
from allmydata.web import status, common from allmydata.web import status, common
from allmydata.scripts.debug import CorruptShareOptions, corrupt_share from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
from allmydata.util import fileutil, base32 from allmydata.util import fileutil, base32
from allmydata.util.assertutil import precondition from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \ create_chk_filenode, WebErrorMixin, ShouldFailMixin
FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin from allmydata.interfaces import IMutableFileNode
from allmydata.interfaces import IURI, IDirectoryURI, IReadonlyDirectoryURI, \
IFileURI, IMutableFileURI, IMutableFileNode, UnhandledCapTypeError
from allmydata.mutable import servermap, publish, retrieve from allmydata.mutable import servermap, publish, retrieve
import common_util as testutil import common_util as testutil
from allmydata.test.no_network import GridTestMixin from allmydata.test.no_network import GridTestMixin
from allmydata.test.common_web import HTTPClientGETFactory, \ from allmydata.test.common_web import HTTPClientGETFactory, \
HTTPClientHEADFactory HTTPClientHEADFactory
@ -37,69 +35,37 @@ class FakeStatsProvider:
stats = {'stats': {}, 'counters': {}} stats = {'stats': {}, 'counters': {}}
return stats return stats
class FakeClient(service.MultiService): class FakeNodeMaker(NodeMaker):
nodeid = "fake_nodeid" def _create_lit(self, cap):
nickname = "fake_nickname" return FakeCHKFileNode(cap)
basedir = "fake_basedir" def _create_immutable(self, cap):
def get_versions(self): return FakeCHKFileNode(cap)
return {'allmydata': "fake", def _create_mutable(self, cap):
'foolscap': "fake", return FakeMutableFileNode(None, None, None, None).init_from_uri(cap)
'twisted': "fake", def create_mutable_file(self, contents="", keysize=None):
'zfec': "fake", n = FakeMutableFileNode(None, None, None, None)
}
introducer_furl = "None"
_all_upload_status = [upload.UploadStatus()]
_all_download_status = [download.DownloadStatus()]
_all_mapupdate_statuses = [servermap.UpdateStatus()]
_all_publish_statuses = [publish.PublishStatus()]
_all_retrieve_statuses = [retrieve.RetrieveStatus()]
convergence = "some random string"
stats_provider = FakeStatsProvider()
def connected_to_introducer(self):
return False
storage_broker = StorageFarmBroker(None, permute_peers=True)
def get_storage_broker(self):
return self.storage_broker
def create_node_from_uri(self, auri, readcap=None):
if not auri:
auri = readcap
precondition(isinstance(auri, str), auri)
u = uri.from_string(auri)
if (IDirectoryURI.providedBy(u) or IReadonlyDirectoryURI.providedBy(u)):
return FakeDirectoryNode(self).init_from_uri(u)
if IFileURI.providedBy(u):
return FakeCHKFileNode(u, self)
if IMutableFileURI.providedBy(u):
return FakeMutableFileNode(self).init_from_uri(u)
raise UnhandledCapTypeError("cap '%s' is recognized, but has no Node" % auri)
def create_empty_dirnode(self):
n = FakeDirectoryNode(self)
d = n.create()
d.addCallback(lambda res: n)
return d
MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
def create_mutable_file(self, contents=""):
n = FakeMutableFileNode(self)
return n.create(contents) return n.create(contents)
class FakeUploader:
def upload(self, uploadable): def upload(self, uploadable):
d = uploadable.get_size() d = uploadable.get_size()
d.addCallback(lambda size: uploadable.read(size)) d.addCallback(lambda size: uploadable.read(size))
def _got_data(datav): def _got_data(datav):
data = "".join(datav) data = "".join(datav)
n = create_chk_filenode(self, data) n = create_chk_filenode(data)
results = upload.UploadResults() results = upload.UploadResults()
results.uri = n.get_uri() results.uri = n.get_uri()
return results return results
d.addCallback(_got_data) d.addCallback(_got_data)
return d return d
class FakeHistory:
_all_upload_status = [upload.UploadStatus()]
_all_download_status = [download.DownloadStatus()]
_all_mapupdate_statuses = [servermap.UpdateStatus()]
_all_publish_statuses = [publish.PublishStatus()]
_all_retrieve_statuses = [retrieve.RetrieveStatus()]
def list_all_upload_statuses(self): def list_all_upload_statuses(self):
return self._all_upload_status return self._all_upload_status
def list_all_download_statuses(self): def list_all_download_statuses(self):
@ -113,6 +79,53 @@ class FakeClient(service.MultiService):
def list_all_helper_statuses(self): def list_all_helper_statuses(self):
return [] return []
class FakeClient(service.MultiService):
def __init__(self):
service.MultiService.__init__(self)
self.uploader = FakeUploader()
self.nodemaker = FakeNodeMaker(None, None, None,
self.uploader, None, None,
None, None)
nodeid = "fake_nodeid"
nickname = "fake_nickname"
basedir = "fake_basedir"
def get_versions(self):
return {'allmydata': "fake",
'foolscap': "fake",
'twisted': "fake",
'zfec': "fake",
}
introducer_furl = "None"
convergence = "some random string"
stats_provider = FakeStatsProvider()
def connected_to_introducer(self):
return False
storage_broker = StorageFarmBroker(None, permute_peers=True)
def get_storage_broker(self):
return self.storage_broker
_secret_holder = None
def get_encoding_parameters(self):
return {"k": 3, "n": 10}
def get_history(self):
return FakeHistory()
def create_node_from_uri(self, writecap, readcap=None):
return self.nodemaker.create_from_cap(writecap, readcap)
def create_empty_dirnode(self):
return self.nodemaker.create_new_mutable_directory()
MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
def create_mutable_file(self, contents=""):
return self.nodemaker.create_mutable_file(contents)
def upload(self, uploadable):
return self.uploader.upload(uploadable)
class WebMixin(object): class WebMixin(object):
def setUp(self): def setUp(self):
self.s = FakeClient() self.s = FakeClient()
@ -190,7 +203,7 @@ class WebMixin(object):
def makefile(self, number): def makefile(self, number):
contents = "contents of file %s\n" % number contents = "contents of file %s\n" % number
n = create_chk_filenode(self.s, contents) n = create_chk_filenode(contents)
return contents, n, n.get_uri() return contents, n, n.get_uri()
def tearDown(self): def tearDown(self):
@ -456,11 +469,12 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
return d return d
def test_status(self): def test_status(self):
dl_num = self.s.list_all_download_statuses()[0].get_counter() h = self.s.get_history()
ul_num = self.s.list_all_upload_statuses()[0].get_counter() dl_num = h.list_all_download_statuses()[0].get_counter()
mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter() ul_num = h.list_all_upload_statuses()[0].get_counter()
pub_num = self.s.list_all_publish_statuses()[0].get_counter() mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
ret_num = self.s.list_all_retrieve_statuses()[0].get_counter() pub_num = h.list_all_publish_statuses()[0].get_counter()
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
d = self.GET("/status", followRedirect=True) d = self.GET("/status", followRedirect=True)
def _check(res): def _check(res):
self.failUnless('Upload and Download Status' in res, res) self.failUnless('Upload and Download Status' in res, res)
@ -652,8 +666,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
base = "/file/%s" % urllib.quote(verifier_cap) base = "/file/%s" % urllib.quote(verifier_cap)
# client.create_node_from_uri() can't handle verify-caps # client.create_node_from_uri() can't handle verify-caps
d = self.shouldFail2(error.Error, "GET_unhandled_URI_named", d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
"400 Bad Request", "400 Bad Request", "is not a file-cap",
"is not a valid file- or directory- cap",
self.GET, base) self.GET, base)
return d return d
@ -664,7 +677,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
# client.create_node_from_uri() can't handle verify-caps # client.create_node_from_uri() can't handle verify-caps
d = self.shouldFail2(error.Error, "test_GET_unhandled_URI", d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
"400 Bad Request", "400 Bad Request",
"is not a valid file- or directory- cap", "GET unknown URI type: can only do t=info",
self.GET, base) self.GET, base)
return d return d
@ -704,7 +717,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
def test_PUT_overwrite_only_files(self): def test_PUT_overwrite_only_files(self):
# create a directory, put a file in that directory. # create a directory, put a file in that directory.
contents, n, uri = self.makefile(8) contents, n, filecap = self.makefile(8)
d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "") d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
d.addCallback(lambda res: d.addCallback(lambda res:
self.PUT(self.public_url + "/foo/dir/file1.txt", self.PUT(self.public_url + "/foo/dir/file1.txt",
@ -713,13 +726,13 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
# (this should work) # (this should work)
d.addCallback(lambda res: d.addCallback(lambda res:
self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files", self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
uri)) filecap))
d.addCallback(lambda res: d.addCallback(lambda res:
self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict", self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
"There was already a child by that name, and you asked me " "There was already a child by that name, and you asked me "
"to not replace it", "to not replace it",
self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files", self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
uri)) filecap))
return d return d
def test_PUT_NEWFILEURL(self): def test_PUT_NEWFILEURL(self):
@ -1351,22 +1364,22 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
def test_POST_upload_no_link_mutable(self): def test_POST_upload_no_link_mutable(self):
d = self.POST("/uri", t="upload", mutable="true", d = self.POST("/uri", t="upload", mutable="true",
file=("new.txt", self.NEWFILE_CONTENTS)) file=("new.txt", self.NEWFILE_CONTENTS))
def _check(new_uri): def _check(filecap):
new_uri = new_uri.strip() filecap = filecap.strip()
self.new_uri = new_uri self.failUnless(filecap.startswith("URI:SSK:"), filecap)
u = IURI(new_uri) self.filecap = filecap
self.failUnless(IMutableFileURI.providedBy(u)) u = uri.WriteableSSKFileURI.init_from_string(filecap)
self.failUnless(u.storage_index in FakeMutableFileNode.all_contents) self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
n = self.s.create_node_from_uri(new_uri) n = self.s.create_node_from_uri(filecap)
return n.download_best_version() return n.download_best_version()
d.addCallback(_check) d.addCallback(_check)
def _check2(data): def _check2(data):
self.failUnlessEqual(data, self.NEWFILE_CONTENTS) self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
return self.GET("/uri/%s" % urllib.quote(self.new_uri)) return self.GET("/uri/%s" % urllib.quote(self.filecap))
d.addCallback(_check2) d.addCallback(_check2)
def _check3(data): def _check3(data):
self.failUnlessEqual(data, self.NEWFILE_CONTENTS) self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
return self.GET("/file/%s" % urllib.quote(self.new_uri)) return self.GET("/file/%s" % urllib.quote(self.filecap))
d.addCallback(_check3) d.addCallback(_check3)
def _check4(data): def _check4(data):
self.failUnlessEqual(data, self.NEWFILE_CONTENTS) self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
@ -1526,7 +1539,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
def test_POST_upload_mutable_toobig(self): def test_POST_upload_mutable_toobig(self):
d = self.shouldFail2(error.Error, d = self.shouldFail2(error.Error,
"test_POST_upload_no_link_mutable_toobig", "test_POST_upload_mutable_toobig",
"413 Request Entity Too Large", "413 Request Entity Too Large",
"SDMF is limited to one segment, and 10001 > 10000", "SDMF is limited to one segment, and 10001 > 10000",
self.POST, self.POST,
@ -2361,28 +2374,22 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
def test_PUT_NEWFILE_URI_mutable(self): def test_PUT_NEWFILE_URI_mutable(self):
file_contents = "New file contents here\n" file_contents = "New file contents here\n"
d = self.PUT("/uri?mutable=true", file_contents) d = self.PUT("/uri?mutable=true", file_contents)
def _check_mutable(uri): def _check1(filecap):
uri = uri.strip() filecap = filecap.strip()
u = IURI(uri) self.failUnless(filecap.startswith("URI:SSK:"), filecap)
self.failUnless(IMutableFileURI.providedBy(u)) self.filecap = filecap
u = uri.WriteableSSKFileURI.init_from_string(filecap)
self.failUnless(u.storage_index in FakeMutableFileNode.all_contents) self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
n = self.s.create_node_from_uri(uri) n = self.s.create_node_from_uri(filecap)
return n.download_best_version() return n.download_best_version()
d.addCallback(_check_mutable) d.addCallback(_check1)
def _check2_mutable(data): def _check2(data):
self.failUnlessEqual(data, file_contents) self.failUnlessEqual(data, file_contents)
d.addCallback(_check2_mutable) return self.GET("/uri/%s" % urllib.quote(self.filecap))
return d
def _check(uri):
self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
file_contents)
return self.GET("/uri/%s" % uri)
d.addCallback(_check)
def _check2(res):
self.failUnlessEqual(res, file_contents)
d.addCallback(_check2) d.addCallback(_check2)
def _check3(res):
self.failUnlessEqual(res, file_contents)
d.addCallback(_check3)
return d return d
def test_PUT_mkdir(self): def test_PUT_mkdir(self):

View File

@ -467,8 +467,6 @@ def from_string(s):
return DirectoryURIVerifier.init_from_string(s) return DirectoryURIVerifier.init_from_string(s)
return UnknownURI(s) return UnknownURI(s)
registerAdapter(from_string, str, IURI)
def is_uri(s): def is_uri(s):
try: try:
uri = from_string(s) uri = from_string(s)

View File

@ -1084,6 +1084,6 @@ class UnknownNodeHandler(RenderMixin, rend.Page):
t = get_arg(req, "t", "").strip() t = get_arg(req, "t", "").strip()
if t == "info": if t == "info":
return MoreInfo(self.node) return MoreInfo(self.node)
raise WebError("GET unknown: can only do t=info, not t=%s" % t) raise WebError("GET unknown URI type: can only do t=info, not t=%s" % t)

View File

@ -99,6 +99,7 @@ class FileHandler(rend.Page):
try: try:
node = self.client.create_node_from_uri(name) node = self.client.create_node_from_uri(name)
except (TypeError, UnhandledCapTypeError, AssertionError): except (TypeError, UnhandledCapTypeError, AssertionError):
# I think this can no longer be reached
raise WebError("'%s' is not a valid file- or directory- cap" raise WebError("'%s' is not a valid file- or directory- cap"
% name) % name)
if not IFileNode.providedBy(node): if not IFileNode.providedBy(node):
@ -153,7 +154,7 @@ class Root(rend.Page):
self.child_file = FileHandler(client) self.child_file = FileHandler(client)
self.child_named = FileHandler(client) self.child_named = FileHandler(client)
self.child_status = status.Status(client) # TODO: use client.history self.child_status = status.Status(client.get_history())
self.child_statistics = status.Statistics(client.stats_provider) self.child_statistics = status.Statistics(client.stats_provider)
def child_helper_status(self, ctx): def child_helper_status(self, ctx):

View File

@ -764,9 +764,9 @@ class Status(rend.Page):
docFactory = getxmlfile("status.xhtml") docFactory = getxmlfile("status.xhtml")
addSlash = True addSlash = True
def __init__(self, client): def __init__(self, history):
rend.Page.__init__(self, client) rend.Page.__init__(self, history)
self.client = client self.history = history
def renderHTTP(self, ctx): def renderHTTP(self, ctx):
req = inevow.IRequest(ctx) req = inevow.IRequest(ctx)
@ -804,13 +804,13 @@ class Status(rend.Page):
return simplejson.dumps(data, indent=1) + "\n" return simplejson.dumps(data, indent=1) + "\n"
def _get_all_statuses(self): def _get_all_statuses(self):
c = self.client h = self.history
return itertools.chain(c.list_all_upload_statuses(), return itertools.chain(h.list_all_upload_statuses(),
c.list_all_download_statuses(), h.list_all_download_statuses(),
c.list_all_mapupdate_statuses(), h.list_all_mapupdate_statuses(),
c.list_all_publish_statuses(), h.list_all_publish_statuses(),
c.list_all_retrieve_statuses(), h.list_all_retrieve_statuses(),
c.list_all_helper_statuses(), h.list_all_helper_statuses(),
) )
def data_active_operations(self, ctx, data): def data_active_operations(self, ctx, data):
@ -887,30 +887,30 @@ class Status(rend.Page):
return ctx.tag return ctx.tag
def childFactory(self, ctx, name): def childFactory(self, ctx, name):
client = self.client h = self.history
stype,count_s = name.split("-") stype,count_s = name.split("-")
count = int(count_s) count = int(count_s)
if stype == "up": if stype == "up":
for s in itertools.chain(client.list_all_upload_statuses(), for s in itertools.chain(h.list_all_upload_statuses(),
client.list_all_helper_statuses()): h.list_all_helper_statuses()):
# immutable-upload helpers use the same status object as a # immutable-upload helpers use the same status object as a
# regular immutable-upload # regular immutable-upload
if s.get_counter() == count: if s.get_counter() == count:
return UploadStatusPage(s) return UploadStatusPage(s)
if stype == "down": if stype == "down":
for s in client.list_all_download_statuses(): for s in h.list_all_download_statuses():
if s.get_counter() == count: if s.get_counter() == count:
return DownloadStatusPage(s) return DownloadStatusPage(s)
if stype == "mapupdate": if stype == "mapupdate":
for s in client.list_all_mapupdate_statuses(): for s in h.list_all_mapupdate_statuses():
if s.get_counter() == count: if s.get_counter() == count:
return MapupdateStatusPage(s) return MapupdateStatusPage(s)
if stype == "publish": if stype == "publish":
for s in client.list_all_publish_statuses(): for s in h.list_all_publish_statuses():
if s.get_counter() == count: if s.get_counter() == count:
return PublishStatusPage(s) return PublishStatusPage(s)
if stype == "retrieve": if stype == "retrieve":
for s in client.list_all_retrieve_statuses(): for s in h.list_all_retrieve_statuses():
if s.get_counter() == count: if s.get_counter() == count:
return RetrieveStatusPage(s) return RetrieveStatusPage(s)