Test coverage for NodeStorages; Mocked LocalFile and S3 Backends

pull/476/head
Kieran Prasch 2018-10-09 19:01:09 -07:00
parent ffc96dcc09
commit 1edc9773d7
6 changed files with 210 additions and 40 deletions

View File

@ -1,27 +1,26 @@
import os
import random
import time
from collections import defaultdict
from collections import deque
from contextlib import suppress
from logging import Logger
from logging import getLogger
from typing import Dict, ClassVar, Set
from typing import Tuple
from typing import Union, List
from tempfile import TemporaryDirectory
import maya
import requests
import time
from constant_sorrow import constants, default_constant_splitter
from eth_keys import KeyAPI as EthKeyAPI
from eth_utils import to_checksum_address, to_canonical_address
from requests.exceptions import SSLError
from twisted.internet import reactor
from twisted.internet import task
from typing import Dict, ClassVar, Set
from typing import Tuple
from typing import Union, List
from umbral.keys import UmbralPublicKey
from umbral.signing import Signature
from nucypher.config.node import NodeConfiguration
from nucypher.config.storages import InMemoryNodeStorage
from nucypher.crypto.api import encrypt_and_sign
from nucypher.crypto.kits import UmbralMessageKit
@ -79,14 +78,13 @@ class Learner:
self._learning_listeners = defaultdict(list)
self._node_ids_to_learn_about_immediately = set()
self.known_certificates_dir = known_certificates_dir
self.known_certificates_dir = known_certificates_dir or TemporaryDirectory("nucypher-tmp-certs-").name
self.__known_nodes = dict()
# Read
if node_storage is None:
node_storage = self.__DEFAULT_NODE_STORAGE(federated_only=self.federated_only, #TODO: remove federated_only
serializer=NodeConfiguration.NODE_SERIALIZER,
deserializer=NodeConfiguration.NODE_DESERIALIZER)
character_class=self.__class__)
self.node_storage = node_storage
if save_metadata and node_storage is constants.NO_STORAGE_AVAILIBLE:

View File

@ -6,6 +6,7 @@ from appdirs import AppDirs
import nucypher
# Base Filepaths
BASE_DIR = abspath(dirname(dirname(nucypher.__file__)))
PROJECT_ROOT = abspath(dirname(nucypher.__file__))
APP_DIR = AppDirs("nucypher", "NuCypher")

View File

@ -10,7 +10,7 @@ from constant_sorrow import constants
from nucypher.config.constants import DEFAULT_CONFIG_ROOT, BASE_DIR
from nucypher.config.keyring import NucypherKeyring
from nucypher.config.storages import NodeStorage, InMemoryNodeStorage, FileBasedNodeStorage
from nucypher.config.storages import NodeStorage, InMemoryNodeStorage, LocalFileBasedNodeStorage
from nucypher.crypto.powers import CryptoPowerUp
from nucypher.network.middleware import RestMiddleware
@ -28,7 +28,7 @@ class NodeConfiguration:
__CONFIG_FILE_DESERIALIZER = json.loads
__TEMP_CONFIGURATION_DIR_PREFIX = "nucypher-tmp-"
__DEFAULT_NETWORK_MIDDLEWARE_CLASS = RestMiddleware
__DEFAULT_NODE_STORAGE = FileBasedNodeStorage
__DEFAULT_NODE_STORAGE = LocalFileBasedNodeStorage
__REGISTRY_NAME = 'contract_registry.json'
REGISTRY_SOURCE = os.path.join(BASE_DIR, __REGISTRY_NAME) # TODO: #461 Where will this be hosted?
@ -96,8 +96,7 @@ class NodeConfiguration:
if self.__temp:
self.__temp_dir = constants.UNINITIALIZED_CONFIGURATION
self.node_storage = InMemoryNodeStorage(federated_only=federated_only,
serializer=self.NODE_SERIALIZER,
deserializer=self.NODE_DESERIALIZER)
character_class=self.__class__)
else:
self.config_root = config_root
self.__temp_dir = constants.LIVE_CONFIGURATION

View File

@ -1,9 +1,14 @@
import binascii
import os
import tempfile
from abc import abstractmethod, ABC
from logging import getLogger
import boto3 as boto3
from typing import Set, Callable
import shutil
from botocore.errorfactory import ClientError
from constant_sorrow import constants
from typing import Callable
from nucypher.config.constants import DEFAULT_CONFIG_ROOT
@ -12,23 +17,27 @@ class NodeStorage(ABC):
_name = NotImplemented
_TYPE_LABEL = 'storage_type'
NODE_SERIALIZER = binascii.hexlify
NODE_DESERIALIZER = binascii.unhexlify
class NodeStorageError(Exception):
pass
class NoNodeMetadataFound(NodeStorageError):
class UnknownNode(NodeStorageError):
pass
def __init__(self,
serializer: Callable,
deserializer: Callable,
federated_only: bool, # TODO
character_class,
federated_only: bool, # TODO# 466
serializer: Callable = NODE_SERIALIZER,
deserializer: Callable = NODE_DESERIALIZER,
) -> None:
self.log = getLogger(self.__class__.__name__)
self.serializer = serializer
self.deserializer = deserializer
self.federated_only = federated_only
self.character_class = character_class
def __getitem__(self, item):
return self.get(checksum_address=item, federated_only=self.federated_only)
@ -84,7 +93,10 @@ class InMemoryNodeStorage(NodeStorage):
return set(self.__known_nodes.values())
def get(self, checksum_address: str, federated_only: bool):
return self.__known_nodes[checksum_address]
try:
return self.__known_nodes[checksum_address]
except KeyError:
raise self.UnknownNode
def save(self, node):
self.__known_nodes[node.checksum_public_address] = node
@ -108,13 +120,13 @@ class InMemoryNodeStorage(NodeStorage):
self.__known_nodes = dict()
class FileBasedNodeStorage(NodeStorage):
class LocalFileBasedNodeStorage(NodeStorage):
_name = 'local'
__FILENAME_TEMPLATE = '{}.node'
__DEFAULT_DIR = os.path.join(DEFAULT_CONFIG_ROOT, 'known_nodes', 'metadata')
class NoNodeMetadataFound(FileNotFoundError, NodeStorage.NoNodeMetadataFound):
class NoNodeMetadataFileFound(FileNotFoundError, NodeStorage.UnknownNode):
pass
def __init__(self,
@ -132,23 +144,27 @@ class FileBasedNodeStorage(NodeStorage):
def __read(self, filepath: str, federated_only: bool):
from nucypher.characters.lawful import Ursula
with open(filepath, "r") as seed_file:
seed_file.seek(0)
node_bytes = self.deserializer(seed_file.read())
node = Ursula.from_bytes(node_bytes, federated_only=federated_only)
try:
with open(filepath, "rb") as seed_file:
seed_file.seek(0)
node_bytes = self.deserializer(seed_file.read())
node = Ursula.from_bytes(node_bytes, federated_only=federated_only)
except FileNotFoundError:
raise self.UnknownNode
return node
def __write(self, filepath: str, node):
with open(filepath, "w") as f:
f.write(self.serializer(node).hex())
with open(filepath, "wb") as f:
f.write(self.serializer(self.character_class.__bytes__(node)))
self.log.info("Wrote new node metadata to filesystem {}".format(filepath))
return filepath
def all(self, federated_only: bool) -> set:
metadata_paths = sorted(os.listdir(self.known_metadata_dir), key=os.path.getctime)
self.log.info("Found {} known node metadata files at {}".format(len(metadata_paths), self.known_metadata_dir))
filenames = os.listdir(self.known_metadata_dir)
self.log.info("Found {} known node metadata files at {}".format(len(filenames), self.known_metadata_dir))
known_nodes = set()
for metadata_path in metadata_paths:
for filename in filenames:
metadata_path = os.path.join(self.known_metadata_dir, filename)
node = self.__read(filepath=metadata_path, federated_only=federated_only) # TODO: 466
known_nodes.add(node)
return known_nodes
@ -178,7 +194,7 @@ class FileBasedNodeStorage(NodeStorage):
return payload
@classmethod
def from_payload(cls, payload: str, *args, **kwargs) -> 'FileBasedNodeStorage':
def from_payload(cls, payload: str, *args, **kwargs) -> 'LocalFileBasedNodeStorage':
storage_type = payload[cls._TYPE_LABEL]
if not storage_type == cls._name:
raise cls.NodeStorageError("Wrong storage type. got {}".format(storage_type))
@ -194,16 +210,42 @@ class FileBasedNodeStorage(NodeStorage):
raise self.NodeStorageError("There is no existing configuration at {}".format(self.known_metadata_dir))
class TemporaryFileBasedNodeStorage(LocalFileBasedNodeStorage):
_name = 'tmp'
def __init__(self, *args, **kwargs):
self.__temp_dir = constants.NO_STORAGE_AVAILIBLE
super().__init__(known_metadata_dir=self.__temp_dir, *args, **kwargs)
def __del__(self):
if not self.__temp_dir is constants.NO_STORAGE_AVAILIBLE:
shutil.rmtree(self.__temp_dir, ignore_errors=True)
def initialize(self):
self.__temp_dir = tempfile.mkdtemp(prefix="nucypher-tmp-nodes-")
self.known_metadata_dir = self.__temp_dir
class S3NodeStorage(NodeStorage):
def __init__(self,
bucket_name: str,
s3_resource=None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.__bucket_name = bucket_name
self.__s3client = boto3.client('s3')
self.__s3resource = boto3.resource('s3')
self.bucket = self.__s3resource.Bucket(bucket_name)
self.__s3resource = s3_resource or boto3.resource('s3')
self.bucket = constants.NO_STORAGE_AVAILIBLE
def __read(self, node_obj: str):
try:
node_object_metadata = node_obj.get()
except ClientError:
raise self.UnknownNode
node_bytes = self.deserializer(node_object_metadata['Body'].read())
node = self.character_class.from_bytes(node_bytes)
return node
def generate_presigned_url(self, checksum_address: str) -> str:
payload = {'Bucket': self.__bucket_name, 'Key': checksum_address}
@ -211,21 +253,29 @@ class S3NodeStorage(NodeStorage):
return url
def all(self, federated_only: bool) -> set:
raise NotImplementedError # TODO
node_objs = self.bucket.objects.all()
nodes = set()
for node_obj in node_objs:
node = self.__read(node_obj=node_obj)
nodes.add(node)
return nodes
def get(self, checksum_address: str, federated_only: bool):
node_obj = self.bucket.Object(checksum_address)
node = self.deserializer(node_obj)
node = self.__read(node_obj=node_obj)
return node
def save(self, node):
self.__s3client.put_object(Bucket=self.__bucket_name,
Key=node.checksum_public_address,
Body=self.serializer(node))
Body=self.serializer(bytes(node)))
def remove(self, checksum_address: str) -> bool:
_node_obj = self.get(checksum_address=checksum_address, federated_only=self.federated_only)
return _node_obj()
node_obj = self.bucket.Object(checksum_address)
response = node_obj.delete()
if response['ResponseMetadata']['HTTPStatusCode'] != 204:
raise self.NodeStorageError("S3 Storage failed to delete node {}".format(checksum_address))
return True
def payload(self) -> str:
payload = {
@ -239,5 +289,8 @@ class S3NodeStorage(NodeStorage):
return cls(bucket_name=payload['bucket_name'], *args, **kwargs)
def initialize(self):
return self.__s3client.create_bucket(Bucket=self.__bucket_name)
self.bucket = self.__s3resource.Bucket(self.__bucket_name)
### Node Storage Registry ###
NODE_STORAGES = {storage_class._name: storage_class for storage_class in NodeStorage.__subclasses__()}

View File

@ -132,7 +132,7 @@ class UrsulaProcessProtocol(protocol.ProcessProtocol):
def outReceived(self, data):
print(data)
if b'passphrase' in data:
self.transport.write(bytes(TEST_URSULA_INSECURE_DEVELOPMENT_PASSWORD, encoding='ascii'))
self.transport.__write(bytes(TEST_URSULA_INSECURE_DEVELOPMENT_PASSWORD, encoding='ascii'))
self.transport.closeStdin() # tell them we're done
def errReceived(self, data):

View File

@ -0,0 +1,119 @@
import boto3
import pytest
from moto import mock_s3
from nucypher.characters.lawful import Ursula
from nucypher.config.storages import S3NodeStorage, InMemoryNodeStorage, LocalFileBasedNodeStorage, NODE_STORAGES, \
TemporaryFileBasedNodeStorage, NodeStorage
MOCK_S3_BUCKET_NAME = 'mock-bootnodes'
@pytest.fixture(scope='module')
def light_ursula(temp_dir_path):
node = Ursula(rest_host='127.0.0.1',
rest_port=10151,
federated_only=True)
return node
#
# Factories
#
def memory_node_storage():
_node_storage = InMemoryNodeStorage(character_class=Ursula, federated_only=True)
_node_storage.initialize()
return _node_storage
def local_node_storage():
_node_storage = TemporaryFileBasedNodeStorage(character_class=Ursula, federated_only=True)
_node_storage.initialize()
return _node_storage
def s3_node_storage():
@mock_s3
def __mock_s3():
conn = boto3.resource('s3')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
conn.create_bucket(Bucket=MOCK_S3_BUCKET_NAME)
_mock_storage = S3NodeStorage(bucket_name=MOCK_S3_BUCKET_NAME,
s3_resource=conn,
character_class=Ursula,
federated_only=True)
return _mock_storage
_node_storage = __mock_s3()
_node_storage.initialize()
return _node_storage
#
# Test Helpers
#
def _read_and_write_to_storage(ursula, node_storage):
# Write Node
node_storage.save(node=ursula)
# Read Node
node_from_storage = node_storage.get(checksum_address=ursula.checksum_public_address,
federated_only=True)
assert ursula == node_from_storage, "Node storage {} failed".format(node_storage)
# Save more nodes
all_known_nodes = set()
for port in range(10152, 10155):
node = Ursula(rest_host='127.0.0.1', rest_port=port, federated_only=True)
node_storage.save(node=node)
all_known_nodes.add(node)
# Read all nodes from storage
all_stored_nodes = node_storage.all(federated_only=True)
all_known_nodes.add(ursula)
assert all_stored_nodes == all_known_nodes
return True
def _write_and_delete_nodes_in_storage(ursula, node_storage):
# Write Node
node_storage.save(node=ursula)
# Delete Node
node_storage.remove(checksum_address=ursula.checksum_public_address)
# Read Node
with pytest.raises(NodeStorage.UnknownNode):
_node_from_storage = node_storage.get(checksum_address=ursula.checksum_public_address,
federated_only=True)
# Read all nodes from storage
all_stored_nodes = node_storage.all(federated_only=True)
assert all_stored_nodes == set()
return True
#
# Storage Backed Tests
#
@pytest.mark.parametrize("storage_factory", [
memory_node_storage,
local_node_storage,
s3_node_storage
])
@mock_s3
def test_delete_node_in_storage(light_ursula, storage_factory):
assert _write_and_delete_nodes_in_storage(ursula=light_ursula, node_storage=storage_factory())
@pytest.mark.parametrize("storage_factory", [
memory_node_storage,
local_node_storage,
s3_node_storage
])
@mock_s3
def test_read_and_write_to_storage(light_ursula, storage_factory):
assert _read_and_write_to_storage(ursula=light_ursula, node_storage=storage_factory())