S3NodeStorage methods, expose bucket and name; fetch objects

pull/476/head
Kieran Prasch 2018-10-09 22:44:44 -07:00
parent 1edc9773d7
commit 1ebf5781e7
2 changed files with 20 additions and 8 deletions

View File

@ -18,6 +18,7 @@ from nucypher.network.middleware import RestMiddleware
class NodeConfiguration:
_name = 'node'
_Character = NotImplemented
DEFAULT_CONFIG_FILE_LOCATION = os.path.join(DEFAULT_CONFIG_ROOT, '{}.config'.format(_name))
DEFAULT_OPERATING_MODE = 'decentralized'
@ -100,9 +101,9 @@ class NodeConfiguration:
else:
self.config_root = config_root
self.__temp_dir = constants.LIVE_CONFIGURATION
from nucypher.characters.lawful import Ursula # TODO : Needs cleanup
self.node_storage = node_storage or self.__DEFAULT_NODE_STORAGE(federated_only=federated_only,
serializer=self.NODE_SERIALIZER,
deserializer=self.NODE_DESERIALIZER)
character_class=Ursula)
self.__cache_runtime_filepaths()
self.config_file_location = config_file_location

View File

@ -227,6 +227,8 @@ class TemporaryFileBasedNodeStorage(LocalFileBasedNodeStorage):
class S3NodeStorage(NodeStorage):
S3_ACL = 'private' # Canned S3 Permissions
def __init__(self,
bucket_name: str,
s3_resource=None,
@ -236,7 +238,15 @@ class S3NodeStorage(NodeStorage):
self.__bucket_name = bucket_name
self.__s3client = boto3.client('s3')
self.__s3resource = s3_resource or boto3.resource('s3')
self.bucket = constants.NO_STORAGE_AVAILIBLE
self.__bucket = constants.NO_STORAGE_AVAILIBLE
@property
def bucket(self):
return self.__bucket
@property
def bucket_name(self):
return self.__bucket_name
def __read(self, node_obj: str):
try:
@ -249,11 +259,11 @@ class S3NodeStorage(NodeStorage):
def generate_presigned_url(self, checksum_address: str) -> str:
payload = {'Bucket': self.__bucket_name, 'Key': checksum_address}
url = self.__s3client.generate_presigned_url('get_object', payload)
url = self.__s3client.generate_presigned_url('get_object', payload, ExpiresIn=900)
return url
def all(self, federated_only: bool) -> set:
node_objs = self.bucket.objects.all()
node_objs = self.__bucket.objects.all()
nodes = set()
for node_obj in node_objs:
node = self.__read(node_obj=node_obj)
@ -261,17 +271,18 @@ class S3NodeStorage(NodeStorage):
return nodes
def get(self, checksum_address: str, federated_only: bool):
node_obj = self.bucket.Object(checksum_address)
node_obj = self.__bucket.Object(checksum_address)
node = self.__read(node_obj=node_obj)
return node
def save(self, node):
self.__s3client.put_object(Bucket=self.__bucket_name,
ACL=self.S3_ACL,
Key=node.checksum_public_address,
Body=self.serializer(bytes(node)))
def remove(self, checksum_address: str) -> bool:
node_obj = self.bucket.Object(checksum_address)
node_obj = self.__bucket.Object(checksum_address)
response = node_obj.delete()
if response['ResponseMetadata']['HTTPStatusCode'] != 204:
raise self.NodeStorageError("S3 Storage failed to delete node {}".format(checksum_address))
@ -289,7 +300,7 @@ class S3NodeStorage(NodeStorage):
return cls(bucket_name=payload['bucket_name'], *args, **kwargs)
def initialize(self):
self.bucket = self.__s3resource.Bucket(self.__bucket_name)
self.__bucket = self.__s3resource.Bucket(self.__bucket_name)
### Node Storage Registry ###