mirror of https://github.com/nucypher/nucypher.git
Draft of ursula process handler
parent
629b45ac59
commit
544ed794d3
|
@ -0,0 +1,90 @@
|
||||||
|
"""
|
||||||
|
!! WARNING !!
|
||||||
|
!! This is not an actual mining script: Don't use this to mine. !!
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import binascii
|
||||||
|
import os
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
|
from nucypher.characters import Ursula
|
||||||
|
|
||||||
|
DEFAULT_SEED_NODE_DIR = '.'
|
||||||
|
DEFAULT_KNOWN_NODE_DIR = '.'
|
||||||
|
|
||||||
|
|
||||||
|
def read_node_metadata(filepath) -> Ursula:
|
||||||
|
|
||||||
|
with open(filepath, "r") as seed_file:
|
||||||
|
seed_file.seek(0)
|
||||||
|
seed_node_bytes = binascii.unhexlify(seed_file.read())
|
||||||
|
node = Ursula.from_bytes(seed_node_bytes, federated_only=True)
|
||||||
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
def write_node_metadata(node, node_metadata_dir: str) -> str:
|
||||||
|
|
||||||
|
filename = "node-metadata-{}".format(node.rest_port)
|
||||||
|
metadata_filepath = os.path.join(node_metadata_dir, filename)
|
||||||
|
|
||||||
|
with open(metadata_filepath, "w") as f:
|
||||||
|
f.write(bytes(node).hex())
|
||||||
|
|
||||||
|
return metadata_filepath
|
||||||
|
|
||||||
|
|
||||||
|
def read_metadata_dir(node_metadata_dir: str) -> Set[Ursula]:
|
||||||
|
|
||||||
|
try:
|
||||||
|
seed_node_files = os.listdir(node_metadata_dir)
|
||||||
|
except FileNotFoundError:
|
||||||
|
raise RuntimeError("No seed node metadata found at {}".format(node_metadata_dir))
|
||||||
|
|
||||||
|
nodes = set()
|
||||||
|
for seed_node_file in seed_node_files:
|
||||||
|
node = read_node_metadata(filepath=seed_node_file)
|
||||||
|
nodes.add(node)
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
|
||||||
|
def collect_stored_nodes(seed_node_dir=DEFAULT_SEED_NODE_DIR) -> tuple:
|
||||||
|
"""Collect stored node data from multiple sources and aggregate them into known node sets"""
|
||||||
|
|
||||||
|
seed_nodes = read_metadata_dir(node_metadata_dir=seed_node_dir)
|
||||||
|
other_nodes = set()
|
||||||
|
return seed_nodes, other_nodes
|
||||||
|
|
||||||
|
|
||||||
|
def spin_up_ursula(seed_node_dir=DEFAULT_SEED_NODE_DIR, cleanup=True):
|
||||||
|
|
||||||
|
# Collect nodes from the filesystem
|
||||||
|
seed_nodes, other_nodes = collect_stored_nodes()
|
||||||
|
|
||||||
|
# Start DHT loop
|
||||||
|
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||||
|
|
||||||
|
# Initialize Ursula
|
||||||
|
URSULA = Ursula.from_config(known_nodes=seed_nodes)
|
||||||
|
URSULA.dht_listen()
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Save node
|
||||||
|
metadata_filepath = write_node_metadata(node=URSULA, node_metadata_dir=seed_node_dir)
|
||||||
|
|
||||||
|
# Enter learning loop
|
||||||
|
URSULA.start_learning_loop()
|
||||||
|
URSULA.get_deployer().run()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
|
||||||
|
if cleanup is True:
|
||||||
|
os.remove(URSULA.db_name)
|
||||||
|
os.remove(metadata_filepath)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
spin_up_ursula()
|
Loading…
Reference in New Issue