mirror of https://github.com/nucypher/nucypher.git
Extract NodeMetadata into the core
parent
7c7e6dc12c
commit
e5e598952a
|
@ -132,7 +132,7 @@ class FleetState:
|
|||
|
||||
if self._this_node_ref is not None and not skip_this_node:
|
||||
this_node = self._this_node_ref()
|
||||
this_node_metadata = bytes(this_node)
|
||||
this_node_metadata = bytes(this_node.metadata())
|
||||
this_node_updated = self._this_node_metadata != this_node_metadata
|
||||
this_node_list = [this_node]
|
||||
else:
|
||||
|
@ -156,7 +156,7 @@ class FleetState:
|
|||
|
||||
all_nodes_sorted = sorted(itertools.chain(this_node_list, nodes.values()),
|
||||
key=lambda node: node.checksum_address)
|
||||
joined_metadata = b"".join(bytes(node) for node in all_nodes_sorted)
|
||||
joined_metadata = b"".join(bytes(node.metadata()) for node in all_nodes_sorted)
|
||||
checksum = keccak_digest(joined_metadata).hex()
|
||||
else:
|
||||
nodes = self._nodes
|
||||
|
|
|
@ -27,27 +27,17 @@ from queue import Queue
|
|||
from typing import Dict, Iterable, List, NamedTuple, Tuple, Union, Optional, Sequence, Set, Any
|
||||
|
||||
import maya
|
||||
from bytestring_splitter import (
|
||||
BytestringKwargifier,
|
||||
BytestringSplitter,
|
||||
BytestringSplittingError,
|
||||
VariableLengthBytestring
|
||||
)
|
||||
from constant_sorrow import constants
|
||||
from constant_sorrow.constants import (
|
||||
INCLUDED_IN_BYTESTRING,
|
||||
PUBLIC_ONLY,
|
||||
STRANGER_ALICE,
|
||||
UNKNOWN_VERSION,
|
||||
READY,
|
||||
INVALIDATED,
|
||||
NOT_SIGNED
|
||||
)
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from cryptography.x509 import Certificate, NameOID, load_pem_x509_certificate
|
||||
from cryptography.x509 import Certificate, NameOID
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from eth_utils import to_checksum_address
|
||||
from flask import Response, request
|
||||
from twisted.internet import reactor, stdio, threads
|
||||
from twisted.internet.defer import Deferred
|
||||
|
@ -63,6 +53,7 @@ from nucypher.core import (
|
|||
TreasureMap,
|
||||
EncryptedTreasureMap,
|
||||
ReencryptionResponse,
|
||||
NodeMetadata
|
||||
)
|
||||
|
||||
import nucypher
|
||||
|
@ -70,7 +61,6 @@ from nucypher.acumen.nicknames import Nickname
|
|||
from nucypher.acumen.perception import FleetSensor, ArchivedFleetState, RemoteUrsulaStatus
|
||||
from nucypher.blockchain.eth.actors import BlockchainPolicyAuthor, Worker
|
||||
from nucypher.blockchain.eth.agents import ContractAgency, StakingEscrowAgent
|
||||
from nucypher.blockchain.eth.constants import ETH_ADDRESS_BYTE_LENGTH
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import BaseContractRegistry
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
|
@ -91,11 +81,8 @@ from nucypher.crypto.powers import (
|
|||
TransactingPower,
|
||||
TLSHostingPower,
|
||||
)
|
||||
from nucypher.crypto.signing import InvalidSignature
|
||||
from nucypher.crypto.splitters import key_splitter, signature_splitter
|
||||
from nucypher.crypto.umbral_adapter import (
|
||||
PublicKey,
|
||||
VerificationError,
|
||||
reencrypt,
|
||||
VerifiedKeyFrag,
|
||||
)
|
||||
|
@ -104,7 +91,7 @@ from nucypher.datastore.queries import find_expired_policies
|
|||
from nucypher.network.exceptions import NodeSeemsToBeDown
|
||||
from nucypher.network.middleware import RestMiddleware
|
||||
from nucypher.network.nodes import NodeSprout, TEACHER_NODES, Teacher
|
||||
from nucypher.network.protocols import InterfaceInfo, parse_node_uri
|
||||
from nucypher.network.protocols import parse_node_uri
|
||||
from nucypher.network.retrieval import RetrievalClient
|
||||
from nucypher.network.server import ProxyRESTServer, make_rest_app
|
||||
from nucypher.network.trackers import AvailabilityTracker
|
||||
|
@ -1016,28 +1003,33 @@ class Ursula(Teacher, Character, Worker):
|
|||
deployer = self._crypto_power.power_ups(TLSHostingPower).get_deployer(rest_app=self.rest_app, port=port)
|
||||
return deployer
|
||||
|
||||
def __bytes__(self):
|
||||
|
||||
version = self.TEACHER_VERSION.to_bytes(2, "big")
|
||||
interface_info = VariableLengthBytestring(bytes(self.rest_interface))
|
||||
certificate_vbytes = VariableLengthBytestring(self.certificate.public_bytes(Encoding.PEM))
|
||||
as_bytes = bytes().join((version,
|
||||
self.canonical_public_address,
|
||||
bytes(VariableLengthBytestring(self.domain.encode('utf-8'))),
|
||||
self.timestamp_bytes(),
|
||||
bytes(self._interface_signature),
|
||||
bytes(VariableLengthBytestring(self.decentralized_identity_evidence)), # FIXME: Fixed length doesn't work with federated
|
||||
bytes(self.public_keys(SigningPower)),
|
||||
bytes(self.public_keys(DecryptingPower)),
|
||||
bytes(certificate_vbytes), # TLSHostingPower
|
||||
bytes(interface_info))
|
||||
)
|
||||
return as_bytes
|
||||
def metadata(self) -> NodeMetadata:
|
||||
# TODO: sometimes during cleanup in tests the learner is still running and can call this method,
|
||||
# but `._finalize()` is already called, so `rest_interface` is unavailable.
|
||||
# That doesn't lead to test fails, but produces some tracebacks in stderr.
|
||||
# The whole cleanup situation in tests is messed up and needs to be fixed.
|
||||
return NodeMetadata(public_address=self.canonical_public_address,
|
||||
domain=self.domain,
|
||||
timestamp_epoch=self.timestamp.epoch,
|
||||
interface_signature=self._interface_signature,
|
||||
decentralized_identity_evidence=self.decentralized_identity_evidence,
|
||||
verifying_key=self.public_keys(SigningPower),
|
||||
encrypting_key=self.public_keys(DecryptingPower),
|
||||
certificate_bytes=self.certificate.public_bytes(Encoding.PEM),
|
||||
host=self.rest_interface.host,
|
||||
port=self.rest_interface.port,
|
||||
)
|
||||
|
||||
#
|
||||
# Alternate Constructors
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def from_metadata_bytes(cls, metadata_bytes):
|
||||
# TODO: should be a method of `NodeSprout`, or maybe `NodeMetadata` *is* `NodeSprout`.
|
||||
# Fix when we get rid of inplace maturation.
|
||||
return NodeSprout(NodeMetadata.from_bytes(metadata_bytes))
|
||||
|
||||
@classmethod
|
||||
def from_rest_url(cls,
|
||||
network_middleware: RestMiddleware,
|
||||
|
@ -1049,8 +1041,7 @@ class Ursula(Teacher, Character, Worker):
|
|||
response_data = network_middleware.client.node_information(host, port,
|
||||
certificate_filepath=certificate_filepath)
|
||||
|
||||
stranger_ursula_from_public_keys = cls.from_bytes(response_data,
|
||||
*args, **kwargs)
|
||||
stranger_ursula_from_public_keys = cls.from_metadata_bytes(response_data)
|
||||
|
||||
return stranger_ursula_from_public_keys
|
||||
|
||||
|
@ -1161,130 +1152,6 @@ class Ursula(Teacher, Character, Worker):
|
|||
temp_node_storage.forget()
|
||||
return potential_seed_node
|
||||
|
||||
@classmethod
|
||||
def payload_splitter(cls, splittable, partial: bool = False):
|
||||
splitter = BytestringKwargifier(
|
||||
_receiver=cls.from_processed_bytes,
|
||||
_partial_receiver=NodeSprout,
|
||||
public_address=ETH_ADDRESS_BYTE_LENGTH,
|
||||
domain=VariableLengthBytestring,
|
||||
timestamp=(int, 4, {'byteorder': 'big'}),
|
||||
interface_signature=signature_splitter,
|
||||
|
||||
# FIXME: Fixed length doesn't work with federated. It was LENGTH_ECDSA_SIGNATURE_WITH_RECOVERY,
|
||||
decentralized_identity_evidence=VariableLengthBytestring,
|
||||
|
||||
verifying_key=key_splitter,
|
||||
encrypting_key=key_splitter,
|
||||
certificate=(load_pem_x509_certificate, VariableLengthBytestring, {"backend": default_backend()}),
|
||||
rest_interface=InterfaceInfo,
|
||||
)
|
||||
result = splitter(splittable, partial=partial)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def is_compatible_version(cls, version: int) -> bool:
|
||||
return cls.LOWEST_COMPATIBLE_VERSION <= version <= cls.LEARNER_VERSION
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls,
|
||||
ursula_as_bytes: bytes,
|
||||
version: int = INCLUDED_IN_BYTESTRING,
|
||||
fail_fast=False,
|
||||
) -> 'Ursula':
|
||||
|
||||
if version is INCLUDED_IN_BYTESTRING:
|
||||
version, payload = cls.version_splitter(ursula_as_bytes, return_remainder=True)
|
||||
else:
|
||||
payload = ursula_as_bytes
|
||||
|
||||
# Check version is compatible and prepare to handle potential failures otherwise
|
||||
if not cls.is_compatible_version(version):
|
||||
version_exception_class = cls.IsFromTheFuture if version > cls.LEARNER_VERSION else cls.AreYouFromThePast
|
||||
|
||||
# Try to handle failure, even during failure, graceful degradation
|
||||
# TODO: #154 - Some auto-updater logic?
|
||||
|
||||
try:
|
||||
canonical_address, _ = BytestringSplitter(ETH_ADDRESS_BYTE_LENGTH)(payload, return_remainder=True)
|
||||
checksum_address = to_checksum_address(canonical_address)
|
||||
nickname = Nickname.from_seed(checksum_address)
|
||||
display_name = cls._display_name_template.format(cls.__name__, nickname, checksum_address)
|
||||
message = cls.unknown_version_message.format(display_name, version, cls.LEARNER_VERSION)
|
||||
if version > cls.LEARNER_VERSION:
|
||||
message += " Is there a newer version of NuCypher?"
|
||||
except BytestringSplittingError:
|
||||
message = cls.really_unknown_version_message.format(version, cls.LEARNER_VERSION)
|
||||
|
||||
if fail_fast:
|
||||
raise version_exception_class(message)
|
||||
else:
|
||||
cls.log.warn(message)
|
||||
return UNKNOWN_VERSION
|
||||
else:
|
||||
# Version stuff checked out. Moving on.
|
||||
node_sprout = cls.payload_splitter(payload, partial=True)
|
||||
return node_sprout
|
||||
|
||||
@classmethod
|
||||
def from_processed_bytes(cls, **processed_objects):
|
||||
"""
|
||||
A convenience method for completing the maturation of a NodeSprout.
|
||||
TODO: Either deprecate or consolidate this logic; it's mostly just workarounds. NRN
|
||||
"""
|
||||
#### This is kind of a ridiculous workaround and repeated logic from Ursula.from_bytes
|
||||
interface_info = processed_objects.pop("rest_interface")
|
||||
rest_host = interface_info.host
|
||||
rest_port = interface_info.port
|
||||
checksum_address = to_checksum_address(processed_objects.pop('public_address'))
|
||||
|
||||
domain = processed_objects.pop('domain').decode('utf-8')
|
||||
|
||||
timestamp = maya.MayaDT(processed_objects.pop('timestamp'))
|
||||
|
||||
ursula = cls.from_public_keys(rest_host=rest_host,
|
||||
rest_port=rest_port,
|
||||
checksum_address=checksum_address,
|
||||
domain=domain,
|
||||
timestamp=timestamp,
|
||||
**processed_objects)
|
||||
return ursula
|
||||
|
||||
@classmethod
|
||||
def batch_from_bytes(cls,
|
||||
ursulas_as_bytes: Iterable[bytes],
|
||||
fail_fast: bool = False,
|
||||
) -> List['Ursula']:
|
||||
|
||||
node_splitter = BytestringSplitter(VariableLengthBytestring)
|
||||
nodes_vbytes = node_splitter.repeat(ursulas_as_bytes)
|
||||
version_splitter = BytestringSplitter((int, 2, {"byteorder": "big"}))
|
||||
versions_and_node_bytes = [version_splitter(n, return_remainder=True) for n in nodes_vbytes]
|
||||
|
||||
sprouts = []
|
||||
for version, node_bytes in versions_and_node_bytes:
|
||||
try:
|
||||
sprout = cls.from_bytes(node_bytes,
|
||||
version=version)
|
||||
if sprout is UNKNOWN_VERSION:
|
||||
continue
|
||||
except BytestringSplittingError:
|
||||
message = cls.really_unknown_version_message.format(version, cls.LEARNER_VERSION)
|
||||
if fail_fast:
|
||||
raise cls.IsFromTheFuture(message)
|
||||
else:
|
||||
cls.log.warn(message)
|
||||
continue
|
||||
except Ursula.IsFromTheFuture as e:
|
||||
if fail_fast:
|
||||
raise
|
||||
else:
|
||||
cls.log.warn(e.args[0])
|
||||
continue
|
||||
else:
|
||||
sprouts.append(sprout)
|
||||
return sprouts
|
||||
|
||||
@classmethod
|
||||
def from_storage(cls,
|
||||
node_storage: NodeStorage,
|
||||
|
|
|
@ -22,7 +22,6 @@ from pathlib import Path
|
|||
from typing import Any, Optional, Set, Union
|
||||
|
||||
import OpenSSL
|
||||
from bytestring_splitter import BytestringSplittingError
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
|
@ -353,18 +352,18 @@ class LocalFileBasedNodeStorage(NodeStorage):
|
|||
with open(filepath, "rb") as seed_file:
|
||||
seed_file.seek(0)
|
||||
node_bytes = self.decode_node_bytes(seed_file.read())
|
||||
node = Ursula.from_bytes(node_bytes, fail_fast=True)
|
||||
node = Ursula.from_metadata_bytes(node_bytes)
|
||||
except FileNotFoundError:
|
||||
raise self.NoNodeMetadataFileFound
|
||||
except (BytestringSplittingError, Ursula.UnexpectedVersion):
|
||||
raise self.InvalidNodeMetadata
|
||||
except Exception as e:
|
||||
raise self.InvalidNodeMetadata from e
|
||||
|
||||
return node
|
||||
|
||||
def __write_metadata(self, filepath: Path, node):
|
||||
filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(self.encode_node_bytes(bytes(node)))
|
||||
f.write(self.encode_node_bytes(bytes(node.metadata())))
|
||||
self.log.info("Wrote new node metadata to filesystem {}".format(filepath))
|
||||
return filepath
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ from typing import Optional, Sequence, Callable, Dict, Tuple, List, Iterable
|
|||
from bytestring_splitter import (
|
||||
BytestringSplitter,
|
||||
VariableLengthBytestring,
|
||||
BytestringKwargifier,
|
||||
BytestringSplittingError,
|
||||
)
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
|
@ -841,3 +842,96 @@ class RevocationOrder(Versioned):
|
|||
return cls(ursula_address=ursula_address,
|
||||
encrypted_kfrag=ekfrag,
|
||||
signature=signature)
|
||||
|
||||
|
||||
class NodeMetadata(Versioned):
|
||||
|
||||
def __init__(self,
|
||||
public_address: bytes,
|
||||
domain: str,
|
||||
timestamp_epoch: int,
|
||||
interface_signature: Signature, # sign(timestamp + canonical_public_address + host + port)
|
||||
decentralized_identity_evidence: bytes, # TODO: make its own type?
|
||||
verifying_key: PublicKey,
|
||||
encrypting_key: PublicKey,
|
||||
certificate_bytes: bytes, # serialized `cryptography.x509.Certificate`
|
||||
host: str,
|
||||
port: int,
|
||||
):
|
||||
self.public_address = public_address
|
||||
self.domain = domain
|
||||
self.timestamp_epoch = timestamp_epoch
|
||||
self.interface_signature = interface_signature
|
||||
self.decentralized_identity_evidence = decentralized_identity_evidence
|
||||
self.verifying_key = verifying_key
|
||||
self.encrypting_key = encrypting_key
|
||||
self.certificate_bytes = certificate_bytes
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
@classmethod
|
||||
def _brand(cls) -> bytes:
|
||||
return b'NdMd'
|
||||
|
||||
@classmethod
|
||||
def _version(cls) -> Tuple[int, int]:
|
||||
return 1, 0
|
||||
|
||||
@classmethod
|
||||
def _old_version_handlers(cls) -> Dict:
|
||||
return {}
|
||||
|
||||
def _payload(self):
|
||||
as_bytes = bytes().join((self.public_address,
|
||||
bytes(VariableLengthBytestring(self.domain.encode('utf-8'))),
|
||||
self.timestamp_epoch.to_bytes(4, 'big'),
|
||||
bytes(self.interface_signature),
|
||||
bytes(VariableLengthBytestring(self.decentralized_identity_evidence)), # FIXME: Fixed length doesn't work with federated
|
||||
bytes(self.verifying_key),
|
||||
bytes(self.encrypting_key),
|
||||
bytes(VariableLengthBytestring(self.certificate_bytes)),
|
||||
bytes(VariableLengthBytestring(self.host.encode('utf-8'))),
|
||||
self.port.to_bytes(2, 'big'),
|
||||
))
|
||||
return as_bytes
|
||||
|
||||
@classmethod
|
||||
def _from_bytes_current(cls, data: bytes):
|
||||
splitter = BytestringKwargifier(
|
||||
dict,
|
||||
public_address=ETH_ADDRESS_BYTE_LENGTH,
|
||||
domain_bytes=VariableLengthBytestring,
|
||||
timestamp_epoch=(int, 4, {'byteorder': 'big'}),
|
||||
interface_signature=signature_splitter,
|
||||
|
||||
# FIXME: Fixed length doesn't work with federated. It was LENGTH_ECDSA_SIGNATURE_WITH_RECOVERY,
|
||||
decentralized_identity_evidence=VariableLengthBytestring,
|
||||
|
||||
verifying_key=key_splitter,
|
||||
encrypting_key=key_splitter,
|
||||
certificate_bytes=VariableLengthBytestring,
|
||||
host_bytes=VariableLengthBytestring,
|
||||
port=(int, 2, {'byteorder': 'big'}),
|
||||
)
|
||||
|
||||
result = splitter(data)
|
||||
|
||||
return cls(public_address=result['public_address'],
|
||||
domain=result['domain_bytes'].decode('utf-8'),
|
||||
timestamp_epoch=result['timestamp_epoch'],
|
||||
interface_signature=result['interface_signature'],
|
||||
decentralized_identity_evidence=result['decentralized_identity_evidence'],
|
||||
verifying_key=result['verifying_key'],
|
||||
encrypting_key=result['encrypting_key'],
|
||||
certificate_bytes=result['certificate_bytes'],
|
||||
host=result['host_bytes'].decode('utf-8'),
|
||||
port=result['port'],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def batch_from_bytes(cls, data: bytes):
|
||||
|
||||
node_splitter = BytestringSplitter(VariableLengthBytestring)
|
||||
nodes_vbytes = node_splitter.repeat(data)
|
||||
|
||||
return [cls.from_bytes(node_data) for node_data in nodes_vbytes]
|
||||
|
|
|
@ -228,7 +228,7 @@ class RestMiddleware:
|
|||
|
||||
def check_rest_availability(self, initiator, responder):
|
||||
response = self.client.post(node_or_sprout=responder,
|
||||
data=bytes(initiator),
|
||||
data=bytes(initiator.metadata()),
|
||||
path="ping",
|
||||
timeout=6, # Two round trips are expected
|
||||
)
|
||||
|
@ -251,7 +251,7 @@ class RestMiddleware:
|
|||
params = {}
|
||||
|
||||
if announce_nodes:
|
||||
payload = bytes().join(bytes(VariableLengthBytestring(n)) for n in announce_nodes)
|
||||
payload = bytes().join(bytes(VariableLengthBytestring(bytes(n.metadata()))) for n in announce_nodes)
|
||||
response = self.client.post(node_or_sprout=node,
|
||||
path="node_metadata",
|
||||
params=params,
|
||||
|
|
|
@ -26,9 +26,7 @@ from typing import Callable, Iterable, List, Optional, Set, Tuple, Union
|
|||
import maya
|
||||
import requests
|
||||
from bytestring_splitter import (
|
||||
BytestringSplitter,
|
||||
BytestringSplittingError,
|
||||
PartiallyKwargifiedBytes,
|
||||
VariableLengthBytestring
|
||||
)
|
||||
from constant_sorrow import constant_or_bytes
|
||||
|
@ -41,12 +39,15 @@ from constant_sorrow.constants import (
|
|||
RELAX,
|
||||
UNKNOWN_VERSION
|
||||
)
|
||||
from cryptography.x509 import Certificate
|
||||
from cryptography.x509 import Certificate, load_pem_x509_certificate
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from eth_utils import to_checksum_address
|
||||
from requests.exceptions import SSLError
|
||||
from twisted.internet import reactor, task
|
||||
from twisted.internet.defer import Deferred
|
||||
|
||||
from nucypher.core import NodeMetadata
|
||||
|
||||
from nucypher.acumen.nicknames import Nickname
|
||||
from nucypher.acumen.perception import FleetSensor
|
||||
from nucypher.blockchain.economics import EconomicsFactory
|
||||
|
@ -64,7 +65,7 @@ from nucypher.crypto.utils import recover_address_eip_191, verify_eip_191
|
|||
from nucypher.network import LEARNING_LOOP_VERSION
|
||||
from nucypher.network.exceptions import NodeSeemsToBeDown
|
||||
from nucypher.network.middleware import RestMiddleware
|
||||
from nucypher.network.protocols import SuspiciousActivity
|
||||
from nucypher.network.protocols import SuspiciousActivity, InterfaceInfo
|
||||
from nucypher.utilities.logging import Logger
|
||||
|
||||
TEACHER_NODES = {
|
||||
|
@ -78,20 +79,22 @@ TEACHER_NODES = {
|
|||
}
|
||||
|
||||
|
||||
class NodeSprout(PartiallyKwargifiedBytes):
|
||||
class NodeSprout:
|
||||
"""
|
||||
An abridged node class designed for optimization of instantiation of > 100 nodes simultaneously.
|
||||
"""
|
||||
verified_node = False
|
||||
|
||||
def __init__(self, node_metadata):
|
||||
super().__init__(node_metadata)
|
||||
self._metadata = node_metadata
|
||||
|
||||
# cached properties
|
||||
self._checksum_address = None
|
||||
self._nickname = None
|
||||
self._hash = None
|
||||
self.timestamp = maya.MayaDT(
|
||||
self.timestamp) # Weird for this to be in init. maybe this belongs in the splitter also.
|
||||
self._repr = None
|
||||
self._rest_interface = None
|
||||
|
||||
self._is_finishing = False
|
||||
self._finishing_mutex = Queue()
|
||||
|
||||
|
@ -103,35 +106,19 @@ class NodeSprout(PartiallyKwargifiedBytes):
|
|||
return bytes(self.stamp) == bytes(other_stamp)
|
||||
|
||||
def __hash__(self):
|
||||
return int.from_bytes(bytes(self.stamp), byteorder="big")
|
||||
if not self._hash:
|
||||
self._hash = int.from_bytes(bytes(self.stamp), byteorder="big")
|
||||
return self._hash
|
||||
|
||||
def __repr__(self):
|
||||
if not self._repr:
|
||||
self._repr = f"({self.__class__.__name__})⇀{self.nickname}↽ ({self.checksum_address})"
|
||||
return self._repr
|
||||
|
||||
def __bytes__(self):
|
||||
b = super().__bytes__()
|
||||
|
||||
# We assume that the TEACHER_VERSION of this codebase is the version for this NodeSprout.
|
||||
# This is probably true, right? Might need to be re-examined someday if we have
|
||||
# different node types of different versions.
|
||||
version = Teacher.TEACHER_VERSION.to_bytes(2, "big")
|
||||
return version + b
|
||||
|
||||
@property
|
||||
def stamp(self) -> SignatureStamp:
|
||||
return SignatureStamp(self.verifying_key)
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
domain_bytes = PartiallyKwargifiedBytes.__getattr__(self, "domain")
|
||||
return domain_bytes.decode("utf-8")
|
||||
|
||||
@property
|
||||
def checksum_address(self):
|
||||
if not self._checksum_address:
|
||||
self._checksum_address = to_checksum_address(self.public_address)
|
||||
self._checksum_address = to_checksum_address(self._metadata.public_address)
|
||||
return self._checksum_address
|
||||
|
||||
@property
|
||||
|
@ -140,9 +127,60 @@ class NodeSprout(PartiallyKwargifiedBytes):
|
|||
self._nickname = Nickname.from_seed(self.checksum_address)
|
||||
return self._nickname
|
||||
|
||||
@property
|
||||
def rest_interface(self):
|
||||
if not self._rest_interface:
|
||||
self._rest_interface = InterfaceInfo(self._metadata.host, self._metadata.port)
|
||||
return self._rest_interface
|
||||
|
||||
def rest_url(self):
|
||||
return self.rest_interface.uri
|
||||
|
||||
def metadata(self):
|
||||
return self._metadata
|
||||
|
||||
@property
|
||||
def verifying_key(self):
|
||||
return self._metadata.verifying_key
|
||||
|
||||
@property
|
||||
def encrypting_key(self):
|
||||
return self._metadata.encrypting_key
|
||||
|
||||
@property
|
||||
def decentralized_identity_evidence(self):
|
||||
return self._metadata.decentralized_identity_evidence
|
||||
|
||||
@property
|
||||
def public_address(self):
|
||||
return self._metadata.public_address
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return maya.MayaDT(self._metadata.timestamp_epoch)
|
||||
|
||||
@property
|
||||
def stamp(self) -> SignatureStamp:
|
||||
return SignatureStamp(self._metadata.verifying_key)
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
return self._metadata.domain
|
||||
|
||||
def finish(self):
|
||||
from nucypher.characters.lawful import Ursula
|
||||
return Ursula.from_public_keys(rest_host=self._metadata.host,
|
||||
rest_port=self._metadata.port,
|
||||
checksum_address=self.checksum_address,
|
||||
domain=self._metadata.domain,
|
||||
timestamp=self.timestamp,
|
||||
interface_signature=self._metadata.interface_signature,
|
||||
decentralized_identity_evidence=self._metadata.decentralized_identity_evidence,
|
||||
verifying_key=self._metadata.verifying_key,
|
||||
encrypting_key=self._metadata.encrypting_key,
|
||||
certificate=load_pem_x509_certificate(self._metadata.certificate_bytes, backend=default_backend())
|
||||
)
|
||||
|
||||
def mature(self):
|
||||
if self._is_finishing:
|
||||
return self._finishing_mutex.get()
|
||||
|
@ -196,8 +234,6 @@ class Learner:
|
|||
LEARNER_VERSION = LEARNING_LOOP_VERSION
|
||||
LOWEST_COMPATIBLE_VERSION = 2 # Disallow versions lower than this
|
||||
|
||||
node_splitter = BytestringSplitter(VariableLengthBytestring)
|
||||
version_splitter = BytestringSplitter((int, 2, {"byteorder": "big"}))
|
||||
tracker_class = FleetSensor
|
||||
|
||||
invalid_metadata_message = "{} has invalid metadata. The node's stake may have ended, or it is transitioning to a new interface. Ignoring."
|
||||
|
@ -791,7 +827,7 @@ class Learner:
|
|||
# TODO: Bucket separately and report.
|
||||
unresponsive_nodes.add(current_teacher) # This does nothing.
|
||||
self.known_nodes.mark_as(current_teacher.InvalidNode, current_teacher)
|
||||
self.log.warn(f"Teacher {str(current_teacher)} is invalid (hex={bytes(current_teacher).hex()}):{e}.")
|
||||
self.log.warn(f"Teacher {str(current_teacher)} is invalid (hex={bytes(current_teacher.metadata()).hex()}):{e}.")
|
||||
self.suspicious_activities_witnessed['vladimirs'].append(current_teacher)
|
||||
return
|
||||
except RuntimeError as e:
|
||||
|
@ -802,12 +838,12 @@ class Learner:
|
|||
else:
|
||||
self.log.warn(
|
||||
f"Unhandled error while learning from {str(current_teacher)} "
|
||||
f"(hex={bytes(current_teacher).hex()}):{e}.")
|
||||
f"(hex={bytes(current_teacher.metadata()).hex()}):{e}.")
|
||||
raise
|
||||
except Exception as e:
|
||||
self.log.warn(
|
||||
f"Unhandled error while learning from {str(current_teacher)} "
|
||||
f"(hex={bytes(current_teacher).hex()}):{e}.") # To track down 2345 / 1698
|
||||
f"(hex={bytes(current_teacher.metadata()).hex()}):{e}.") # To track down 2345 / 1698
|
||||
raise
|
||||
finally:
|
||||
# Is cycling happening in the right order?
|
||||
|
@ -865,10 +901,10 @@ class Learner:
|
|||
# so it has been removed. When we create a new Ursula bytestring version, let's put the check
|
||||
# somewhere more performant, like mature() or verify_node().
|
||||
|
||||
sprouts = self.node_class.batch_from_bytes(node_payload)
|
||||
nodes = NodeMetadata.batch_from_bytes(node_payload)
|
||||
sprouts = [NodeSprout(node) for node in nodes]
|
||||
|
||||
for sprout in sprouts:
|
||||
fail_fast = True # TODO NRN
|
||||
try:
|
||||
node_or_false = self.remember_node(sprout,
|
||||
record_fleet_state=False,
|
||||
|
@ -987,15 +1023,6 @@ class Teacher:
|
|||
class WrongMode(TypeError):
|
||||
"""Raised when a Character tries to use another Character as decentralized when the latter is federated_only."""
|
||||
|
||||
class UnexpectedVersion(TypeError):
|
||||
"""Raised when deserializing a Character from a unexpected and incompatible version."""
|
||||
|
||||
class IsFromTheFuture(UnexpectedVersion):
|
||||
"""Raised when deserializing a Character from a future version."""
|
||||
|
||||
class AreYouFromThePast(UnexpectedVersion):
|
||||
"""Raised when deserializing a Character from a previous, now unsupported version."""
|
||||
|
||||
unknown_version_message = "{} purported to be of version {}, but we're version {}."
|
||||
really_unknown_version_message = "Unable to glean address from node that purported to be version {}. " \
|
||||
"We're version {}."
|
||||
|
@ -1032,9 +1059,9 @@ class Teacher:
|
|||
|
||||
def bytestring_of_known_nodes(self):
|
||||
payload = self.known_nodes.snapshot()
|
||||
ursulas_as_vbytes = (VariableLengthBytestring(n) for n in self.known_nodes)
|
||||
ursulas_as_vbytes = (VariableLengthBytestring(bytes(n.metadata())) for n in self.known_nodes)
|
||||
ursulas_as_bytes = bytes().join(bytes(u) for u in ursulas_as_vbytes)
|
||||
ursulas_as_bytes += VariableLengthBytestring(bytes(self))
|
||||
ursulas_as_bytes += VariableLengthBytestring(bytes(self.metadata()))
|
||||
|
||||
payload += ursulas_as_bytes
|
||||
return payload
|
||||
|
@ -1192,9 +1219,7 @@ class Teacher:
|
|||
port=self.rest_interface.port,
|
||||
certificate_filepath=certificate_filepath)
|
||||
|
||||
version, node_bytes = self.version_splitter(response_data, return_remainder=True)
|
||||
|
||||
sprout = self.payload_splitter(node_bytes, partial=True)
|
||||
sprout = NodeSprout(NodeMetadata.from_bytes(response_data))
|
||||
|
||||
verifying_keys_match = sprout.verifying_key == self.public_keys(SigningPower)
|
||||
encrypting_keys_match = sprout.encrypting_key == self.public_keys(DecryptingPower)
|
||||
|
@ -1236,6 +1261,11 @@ class Teacher:
|
|||
"""
|
||||
Checks that the interface info is valid for this node's canonical address.
|
||||
"""
|
||||
|
||||
# TODO: move to NodeMetadata
|
||||
# Also: all this info we're verifying came in the same package as the verifying key itself
|
||||
# (in NodeMetadata). So what's the point? Of course it's going to be verified successfully.
|
||||
|
||||
interface_info_message = self._signable_interface_info_message() # Contains canonical address.
|
||||
message = self.timestamp_bytes() + interface_info_message
|
||||
interface_is_valid = self._interface_signature.verify(self.public_keys(SigningPower), message)
|
||||
|
@ -1246,7 +1276,11 @@ class Teacher:
|
|||
raise self.InvalidNode("Interface is not valid")
|
||||
|
||||
def _signable_interface_info_message(self):
|
||||
message = self.canonical_public_address + self.rest_interface
|
||||
message = (
|
||||
self.canonical_public_address +
|
||||
self.rest_interface.host.encode('utf-8') +
|
||||
self.rest_interface.port.to_bytes(2, 'big')
|
||||
)
|
||||
return message
|
||||
|
||||
def _sign_and_date_interface_info(self):
|
||||
|
|
|
@ -62,7 +62,6 @@ def parse_node_uri(uri: str):
|
|||
|
||||
|
||||
class InterfaceInfo:
|
||||
expected_bytes_length = lambda: VariableLengthBytestring
|
||||
|
||||
def __init__(self, host, port) -> None:
|
||||
loopback, localhost = LOOPBACK_ADDRESS, 'localhost'
|
||||
|
@ -73,13 +72,6 @@ class InterfaceInfo:
|
|||
yield self.host
|
||||
yield self.port
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, url_string):
|
||||
host_bytes, port_bytes = url_string.split(b':', 1)
|
||||
port = int.from_bytes(port_bytes, "big")
|
||||
host = host_bytes.decode("utf-8")
|
||||
return cls(host=host, port=port)
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
return u"{}:{}".format(self.host, self.port)
|
||||
|
@ -88,14 +80,5 @@ class InterfaceInfo:
|
|||
def formal_uri(self):
|
||||
return u"{}://{}".format('https', self.uri)
|
||||
|
||||
def __bytes__(self):
|
||||
return bytes(self.host, encoding="utf-8") + b":" + self.port.to_bytes(4, "big")
|
||||
|
||||
def __add__(self, other):
|
||||
return bytes(self) + bytes(other)
|
||||
|
||||
def __radd__(self, other):
|
||||
return bytes(other) + bytes(self)
|
||||
|
||||
def __repr__(self):
|
||||
return self.uri
|
||||
|
|
|
@ -27,7 +27,7 @@ from flask import Flask, Response, jsonify, request
|
|||
from mako import exceptions as mako_exceptions
|
||||
from mako.template import Template
|
||||
|
||||
from nucypher.core import AuthorizedKeyFrag, ReencryptionRequest, Arrangement, ArrangementResponse, RevocationOrder
|
||||
from nucypher.core import AuthorizedKeyFrag, ReencryptionRequest, Arrangement, ArrangementResponse, RevocationOrder, NodeMetadata
|
||||
|
||||
from nucypher.blockchain.eth.utils import period_to_epoch
|
||||
from nucypher.config.constants import MAX_UPLOAD_CONTENT_LENGTH
|
||||
|
@ -39,6 +39,7 @@ from nucypher.datastore.models import ReencryptionRequest as ReencryptionRequest
|
|||
from nucypher.network import LEARNING_LOOP_VERSION
|
||||
from nucypher.network.exceptions import NodeSeemsToBeDown
|
||||
from nucypher.network.protocols import InterfaceInfo
|
||||
from nucypher.network.nodes import NodeSprout
|
||||
from nucypher.utilities.logging import Logger
|
||||
|
||||
HERE = BASE_DIR = Path(__file__).parent
|
||||
|
@ -113,7 +114,7 @@ def _make_rest_app(datastore: Datastore, this_node, domain: str, log: Logger) ->
|
|||
@rest_app.route("/public_information")
|
||||
def public_information():
|
||||
"""REST endpoint for public keys and address."""
|
||||
response = Response(response=bytes(this_node), mimetype='application/octet-stream')
|
||||
response = Response(response=bytes(this_node.metadata()), mimetype='application/octet-stream')
|
||||
return response
|
||||
|
||||
@rest_app.route('/node_metadata', methods=["GET"])
|
||||
|
@ -142,10 +143,12 @@ def _make_rest_app(datastore: Datastore, this_node, domain: str, log: Logger) ->
|
|||
signature = this_node.stamp(payload)
|
||||
return Response(bytes(signature) + payload, headers=headers)
|
||||
|
||||
sprouts = _node_class.batch_from_bytes(request.data)
|
||||
sprouts = NodeMetadata.batch_from_bytes(request.data)
|
||||
|
||||
for node in sprouts:
|
||||
this_node.remember_node(node)
|
||||
this_node.remember_node(NodeSprout(node))
|
||||
|
||||
# TODO: generate a new fleet state here?
|
||||
|
||||
# TODO: What's the right status code here? 202? Different if we already knew about the node(s)?
|
||||
return all_known_nodes()
|
||||
|
@ -286,7 +289,8 @@ def _make_rest_app(datastore: Datastore, this_node, domain: str, log: Logger) ->
|
|||
|
||||
elif request.method == 'POST':
|
||||
try:
|
||||
requesting_ursula = Ursula.from_bytes(request.data)
|
||||
requester_metadata = NodeMetadata.from_bytes(request.data)
|
||||
requesting_ursula = NodeSprout(requester_metadata)
|
||||
requesting_ursula.mature()
|
||||
except ValueError:
|
||||
return Response({'error': 'Invalid Ursula'}, status=400)
|
||||
|
@ -307,14 +311,14 @@ def _make_rest_app(datastore: Datastore, this_node, domain: str, log: Logger) ->
|
|||
# Fetch and store initiator's teacher certificate.
|
||||
certificate = this_node.network_middleware.get_certificate(host=initiator_address, port=initiator_port)
|
||||
certificate_filepath = this_node.node_storage.store_node_certificate(certificate=certificate)
|
||||
requesting_ursula_bytes = this_node.network_middleware.client.node_information(host=initiator_address,
|
||||
port=initiator_port,
|
||||
certificate_filepath=certificate_filepath)
|
||||
metadata_bytes = this_node.network_middleware.client.node_information(host=initiator_address,
|
||||
port=initiator_port,
|
||||
certificate_filepath=certificate_filepath)
|
||||
visible_metadata = NodeMetadata.from_bytes(metadata_bytes)
|
||||
except NodeSeemsToBeDown:
|
||||
return Response({'error': 'Unreachable node'}, status=400) # ... toasted
|
||||
|
||||
# Compare the results of the outer POST with the inner GET... yum
|
||||
if requesting_ursula_bytes == request.data:
|
||||
if requester_metadata == visible_metadata:
|
||||
return Response(status=200)
|
||||
else:
|
||||
return Response({'error': 'Suspicious node'}, status=400)
|
||||
|
|
|
@ -112,7 +112,7 @@ def test_availability_tracker_integration(blockchain_ursulas, monkeypatch):
|
|||
if ursula_were_looking_for:
|
||||
raise RestMiddleware.NotFound("Fake Reason") # Make this node unreachable
|
||||
else:
|
||||
response = Response(response=bytes(ursula), mimetype='application/octet-stream')
|
||||
response = Response(response=bytes(ursula.metadata()), mimetype='application/octet-stream')
|
||||
return response
|
||||
|
||||
# apply the monkeypatch for requests.get to mock_get
|
||||
|
|
|
@ -76,7 +76,7 @@ def test_vladimir_illegal_interface_key_does_not_propagate(blockchain_ursulas):
|
|||
# This Ursula is totally legit...
|
||||
ursula_whom_vladimir_will_imitate.verify_node(MockRestMiddleware())
|
||||
|
||||
vladimir.network_middleware.propagate_shitty_interface_id(other_ursula, bytes(vladimir))
|
||||
vladimir.network_middleware.propagate_shitty_interface_id(other_ursula, bytes(vladimir.metadata()))
|
||||
|
||||
# So far, Ursula hasn't noticed any Vladimirs.
|
||||
assert other_ursula.suspicious_activities_witnessed['vladimirs'] == []
|
||||
|
|
|
@ -84,6 +84,7 @@ def test_alice_can_learn_about_a_whole_bunch_of_ursulas(highperf_mocked_alice):
|
|||
ended = time.time()
|
||||
elapsed = ended - started
|
||||
|
||||
# TODO: probably can be brought down a lot when the core is moved to Rust
|
||||
assert elapsed < 6 # 6 seconds is still a little long to discover 4000 out of 5000 nodes, but before starting the optimization that went with this test, this operation took about 18 minutes on jMyles' laptop.
|
||||
assert VerificationTracker.node_verifications == 1 # We have only verified the first Ursula.
|
||||
assert sum(
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
import os
|
||||
from collections import namedtuple
|
||||
|
||||
import pytest
|
||||
|
||||
from eth_utils.address import to_checksum_address
|
||||
from twisted.logger import LogLevel, globalLogPublisher
|
||||
|
||||
|
@ -27,6 +29,7 @@ from nucypher.characters.base import Character
|
|||
from tests.utils.middleware import MockRestMiddleware
|
||||
|
||||
|
||||
@pytest.mark.skip("Unmark when the versioning scheme is stabilized")
|
||||
def test_emit_warning_upon_new_version(lonely_ursula_maker, caplog):
|
||||
seed_node, teacher, new_node = lonely_ursula_maker(quantity=3,
|
||||
domain="no hardcodes",
|
||||
|
@ -108,6 +111,7 @@ def test_emit_warning_upon_new_version(lonely_ursula_maker, caplog):
|
|||
globalLogPublisher.removeObserver(warning_trapper)
|
||||
|
||||
|
||||
@pytest.mark.skip("Unmark when the versioning scheme is stabilized")
|
||||
def test_node_posts_future_version(federated_ursulas):
|
||||
ursula = list(federated_ursulas)[0]
|
||||
middleware = MockRestMiddleware()
|
||||
|
|
|
@ -59,6 +59,7 @@ versioned_ursulas = {
|
|||
}
|
||||
|
||||
|
||||
@pytest.mark.skip("Unmark when the versioning scheme is stabilized")
|
||||
def test_deserialize_ursulas_version_1():
|
||||
"""
|
||||
DON'T 'FIX' THIS TEST IF FAILING, UNLESS YOU KNOW WHAT YOU'RE DOING.
|
||||
|
@ -78,11 +79,12 @@ def test_deserialize_ursulas_version_1():
|
|||
|
||||
with pytest.raises(Teacher.AreYouFromThePast, match=f"purported to be of version 1, "
|
||||
f"but we're version {Ursula.LEARNER_VERSION}"):
|
||||
_resurrected_ursula = Ursula.from_bytes(fossilized_ursula, fail_fast=True)
|
||||
_resurrected_ursula = Ursula.from_metadata_bytes(fossilized_ursula, fail_fast=True)
|
||||
|
||||
assert UNKNOWN_VERSION == Ursula.from_bytes(fossilized_ursula, fail_fast=False)
|
||||
assert UNKNOWN_VERSION == Ursula.from_metadata_bytes(fossilized_ursula, fail_fast=False)
|
||||
|
||||
|
||||
@pytest.mark.skip("Unmark when the versioning scheme is stabilized")
|
||||
def test_deserialize_ursulas_version_2():
|
||||
"""
|
||||
DON'T 'FIX' THIS TEST IF FAILING, UNLESS YOU KNOW WHAT YOU'RE DOING.
|
||||
|
@ -100,5 +102,5 @@ def test_deserialize_ursulas_version_2():
|
|||
assert version == expected_version
|
||||
assert version == Ursula.LEARNER_VERSION
|
||||
|
||||
resurrected_ursula = Ursula.from_bytes(fossilized_ursula, fail_fast=True)
|
||||
resurrected_ursula = Ursula.from_metadata_bytes(fossilized_ursula, fail_fast=True)
|
||||
assert TEMPORARY_DOMAIN == resurrected_ursula.domain
|
||||
|
|
|
@ -40,7 +40,7 @@ def test_federated_nodes_connect_via_tls_and_verify(lonely_ursula_maker):
|
|||
|
||||
def check_node_with_cert(node, cert_file):
|
||||
response = requests.get("https://{}/public_information".format(node.rest_url()), verify=cert_file)
|
||||
ursula = Ursula.from_bytes(response.content)
|
||||
ursula = Ursula.from_metadata_bytes(response.content)
|
||||
assert ursula == node
|
||||
|
||||
try:
|
||||
|
|
|
@ -36,7 +36,7 @@ def test_alice_creates_policy_with_correct_hrac(federated_alice, federated_bob,
|
|||
|
||||
def test_alice_does_not_update_with_old_ursula_info(federated_alice, federated_ursulas):
|
||||
ursula = list(federated_ursulas)[0]
|
||||
old_metadata = bytes(ursula)
|
||||
old_metadata = bytes(ursula.metadata())
|
||||
|
||||
# Alice has remembered Ursula.
|
||||
assert federated_alice.known_nodes[ursula.checksum_address] == ursula
|
||||
|
@ -45,9 +45,9 @@ def test_alice_does_not_update_with_old_ursula_info(federated_alice, federated_u
|
|||
ursula._sign_and_date_interface_info()
|
||||
|
||||
# Indeed, her metadata is not the same now.
|
||||
assert bytes(ursula) != old_metadata
|
||||
assert bytes(ursula.metadata()) != old_metadata
|
||||
|
||||
old_ursula = Ursula.from_bytes(old_metadata)
|
||||
old_ursula = Ursula.from_metadata_bytes(old_metadata)
|
||||
|
||||
# Once Alice learns about Ursula's updated info...
|
||||
federated_alice.remember_node(ursula)
|
||||
|
@ -55,5 +55,5 @@ def test_alice_does_not_update_with_old_ursula_info(federated_alice, federated_u
|
|||
# ...she can't learn about old ursula anymore.
|
||||
federated_alice.remember_node(old_ursula)
|
||||
|
||||
new_metadata = bytes(federated_alice.known_nodes[ursula.checksum_address])
|
||||
new_metadata = bytes(federated_alice.known_nodes[ursula.checksum_address].metadata())
|
||||
assert new_metadata != old_metadata
|
||||
|
|
|
@ -127,7 +127,7 @@ class NotACert:
|
|||
return NotAPublicKey()
|
||||
|
||||
|
||||
mock_cert_loading = patch("nucypher.characters.lawful.load_pem_x509_certificate",
|
||||
mock_cert_loading = patch("nucypher.network.nodes.load_pem_x509_certificate",
|
||||
new=lambda *args, **kwargs: NotACert())
|
||||
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ from nucypher.characters.lawful import Ursula
|
|||
|
||||
def test_serialize_ursula(federated_ursulas):
|
||||
ursula = federated_ursulas.pop()
|
||||
ursula_as_bytes = bytes(ursula)
|
||||
ursula_object = Ursula.from_bytes(ursula_as_bytes)
|
||||
ursula_as_bytes = bytes(ursula.metadata())
|
||||
ursula_object = Ursula.from_metadata_bytes(ursula_as_bytes)
|
||||
assert ursula == ursula_object
|
||||
ursula.stop()
|
||||
|
|
|
@ -60,7 +60,7 @@ class Dummy: # Teacher
|
|||
def rest_url(self):
|
||||
return MOCK_IP_ADDRESS
|
||||
|
||||
def __bytes__(self):
|
||||
def metadata(self):
|
||||
return self.checksum_address.encode()
|
||||
|
||||
|
||||
|
|
|
@ -29,5 +29,5 @@ def test_print_ursulas_bytes(blockchain_ursulas):
|
|||
|
||||
print(f"\nursulas_v{Learner.LEARNER_VERSION} = (")
|
||||
for ursula in blockchain_ursulas:
|
||||
print(f" '{bytes(ursula).hex()}',")
|
||||
print(f" '{bytes(ursula.metadata()).hex()}',")
|
||||
print(")")
|
||||
|
|
Loading…
Reference in New Issue