mirror of https://github.com/nucypher/nucypher.git
Merge pull request #3117 from derekpierre/coordinator-2
Update `Coordinator` usage and `ferveo==v0.1.8`pull/3088/head
commit
20d750a1c6
2
Pipfile
2
Pipfile
|
@ -14,7 +14,7 @@ hendrix = ">=4.0"
|
|||
nucypher-core = ">=0.7.0"
|
||||
# Cryptography
|
||||
cryptography = ">=3.2"
|
||||
ferveo = ">=0.1.7"
|
||||
ferveo = ">=0.1.8"
|
||||
mnemonic = "*"
|
||||
pynacl= ">=1.4.0"
|
||||
pyopenssl = "*"
|
||||
|
|
|
@ -4,7 +4,7 @@ from typing import List, Optional, Tuple, Union
|
|||
|
||||
import maya
|
||||
from eth_typing import ChecksumAddress
|
||||
from ferveo_py import AggregatedTranscript, Ciphertext, ExternalValidator, PublicKey
|
||||
from ferveo_py import AggregatedTranscript, Ciphertext, PublicKey, Validator
|
||||
from hexbytes import HexBytes
|
||||
from web3 import Web3
|
||||
from web3.types import TxReceipt
|
||||
|
@ -308,7 +308,7 @@ class Ritualist(BaseActor):
|
|||
self,
|
||||
ritual: CoordinatorAgent.Ritual,
|
||||
timeout: int = 60
|
||||
) -> List[Tuple[ExternalValidator, Transcript]]:
|
||||
) -> List[Tuple[Validator, Transcript]]:
|
||||
|
||||
validators = [n[0] for n in ritual.transcripts]
|
||||
if timeout > 0:
|
||||
|
@ -323,7 +323,7 @@ class Ritualist(BaseActor):
|
|||
for staking_provider_address, transcript_bytes in ritual.transcripts:
|
||||
if self.checksum_address == staking_provider_address:
|
||||
# Local
|
||||
external_validator = ExternalValidator(
|
||||
external_validator = Validator(
|
||||
address=self.checksum_address,
|
||||
public_key=self.ritual_power.public_key()
|
||||
)
|
||||
|
@ -335,21 +335,24 @@ class Ritualist(BaseActor):
|
|||
raise self.ActorError(f"Unknown node {staking_provider_address}")
|
||||
remote_ritualist.mature()
|
||||
public_key = remote_ritualist.public_keys(RitualisticPower)
|
||||
self.log.debug(f"Ferveo public key for {staking_provider_address} is {bytes(public_key).hex()[:-8:-1]}")
|
||||
external_validator = ExternalValidator(address=staking_provider_address, public_key=public_key)
|
||||
self.log.debug(
|
||||
f"Ferveo public key for {staking_provider_address} is {bytes(public_key).hex()[:-8:-1]}"
|
||||
)
|
||||
external_validator = Validator(
|
||||
address=staking_provider_address, public_key=public_key
|
||||
)
|
||||
|
||||
transcript = Transcript.from_bytes(transcript_bytes) if transcript_bytes else None
|
||||
result.append((external_validator, transcript))
|
||||
|
||||
result = sorted(result, key=lambda x: x[0].address)
|
||||
return result
|
||||
|
||||
def publish_transcript(self, ritual_id: int, transcript: Transcript) -> TxReceipt:
|
||||
"""Publish a transcript to publicly available storage."""
|
||||
# look up the node index for this node on the blockchain
|
||||
index = self.coordinator_agent.get_node_index(ritual_id=ritual_id, node=self.checksum_address)
|
||||
receipt = self.coordinator_agent.post_transcript(
|
||||
ritual_id=ritual_id,
|
||||
node_index=index,
|
||||
transcript=bytes(transcript),
|
||||
transacting_power=self.transacting_power
|
||||
)
|
||||
|
@ -363,10 +366,8 @@ class Ritualist(BaseActor):
|
|||
) -> TxReceipt:
|
||||
"""Publish an aggregated transcript to publicly available storage."""
|
||||
# look up the node index for this node on the blockchain
|
||||
index = self.coordinator_agent.get_node_index(ritual_id=ritual_id, node=self.checksum_address)
|
||||
receipt = self.coordinator_agent.post_aggregation(
|
||||
ritual_id=ritual_id,
|
||||
node_index=index,
|
||||
aggregated_transcript=bytes(aggregated_transcript),
|
||||
public_key=public_key,
|
||||
transacting_power=self.transacting_power
|
||||
|
@ -377,12 +378,12 @@ class Ritualist(BaseActor):
|
|||
self,
|
||||
ritual_id: int,
|
||||
initiator: ChecksumAddress,
|
||||
nodes: List[ChecksumAddress],
|
||||
participants: List[ChecksumAddress],
|
||||
timestamp: int,
|
||||
):
|
||||
"""Perform round 1 of the DKG protocol for a given ritual ID on this node."""
|
||||
|
||||
if self.checksum_address not in nodes:
|
||||
if self.checksum_address not in participants:
|
||||
# should never get here
|
||||
self.log.error(
|
||||
f"Not part of ritual {ritual_id}; no need to submit transcripts"
|
||||
|
@ -398,12 +399,14 @@ class Ritualist(BaseActor):
|
|||
# validate the status
|
||||
if status != CoordinatorAgent.Ritual.Status.AWAITING_TRANSCRIPTS:
|
||||
raise self.RitualError(
|
||||
f"ritual #{ritual.id} is not waiting for transcripts; status={status}."
|
||||
f"ritual #{ritual_id} is not waiting for transcripts; status={status}."
|
||||
)
|
||||
|
||||
# validate the active ritual tracker state
|
||||
node_index = self.coordinator_agent.get_node_index(ritual_id=ritual_id, node=self.checksum_address)
|
||||
if ritual.participants[node_index].transcript:
|
||||
participant = self.coordinator_agent.get_participant_from_provider(
|
||||
ritual_id=ritual_id, provider=self.checksum_address
|
||||
)
|
||||
if participant.transcript:
|
||||
raise self.RitualError(
|
||||
f"Node {self.transacting_power.account} has already posted a transcript for ritual {ritual_id}"
|
||||
)
|
||||
|
@ -411,8 +414,11 @@ class Ritualist(BaseActor):
|
|||
|
||||
# gather the cohort
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual)))
|
||||
nodes = sorted(nodes, key=lambda n: n.address)
|
||||
if any(transcripts):
|
||||
self.log.debug(f"ritual #{ritual_id} is in progress {ritual.total_transcripts + 1}/{len(ritual.nodes)}.")
|
||||
self.log.debug(
|
||||
f"ritual #{ritual_id} is in progress {ritual.total_transcripts + 1}/{len(ritual.providers)}."
|
||||
)
|
||||
self.ritual_tracker.refresh(fetch_rituals=[ritual_id])
|
||||
|
||||
# generate a transcript
|
||||
|
@ -437,8 +443,10 @@ class Ritualist(BaseActor):
|
|||
self.dkg_storage.store_transcript_receipt(ritual_id=ritual_id, receipt=receipt)
|
||||
|
||||
arrival = ritual.total_transcripts + 1
|
||||
self.log.debug(f"{self.transacting_power.account[:8]} submitted a transcript for "
|
||||
f"DKG ritual #{ritual_id} ({arrival}/{len(ritual.nodes)})")
|
||||
self.log.debug(
|
||||
f"{self.transacting_power.account[:8]} submitted a transcript for "
|
||||
f"DKG ritual #{ritual_id} ({arrival}/{len(ritual.providers)})"
|
||||
)
|
||||
return receipt
|
||||
|
||||
def perform_round_2(self, ritual_id: int, timestamp: int):
|
||||
|
@ -447,16 +455,15 @@ class Ritualist(BaseActor):
|
|||
# Get the ritual and check the status from the blockchain
|
||||
# TODO Optimize local cache of ritual participants (#3052)
|
||||
ritual = self.coordinator_agent.get_ritual(ritual_id, with_participants=True)
|
||||
if self.checksum_address not in [p.node for p in ritual.participants]:
|
||||
if self.checksum_address not in [p.provider for p in ritual.participants]:
|
||||
raise self.RitualError(
|
||||
f"Node is not part of {ritual.id}; no need to submit aggregated transcript"
|
||||
f"Node is not part of {ritual_id}; no need to submit aggregated transcript"
|
||||
)
|
||||
|
||||
status = self.coordinator_agent.get_ritual_status(ritual_id=ritual_id)
|
||||
|
||||
if status != CoordinatorAgent.Ritual.Status.AWAITING_AGGREGATIONS:
|
||||
raise self.ActorError(
|
||||
f"ritual #{ritual.id} is not waiting for aggregations; status={status}."
|
||||
f"ritual #{ritual_id} is not waiting for aggregations; status={status}."
|
||||
)
|
||||
self.log.debug(
|
||||
f"{self.transacting_power.account[:8]} performing round 2 of DKG ritual #{ritual_id} from blocktime {timestamp}"
|
||||
|
@ -479,7 +486,7 @@ class Ritualist(BaseActor):
|
|||
)
|
||||
except Exception as e:
|
||||
self.log.debug(f"Failed to aggregate transcripts for ritual #{ritual_id}: {str(e)}")
|
||||
raise self.ActorError(f"Failed to aggregate transcripts: {str(e)}")
|
||||
raise e
|
||||
else:
|
||||
aggregated_transcript, dkg_public_key, params = result
|
||||
|
||||
|
@ -500,9 +507,11 @@ class Ritualist(BaseActor):
|
|||
)
|
||||
|
||||
# logging
|
||||
self.log.debug(f"{self.transacting_power.account[:8]} aggregated a transcript for "
|
||||
f"DKG ritual #{ritual_id} ({total}/{len(ritual.nodes)})")
|
||||
if total >= len(ritual.nodes):
|
||||
self.log.debug(
|
||||
f"{self.transacting_power.account[:8]} aggregated a transcript for "
|
||||
f"DKG ritual #{ritual_id} ({total}/{len(ritual.providers)})"
|
||||
)
|
||||
if total >= len(ritual.providers):
|
||||
self.log.debug(f"DKG ritual #{ritual_id} should now be finalized")
|
||||
|
||||
return receipt
|
||||
|
@ -517,7 +526,7 @@ class Ritualist(BaseActor):
|
|||
ritual = self.coordinator_agent.get_ritual(ritual_id)
|
||||
status = self.coordinator_agent.get_ritual_status(ritual_id=ritual_id)
|
||||
if status != CoordinatorAgent.Ritual.Status.FINALIZED:
|
||||
raise self.ActorError(f"ritual #{ritual.id} is not finalized.")
|
||||
raise self.ActorError(f"ritual #{ritual_id} is not finalized.")
|
||||
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual)))
|
||||
if not all(transcripts):
|
||||
|
|
|
@ -10,7 +10,7 @@ from constant_sorrow.constants import CONTRACT_ATTRIBUTE # type: ignore
|
|||
from constant_sorrow.constants import CONTRACT_CALL, TRANSACTION
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from eth_utils.address import to_checksum_address
|
||||
from ferveo_py import PublicKey
|
||||
from ferveo_py.ferveo_py import DkgPublicKey
|
||||
from web3.contract.contract import Contract, ContractFunction
|
||||
from web3.types import Timestamp, TxParams, TxReceipt, Wei
|
||||
|
||||
|
@ -552,37 +552,53 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
|
||||
@dataclass
|
||||
class Participant:
|
||||
node: ChecksumAddress
|
||||
provider: ChecksumAddress
|
||||
aggregated: bool = False
|
||||
transcript: bytes = bytes()
|
||||
|
||||
id: int
|
||||
class G1Point(NamedTuple):
|
||||
# TODO validation of these if used directly
|
||||
word0: bytes # 32 bytes
|
||||
word1: bytes # 16 bytes
|
||||
|
||||
@classmethod
|
||||
def from_dkg_public_key(cls, public_key: DkgPublicKey):
|
||||
return cls.from_bytes(bytes(public_key))
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes):
|
||||
# TODO uncomment once ferveo version used in updated
|
||||
# if len(data) != 48:
|
||||
# raise ValueError(f"Invalid bytes ({len(data)}) for G1Point")
|
||||
return cls(word0=data[:32], word1=data[32:48])
|
||||
|
||||
def __bytes__(self):
|
||||
return self.word0 + self.word1
|
||||
|
||||
initiator: ChecksumAddress
|
||||
dkg_size: int
|
||||
init_timestamp: int
|
||||
total_transcripts: int = 0
|
||||
total_aggregations: int = 0
|
||||
aggregated_transcript_hash: bytes = bytes()
|
||||
public_key: G1Point = None
|
||||
aggregation_mismatch: bool = False
|
||||
aggregated_transcript: bytes = bytes()
|
||||
public_key: bytes = bytes()
|
||||
public_key_hash: bytes = bytes()
|
||||
participants: List = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def nodes(self):
|
||||
return [p.node for p in self.participants]
|
||||
def providers(self):
|
||||
return [p.provider for p in self.participants]
|
||||
|
||||
@property
|
||||
def transcripts(self) -> List[Tuple[ChecksumAddress, bytes]]:
|
||||
transcripts = list()
|
||||
for p in self.participants:
|
||||
transcripts.append((p.node, p.transcript))
|
||||
transcripts.append((p.provider, p.transcript))
|
||||
return transcripts
|
||||
|
||||
@property
|
||||
def shares(self) -> int:
|
||||
return len(self.nodes)
|
||||
return len(self.providers)
|
||||
|
||||
@contract_api(CONTRACT_CALL)
|
||||
def get_timeout(self) -> int:
|
||||
|
@ -591,23 +607,21 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
@contract_api(CONTRACT_CALL)
|
||||
def get_ritual(self, ritual_id: int, with_participants: bool = True) -> Ritual:
|
||||
result = self.contract.functions.rituals(int(ritual_id)).call()
|
||||
if result[0] != ritual_id:
|
||||
raise RuntimeError(f"Ritual {ritual_id} not found")
|
||||
ritual = self.Ritual(
|
||||
id=result[0],
|
||||
initiator=ChecksumAddress(result[1]),
|
||||
dkg_size=result[2],
|
||||
init_timestamp=result[3],
|
||||
total_transcripts=result[4],
|
||||
total_aggregations=result[5],
|
||||
aggregated_transcript_hash=bytes(result[6]),
|
||||
aggregation_mismatch=result[7],
|
||||
aggregated_transcript=bytes(result[8]),
|
||||
public_key=bytes(result[9]),
|
||||
public_key_hash=bytes(result[10]),
|
||||
participants=[]
|
||||
initiator=ChecksumAddress(result[0]),
|
||||
dkg_size=result[1],
|
||||
init_timestamp=result[2],
|
||||
total_transcripts=result[3],
|
||||
total_aggregations=result[4],
|
||||
aggregation_mismatch=result[6],
|
||||
aggregated_transcript=bytes(result[7]),
|
||||
participants=[], # solidity does not return sub-structs
|
||||
)
|
||||
|
||||
# public key
|
||||
ritual.public_key = self.Ritual.G1Point(result[5][0], result[5][1])
|
||||
|
||||
# participants
|
||||
if with_participants:
|
||||
participants = self.get_participants(ritual_id)
|
||||
ritual.participants = participants
|
||||
|
@ -623,10 +637,10 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
result = self.contract.functions.getParticipants(ritual_id).call()
|
||||
participants = list()
|
||||
for r in result:
|
||||
performance = self.Ritual.Participant(
|
||||
node=ChecksumAddress(r[0]), aggregated=r[1], transcript=bytes(r[2])
|
||||
participant = self.Ritual.Participant(
|
||||
provider=ChecksumAddress(r[0]), aggregated=r[1], transcript=bytes(r[2])
|
||||
)
|
||||
participants.append(performance)
|
||||
participants.append(participant)
|
||||
return participants
|
||||
|
||||
@contract_api(CONTRACT_CALL)
|
||||
|
@ -635,28 +649,40 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
return result
|
||||
|
||||
@contract_api(CONTRACT_CALL)
|
||||
def get_node_index(self, ritual_id: int, node: ChecksumAddress) -> int:
|
||||
result = self.contract.functions.getNodeIndex(ritual_id, node).call()
|
||||
return result
|
||||
def get_participant_from_provider(
|
||||
self, ritual_id: int, provider: ChecksumAddress
|
||||
) -> Ritual.Participant:
|
||||
result = self.contract.functions.getParticipantFromProvider(
|
||||
ritual_id, provider
|
||||
).call()
|
||||
participant = self.Ritual.Participant(
|
||||
provider=ChecksumAddress(result[0]),
|
||||
aggregated=result[1],
|
||||
transcript=bytes(result[2]),
|
||||
)
|
||||
return participant
|
||||
|
||||
@contract_api(TRANSACTION)
|
||||
def initiate_ritual(self, nodes: List[ChecksumAddress], transacting_power: TransactingPower) -> TxReceipt:
|
||||
contract_function: ContractFunction = self.contract.functions.initiateRitual(nodes=nodes)
|
||||
receipt = self.blockchain.send_transaction(contract_function=contract_function,
|
||||
transacting_power=transacting_power)
|
||||
def initiate_ritual(
|
||||
self, providers: List[ChecksumAddress], transacting_power: TransactingPower
|
||||
) -> TxReceipt:
|
||||
contract_function: ContractFunction = self.contract.functions.initiateRitual(
|
||||
providers
|
||||
)
|
||||
receipt = self.blockchain.send_transaction(
|
||||
contract_function=contract_function, transacting_power=transacting_power
|
||||
)
|
||||
return receipt
|
||||
|
||||
@contract_api(TRANSACTION)
|
||||
def post_transcript(
|
||||
self,
|
||||
ritual_id: int,
|
||||
node_index: int,
|
||||
transcript: bytes,
|
||||
transacting_power: TransactingPower,
|
||||
) -> TxReceipt:
|
||||
contract_function: ContractFunction = self.contract.functions.postTranscript(
|
||||
ritualId=ritual_id,
|
||||
nodeIndex=node_index,
|
||||
transcript=transcript
|
||||
)
|
||||
receipt = self.blockchain.send_transaction(contract_function=contract_function,
|
||||
|
@ -667,16 +693,14 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
def post_aggregation(
|
||||
self,
|
||||
ritual_id: int,
|
||||
node_index: int,
|
||||
aggregated_transcript: bytes,
|
||||
public_key: PublicKey,
|
||||
public_key: DkgPublicKey,
|
||||
transacting_power: TransactingPower,
|
||||
) -> TxReceipt:
|
||||
contract_function: ContractFunction = self.contract.functions.postAggregation(
|
||||
ritualId=ritual_id,
|
||||
nodeIndex=node_index,
|
||||
aggregatedTranscript=aggregated_transcript,
|
||||
publicKey=bytes(public_key),
|
||||
publicKey=self.Ritual.G1Point.from_dkg_public_key(public_key),
|
||||
)
|
||||
receipt = self.blockchain.send_transaction(
|
||||
contract_function=contract_function,
|
||||
|
|
|
@ -134,7 +134,7 @@ class ActiveRitualTracker:
|
|||
)
|
||||
|
||||
expected_start_block = w3.eth.get_block(
|
||||
latest_block.number - number_of_blocks_in_the_past
|
||||
max(0, latest_block.number - number_of_blocks_in_the_past)
|
||||
)
|
||||
while (
|
||||
expected_start_block.number > 0
|
||||
|
@ -188,9 +188,9 @@ class ActiveRitualTracker:
|
|||
"""Secondary filtration of events."""
|
||||
name, args = event.event, event.args
|
||||
event_type = getattr(self.contract.events, event.event)
|
||||
if hasattr(args, "nodes"):
|
||||
if hasattr(args, "participants"):
|
||||
# Filter out events that are not for me
|
||||
if self.ritualist.checksum_address not in args.nodes:
|
||||
if self.ritualist.checksum_address not in args.participants:
|
||||
self.log.debug(f"Event {name} is not for me, skipping")
|
||||
return None, event_type
|
||||
if not self.__action_required(event_type, event.blockNumber, args.ritualId):
|
||||
|
@ -221,9 +221,10 @@ class ActiveRitualTracker:
|
|||
# do not use abbreviations in event names (e.g. "DKG" -> "d_k_g")
|
||||
formatted_kwargs = {camel_case_to_snake(k): v for k, v in event.args.items()}
|
||||
timestamp = int(get_block_when(event.blockNumber).timestamp())
|
||||
ritual = self.get_ritual(ritual_id=event.args.ritualId)
|
||||
self.add_ritual(ritual=ritual)
|
||||
self.active_tasks.add((event_type, ritual.id))
|
||||
ritual_id = event.args.ritualId
|
||||
ritual = self.get_ritual(ritual_id=ritual_id)
|
||||
self.add_ritual(ritual_id=ritual_id, ritual=ritual)
|
||||
self.active_tasks.add((event_type, ritual_id))
|
||||
d = self.__execute_round(event_type=event_type, timestamp=timestamp, **formatted_kwargs)
|
||||
return d
|
||||
|
||||
|
@ -256,11 +257,8 @@ class ActiveRitualTracker:
|
|||
end_block = self.scanner.get_suggested_scan_end_block()
|
||||
self.__scan(start_block, end_block, self.ritualist.transacting_power.account)
|
||||
|
||||
# def get_node_index(self, ritual_id: int, node: ChecksumAddress) -> int:
|
||||
# return self.rituals[ritual_id].nodes.index(node)
|
||||
|
||||
def add_ritual(self, ritual):
|
||||
self.rituals[ritual.id] = ritual
|
||||
def add_ritual(self, ritual_id, ritual):
|
||||
self.rituals[ritual_id] = ritual
|
||||
return ritual
|
||||
|
||||
def track_ritual(self, ritual_id: int, ritual=None, transcript=None, confirmations=None, checkin_timestamp=None):
|
||||
|
@ -269,7 +267,7 @@ class ActiveRitualTracker:
|
|||
except KeyError:
|
||||
if not ritual:
|
||||
raise ValueError("Ritual not found and no new ritual provided")
|
||||
_ritual = self.add_ritual(ritual=ritual)
|
||||
_ritual = self.add_ritual(ritual_id=ritual_id, ritual=ritual)
|
||||
if ritual_id and ritual:
|
||||
# replace the whole ritual
|
||||
self.rituals[ritual_id] = ritual
|
||||
|
|
|
@ -1,57 +1,8 @@
|
|||
import contextlib
|
||||
import ferveo_py
|
||||
import json
|
||||
import maya
|
||||
import time
|
||||
from constant_sorrow import constants
|
||||
from constant_sorrow.constants import (
|
||||
INVALIDATED,
|
||||
NOT_SIGNED,
|
||||
PUBLIC_ONLY,
|
||||
READY,
|
||||
STRANGER_ALICE,
|
||||
)
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from cryptography.x509 import Certificate, NameOID
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from eth_utils import to_checksum_address
|
||||
from ferveo_py import (
|
||||
Ciphertext,
|
||||
DecryptionShareSimple,
|
||||
combine_decryption_shares_simple,
|
||||
decrypt_with_shared_secret,
|
||||
ExternalValidator,
|
||||
Transcript,
|
||||
DkgPublicParameters,
|
||||
DecryptionSharePrecomputed
|
||||
)
|
||||
from nucypher_core import (
|
||||
Context,
|
||||
ThresholdDecryptionRequest,
|
||||
ThresholdDecryptionResponse,
|
||||
)
|
||||
from nucypher_core import (
|
||||
HRAC,
|
||||
Address,
|
||||
Conditions,
|
||||
EncryptedKeyFrag,
|
||||
EncryptedTreasureMap,
|
||||
MessageKit,
|
||||
NodeMetadata,
|
||||
NodeMetadataPayload,
|
||||
ReencryptionResponse,
|
||||
TreasureMap,
|
||||
)
|
||||
from nucypher_core.umbral import (
|
||||
PublicKey,
|
||||
VerifiedKeyFrag,
|
||||
reencrypt,
|
||||
RecoverableSignature
|
||||
)
|
||||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from twisted.internet import reactor
|
||||
from twisted.logger import Logger
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
|
@ -64,13 +15,66 @@ from typing import (
|
|||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import ferveo_py
|
||||
import maya
|
||||
from constant_sorrow import constants
|
||||
from constant_sorrow.constants import (
|
||||
INVALIDATED,
|
||||
NOT_SIGNED,
|
||||
PUBLIC_ONLY,
|
||||
READY,
|
||||
STRANGER_ALICE,
|
||||
)
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from cryptography.x509 import Certificate, NameOID
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from eth_utils import to_checksum_address
|
||||
from ferveo_py.ferveo_py import (
|
||||
Ciphertext,
|
||||
DecryptionSharePrecomputed,
|
||||
DecryptionShareSimple,
|
||||
DkgPublicParameters,
|
||||
Transcript,
|
||||
Validator,
|
||||
combine_decryption_shares_precomputed,
|
||||
combine_decryption_shares_simple,
|
||||
decrypt_with_shared_secret,
|
||||
)
|
||||
from nucypher_core import (
|
||||
HRAC,
|
||||
Address,
|
||||
Conditions,
|
||||
Context,
|
||||
EncryptedKeyFrag,
|
||||
EncryptedTreasureMap,
|
||||
MessageKit,
|
||||
NodeMetadata,
|
||||
NodeMetadataPayload,
|
||||
ReencryptionResponse,
|
||||
ThresholdDecryptionRequest,
|
||||
ThresholdDecryptionResponse,
|
||||
TreasureMap,
|
||||
)
|
||||
from nucypher_core.umbral import (
|
||||
PublicKey,
|
||||
RecoverableSignature,
|
||||
VerifiedKeyFrag,
|
||||
reencrypt,
|
||||
)
|
||||
from twisted.internet import reactor
|
||||
from twisted.logger import Logger
|
||||
from web3.types import TxReceipt
|
||||
|
||||
import nucypher
|
||||
from nucypher.acumen.nicknames import Nickname
|
||||
from nucypher.acumen.perception import ArchivedFleetState, RemoteUrsulaStatus
|
||||
from nucypher.blockchain.eth.actors import Operator, PolicyAuthor, Ritualist
|
||||
from nucypher.blockchain.eth.agents import ContractAgency, PREApplicationAgent, CoordinatorAgent
|
||||
from nucypher.blockchain.eth.agents import (
|
||||
ContractAgency,
|
||||
CoordinatorAgent,
|
||||
PREApplicationAgent,
|
||||
)
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import BaseContractRegistry
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
|
@ -82,16 +86,16 @@ from nucypher.characters.banners import (
|
|||
)
|
||||
from nucypher.characters.base import Character, Learner
|
||||
from nucypher.config.storages import NodeStorage
|
||||
from nucypher.crypto.ferveo.dkg import aggregate_transcripts, FerveoVariant
|
||||
from nucypher.crypto.ferveo.dkg import FerveoVariant, aggregate_transcripts
|
||||
from nucypher.crypto.keypairs import HostingKeypair
|
||||
from nucypher.crypto.powers import (
|
||||
DecryptingPower,
|
||||
DelegatingPower,
|
||||
PowerUpError,
|
||||
RitualisticPower,
|
||||
SigningPower,
|
||||
TLSHostingPower,
|
||||
TransactingPower,
|
||||
RitualisticPower,
|
||||
)
|
||||
from nucypher.network.exceptions import NodeSeemsToBeDown
|
||||
from nucypher.network.middleware import RestMiddleware
|
||||
|
@ -560,13 +564,18 @@ class Bob(Character):
|
|||
def gather_decryption_shares(
|
||||
self,
|
||||
ritual_id: int,
|
||||
cohort: List['Ursula'],
|
||||
cohort: List["Ursula"],
|
||||
ciphertext: Ciphertext,
|
||||
lingo: LingoList,
|
||||
threshold: int,
|
||||
variant: FerveoVariant,
|
||||
context: Optional[dict] = None,
|
||||
) -> List[DecryptionShareSimple]:
|
||||
if variant == FerveoVariant.PRECOMPUTED:
|
||||
share_type = DecryptionSharePrecomputed
|
||||
elif variant == FerveoVariant.SIMPLE:
|
||||
share_type = DecryptionShareSimple
|
||||
|
||||
gathered_shares = list()
|
||||
for ursula in cohort:
|
||||
conditions = Conditions(json.dumps(lingo))
|
||||
|
@ -589,24 +598,22 @@ class Bob(Character):
|
|||
self.log.warn(f"Node {ursula} returned {response.status_code}.")
|
||||
continue
|
||||
|
||||
decryption_response = ThresholdDecryptionResponse.from_bytes(response.content)
|
||||
decryption_share = DecryptionShareSimple.from_bytes(decryption_response.decryption_share)
|
||||
decryption_response = ThresholdDecryptionResponse.from_bytes(
|
||||
response.content
|
||||
)
|
||||
decryption_share = share_type.from_bytes(
|
||||
decryption_response.decryption_share
|
||||
)
|
||||
gathered_shares.append(decryption_share)
|
||||
self.log.debug(f"Got {len(gathered_shares)}/{threshold} shares so far...")
|
||||
|
||||
if len(gathered_shares) >= threshold:
|
||||
self.log.debug(f"Got enough shares to decrypt.")
|
||||
if variant == FerveoVariant.PRECOMPUTED:
|
||||
# If we have enough shares, we can stop.
|
||||
break
|
||||
elif variant == FerveoVariant.SIMPLE:
|
||||
# all shares are needed to decrypt.
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f"Unknown variant {variant}")
|
||||
# TODO: Uncomment these lines to reproduce the bug
|
||||
# if variant == FerveoVariant.SIMPLE and (len(gathered_shares) == threshold):
|
||||
# break
|
||||
|
||||
if len(gathered_shares) < threshold:
|
||||
raise Ursula.NotEnoughUrsulas(f"Not enough Ursulas to decrypt")
|
||||
self.log.debug(f"Got enough shares to decrypt.")
|
||||
|
||||
return gathered_shares
|
||||
|
||||
|
@ -652,21 +659,29 @@ class Bob(Character):
|
|||
)
|
||||
|
||||
if not params:
|
||||
# TODO: Bob can call.verify here instead of aggregating the shares.
|
||||
# if the DKG parameters are not provided, we need to
|
||||
# aggregate the transcripts and derive them.
|
||||
params = self.__derive_dkg_parameters(ritual_id, ursulas, ritual, threshold)
|
||||
# TODO: compare the results with the on-chain records (Coordinator).
|
||||
|
||||
return self.__decrypt(shares, ciphertext, conditions, params)
|
||||
return self.__decrypt(shares, ciphertext, conditions, params, variant)
|
||||
|
||||
@staticmethod
|
||||
def __decrypt(
|
||||
shares: List[Union[DecryptionShareSimple, DecryptionSharePrecomputed]],
|
||||
ciphertext: Ciphertext,
|
||||
conditions: LingoList,
|
||||
params: DkgPublicParameters
|
||||
params: DkgPublicParameters,
|
||||
variant: FerveoVariant,
|
||||
):
|
||||
"""decrypt the ciphertext"""
|
||||
if variant == FerveoVariant.PRECOMPUTED:
|
||||
shared_secret = combine_decryption_shares_precomputed(shares)
|
||||
elif variant == FerveoVariant.SIMPLE:
|
||||
shared_secret = combine_decryption_shares_simple(shares, params)
|
||||
else:
|
||||
raise ValueError(f"Invalid variant: {variant}.")
|
||||
conditions = json.dumps(conditions).encode() # aad
|
||||
cleartext = decrypt_with_shared_secret(
|
||||
ciphertext,
|
||||
|
@ -679,6 +694,7 @@ class Bob(Character):
|
|||
@staticmethod
|
||||
def __derive_dkg_parameters(ritual_id: int, ursulas, ritual, threshold) -> DkgPublicParameters:
|
||||
validators = [u.as_external_validator() for u in ursulas]
|
||||
validators = sorted(validators, key=lambda v: v.address)
|
||||
transcripts = [Transcript.from_bytes(t[1]) for t in ritual.transcripts]
|
||||
data = list(zip(validators, transcripts))
|
||||
pvss_aggregated, final_key, params = aggregate_transcripts(
|
||||
|
@ -1007,6 +1023,7 @@ class Ursula(Teacher, Character, Operator, Ritualist):
|
|||
self.stop_learning_loop()
|
||||
self.work_tracker.stop()
|
||||
self._operator_bonded_tracker.stop()
|
||||
self.ritual_tracker.stop()
|
||||
if halt_reactor:
|
||||
reactor.stop()
|
||||
|
||||
|
@ -1263,9 +1280,9 @@ class Ursula(Teacher, Character, Operator, Ritualist):
|
|||
balance_eth=balance_eth,
|
||||
)
|
||||
|
||||
def as_external_validator(self) -> ExternalValidator:
|
||||
"""Returns an ExternalValidator instance for this Ursula for use in DKG operations."""
|
||||
validator = ExternalValidator(
|
||||
def as_external_validator(self) -> Validator:
|
||||
"""Returns an Validator instance for this Ursula for use in DKG operations."""
|
||||
validator = Validator(
|
||||
address=self.checksum_address,
|
||||
public_key=self.public_keys(RitualisticPower)
|
||||
)
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from enum import Enum
|
||||
from eth_utils import keccak
|
||||
from ferveo_py import *
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
from eth_utils import keccak
|
||||
from ferveo_py.ferveo_py import *
|
||||
|
||||
from nucypher.utilities.logging import Logger
|
||||
|
||||
LOGGER = Logger('ferveo-dkg')
|
||||
|
@ -20,11 +21,11 @@ _VARIANTS = {
|
|||
|
||||
|
||||
def _make_dkg(
|
||||
me: ExternalValidator,
|
||||
me: Validator,
|
||||
ritual_id: int,
|
||||
shares: int,
|
||||
threshold: int,
|
||||
nodes: List[ExternalValidator],
|
||||
nodes: List[Validator],
|
||||
) -> Dkg:
|
||||
dkg = Dkg(
|
||||
tau=ritual_id,
|
||||
|
@ -48,27 +49,26 @@ def derive_public_key(*args, **kwargs):
|
|||
return dkg.final_key
|
||||
|
||||
|
||||
def _validate_pvss_aggregated(pvss_aggregated: AggregatedTranscript, dkg) -> bool:
|
||||
valid = pvss_aggregated.validate(dkg)
|
||||
if not valid:
|
||||
raise Exception("validation failed") # TODO: #3096 better exception handling
|
||||
return valid
|
||||
|
||||
|
||||
def aggregate_transcripts(
|
||||
transcripts: List[Tuple[ExternalValidator, Transcript]],
|
||||
*args, **kwargs
|
||||
transcripts: List[Tuple[Validator, Transcript]], shares: int, *args, **kwargs
|
||||
) -> Tuple[AggregatedTranscript, PublicKey, DkgPublicParameters]:
|
||||
validators = [t[0] for t in transcripts]
|
||||
_dkg = _make_dkg(nodes=validators, *args, **kwargs)
|
||||
_dkg = _make_dkg(nodes=validators, shares=shares, *args, **kwargs)
|
||||
pvss_aggregated = _dkg.aggregate_transcripts(transcripts)
|
||||
pvss_aggregated.validate(_dkg)
|
||||
verify_aggregate(pvss_aggregated, shares, transcripts)
|
||||
LOGGER.debug(f"derived final DKG key {bytes(_dkg.final_key).hex()[:10]} and {keccak(bytes(_dkg.public_params)).hex()[:10]}")
|
||||
return pvss_aggregated, _dkg.final_key, _dkg.public_params
|
||||
|
||||
|
||||
def verify_aggregate(
|
||||
pvss_aggregated: AggregatedTranscript,
|
||||
shares: int,
|
||||
transcripts: List[Tuple[Validator, Transcript]],
|
||||
):
|
||||
pvss_aggregated.verify(shares, transcripts)
|
||||
|
||||
def derive_decryption_share(
|
||||
nodes: List[ExternalValidator],
|
||||
nodes: List[Validator],
|
||||
aggregated_transcript: AggregatedTranscript,
|
||||
keypair: Keypair,
|
||||
ciphertext: Ciphertext,
|
||||
|
|
|
@ -2,31 +2,35 @@ from pathlib import Path
|
|||
from typing import Optional, Union
|
||||
|
||||
import ferveo_py
|
||||
from OpenSSL.SSL import TLSv1_2_METHOD
|
||||
from OpenSSL.crypto import X509
|
||||
from constant_sorrow import constants
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from ferveo_py import Keypair as FerveoKeypair
|
||||
from ferveo_py.ferveo_py import Keypair as FerveoKeypair
|
||||
from hendrix.deploy.tls import HendrixDeployTLS
|
||||
from hendrix.facilities.services import ExistingKeyTLSContextFactory
|
||||
from nucypher_core import (
|
||||
MessageKit,
|
||||
EncryptedTreasureMap,
|
||||
EncryptedKeyFrag,
|
||||
HRAC,
|
||||
EncryptedKeyFrag,
|
||||
EncryptedTreasureMap,
|
||||
MessageKit,
|
||||
TreasureMap,
|
||||
)
|
||||
from nucypher_core.umbral import (
|
||||
SecretKey,
|
||||
PublicKey,
|
||||
SecretKey,
|
||||
Signature,
|
||||
Signer,
|
||||
VerifiedKeyFrag,
|
||||
)
|
||||
from OpenSSL.crypto import X509
|
||||
from OpenSSL.SSL import TLSv1_2_METHOD
|
||||
|
||||
from nucypher.config.constants import MAX_UPLOAD_CONTENT_LENGTH
|
||||
from nucypher.crypto.signing import SignatureStamp, StrangerStamp
|
||||
from nucypher.crypto.tls import _read_tls_certificate, _TLS_CURVE, generate_self_signed_certificate
|
||||
from nucypher.crypto.tls import (
|
||||
_TLS_CURVE,
|
||||
_read_tls_certificate,
|
||||
generate_self_signed_certificate,
|
||||
)
|
||||
from nucypher.crypto.utils import keccak_digest
|
||||
from nucypher.network.resources import get_static_resources
|
||||
|
||||
|
|
|
@ -1,24 +1,30 @@
|
|||
import ferveo_py
|
||||
import inspect
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import ferveo_py
|
||||
from eth_account._utils.signing import to_standard_signature_bytes
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from ferveo_py import (
|
||||
Transcript,
|
||||
AggregatedTranscript,
|
||||
ExternalValidator,
|
||||
DecryptionShareSimple,
|
||||
Ciphertext,
|
||||
DecryptionShareSimple,
|
||||
Transcript,
|
||||
Validator,
|
||||
)
|
||||
from hexbytes import HexBytes
|
||||
from nucypher_core.umbral import generate_kfrags, SecretKeyFactory, SecretKey, PublicKey
|
||||
from typing import List, Optional, Tuple, Any
|
||||
from nucypher_core.umbral import PublicKey, SecretKey, SecretKeyFactory, generate_kfrags
|
||||
|
||||
from nucypher.blockchain.eth.decorators import validate_checksum_address
|
||||
from nucypher.blockchain.eth.signers.base import Signer
|
||||
from nucypher.crypto import keypairs
|
||||
from nucypher.crypto.ferveo import dkg
|
||||
from nucypher.crypto.ferveo.dkg import FerveoVariant
|
||||
from nucypher.crypto.keypairs import DecryptingKeypair, SigningKeypair, HostingKeypair, RitualisticKeypair
|
||||
from nucypher.crypto.keypairs import (
|
||||
DecryptingKeypair,
|
||||
HostingKeypair,
|
||||
RitualisticKeypair,
|
||||
SigningKeypair,
|
||||
)
|
||||
|
||||
|
||||
class PowerUpError(TypeError):
|
||||
|
@ -258,7 +264,7 @@ class RitualisticPower(KeyPairBasedPower):
|
|||
) -> DecryptionShareSimple:
|
||||
decryption_share = dkg.derive_decryption_share(
|
||||
ritual_id=ritual_id,
|
||||
me=ExternalValidator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
me=Validator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
shares=shares,
|
||||
threshold=threshold,
|
||||
nodes=nodes,
|
||||
|
@ -280,7 +286,7 @@ class RitualisticPower(KeyPairBasedPower):
|
|||
) -> Transcript:
|
||||
transcript = dkg.generate_transcript(
|
||||
ritual_id=ritual_id,
|
||||
me=ExternalValidator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
me=Validator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
shares=shares,
|
||||
threshold=threshold,
|
||||
nodes=nodes
|
||||
|
@ -297,7 +303,7 @@ class RitualisticPower(KeyPairBasedPower):
|
|||
) -> Tuple[AggregatedTranscript, PublicKey, Any]:
|
||||
aggregated_transcript, public_key, params = dkg.aggregate_transcripts(
|
||||
ritual_id=ritual_id,
|
||||
me=ExternalValidator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
me=Validator(address=checksum_address, public_key=self.keypair.pubkey),
|
||||
shares=shares,
|
||||
threshold=threshold,
|
||||
transcripts=transcripts
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from ferveo_py import Transcript, AggregatedTranscript
|
||||
from ferveo_py.ferveo_py import AggregatedTranscript, Transcript
|
||||
from web3.types import TxReceipt
|
||||
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ from contextlib import suppress
|
|||
from pathlib import Path
|
||||
from queue import Queue
|
||||
|
||||
from ferveo_py import PublicKey
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import maya
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
from http import HTTPStatus
|
||||
|
||||
import json
|
||||
import weakref
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
|
||||
from constant_sorrow import constants
|
||||
from constant_sorrow.constants import RELAX
|
||||
from ferveo_py import Ciphertext
|
||||
from ferveo_py.ferveo_py import Ciphertext
|
||||
from flask import Flask, Response, jsonify, request
|
||||
from mako import exceptions as mako_exceptions
|
||||
from mako.template import Template
|
||||
|
@ -15,9 +16,8 @@ from nucypher_core import (
|
|||
ReencryptionRequest,
|
||||
RevocationOrder,
|
||||
ThresholdDecryptionRequest,
|
||||
ThresholdDecryptionResponse
|
||||
ThresholdDecryptionResponse,
|
||||
)
|
||||
from pathlib import Path
|
||||
|
||||
from nucypher.config.constants import MAX_UPLOAD_CONTENT_LENGTH
|
||||
from nucypher.crypto.ferveo.dkg import FerveoVariant
|
||||
|
@ -170,7 +170,7 @@ def _make_rest_app(this_node, log: Logger) -> Flask:
|
|||
# TODO: #3052 consider using the DKGStorage cache instead of the coordinator agent
|
||||
# dkg_public_key = this_node.dkg_storage.get_public_key(decryption_request.ritual_id)
|
||||
ritual = this_node.coordinator_agent.get_ritual(decryption_request.id, with_participants=True)
|
||||
participants = [p.node for p in ritual.participants]
|
||||
participants = [p.provider for p in ritual.participants]
|
||||
|
||||
# enforces that the node is part of the ritual
|
||||
if this_node.checksum_address not in participants:
|
||||
|
|
|
@ -31,7 +31,7 @@ eth-rlp==0.3.0 ; python_version >= '3.7' and python_version < '4'
|
|||
eth-tester==0.8.0b3
|
||||
eth-typing==3.3.0 ; python_version < '4' and python_full_version >= '3.7.2'
|
||||
eth-utils==2.1.0
|
||||
ferveo @ git+https://github.com/KPrasch/ferveo.git@7a5c5fc8be49894c0affd0da32b80e29e6e1ee8e#subdirectory=ferveo-python
|
||||
ferveo==0.1.8
|
||||
flask==2.2.5
|
||||
frozenlist==1.3.3 ; python_version >= '3.7'
|
||||
hendrix==4.0.0
|
||||
|
|
|
@ -17,7 +17,7 @@ import time
|
|||
|
||||
import click
|
||||
from constant_sorrow.constants import NO_BLOCKCHAIN_CONNECTION
|
||||
from ferveo_py import DkgPublicKey
|
||||
from ferveo_py.ferveo_py import DkgPublicKey
|
||||
from web3 import Web3
|
||||
|
||||
from nucypher.blockchain.eth.agents import (
|
||||
|
|
|
@ -44,7 +44,7 @@ def test_ursula_ritualist(testerchain, coordinator_agent, cohort, alice, bob):
|
|||
print("==================== INITIALIZING ====================")
|
||||
cohort_staking_provider_addresses = list(u.checksum_address for u in cohort)
|
||||
receipt = coordinator_agent.initiate_ritual(
|
||||
nodes=cohort_staking_provider_addresses,
|
||||
providers=cohort_staking_provider_addresses,
|
||||
transacting_power=alice.transacting_power
|
||||
)
|
||||
return receipt
|
||||
|
@ -91,6 +91,7 @@ def test_ursula_ritualist(testerchain, coordinator_agent, cohort, alice, bob):
|
|||
|
||||
# side channel fake-out by using the datastore from the last node in the cohort
|
||||
# alternatively, we could use the coordinator datastore
|
||||
# TODO get from Coordinator contract (when Ferveo version updated)
|
||||
last_node = cohort[-1]
|
||||
encrypting_key = last_node.dkg_storage.get_public_key(RITUAL_ID)
|
||||
|
||||
|
|
|
@ -3,7 +3,11 @@ import os
|
|||
import pytest
|
||||
from eth_utils import keccak
|
||||
|
||||
from nucypher.blockchain.eth.agents import CoordinatorAgent, ContractAgency, PREApplicationAgent
|
||||
from nucypher.blockchain.eth.agents import (
|
||||
ContractAgency,
|
||||
CoordinatorAgent,
|
||||
PREApplicationAgent,
|
||||
)
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
from nucypher.crypto.powers import TransactingPower
|
||||
|
||||
|
@ -17,38 +21,47 @@ def agent(testerchain, test_registry) -> CoordinatorAgent:
|
|||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def transcript():
|
||||
return os.urandom(32)
|
||||
def transcripts():
|
||||
return [os.urandom(32), os.urandom(32)]
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def aggregated_transcript():
|
||||
return os.urandom(32)
|
||||
def aggregated_transcripts():
|
||||
return [os.urandom(32), os.urandom(32)]
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def public_key():
|
||||
return os.urandom(104)
|
||||
def public_keys():
|
||||
return [os.urandom(48), os.urandom(48)] # BLS G1Point
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def cohort(testerchain, staking_providers):
|
||||
deployer, someone, *everybody_else = staking_providers
|
||||
return [someone]
|
||||
deployer, cohort_provider_1, cohort_provider_2, *everybody_else = staking_providers
|
||||
cohort_providers = [cohort_provider_1, cohort_provider_2]
|
||||
cohort_providers.sort() # providers must be sorted
|
||||
return cohort_providers
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def ursula(cohort, test_registry):
|
||||
staking_provider = cohort[0]
|
||||
def ursulas(cohort, test_registry):
|
||||
ursulas_for_cohort = []
|
||||
application_agent = ContractAgency.get_agent(
|
||||
PREApplicationAgent, registry=test_registry
|
||||
)
|
||||
return application_agent.get_operator_from_staking_provider(staking_provider)
|
||||
for provider in cohort:
|
||||
operator = application_agent.get_operator_from_staking_provider(provider)
|
||||
ursulas_for_cohort.append(operator)
|
||||
|
||||
return ursulas_for_cohort
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def transacting_power(testerchain, ursula):
|
||||
return TransactingPower(account=ursula, signer=Web3Signer(testerchain.client))
|
||||
def transacting_powers(testerchain, ursulas):
|
||||
return [
|
||||
TransactingPower(account=ursula, signer=Web3Signer(testerchain.client))
|
||||
for ursula in ursulas
|
||||
]
|
||||
|
||||
|
||||
def test_coordinator_properties(agent):
|
||||
|
@ -58,69 +71,71 @@ def test_coordinator_properties(agent):
|
|||
assert not agent._proxy_name # not upgradeable
|
||||
|
||||
|
||||
def test_initiate_ritual(agent, cohort, transacting_power):
|
||||
def test_initiate_ritual(agent, cohort, transacting_powers):
|
||||
number_of_rituals = agent.number_of_rituals()
|
||||
assert number_of_rituals == 0
|
||||
|
||||
receipt = agent.initiate_ritual(
|
||||
nodes=cohort,
|
||||
transacting_power=transacting_power
|
||||
providers=cohort, transacting_power=transacting_powers[0]
|
||||
)
|
||||
assert receipt['status'] == 1
|
||||
start_ritual_event = agent.contract.events.StartRitual().process_receipt(receipt)
|
||||
assert start_ritual_event[0]['args']['nodes'] == cohort
|
||||
assert start_ritual_event[0]["args"]["participants"] == cohort
|
||||
|
||||
number_of_rituals = agent.number_of_rituals()
|
||||
assert number_of_rituals == 1
|
||||
ritual_id = number_of_rituals - 1
|
||||
|
||||
ritual = agent.get_ritual(ritual_id)
|
||||
assert ritual.initiator == transacting_power.account
|
||||
assert ritual.initiator == transacting_powers[0].account
|
||||
|
||||
participants = agent.get_participants(ritual_id)
|
||||
assert [p.node for p in participants] == cohort
|
||||
assert [p.provider for p in participants] == cohort
|
||||
|
||||
|
||||
def test_post_transcript(agent, transcript, transacting_power):
|
||||
def test_post_transcript(agent, transcripts, transacting_powers):
|
||||
ritual_id = agent.number_of_rituals() - 1
|
||||
for i, transacting_power in enumerate(transacting_powers):
|
||||
receipt = agent.post_transcript(
|
||||
ritual_id=ritual_id,
|
||||
node_index=0,
|
||||
transcript=transcript,
|
||||
transacting_power=transacting_power
|
||||
transcript=transcripts[i],
|
||||
transacting_power=transacting_power,
|
||||
)
|
||||
assert receipt["status"] == 1
|
||||
post_transcript_events = agent.contract.events.TranscriptPosted().process_receipt(
|
||||
receipt
|
||||
post_transcript_events = (
|
||||
agent.contract.events.TranscriptPosted().process_receipt(receipt)
|
||||
)
|
||||
assert len(post_transcript_events) == 1
|
||||
# assert len(post_transcript_events) == 1
|
||||
event = post_transcript_events[0]
|
||||
assert event['args']['ritualId'] == ritual_id
|
||||
assert event['args']['transcriptDigest'] == keccak(transcript)
|
||||
assert event["args"]["ritualId"] == ritual_id
|
||||
assert event["args"]["transcriptDigest"] == keccak(transcripts[i])
|
||||
|
||||
participants = agent.get_participants(ritual_id)
|
||||
assert [p.transcript for p in participants] == [transcript]
|
||||
assert [p.transcript for p in participants] == transcripts
|
||||
|
||||
|
||||
|
||||
def test_post_aggregation(agent, aggregated_transcript, public_key, transacting_power):
|
||||
def test_post_aggregation(
|
||||
agent, aggregated_transcripts, public_keys, transacting_powers
|
||||
):
|
||||
ritual_id = agent.number_of_rituals() - 1
|
||||
for i, transacting_power in enumerate(transacting_powers):
|
||||
receipt = agent.post_aggregation(
|
||||
ritual_id=ritual_id,
|
||||
node_index=0,
|
||||
aggregated_transcript=aggregated_transcript,
|
||||
public_key=public_key,
|
||||
transacting_power=transacting_power
|
||||
aggregated_transcript=aggregated_transcripts[i],
|
||||
public_key=public_keys[i],
|
||||
transacting_power=transacting_power,
|
||||
)
|
||||
assert receipt['status'] == 1
|
||||
assert receipt["status"] == 1
|
||||
|
||||
post_aggregation_events = agent.contract.events.AggregationPosted().process_receipt(
|
||||
receipt
|
||||
post_aggregation_events = (
|
||||
agent.contract.events.AggregationPosted().process_receipt(receipt)
|
||||
)
|
||||
assert len(post_aggregation_events) == 1
|
||||
# assert len(post_aggregation_events) == 1
|
||||
event = post_aggregation_events[0]
|
||||
assert event['args']['ritualId'] == ritual_id
|
||||
assert event['args']['aggregatedTranscriptDigest'] == keccak(aggregated_transcript)
|
||||
assert event["args"]["ritualId"] == ritual_id
|
||||
assert event["args"]["aggregatedTranscriptDigest"] == keccak(
|
||||
aggregated_transcripts[i]
|
||||
)
|
||||
|
||||
participants = agent.get_participants(ritual_id)
|
||||
assert all([p.aggregated for p in participants])
|
||||
|
|
|
@ -29,9 +29,10 @@ deployments:
|
|||
pre_min_operator_seconds: 86400 # one day in seconds
|
||||
- contract_type: Coordinator
|
||||
address: 0 # account index
|
||||
app: '::SimplePREApplication.address::'
|
||||
ritual_timeout: 3600
|
||||
max_dkg_size: 8
|
||||
pre_application: '::SimplePREApplication.address::'
|
||||
|
||||
|
||||
|
||||
test:
|
||||
|
|
|
@ -12,18 +12,9 @@ from tests.utils.middleware import NodeIsDownMiddleware
|
|||
from tests.utils.ursula import make_ursulas
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ursulas")
|
||||
def test_stakers_bond_to_ursulas(
|
||||
testerchain, test_registry, staking_providers, ursula_test_config
|
||||
):
|
||||
nodes = make_ursulas(
|
||||
ursula_config=ursula_test_config,
|
||||
staking_provider_addresses=testerchain.stake_providers_accounts,
|
||||
operator_addresses=testerchain.ursulas_accounts,
|
||||
)
|
||||
|
||||
assert len(nodes) == len(staking_providers)
|
||||
for ursula in nodes:
|
||||
def test_stakers_bond_to_ursulas(ursulas, test_registry, staking_providers):
|
||||
assert len(ursulas) == len(staking_providers)
|
||||
for ursula in ursulas:
|
||||
ursula.validate_operator(registry=test_registry)
|
||||
assert ursula.verified_operator
|
||||
|
||||
|
|
|
@ -156,3 +156,6 @@ RPC_SUCCESSFUL_RESPONSE = {
|
|||
"id": 1,
|
||||
"result": "Geth/v1.9.20-stable-979fc968/linux-amd64/go1.15"
|
||||
}
|
||||
|
||||
|
||||
FAKE_TRANSCRIPT = b'\x98\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\xae\xdb_-\xeaj\x9bz\xdd\xd6\x98\xf8\xf91A\xc1\x8f;\x13@\x89\xcb\xcf>\x86\xc4T\xfb\x0c\x1ety\x8b\xd8mSkk\xbb\xcaU\xe5]v}E\xfa\xbc\xae\xb6\xa1\xf4e\x19\x86\xf2L\xcaZj\x03]h:\xbfP\x03Q\x8c\x95e\xe0c\xaa\xc2\xb4\xbby}\xecW%\xdet\xc8\xfc\xe7ky\xe5\xf6\xe9\xf5\x05\xe5\xdf\x81\x9bx\x18\xa4\x15\x85\xdeA9\x9f\x99\xceQ\xb0\xd0&\x9a\xa7\xaed&\x99\xdc\xa7\xfeLM\x01\x02\x87\xc8\x14$\x89"kA\x0b\x91\t\x1e\x1c/f\x00N,\x88\x01\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\xab\x0f\tFA\xdcB\xd4\xb3\x08\xd7IVkmw6za\xb6)\x13\x014]f.\xa1\xcd\xe27\xee\xc0\x95\xf6\xa4\x12\xa9\x19\x94\xed\x05\xffF\x81\xb2\xb2\xcb\x06\xaf-\xe4\xb5\x98\xbd\x81\x0f\xb8\xb7\xa1<\xf6/\xe5\xa4\x11\x83}\xfaH\x15\x80h\n\xe7\xc6\xc2\xb3\xd5{dH\xeb\x1e]v\xb4\x88v\x88\xb7N1\xff\x80\xd0\x88\x04.\x00\x82K\x1e\x96\xa0\xbd}X\xbb{?6\xeb\xe7\rg\x03\xeeG\x01\x10^\xee\x9cH\x94[\x9d8s\xa3\xb6\x8f\xfc\xf1\xdf\x01m\xf9\x08_N\xb5-\x16O\x89n\x95\xf3\x8b[\x1f&Yk?*\x07\x8fQ\x98\x85\xd5\xc1YL\xe0CB\xb2"!\x8d,\x90Q7\xca\x9c\x0e\xb2\x7f\xb0\xe1\xc8\xdd\xe7\xe1\xe4\x14\xb3\xa6\xb4\x8e\x8b\xed\xacM\xc3\x9d\xc4|U\x93k\x17\xac\x14\x86\x16\xd7\xebk\xbd{\xad}\x87\x13Y\x83\x9d\x88\x1e\x1b4\xa7r\xa6\x80\xbf\xf0\x15\x99\x11Q\xdb\xeb\xdf\x15ns\xc6\x85\xb3\x1d\xf5j\xc5\x87`=OD\x86\x86\x08\x8d\xb6\x0b\xec\x1d\x15\xc9\x93\x9a\xed\xa3\xe2\x96\xa4\xa2b\xa6\xa5h\xb0\xbb4\xb3\x0c\xa5\xdcu\x1f{\xb9\xaf\xd0W\xe1\xa3&\xa8\xb5\xea\xe5c\xfd\xc7?\xbdLg\xb3\xae\xb9\xb8*\xfc\xd5\xa6\xeeI\x15v\xdc\xa2`1VZ\xb5\x1c_`\x86\xbe{\xef\xae\t\xf2\xa9N\x00\x9a\xa1F\x84\xb2\xe3\xbc\xfa\xf7I\xee\xe8[~\x99;i\xfc%\xa8\x80\x80\x8e%\'\x9c+\x9c\xa9\x13R!\x80w\xc0\xda[\x84\xf6X\xfe\xc2\xe3\x0f\x94-\xbb`\x00\x00\x00\x00\x00\x00\x00\x93\xff\x1e\x1b\x15;e\xfe}\x83v K\xf9\r\xc9\xad\x9d\xddN\xcd\xcaWq\xfa\x8e\x98sn\x9b~t\x01 =p\xe5\xb1\x7f"!\xb4\xb9\xc9W\x90\x86\x80\x17\nm\xa0\x8dD\xb5\xaf\xfc\xa5\xf5%V]\xb9\x89a@\xe5\x0c@#%x\xecW\xed\xb0a\x98\x1a!C\x80B@{\xf0\xffJ{\xa3\xeayDP\'u'
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
import contextlib
|
||||
import json
|
||||
import maya
|
||||
import os
|
||||
import pytest
|
||||
import shutil
|
||||
import tempfile
|
||||
from click.testing import CliRunner
|
||||
from datetime import timedelta
|
||||
from eth_account import Account
|
||||
from eth_utils import to_checksum_address
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
import maya
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from eth_account import Account
|
||||
from eth_utils import to_checksum_address
|
||||
from ferveo_py.ferveo_py import Keypair as FerveoKeyPair
|
||||
from ferveo_py.ferveo_py import Validator
|
||||
from twisted.internet.task import Clock
|
||||
from web3 import Web3
|
||||
|
||||
|
@ -19,9 +22,7 @@ import tests
|
|||
from nucypher.blockchain.economics import Economics
|
||||
from nucypher.blockchain.eth.clients import EthereumClient
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import (
|
||||
LocalContractRegistry,
|
||||
)
|
||||
from nucypher.blockchain.eth.registry import LocalContractRegistry
|
||||
from nucypher.blockchain.eth.signers.software import KeystoreSigner
|
||||
from nucypher.blockchain.eth.trackers.dkg import EventScannerTask
|
||||
from nucypher.characters.lawful import Enrico, Ursula
|
||||
|
@ -32,6 +33,7 @@ from nucypher.config.characters import (
|
|||
UrsulaConfiguration,
|
||||
)
|
||||
from nucypher.config.constants import TEMPORARY_DOMAIN
|
||||
from nucypher.crypto.ferveo import dkg
|
||||
from nucypher.crypto.keystore import Keystore
|
||||
from nucypher.network.nodes import TEACHER_NODES
|
||||
from nucypher.policy.conditions.context import USER_ADDRESS_CONTEXT
|
||||
|
@ -43,13 +45,13 @@ from nucypher.utilities.emitters import StdoutEmitter
|
|||
from nucypher.utilities.logging import GlobalLoggerSettings, Logger
|
||||
from nucypher.utilities.networking import LOOPBACK_ADDRESS
|
||||
from tests.constants import (
|
||||
INSECURE_DEVELOPMENT_PASSWORD,
|
||||
MOCK_CUSTOM_INSTALLATION_PATH,
|
||||
MOCK_CUSTOM_INSTALLATION_PATH_2,
|
||||
MOCK_ETH_PROVIDER_URI,
|
||||
MOCK_REGISTRY_FILEPATH,
|
||||
TEST_ETH_PROVIDER_URI,
|
||||
TESTERCHAIN_CHAIN_ID, )
|
||||
TESTERCHAIN_CHAIN_ID,
|
||||
)
|
||||
from tests.mock.interfaces import MockBlockchain, mock_registry_source_manager
|
||||
from tests.mock.performance_mocks import (
|
||||
mock_cert_generation,
|
||||
|
@ -694,3 +696,41 @@ def ursulas(testerchain, staking_providers, ursula_test_config):
|
|||
# Pytest will hold on to this object, need to clear it manually.
|
||||
# See https://github.com/pytest-dev/pytest/issues/5642
|
||||
_ursulas.clear()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def dkg_public_key(get_random_checksum_address):
|
||||
ritual_id = 0
|
||||
num_shares = 4
|
||||
threshold = 3
|
||||
validators = []
|
||||
for i in range(0, num_shares):
|
||||
validators.append(
|
||||
Validator(
|
||||
address=get_random_checksum_address(),
|
||||
public_key=FerveoKeyPair.random().public_key(),
|
||||
)
|
||||
)
|
||||
|
||||
validators.sort(key=lambda x: x.address) # must be sorted
|
||||
|
||||
transcripts = []
|
||||
for validator in validators:
|
||||
transcript = dkg.generate_transcript(
|
||||
ritual_id=ritual_id,
|
||||
me=validator,
|
||||
shares=num_shares,
|
||||
threshold=threshold,
|
||||
nodes=validators,
|
||||
)
|
||||
transcripts.append(transcript)
|
||||
|
||||
_, public_key, _ = dkg.aggregate_transcripts(
|
||||
ritual_id=ritual_id,
|
||||
me=validators[0],
|
||||
shares=num_shares,
|
||||
threshold=threshold,
|
||||
transcripts=list(zip(validators, transcripts)),
|
||||
)
|
||||
|
||||
return public_key
|
||||
|
|
|
@ -21,19 +21,12 @@ ROUND_1_EVENT_NAME = "StartRitual"
|
|||
ROUND_2_EVENT_NAME = "StartAggregationRound"
|
||||
|
||||
PARAMS = [ # dkg_size, ritual_id, variant
|
||||
|
||||
(1, 0, 'simple'),
|
||||
(4, 1, 'simple'),
|
||||
(8, 2, 'simple'),
|
||||
# TODO: enable these tests
|
||||
# (32, 3, 'simple'),
|
||||
|
||||
# TODO: enable these tests
|
||||
# (1, 3, 'precomputed'),
|
||||
# (4, 5, 'precomputed'),
|
||||
# (8, 6, 'precomputed'),
|
||||
# (32, 7, 'precomputed'),
|
||||
|
||||
(2, 0, "precomputed"),
|
||||
(4, 1, "precomputed"),
|
||||
(8, 2, "precomputed"),
|
||||
(2, 3, "simple"),
|
||||
(4, 4, "simple"),
|
||||
(8, 5, "simple"),
|
||||
]
|
||||
|
||||
BLOCKS = list(reversed(range(1, 100)))
|
||||
|
@ -43,15 +36,21 @@ COORDINATOR = MockCoordinatorAgent(MockBlockchain())
|
|||
@pytest.fixture(scope="function", autouse=True)
|
||||
def mock_coordinator_agent(testerchain, application_economics, mock_contract_agency):
|
||||
mock_contract_agency._MockContractAgency__agents[CoordinatorAgent] = COORDINATOR
|
||||
yield COORDINATOR
|
||||
|
||||
yield COORDINATOR
|
||||
COORDINATOR.reset()
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cohort(ursulas, mock_coordinator_agent):
|
||||
"""Creates a cohort of Ursulas"""
|
||||
for u in ursulas:
|
||||
# set mapping in coordinator agent
|
||||
mock_coordinator_agent._add_operator_to_staking_provider_mapping(
|
||||
{u.operator_address: u.checksum_address}
|
||||
)
|
||||
u.coordinator_agent = mock_coordinator_agent
|
||||
u.ritual_tracker.coordinator_agent = mock_coordinator_agent
|
||||
|
||||
return ursulas
|
||||
|
||||
|
||||
|
@ -70,7 +69,7 @@ def execute_round_1(ritual_id: int, initiator: ChecksumAddress, cohort: List[Urs
|
|||
{
|
||||
"ritualId": ritual_id,
|
||||
"initiator": initiator,
|
||||
"nodes": [u.checksum_address for u in cohort],
|
||||
"participants": [u.checksum_address for u in cohort],
|
||||
}
|
||||
),
|
||||
)
|
||||
|
@ -110,7 +109,7 @@ def test_ursula_ritualist(testerchain, mock_coordinator_agent, cohort, alice, bo
|
|||
print("==================== INITIALIZING ====================")
|
||||
cohort_staking_provider_addresses = list(u.checksum_address for u in cohort)
|
||||
mock_coordinator_agent.initiate_ritual(
|
||||
nodes=cohort_staking_provider_addresses,
|
||||
providers=cohort_staking_provider_addresses,
|
||||
transacting_power=alice.transacting_power
|
||||
)
|
||||
assert mock_coordinator_agent.number_of_rituals() == ritual_id + 1
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
import pytest
|
||||
|
||||
from nucypher.blockchain.eth.agents import CoordinatorAgent
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
from nucypher.crypto.powers import TransactingPower
|
||||
|
||||
FAKE_TRANSCRIPT = b'(\x01\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x0br8\xa5\xc0\x1e55\xea\xac\'?r\xd5\xa6\x16\x11\xec\xca^\x0c,\x999\xc9\x82\x1f\xb8\xe5^\xdb\x11O\xb9\xbd1\xae\x02p"\x04\xe7\xab\x04K\x1fv9\x0e\xea"\xa0\xc5_/#\xf1\x1e{Qc\xfb\xed\x18\\H73.\xcd2q3d%\xaa,b\x19\x85}\xa9\xf5\xbe\xd6\x01\xc96\xcf\x0b\xaaji\x1e\xaf!\x0e\x1e;\x07K\xbd\x81\x8f\xbd\xae\x8b\xa0Li}\xd6oU\xa1d\xf2\x02\x8bT\x86X\x8b\x05\xf5\x1e\xf9\x9f~\xcc\x88+L!h\x15\x87\xfa\xbfvy3\xb6J\x0e\x13I\xad\xc5\xdczG\xfbQ\x8f4\x1aU#\xa1]\xc0sbJ!n\x1b\x8e\xa0\xce\xc8\xba\x08\x15xr\xd2\xcd:i\xb2\xab\x01\x9a\xe5wk\x845\xe6,\x11f\x19\x06w\xf1\xd5a\x83\x82\xdf\x96k\xben\xc9\xa2\x81i\x81\xd3Y\xb2\x91\x19]\xd3\x976\xdb\x19\x08%\xc0\x8f4\xf1c6\xd4\xc4)\x9a\xf7 [\xd6\x88\x13IA%@r=\xf7s\xebt\xd5\xdd\xec\x84\xcb!\xc6] \xfe7g\x8e\x00\x12\x0b\nO\t\x95\xb3\xe0tdI\xc0d\x8b\'\xd3|\xc5@\xac]\xabk\x08\x03\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00n\xcc\xab\t7\xbf\x1e\xc9\xeb\xbd\x0cvEY\x0cf\'\tta\x93\x91\xc9\xce\x83H\xa4\xef\xbb\x16\xc4tb%V\x80\xd0\xc6\x02\x1ca]\xc1\xa2\xe3\xb89\x16\xac\x9d>\xa3\xbe\x86R\xf4\xefj\xdf&\xbcw\xe2\xda\x93t/\xf9\xacK\xb1d\xc0w\xee\x07\xe36\x9a\x0c\xa5\xc8\x9c\xb7\xe4\xd5\x91\xed\xccI\xdc\xf4NU\x7f\r*\xad\x07\xe38\x0b[N\x96\xff\xb4\'\xe1[o\xc4\x0f\xd1\xe3]\x05y\x89\xa7\xe8\xa8Zat6M\xa8\xbe\x9d\x15\xaa\xdch\xa3&\xd6\xe3l\xaa\xf0\xb3>\x0c\x06\xa8\xc6U\xe4\x08\x17?\x8cd\xaa\xcf\xcas\xd7\xdb\x98\xc62\xae\x93:}:wm0\xc9\x98L\xed4\xeau+=B\x13\xe2#bho\n1"\xbd\x012\n\xceH\x18n\xce\x1c_"k\xeaH\xd0\xc3\xfa\x98\xf1 \x7fJ\xd0x\xf9M~\xd0JE\x8c\xfc\x9e\xe6\xff\xdd<\xcb\x10\xf6c5/@zzyO\t\x9e\x1dU\xcc\x18\xba\xd6\xb3y\xa8\nN\x1e(\x9f\x00\xbd\x87f\xfeG\xd8\xe6\xd9\xee\x06\x1bFp\xbd\xec\xca\xd9J\xd5-\xed\xce\xfc\xec\x07u\x86\xc3\xdc\xd3\xc6\x03\xf7\x9f6>\xb6"5J\x1ds\xf2\x10\x1df\xe2\xbf\xa8>\x10\x07\n\x15\xd2\xf41\xea\xaf\xbb+bk,iCd\xef\x9e\x04q\xedA\x9aOB\x8c\x81\x82\x02\xd9E\x8dv\xac\xcb\x02\xad0-\x9er\x8d<\x92\xf2\xdc\xaf\xb5:/\xd6\xad\xa7\xe7N\xb8> M\xb3\x9f\n\xeeH\x01r\x9b\xca\xf2\x96\'\xcd@\x12\xe9\n\x05\xcb\x8a\x94\xc3\xe6\x16wo]\xb4\xac\xeb\xa5S\xc08,\xa9(06)\x9d\x84\x85_\xe0\xb4\xef\x18\xa0\xf2eO\xfc7\x18\xa6h\x94\xefA\xd91\xeeW\xcd\x15\x96\x02\xa0\x11\x01\xb9j\xa8\xcf|h\x9c\\\xe8r\x9e\xa6*;\x02\xf3\xb7qS\\FX\xc7\xd6\xfew\xf0\xf4\x12w\xfb\xc6\xcc\xba8\x90\xa1\x96JckV\x01\xdc7\x972\x9bo_\xaf{\x13Jlg\x0b\x87\xbe\xbb\rW\x89?\x1c^b5\x12\xc3\x98\xecv!\x0c\xa9\x98w\x9c\xb6\xb3\x00p]\'.\xdcp\xae\x1e\x0e\x0579B\x9a\xab\xf2k\x1e\x07\x1c7\xe7\xafp\x82\x90XTX\xfe\x14\xeab|&\xebT\xd8\x000\x19\xeb\xb6a\x93K\xbd\xa0\xa0\x1a\x8d\'\x0e\x816Y\x14\x8e3\x1a\xfaS:}?m\\\xbc\xb0\xfb\xe7+\xe2\x87\x07\xec\x9e\x02\x1en/\xeb\x14\x1e\xf2\x03\x18=\xb2\x9c\xb4XP\x11\xee\xbe"M\x0e\xbd8&Zd\x0c\xfe\xb3e%\x80\x95\xc2b\xa0<\x16&\xc6\xcf#ItX\xe2\\H\x06\xf9q\xd5\x97\xdeD\xfb\xa2\x12W?\x03\x84\xa9\xbe#\xf2\xa0\xb8\x8c$\xc3\x130\x94\x18?\x1fL\x9b\x99|\xad\xc69+\xe4ma\x00\xbc\xf5Ta \xfc\x17\x8c5\xc6\xbd\xf0HA\xe7\x11\xd7\x85\xdf{WO5N{#\xe1\xd4\xd7\x84\xad\xc6N\t\x05\x99.)\x1fB\xce\xd4O\xdf;\xfcZ\xe8\r\xc0\xaf\xc6\xf8$\xf4rH\xa1d\xba\xf8\r+\xd5nk\x029N\x1bD\xdfz8\'M\x13\xe7v\xd3\xef\xc0\x00\x00\x00\x00\x00\x00\x00\x05\xc8\x14\x1d\xc6\x90\xc5\xc9\xf2\xcfKK&%\x0c\xe0\xb4[:\x8e\x0bc7\x97\x92\xb2\xb6^c}\x1d\x98\x1e%\xfdO4\xf2\xc9\xe4*Y\x05\xb7F\xfd\xd7+\x18T\x04\xf8+\xab\x94\xfe\xe4\xcc\xc1\xed\xe5e\x8c\x95\x89\x88\x90z\xe1Z!\xb3\x8c\x98\xb5*\xee\xce\xda\xf4\xd2\x04\xa0\x16\x9a\x1e\xe3b?\x1b\xb4\x90\xf5\xa0\x19\xa8\x13d \xd78\x90\xf0\xc1JTu?\xad\xb5\x81\x1a\x1a\xb7\x07$\xf3\x03\xdc\x11O_\xc0Oz\xd5\xbbiD4\xf1\xf4\xbe\x9d}\xa3\x99\xff\xb3\xf4,\xccf\xce\x13\x90\x12\x1e\x1a\xe5\xc3_\xf2\x11e"\x15`\x1fz\x8dS\xb8\xd7\x0c\x8dD`\x7f\xed\xe09\xef\xa2`\xb2Q\x15GE\xabI4$\x9a\x0c\xa5\xec1\xac\xc3\xe5'
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def agent(mock_contract_agency) -> CoordinatorAgent:
|
||||
coordinator_agent: CoordinatorAgent = mock_contract_agency.get_agent(CoordinatorAgent, registry=None)
|
||||
return coordinator_agent
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def cohort(ursulas):
|
||||
return [u.checksum_address for u in ursulas[:4]]
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def ursula(ursulas):
|
||||
return ursulas[1]
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def transacting_power(testerchain, alice):
|
||||
return TransactingPower(account=alice.transacting_power.account, signer=Web3Signer(testerchain.client))
|
||||
|
||||
|
||||
def test_initiate_ritual(agent: CoordinatorAgent, cohort, transacting_power):
|
||||
receipt = agent.initiate_ritual(
|
||||
nodes=cohort,
|
||||
transacting_power=transacting_power
|
||||
)
|
||||
|
||||
participants = [CoordinatorAgent.Ritual.Participant(
|
||||
node=c,
|
||||
) for c in cohort]
|
||||
|
||||
ritual = CoordinatorAgent.Ritual(
|
||||
id=0,
|
||||
initiator=transacting_power.account,
|
||||
dkg_size=4,
|
||||
init_timestamp=123456,
|
||||
participants=participants,
|
||||
)
|
||||
agent.get_ritual = lambda *args, **kwargs: ritual
|
||||
agent.get_participants = lambda *args, **kwargs: participants
|
||||
|
||||
assert receipt['transactionHash']
|
||||
number_of_rituals = agent.number_of_rituals()
|
||||
ritual_id = number_of_rituals - 1
|
||||
return ritual_id
|
||||
|
||||
|
||||
def test_perform_round_1(ursula, random_address, cohort):
|
||||
ursula.ritual_tracker.refresh(fetch_rituals=[0])
|
||||
ursula.perform_round_1(
|
||||
ritual_id=0, initiator=random_address, nodes=cohort, timestamp=0
|
||||
)
|
||||
|
||||
|
||||
def test_perform_round_2(ursula, cohort, transacting_power, agent, mocker):
|
||||
mocker.patch('nucypher.crypto.ferveo.dkg._validate_pvss_aggregated', return_value=True)
|
||||
participants = [CoordinatorAgent.Ritual.Participant(
|
||||
node=c,
|
||||
aggregated=False,
|
||||
transcript=FAKE_TRANSCRIPT
|
||||
) for c in cohort]
|
||||
ritual = CoordinatorAgent.Ritual(
|
||||
id=0,
|
||||
initiator=transacting_power.account,
|
||||
dkg_size=4,
|
||||
init_timestamp=123456,
|
||||
total_transcripts=4,
|
||||
participants=participants,
|
||||
)
|
||||
agent.get_ritual = lambda *args, **kwargs: ritual
|
||||
agent.get_participants = lambda *args, **kwargs: participants
|
||||
agent.get_ritual_status = lambda *args, **kwargs: 2
|
||||
|
||||
ursula.perform_round_2(ritual_id=0, timestamp=0)
|
|
@ -1,9 +1,10 @@
|
|||
import time
|
||||
from enum import Enum
|
||||
from typing import Dict, List
|
||||
|
||||
from eth_typing import ChecksumAddress
|
||||
from eth_utils import keccak
|
||||
from ferveo_py import PublicKey
|
||||
from typing import List
|
||||
from ferveo_py.ferveo_py import DkgPublicKey
|
||||
from web3.types import TxReceipt
|
||||
|
||||
from nucypher.blockchain.eth.agents import CoordinatorAgent
|
||||
|
@ -17,8 +18,8 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
Participant = CoordinatorAgent.Ritual.Participant
|
||||
Ritual = CoordinatorAgent.Ritual
|
||||
RitualStatus = CoordinatorAgent.Ritual.Status
|
||||
G1Point = CoordinatorAgent.Ritual.G1Point
|
||||
|
||||
PUBLIC_KEY_SIZE = 104 # TODO get from contract / ferveo (ferveo #99)
|
||||
EVENTS = {}
|
||||
|
||||
class Events(Enum):
|
||||
|
@ -27,83 +28,109 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
|
||||
def __init__(self, blockchain: MockBlockchain, max_dkg_size: int = 64, timeout: int = 600):
|
||||
self.blockchain = blockchain
|
||||
self.rituals = {}
|
||||
self.rituals = []
|
||||
self.timeout = timeout
|
||||
self.max_dkg_size = max_dkg_size
|
||||
# Note that the call to super() is not necessary here
|
||||
|
||||
self._operator_to_staking_provider = {}
|
||||
|
||||
def _add_operator_to_staking_provider_mapping(
|
||||
self, mapping: Dict[ChecksumAddress, ChecksumAddress]
|
||||
):
|
||||
self._operator_to_staking_provider.update(mapping)
|
||||
|
||||
def _get_staking_provider_from_operator(self, operator: ChecksumAddress):
|
||||
try:
|
||||
return self._operator_to_staking_provider[operator]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def emit_event(self, ritual_id: int, signal: Events, **kwargs) -> None:
|
||||
self.EVENTS[(int(time.time_ns()), ritual_id)] = (signal, {**kwargs, 'ritual_id': ritual_id})
|
||||
|
||||
def reset(self, **kwargs) -> None:
|
||||
# self.rituals = {}
|
||||
# self.rituals = []
|
||||
# self.EVENTS = {}
|
||||
pass
|
||||
self._operator_to_staking_provider = {}
|
||||
|
||||
#
|
||||
# Transactions
|
||||
#
|
||||
|
||||
def initiate_ritual(
|
||||
self, nodes: List[ChecksumAddress], transacting_power: TransactingPower
|
||||
self, providers: List[ChecksumAddress], transacting_power: TransactingPower
|
||||
) -> TxReceipt:
|
||||
ritual_id = len(self.rituals)
|
||||
ritual = self.Ritual(
|
||||
id=len(self.rituals),
|
||||
init_timestamp=int(time.time_ns()),
|
||||
participants=[self.Participant(node=node) for node in nodes],
|
||||
dkg_size=len(nodes),
|
||||
participants=[
|
||||
self.Participant(provider=provider) for provider in providers
|
||||
],
|
||||
dkg_size=len(providers),
|
||||
initiator=transacting_power.account,
|
||||
)
|
||||
self.rituals[ritual.id] = ritual
|
||||
self.rituals.append(ritual)
|
||||
self.emit_event(
|
||||
signal=self.Events.START_RITUAL,
|
||||
ritual_id=ritual.id,
|
||||
ritual_id=ritual_id,
|
||||
initiator=transacting_power.account,
|
||||
nodes=nodes,
|
||||
participants=providers,
|
||||
)
|
||||
return self.blockchain.FAKE_RECEIPT
|
||||
|
||||
def post_transcript(
|
||||
self,
|
||||
ritual_id: int,
|
||||
node_index: int,
|
||||
transcript: bytes,
|
||||
transacting_power: TransactingPower
|
||||
) -> TxReceipt:
|
||||
ritual = self.rituals[ritual_id]
|
||||
ritual.participants[node_index].transcript = transcript
|
||||
operator_address = transacting_power.account
|
||||
# either mapping is populated or just assume provider same as operator for testing
|
||||
provider = (
|
||||
self._get_staking_provider_from_operator(operator=operator_address)
|
||||
or transacting_power.account
|
||||
)
|
||||
participant = self.get_participant_from_provider(ritual_id, provider)
|
||||
participant.transcript = transcript
|
||||
ritual.total_transcripts += 1
|
||||
if ritual.total_transcripts >= ritual.dkg_size:
|
||||
if ritual.total_transcripts == ritual.dkg_size:
|
||||
ritual.status = self.RitualStatus.AWAITING_AGGREGATIONS
|
||||
self.emit_event(
|
||||
signal=self.Events.START_AGGREGATION_ROUND,
|
||||
ritual_id=ritual_id,
|
||||
nodes=[p.node for p in ritual.participants],
|
||||
participants=[
|
||||
p.provider for p in ritual.participants
|
||||
], # TODO This should not be
|
||||
)
|
||||
return self.blockchain.FAKE_RECEIPT
|
||||
|
||||
def post_aggregation(
|
||||
self,
|
||||
ritual_id: int,
|
||||
node_index: int,
|
||||
aggregated_transcript: bytes,
|
||||
public_key: PublicKey,
|
||||
public_key: DkgPublicKey,
|
||||
transacting_power: TransactingPower,
|
||||
) -> TxReceipt:
|
||||
ritual = self.rituals[ritual_id]
|
||||
operator_address = transacting_power.account
|
||||
# either mapping is populated or just assume provider same as operator for testing
|
||||
provider = (
|
||||
self._get_staking_provider_from_operator(operator=operator_address)
|
||||
or transacting_power.account
|
||||
)
|
||||
participant = self.get_participant_from_provider(ritual_id, provider)
|
||||
participant.aggregated = True
|
||||
|
||||
aggregated_transcript_hash = keccak(aggregated_transcript)
|
||||
public_key_hash = keccak(bytes(public_key))
|
||||
|
||||
if len(ritual.aggregated_transcript) == 0 and len(ritual.public_key) == 0:
|
||||
# TODO the dkg public key bytes are padded - remove using subarray when fixed
|
||||
g1_point = self.Ritual.G1Point.from_bytes(bytes(public_key)[8:])
|
||||
if len(ritual.aggregated_transcript) == 0:
|
||||
ritual.aggregated_transcript = aggregated_transcript
|
||||
ritual.aggregated_transcript_hash = aggregated_transcript_hash
|
||||
ritual.public_key = public_key
|
||||
ritual.public_key_hash = public_key_hash
|
||||
elif (
|
||||
ritual.aggregated_transcript_hash != aggregated_transcript_hash
|
||||
or ritual.public_key_hash != public_key_hash
|
||||
):
|
||||
ritual.public_key = g1_point
|
||||
elif bytes(ritual.public_key) != bytes(g1_point) or keccak(
|
||||
ritual.aggregated_transcript
|
||||
) != keccak(aggregated_transcript):
|
||||
ritual.aggregation_mismatch = True
|
||||
# don't increment aggregations
|
||||
# TODO Emit EndRitual here?
|
||||
|
@ -130,12 +157,14 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
def get_participants(self, ritual_id: int) -> List[Ritual.Participant]:
|
||||
return self.rituals[ritual_id].participants
|
||||
|
||||
def get_node_index(self, ritual_id: int, node: ChecksumAddress) -> int:
|
||||
for i, p in enumerate(self.get_participants(ritual_id)):
|
||||
if p.node == node:
|
||||
return i
|
||||
def get_participant_from_provider(
|
||||
self, ritual_id: int, provider: ChecksumAddress
|
||||
) -> Ritual.Participant:
|
||||
for p in self.rituals[ritual_id].participants:
|
||||
if p.provider == provider:
|
||||
return p
|
||||
|
||||
raise ValueError(f"{node} not in list")
|
||||
raise ValueError(f"Provider {provider} not found for ritual #{ritual_id}")
|
||||
|
||||
def get_ritual_status(self, ritual_id: int) -> int:
|
||||
ritual = self.rituals[ritual_id]
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import pytest
|
||||
|
||||
from nucypher.blockchain.economics import EconomicsFactory
|
||||
from nucypher.blockchain.eth.actors import Operator
|
||||
from nucypher.blockchain.eth.agents import ContractAgency
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import InMemoryContractRegistry
|
||||
from nucypher.crypto.powers import TransactingPower
|
||||
from nucypher.network.nodes import Teacher
|
||||
from tests.mock.interfaces import MockEthereumClient
|
||||
from tests.mock.interfaces import MockBlockchain, MockEthereumClient
|
||||
|
||||
|
||||
def pytest_addhooks(pluginmanager):
|
||||
|
@ -54,3 +56,35 @@ def mock_contract_agency(module_mocker, application_economics):
|
|||
@pytest.fixture(scope='session', autouse=True)
|
||||
def mock_operator_bonding(session_mocker):
|
||||
session_mocker.patch.object(Teacher, '_operator_is_bonded', autospec=True)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def testerchain(mock_testerchain, module_mocker) -> MockBlockchain:
|
||||
def always_use_mock(*a, **k):
|
||||
return mock_testerchain
|
||||
|
||||
module_mocker.patch.object(
|
||||
BlockchainInterfaceFactory, "get_interface", always_use_mock
|
||||
)
|
||||
return mock_testerchain
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def staking_providers(testerchain, test_registry, monkeymodule):
|
||||
def faked(self, *args, **kwargs):
|
||||
return testerchain.stake_providers_accounts[
|
||||
testerchain.ursulas_accounts.index(self.transacting_power.account)
|
||||
]
|
||||
|
||||
Operator.get_staking_provider_address = faked
|
||||
return testerchain.stake_providers_accounts
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def mock_substantiate_stamp(module_mocker, monkeymodule):
|
||||
fake_signature = b"\xb1W5?\x9b\xbaix>'\xfe`\x1b\x9f\xeb*9l\xc0\xa7\xb9V\x9a\x83\x84\x04\x97\x0c\xad\x99\x86\x81W\x93l\xc3\xbde\x03\xcd\"Y\xce\xcb\xf7\x02z\xf6\x9c\xac\x84\x05R\x9a\x9f\x97\xf7\xa02\xb2\xda\xa1Gv\x01"
|
||||
from nucypher.characters.lawful import Ursula
|
||||
|
||||
module_mocker.patch.object(Ursula, "_substantiate_stamp", autospec=True)
|
||||
module_mocker.patch.object(Ursula, "operator_signature", fake_signature)
|
||||
module_mocker.patch.object(Teacher, "validate_operator")
|
||||
|
|
|
@ -4,10 +4,8 @@ from unittest.mock import Mock
|
|||
|
||||
import pytest
|
||||
from eth_account import Account
|
||||
from eth_utils import keccak
|
||||
from ferveo_py import Keypair as FerveoKeypair
|
||||
|
||||
from tests.integration.blockchain.test_ritualist import FAKE_TRANSCRIPT
|
||||
from tests.constants import FAKE_TRANSCRIPT
|
||||
from tests.mock.coordinator import MockCoordinatorAgent
|
||||
from tests.mock.interfaces import MockBlockchain
|
||||
|
||||
|
@ -38,7 +36,10 @@ def test_mock_coordinator_initiation(mocker, nodes_transacting_powers, coordinat
|
|||
assert len(coordinator.rituals) == 0
|
||||
mock_transacting_power = mocker.Mock()
|
||||
mock_transacting_power.account = random_address
|
||||
coordinator.initiate_ritual(nodes=list(nodes_transacting_powers.keys()), transacting_power=mock_transacting_power)
|
||||
coordinator.initiate_ritual(
|
||||
providers=list(nodes_transacting_powers.keys()),
|
||||
transacting_power=mock_transacting_power,
|
||||
)
|
||||
assert len(coordinator.rituals) == 1
|
||||
|
||||
assert coordinator.number_of_rituals() == 1
|
||||
|
@ -55,12 +56,15 @@ def test_mock_coordinator_initiation(mocker, nodes_transacting_powers, coordinat
|
|||
assert signal_type == MockCoordinatorAgent.Events.START_RITUAL
|
||||
assert signal_data["ritual_id"] == 0
|
||||
assert signal_data["initiator"] == mock_transacting_power.account
|
||||
assert set(signal_data["nodes"]) == nodes_transacting_powers.keys()
|
||||
assert set(signal_data["participants"]) == nodes_transacting_powers.keys()
|
||||
|
||||
|
||||
def test_mock_coordinator_round_1(nodes_transacting_powers, coordinator):
|
||||
ritual = coordinator.rituals[0]
|
||||
assert coordinator.get_ritual_status(ritual.id) == MockCoordinatorAgent.RitualStatus.AWAITING_TRANSCRIPTS
|
||||
assert (
|
||||
coordinator.get_ritual_status(0)
|
||||
== MockCoordinatorAgent.RitualStatus.AWAITING_TRANSCRIPTS
|
||||
)
|
||||
|
||||
for p in ritual.participants:
|
||||
assert p.transcript == bytes()
|
||||
|
@ -70,7 +74,6 @@ def test_mock_coordinator_round_1(nodes_transacting_powers, coordinator):
|
|||
|
||||
coordinator.post_transcript(
|
||||
ritual_id=0,
|
||||
node_index=index,
|
||||
transcript=transcript,
|
||||
transacting_power=nodes_transacting_powers[node_address]
|
||||
)
|
||||
|
@ -84,41 +87,42 @@ def test_mock_coordinator_round_1(nodes_transacting_powers, coordinator):
|
|||
timestamp, signal = list(coordinator.EVENTS.items())[1]
|
||||
signal_type, signal_data = signal
|
||||
assert signal_type == MockCoordinatorAgent.Events.START_AGGREGATION_ROUND
|
||||
assert signal_data['ritual_id'] == ritual.id
|
||||
assert set(signal_data['nodes']) == nodes_transacting_powers.keys()
|
||||
assert signal_data["ritual_id"] == 0
|
||||
|
||||
|
||||
def test_mock_coordinator_round_2(nodes_transacting_powers, coordinator):
|
||||
def test_mock_coordinator_round_2(
|
||||
nodes_transacting_powers, coordinator, dkg_public_key
|
||||
):
|
||||
ritual = coordinator.rituals[0]
|
||||
assert coordinator.get_ritual_status(ritual.id) == MockCoordinatorAgent.RitualStatus.AWAITING_AGGREGATIONS
|
||||
assert (
|
||||
coordinator.get_ritual_status(0)
|
||||
== MockCoordinatorAgent.RitualStatus.AWAITING_AGGREGATIONS
|
||||
)
|
||||
|
||||
for p in ritual.participants:
|
||||
assert p.transcript == FAKE_TRANSCRIPT
|
||||
|
||||
aggregated_transcript = os.urandom(len(FAKE_TRANSCRIPT))
|
||||
aggregated_transcript_hash = keccak(aggregated_transcript)
|
||||
public_key = FerveoKeypair.random().public_key()
|
||||
public_key_hash = keccak(bytes(public_key))
|
||||
|
||||
for index, node_address in enumerate(nodes_transacting_powers):
|
||||
coordinator.post_aggregation(
|
||||
ritual_id=0,
|
||||
node_index=index,
|
||||
aggregated_transcript=aggregated_transcript,
|
||||
public_key=public_key,
|
||||
public_key=dkg_public_key,
|
||||
transacting_power=nodes_transacting_powers[node_address]
|
||||
)
|
||||
if index == len(nodes_transacting_powers) - 1:
|
||||
assert len(coordinator.EVENTS) == 2
|
||||
|
||||
assert ritual.aggregated_transcript == aggregated_transcript
|
||||
assert ritual.aggregated_transcript_hash == aggregated_transcript_hash
|
||||
assert ritual.public_key == public_key
|
||||
assert ritual.public_key_hash == public_key_hash
|
||||
|
||||
# TODO this key is currently incorrect padded with 8 bytes (56 bytes instead of 48) - remove when fixed.
|
||||
assert bytes(ritual.public_key) == bytes(dkg_public_key)[8:]
|
||||
for p in ritual.participants:
|
||||
# unchanged
|
||||
assert p.transcript == FAKE_TRANSCRIPT
|
||||
assert p.transcript != aggregated_transcript
|
||||
|
||||
assert len(coordinator.EVENTS) == 2 # no additional event emitted here?
|
||||
assert coordinator.get_ritual_status(ritual.id) == MockCoordinatorAgent.RitualStatus.FINALIZED
|
||||
assert (
|
||||
coordinator.get_ritual_status(0) == MockCoordinatorAgent.RitualStatus.FINALIZED
|
||||
)
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from eth_utils import to_checksum_address
|
||||
from ferveo_py import Keypair
|
||||
from ferveo_py.ferveo_py import Keypair
|
||||
from nucypher_core import Address, NodeMetadata, NodeMetadataPayload
|
||||
from nucypher_core.umbral import SecretKey, Signer, RecoverableSignature
|
||||
from pathlib import Path
|
||||
from nucypher_core.umbral import RecoverableSignature, SecretKey, Signer
|
||||
|
||||
from nucypher.acumen.perception import FleetSensor
|
||||
from nucypher.characters.lawful import Ursula
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
import pytest
|
||||
|
||||
from nucypher.blockchain.eth.agents import CoordinatorAgent
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
from nucypher.crypto.powers import TransactingPower
|
||||
from tests.constants import FAKE_TRANSCRIPT
|
||||
from tests.mock.coordinator import MockCoordinatorAgent
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def agent(mock_contract_agency) -> MockCoordinatorAgent:
|
||||
coordinator_agent: CoordinatorAgent = mock_contract_agency.get_agent(
|
||||
CoordinatorAgent, registry=None
|
||||
)
|
||||
return coordinator_agent
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ursula(ursulas):
|
||||
return ursulas[1]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def cohort(ursulas):
|
||||
return [u.staking_provider_address for u in ursulas[:4]]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def transacting_power(testerchain, alice):
|
||||
return TransactingPower(
|
||||
account=alice.transacting_power.account, signer=Web3Signer(testerchain.client)
|
||||
)
|
||||
|
||||
|
||||
def test_initiate_ritual(agent: CoordinatorAgent, cohort, transacting_power):
|
||||
receipt = agent.initiate_ritual(
|
||||
providers=cohort, transacting_power=transacting_power
|
||||
)
|
||||
|
||||
participants = [
|
||||
CoordinatorAgent.Ritual.Participant(
|
||||
provider=c,
|
||||
)
|
||||
for c in cohort
|
||||
]
|
||||
|
||||
ritual = CoordinatorAgent.Ritual(
|
||||
initiator=transacting_power.account,
|
||||
dkg_size=4,
|
||||
init_timestamp=123456,
|
||||
participants=participants,
|
||||
)
|
||||
agent.get_ritual = lambda *args, **kwargs: ritual
|
||||
agent.get_participants = lambda *args, **kwargs: participants
|
||||
|
||||
assert receipt["transactionHash"]
|
||||
number_of_rituals = agent.number_of_rituals()
|
||||
ritual_id = number_of_rituals - 1
|
||||
return ritual_id
|
||||
|
||||
|
||||
def test_perform_round_1(ursula, random_address, cohort, agent):
|
||||
participants = [
|
||||
CoordinatorAgent.Ritual.Participant(
|
||||
provider=c,
|
||||
)
|
||||
for c in cohort
|
||||
]
|
||||
ritual = CoordinatorAgent.Ritual(
|
||||
initiator=random_address,
|
||||
dkg_size=4,
|
||||
init_timestamp=123456,
|
||||
total_transcripts=4,
|
||||
participants=participants,
|
||||
)
|
||||
agent.get_ritual = lambda *args, **kwargs: ritual
|
||||
agent.get_participants = lambda *args, **kwargs: participants
|
||||
|
||||
agent.get_participant_from_provider = lambda *args, **kwargs: participants[0]
|
||||
|
||||
ursula.ritual_tracker.refresh(fetch_rituals=[0])
|
||||
ursula.perform_round_1(
|
||||
ritual_id=0, initiator=random_address, participants=cohort, timestamp=0
|
||||
)
|
||||
|
||||
|
||||
def test_perform_round_2(ursula, cohort, transacting_power, agent, mocker):
|
||||
participants = [
|
||||
CoordinatorAgent.Ritual.Participant(
|
||||
provider=c, aggregated=False, transcript=FAKE_TRANSCRIPT
|
||||
)
|
||||
for c in cohort
|
||||
]
|
||||
ritual = CoordinatorAgent.Ritual(
|
||||
initiator=transacting_power.account,
|
||||
dkg_size=4,
|
||||
init_timestamp=123456,
|
||||
total_transcripts=4,
|
||||
participants=participants,
|
||||
)
|
||||
agent.get_ritual = lambda *args, **kwargs: ritual
|
||||
agent.get_participants = lambda *args, **kwargs: participants
|
||||
agent.get_ritual_status = lambda *args, **kwargs: 2
|
||||
|
||||
mocker.patch("nucypher.crypto.ferveo.dkg.verify_aggregate")
|
||||
ursula.perform_round_2(ritual_id=0, timestamp=0)
|
Loading…
Reference in New Issue