mirror of https://github.com/nucypher/nucypher.git
commit
4c90487e9f
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 7.0.4
|
||||
current_version = 7.1.0
|
||||
commit = True
|
||||
tag = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<stage>[^.]*)\.(?P<devnum>\d+))?
|
||||
|
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.12'
|
||||
- uses: akaihola/darker@1.7.2
|
||||
with:
|
||||
version: "1.7.2"
|
||||
|
|
|
@ -11,7 +11,7 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
env: # TODO: Use variables when GH supports it for forks. See https://github.com/orgs/community/discussions/44322
|
||||
DEMO_L1_PROVIDER_URI: "https://goerli.infura.io/v3/3747007a284045d483c342fb39889a30"
|
||||
DEMO_L1_PROVIDER_URI: "https://sepolia.infura.io/v3/3747007a284045d483c342fb39889a30"
|
||||
DEMO_L2_PROVIDER_URI: "https://polygon-mumbai.infura.io/v3/3747007a284045d483c342fb39889a30"
|
||||
COLLECT_PROFILER_STATS: "" # any value is fine
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ "3.8", "3.11" ]
|
||||
python-version: [ "3.8", "3.12" ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
|
@ -54,94 +54,94 @@ jobs:
|
|||
|
||||
# Unit tests
|
||||
- name: Unit Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
run: |
|
||||
coverage run --data-file=unit_data -m pytest tests/unit
|
||||
coverage xml -i --data-file=unit_data -o unit-coverage.xml
|
||||
|
||||
- name: Unit Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
run: python -m pytest tests/unit
|
||||
|
||||
# Integration tests
|
||||
- name: Integration Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
run: |
|
||||
coverage run --data-file=integration_data -m pytest tests/integration
|
||||
coverage xml -i --data-file=integration_data -o integration-coverage.xml
|
||||
|
||||
- name: Integration Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
run: python -m pytest tests/integration
|
||||
|
||||
# Acceptance tests
|
||||
- name: Agents Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: |
|
||||
coverage run --data-file=acceptance_agent_data -m pytest agents
|
||||
coverage xml -i --data-file=acceptance_agent_data -o acceptance-agents-coverage.xml
|
||||
|
||||
- name: Agents Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: python -m pytest agents
|
||||
|
||||
- name: Actors Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: |
|
||||
coverage run --data-file=acceptance_actors_data -m pytest actors
|
||||
coverage xml -i --data-file=acceptance_actors_data -o acceptance-actors-coverage.xml
|
||||
|
||||
- name: Actors Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: python -m pytest actors
|
||||
|
||||
|
||||
- name: Conditions Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: |
|
||||
coverage run --data-file=acceptance_conditions_data -m pytest conditions
|
||||
coverage xml -i --data-file=acceptance_conditions_data -o acceptance-conditions-coverage.xml
|
||||
|
||||
- name: Conditions Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: python -m pytest conditions
|
||||
|
||||
|
||||
- name: Characters Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: |
|
||||
coverage run --data-file=acceptance_characters_data -m pytest characters
|
||||
coverage xml -i --data-file=acceptance_characters_data -o acceptance-characters-coverage.xml
|
||||
|
||||
- name: Characters Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: python -m pytest characters
|
||||
|
||||
|
||||
- name: CLI Tests (Coverage)
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: |
|
||||
coverage run --data-file=acceptance_cli_data -m pytest cli
|
||||
coverage xml -i --data-file=acceptance_cli_data -o acceptance-cli-coverage.xml
|
||||
|
||||
- name: CLI Tests
|
||||
if: matrix.python-version != '3.11'
|
||||
if: matrix.python-version != '3.12'
|
||||
working-directory: tests/acceptance
|
||||
run: python -m pytest cli
|
||||
|
||||
|
||||
# Only upload coverage files after all tests have passed
|
||||
- name: Upload unit tests coverage to Codecov
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
@ -151,7 +151,7 @@ jobs:
|
|||
verbose: true
|
||||
|
||||
- name: Upload integration tests coverage to Codecov
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
@ -161,7 +161,7 @@ jobs:
|
|||
verbose: true
|
||||
|
||||
- name: Upload acceptance tests coverage to Codecov
|
||||
if: matrix.python-version == '3.11'
|
||||
if: matrix.python-version == '3.12'
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
|
59
Pipfile
59
Pipfile
|
@ -7,53 +7,54 @@ name = "pypi"
|
|||
python_version = "3"
|
||||
|
||||
[packages]
|
||||
# NuCypher
|
||||
constant-sorrow = ">=0.1.0a9"
|
||||
bytestring-splitter = ">=2.4.0"
|
||||
hendrix = ">=4.0"
|
||||
nucypher-core = "==0.13.0"
|
||||
# Cryptography
|
||||
cryptography = ">=3.2"
|
||||
mnemonic = "*"
|
||||
pynacl = ">=1.4.0"
|
||||
pyopenssl = "*"
|
||||
# Utilities
|
||||
mako = "*"
|
||||
marshmallow = "*"
|
||||
maya = "*"
|
||||
msgpack = "*"
|
||||
# Web
|
||||
aiohttp = "==3.8.2"
|
||||
flask = "*"
|
||||
requests = "*"
|
||||
# Third-Party Ethereum
|
||||
eth-tester = "*" # providers.py still uses this
|
||||
py-evm = "<0.8"
|
||||
web3 = ">=6.0.0"
|
||||
# Ethereum
|
||||
eth-abi = "<5.0.0" # eth-ape restriction
|
||||
eth-tester = "<0.10.0,>0.9.0" # web3[tester]
|
||||
eth-account = "<0.9,>=0.8.0"
|
||||
eth-utils = "*"
|
||||
# CLI / Configuration
|
||||
appdirs = "*"
|
||||
web3 = ">=6.0.0"
|
||||
# Web
|
||||
flask = "*"
|
||||
hendrix = ">=4.0"
|
||||
requests = "*"
|
||||
mako = "*"
|
||||
# CLI
|
||||
click = ">=7.0"
|
||||
colorama = "*"
|
||||
tabulate = "*"
|
||||
# Tools
|
||||
eth-account = ">=0.8.0,<0.9" # because of eth-ape (eip712 dep)
|
||||
# Serialization
|
||||
bytestring-splitter = ">=2.4.0"
|
||||
marshmallow = "*"
|
||||
msgpack = "*"
|
||||
# Utilities
|
||||
aiohttp = "*"
|
||||
appdirs = "*"
|
||||
constant-sorrow = ">=0.1.0a9"
|
||||
maya = "*"
|
||||
pendulum = ">=3.0.0b1"
|
||||
prometheus-client = "*"
|
||||
setuptools = "*" # for distutils
|
||||
urllib3 = "<2,>=1.26.16" # eth-ape
|
||||
|
||||
|
||||
[dev-packages]
|
||||
# Pytest
|
||||
# See https://github.com/pytest-dev/pytest/issues/9703
|
||||
pytest = "<7"
|
||||
pytest-twisted = "*"
|
||||
pytest = "<7" # See https://github.com/pytest-dev/pytest/issues/9703
|
||||
pytest-cov = "*"
|
||||
pytest-mock = "*"
|
||||
pytest-timeout = "*"
|
||||
pytest-twisted = "*"
|
||||
# Tools
|
||||
eth-ape = "<=0.6.19" # eth-ape 0.6.22 depends on urllib3<2 and >=1.26.16
|
||||
eth-account = ">=0.8.0,<0.9" # because of eth-ape (eip712 dep)
|
||||
ape-solidity = ">=0.6.5"
|
||||
hypothesis = "*"
|
||||
pre-commit = "2.12.1"
|
||||
coverage = "<=6.5.0"
|
||||
coverage = ">=7.3.2"
|
||||
eth-ape = ">=0.6.23"
|
||||
pre-commit = ">=2.12.1"
|
||||
|
||||
[scripts]
|
||||
nucypher = "python3 nucypher/cli/main.py"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,4 @@
|
|||
FROM nucypher/rust-python:3.8.16
|
||||
FROM nucypher/rust-python:3.12.0
|
||||
|
||||
# set default user
|
||||
USER $USER
|
||||
|
|
|
@ -1,76 +1,74 @@
|
|||
-i https://pypi.python.org/simple
|
||||
aiohttp==3.8.2; python_version >= '3.6'
|
||||
aiohttp==3.9.1; python_version >= '3.8'
|
||||
aiosignal==1.3.1; python_version >= '3.7'
|
||||
ape-solidity==0.6.9; python_version >= '3.8' and python_version < '4'
|
||||
asttokens==2.4.0
|
||||
async-timeout==4.0.3; python_version >= '3.7'
|
||||
attrs==23.1.0; python_version >= '3.7'
|
||||
backcall==0.2.0
|
||||
annotated-types==0.6.0; python_version >= '3.8'
|
||||
ape-solidity==0.6.11; python_version >= '3.8' and python_version < '4'
|
||||
asttokens==2.4.1
|
||||
attrs==23.2.0; python_version >= '3.7'
|
||||
base58==1.0.3
|
||||
bitarray==2.8.2
|
||||
bitarray==2.9.2
|
||||
cached-property==1.5.2
|
||||
certifi==2023.7.22; python_version >= '3.6'
|
||||
certifi==2023.11.17; python_version >= '3.6'
|
||||
cffi==1.16.0; python_version >= '3.8'
|
||||
cfgv==3.4.0; python_version >= '3.8'
|
||||
charset-normalizer==2.1.1; python_full_version >= '3.6.0'
|
||||
charset-normalizer==3.3.2; python_full_version >= '3.7.0'
|
||||
click==8.1.7; python_version >= '3.7'
|
||||
commonmark==0.9.1
|
||||
coverage[toml]==6.5.0; python_version >= '3.7'
|
||||
cryptography==41.0.5; python_version >= '3.7'
|
||||
coverage[toml]==7.4.0; python_version >= '3.8'
|
||||
cryptography==41.0.7; python_version >= '3.7'
|
||||
cytoolz==0.12.2; implementation_name == 'cpython'
|
||||
dataclassy==0.11.1; python_version >= '3.6'
|
||||
decorator==5.1.1; python_version >= '3.5'
|
||||
deprecated==1.2.14; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
distlib==0.3.7
|
||||
eip712==0.2.1; python_version >= '3.8' and python_version < '4'
|
||||
distlib==0.3.8
|
||||
eip712==0.2.2; python_version >= '3.8' and python_version < '4'
|
||||
eth-abi==4.2.1; python_full_version >= '3.7.2' and python_version < '4'
|
||||
eth-account==0.8.0; python_version >= '3.6' and python_version < '4'
|
||||
eth-ape==0.6.19; python_version >= '3.8' and python_version < '4'
|
||||
eth-bloom==2.0.0; python_version >= '3.7' and python_version < '4'
|
||||
eth-hash[pycryptodome]==0.5.2; python_version >= '3.7' and python_version < '4'
|
||||
eth-ape==0.6.27; python_version >= '3.8' and python_version < '4'
|
||||
eth-bloom==3.0.0; python_version >= '3.8' and python_version < '4'
|
||||
eth-hash[pycryptodome]==0.6.0; python_version >= '3.8' and python_version < '4'
|
||||
eth-keyfile==0.6.1
|
||||
eth-keys==0.4.0
|
||||
eth-pydantic-types==0.1.0a5; python_version >= '3.8' and python_version < '4'
|
||||
eth-rlp==0.3.0; python_version >= '3.7' and python_version < '4'
|
||||
eth-tester==0.9.1b1; python_full_version >= '3.6.8' and python_version < '4'
|
||||
eth-typing==3.5.1; python_full_version >= '3.7.2' and python_version < '4'
|
||||
eth-utils==2.3.0; python_version >= '3.7' and python_version < '4'
|
||||
ethpm-types==0.5.8; python_version >= '3.8' and python_version < '4'
|
||||
evm-trace==0.1.0a25; python_version >= '3.8' and python_version < '4'
|
||||
executing==2.0.0
|
||||
filelock==3.12.4; python_version >= '3.8'
|
||||
frozenlist==1.4.0; python_version >= '3.8'
|
||||
greenlet==3.0.0; python_version >= '3.7'
|
||||
eth-typing==3.5.2; python_full_version >= '3.7.2' and python_version < '4'
|
||||
eth-utils==2.3.1; python_version >= '3.7' and python_version < '4'
|
||||
ethpm-types==0.5.11; python_version >= '3.8' and python_version < '4'
|
||||
evm-trace==0.1.2; python_version >= '3.8' and python_version < '4'
|
||||
executing==2.0.1; python_version >= '3.5'
|
||||
filelock==3.13.1; python_version >= '3.8'
|
||||
frozenlist==1.4.1; python_version >= '3.8'
|
||||
greenlet==3.0.3; python_version >= '3.7'
|
||||
hexbytes==0.3.1; python_version >= '3.7' and python_version < '4'
|
||||
hypothesis==6.88.1; python_version >= '3.8'
|
||||
identify==2.5.30; python_version >= '3.8'
|
||||
idna==3.4; python_version >= '3.5'
|
||||
identify==2.5.33; python_version >= '3.8'
|
||||
idna==3.6; python_version >= '3.5'
|
||||
ijson==3.2.3
|
||||
importlib-metadata==6.8.0; python_version >= '3.8'
|
||||
importlib-metadata==7.0.1; python_version >= '3.8'
|
||||
iniconfig==2.0.0; python_version >= '3.7'
|
||||
ipython==8.16.1; python_version >= '3.9'
|
||||
ipython==8.20.0; python_version >= '3.10'
|
||||
jedi==0.19.1; python_version >= '3.6'
|
||||
jsonschema==4.19.1; python_version >= '3.8'
|
||||
jsonschema-specifications==2023.7.1; python_version >= '3.8'
|
||||
jsonschema==4.20.0; python_version >= '3.8'
|
||||
jsonschema-specifications==2023.12.1; python_version >= '3.8'
|
||||
lazyasd==0.1.4
|
||||
lru-dict==1.2.0
|
||||
matplotlib-inline==0.1.6; python_version >= '3.5'
|
||||
morphys==1.0
|
||||
msgspec==0.18.4; python_version >= '3.8'
|
||||
multidict==5.2.0; python_version >= '3.6'
|
||||
msgspec==0.18.5; python_version >= '3.8'
|
||||
multidict==6.0.4; python_version >= '3.7'
|
||||
mypy-extensions==1.0.0; python_version >= '3.5'
|
||||
nodeenv==1.8.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'
|
||||
numpy==1.26.1; python_version >= '3.10'
|
||||
numpy==1.26.3; python_version >= '3.10'
|
||||
packaging==23.2; python_version >= '3.7'
|
||||
pandas==1.5.3; python_version >= '3.8'
|
||||
parsimonious==0.9.0
|
||||
parso==0.8.3; python_version >= '3.6'
|
||||
pexpect==4.8.0; sys_platform != 'win32'
|
||||
pickleshare==0.7.5
|
||||
platformdirs==3.11.0; python_version >= '3.7'
|
||||
pexpect==4.9.0; sys_platform != 'win32'
|
||||
platformdirs==4.1.0; python_version >= '3.8'
|
||||
pluggy==1.3.0; python_version >= '3.8'
|
||||
pre-commit==2.12.1; python_full_version >= '3.6.1'
|
||||
prompt-toolkit==3.0.39; python_full_version >= '3.7.0'
|
||||
protobuf==4.25.0rc2; python_version >= '3.8'
|
||||
pre-commit==3.6.0; python_version >= '3.9'
|
||||
prompt-toolkit==3.0.43; python_full_version >= '3.7.0'
|
||||
protobuf==4.25.2; python_version >= '3.8'
|
||||
ptyprocess==0.7.0
|
||||
pure-eval==0.2.2
|
||||
py==1.11.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
|
@ -81,13 +79,14 @@ py-geth==3.13.0; python_version >= '3.7' and python_version < '4'
|
|||
py-multibase==1.0.3
|
||||
py-multicodec==0.2.1
|
||||
py-multihash==0.2.3
|
||||
py-solc-x==1.1.1; python_version >= '3.6' and python_version < '4'
|
||||
py-solc-x==2.0.2; python_version >= '3.8' and python_version < '4'
|
||||
pycparser==2.21
|
||||
pycryptodome==3.19.0
|
||||
pydantic==1.10.13; python_version >= '3.7'
|
||||
pycryptodome==3.20.0
|
||||
pydantic==2.5.3; python_version >= '3.7'
|
||||
pydantic-core==2.14.6; python_version >= '3.7'
|
||||
pyethash==0.1.27
|
||||
pygithub==1.59.1; python_version >= '3.7'
|
||||
pygments==2.16.1; python_version >= '3.7'
|
||||
pygments==2.17.2; python_version >= '3.7'
|
||||
pyjwt[crypto]==2.8.0; python_version >= '3.7'
|
||||
pynacl==1.5.0; python_version >= '3.6'
|
||||
pytest==6.2.5; python_version >= '3.6'
|
||||
|
@ -98,34 +97,34 @@ pytest-twisted==1.14.0; python_version >= '2.7' and python_version not in '3.0,
|
|||
python-baseconv==1.2.2
|
||||
python-dateutil==2.8.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'
|
||||
pytz==2023.3.post1
|
||||
pyunormalize==15.0.0; python_version >= '3.6'
|
||||
pyunormalize==15.1.0; python_version >= '3.6'
|
||||
pyyaml==6.0.1; python_version >= '3.6'
|
||||
referencing==0.30.2; python_version >= '3.8'
|
||||
regex==2023.10.3; python_version >= '3.7'
|
||||
referencing==0.32.1; python_version >= '3.8'
|
||||
regex==2023.12.25; python_version >= '3.7'
|
||||
requests==2.31.0; python_version >= '3.7'
|
||||
rich==12.6.0; python_full_version >= '3.6.3' and python_full_version < '4.0.0'
|
||||
rlp==3.0.0
|
||||
rpds-py==0.10.6; python_version >= '3.8'
|
||||
rpds-py==0.16.2; python_version >= '3.8'
|
||||
safe-pysha3==1.0.4
|
||||
semantic-version==2.10.0; python_version >= '2.7'
|
||||
setuptools==68.2.2; python_version >= '3.8'
|
||||
setuptools==69.0.3; python_version >= '3.8'
|
||||
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'
|
||||
sortedcontainers==2.4.0
|
||||
sqlalchemy==2.0.22; python_version >= '3.7'
|
||||
sqlalchemy==2.0.25; python_version >= '3.7'
|
||||
stack-data==0.6.3
|
||||
toml==0.10.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'
|
||||
toolz==0.12.0; python_version >= '3.5'
|
||||
tqdm==4.66.1; python_version >= '3.7'
|
||||
traitlets==5.11.2; python_version >= '3.8'
|
||||
trie==2.1.1; python_version >= '3.7' and python_version < '4'
|
||||
typing-extensions==4.8.0; python_version >= '3.8'
|
||||
urllib3==2.0.7; python_version >= '3.7'
|
||||
traitlets==5.14.1; python_version >= '3.8'
|
||||
trie==2.2.0; python_version >= '3.7' and python_version < '4'
|
||||
typing-extensions==4.9.0; python_version >= '3.8'
|
||||
urllib3==1.26.18; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
varint==1.0.2
|
||||
virtualenv==20.24.6; python_version >= '3.7'
|
||||
virtualenv==20.25.0; python_version >= '3.7'
|
||||
watchdog==3.0.0; python_version >= '3.7'
|
||||
wcwidth==0.2.8
|
||||
web3==6.11.1; python_full_version >= '3.7.2'
|
||||
wcwidth==0.2.13
|
||||
web3==6.14.0; python_full_version >= '3.7.2'
|
||||
websockets==12.0; python_version >= '3.8'
|
||||
wrapt==1.16.0rc1; python_version >= '3.6'
|
||||
yarl==1.9.2; python_version >= '3.7'
|
||||
wrapt==1.16.0; python_version >= '3.6'
|
||||
yarl==1.9.4; python_version >= '3.7'
|
||||
zipp==3.17.0; python_version >= '3.8'
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import datetime
|
||||
import maya
|
||||
import os
|
||||
from getpass import getpass
|
||||
from pathlib import Path
|
||||
|
||||
import maya
|
||||
|
||||
from nucypher.blockchain.eth import domains
|
||||
from nucypher.blockchain.eth.signers.base import Signer
|
||||
from nucypher.characters.lawful import Alice, Bob
|
||||
|
|
|
@ -17,12 +17,13 @@
|
|||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import maya
|
||||
import os
|
||||
import shutil
|
||||
from getpass import getpass
|
||||
from pathlib import Path
|
||||
|
||||
import maya
|
||||
|
||||
from nucypher.blockchain.eth import domains
|
||||
from nucypher.blockchain.eth.signers import Signer
|
||||
from nucypher.characters.lawful import Alice, Bob
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import base64
|
||||
import json
|
||||
import maya
|
||||
import msgpack
|
||||
import os
|
||||
import shutil
|
||||
from timeit import default_timer as timer
|
||||
|
||||
import maya
|
||||
import msgpack
|
||||
from nucypher_core import EncryptedTreasureMap, MessageKit
|
||||
from nucypher_core.umbral import PublicKey
|
||||
from timeit import default_timer as timer
|
||||
|
||||
from nucypher.blockchain.eth import domains
|
||||
from nucypher.characters.lawful import Bob
|
||||
|
|
|
@ -38,7 +38,7 @@ coordinator_agent = CoordinatorAgent(
|
|||
blockchain_endpoint=polygon_endpoint,
|
||||
registry=registry,
|
||||
)
|
||||
ritual_id = 0 # got this from a side channel
|
||||
ritual_id = 5 # got this from a side channel
|
||||
ritual = coordinator_agent.get_ritual(ritual_id)
|
||||
|
||||
# known authorized encryptor for ritual 3
|
||||
|
@ -75,7 +75,7 @@ conditions = {
|
|||
},
|
||||
{
|
||||
"conditionType": ConditionType.RPC.value,
|
||||
"chain": 5,
|
||||
"chain": 11155111,
|
||||
"method": "eth_getBalance",
|
||||
"parameters": ["0x210eeAC07542F815ebB6FD6689637D8cA2689392", "latest"],
|
||||
"returnValueTest": {"comparator": ">", "value": 1},
|
||||
|
|
|
@ -39,7 +39,7 @@ coordinator_agent = CoordinatorAgent(
|
|||
blockchain_endpoint=polygon_endpoint,
|
||||
registry=registry,
|
||||
)
|
||||
ritual_id = 0 # got this from a side channel
|
||||
ritual_id = 5 # got this from a side channel
|
||||
ritual = coordinator_agent.get_ritual(ritual_id)
|
||||
|
||||
# known authorized encryptor for ritual 3
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Make Prometheus exporter always run for Ursula
|
|
@ -0,0 +1 @@
|
|||
Add Prometheus metrics endpoint to running logs
|
|
@ -0,0 +1 @@
|
|||
Add metrics for root and child networks.
|
|
@ -0,0 +1 @@
|
|||
Don't use web3.py gas strategies, since that switches TX mode to legacy.
|
|
@ -0,0 +1 @@
|
|||
Deprecate use of Goerli for Lynx testnets; use Sepolia instead.
|
|
@ -0,0 +1 @@
|
|||
Make prometheus optional, and allow fine tuning of collection interval.
|
|
@ -0,0 +1,5 @@
|
|||
Node blocks and remains unresponsive when another node in the cohort is
|
||||
unreachable during a dkg ritual because the ferveo public key is obtained from
|
||||
a node directly through node discovery. Instead, obtain ferveo public key
|
||||
from Coordinator contract so that connecting to the another node in
|
||||
the cohort is unnecessary.
|
|
@ -0,0 +1 @@
|
|||
Optimize use of decryption request WorkerPool.
|
|
@ -0,0 +1 @@
|
|||
Fix `MAX_UPLOAD_CONTENT_LENGTH` too small for mainnet TACo rituals
|
|
@ -0,0 +1 @@
|
|||
Add prometheus metrics for tracking total threshold decryption requests and errors.
|
|
@ -0,0 +1 @@
|
|||
Ensure incoming request ip addresses resolution handles proxied headers.
|
|
@ -0,0 +1 @@
|
|||
Scan for ritual events less often to be more efficient with RPC requests.
|
|
@ -16,7 +16,7 @@ __url__ = "https://github.com/nucypher/nucypher"
|
|||
|
||||
__summary__ = "A threshold access control application to empower privacy in decentralized systems."
|
||||
|
||||
__version__ = "7.0.4"
|
||||
__version__ = "7.1.0"
|
||||
|
||||
__author__ = "NuCypher"
|
||||
|
||||
|
|
|
@ -203,7 +203,7 @@ class FleetState:
|
|||
|
||||
class FleetSensor:
|
||||
"""
|
||||
A representation of a fleet of NuCypher nodes.
|
||||
A representation of a fleet of nodes.
|
||||
|
||||
If `this_node` is provided, it will be included in the state checksum
|
||||
(but not returned during iteration/lookups).
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import random
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from decimal import Decimal
|
||||
|
@ -134,6 +135,8 @@ class Operator(BaseActor):
|
|||
READY_TIMEOUT = None # (None or 0) == indefinite
|
||||
READY_POLL_RATE = 120 # seconds
|
||||
|
||||
AGGREGATION_SUBMISSION_MAX_DELAY = 60
|
||||
|
||||
class OperatorError(BaseActor.ActorError):
|
||||
"""Operator-specific errors."""
|
||||
|
||||
|
@ -296,20 +299,11 @@ class Operator(BaseActor):
|
|||
return ritual
|
||||
|
||||
def _resolve_validators(
|
||||
self,
|
||||
ritual: CoordinatorAgent.Ritual,
|
||||
timeout: int = 60
|
||||
self,
|
||||
ritual: CoordinatorAgent.Ritual,
|
||||
ritual_id: int,
|
||||
) -> List[Tuple[Validator, Transcript]]:
|
||||
|
||||
validators = [n[0] for n in ritual.transcripts]
|
||||
if timeout > 0:
|
||||
nodes_to_discover = list(set(validators) - {self.checksum_address})
|
||||
self.block_until_specific_nodes_are_known(
|
||||
addresses=nodes_to_discover,
|
||||
timeout=timeout,
|
||||
allow_missing=0
|
||||
)
|
||||
|
||||
result = list()
|
||||
for staking_provider_address, transcript_bytes in ritual.transcripts:
|
||||
if self.checksum_address == staking_provider_address:
|
||||
|
@ -320,12 +314,9 @@ class Operator(BaseActor):
|
|||
)
|
||||
else:
|
||||
# Remote
|
||||
try:
|
||||
remote_operator = self.known_nodes[staking_provider_address]
|
||||
except KeyError:
|
||||
raise self.ActorError(f"Unknown node {staking_provider_address}")
|
||||
remote_operator.mature()
|
||||
public_key = remote_operator.public_keys(RitualisticPower)
|
||||
public_key = self.coordinator_agent.get_provider_public_key(
|
||||
provider=staking_provider_address, ritual_id=ritual_id
|
||||
)
|
||||
self.log.debug(
|
||||
f"Ferveo public key for {staking_provider_address} is {bytes(public_key).hex()[:-8:-1]}"
|
||||
)
|
||||
|
@ -412,8 +403,14 @@ class Operator(BaseActor):
|
|||
# above, we know this tx is pending
|
||||
pending_tx = self.dkg_storage.get_transcript_receipt(ritual_id=ritual_id)
|
||||
if pending_tx:
|
||||
try:
|
||||
txhash = pending_tx["transactionHash"]
|
||||
except TypeError:
|
||||
txhash = pending_tx
|
||||
txhash = bytes(txhash).hex()
|
||||
self.log.debug(
|
||||
f"Node {self.transacting_power.account} has pending tx {pending_tx} for posting transcript for ritual {ritual_id}; skipping execution"
|
||||
f"Node {self.transacting_power.account} has pending tx {txhash} "
|
||||
f"for posting transcript for ritual {ritual_id}; skipping execution"
|
||||
)
|
||||
return None
|
||||
|
||||
|
@ -423,7 +420,7 @@ class Operator(BaseActor):
|
|||
|
||||
# gather the cohort
|
||||
ritual = self.coordinator_agent.get_ritual(ritual_id, with_participants=True)
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual)))
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual, ritual_id)))
|
||||
nodes = sorted(nodes, key=lambda n: n.address)
|
||||
if any(transcripts):
|
||||
self.log.debug(
|
||||
|
@ -498,7 +495,7 @@ class Operator(BaseActor):
|
|||
)
|
||||
|
||||
ritual = self.coordinator_agent.get_ritual(ritual_id, with_participants=True)
|
||||
transcripts = self._resolve_validators(ritual)
|
||||
transcripts = self._resolve_validators(ritual, ritual_id)
|
||||
if not all([t for _, t in transcripts]):
|
||||
raise self.ActorError(
|
||||
f"ritual #{ritual_id} is missing transcripts from {len([t for t in transcripts if not t])} nodes."
|
||||
|
@ -525,6 +522,11 @@ class Operator(BaseActor):
|
|||
|
||||
# publish the transcript and store the receipt
|
||||
total = ritual.total_aggregations + 1
|
||||
|
||||
# distribute submission of aggregated transcripts - don't want all nodes submitting at
|
||||
# the same time
|
||||
time.sleep(random.randint(0, self.AGGREGATION_SUBMISSION_MAX_DELAY))
|
||||
|
||||
tx_hash = self.publish_aggregated_transcript(
|
||||
ritual_id=ritual_id,
|
||||
aggregated_transcript=aggregated_transcript,
|
||||
|
@ -555,7 +557,7 @@ class Operator(BaseActor):
|
|||
if not self.coordinator_agent.is_ritual_active(ritual_id=ritual_id):
|
||||
raise self.ActorError(f"Ritual #{ritual_id} is not active.")
|
||||
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual)))
|
||||
nodes, transcripts = list(zip(*self._resolve_validators(ritual, ritual_id)))
|
||||
if not all(transcripts):
|
||||
raise self.ActorError(f"Ritual #{ritual_id} is missing transcripts")
|
||||
|
||||
|
|
|
@ -910,6 +910,7 @@ class CoordinatorAgent(EthereumContractAgent):
|
|||
)
|
||||
receipt = self.blockchain.send_transaction(
|
||||
contract_function=contract_function,
|
||||
gas_estimation_multiplier=1.4,
|
||||
transacting_power=transacting_power,
|
||||
fire_and_forget=fire_and_forget,
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -15,7 +15,6 @@ class ChainInfo(NamedTuple):
|
|||
|
||||
class EthChain(ChainInfo, Enum):
|
||||
MAINNET = (1, "mainnet")
|
||||
GOERLI = (5, "goerli")
|
||||
SEPOLIA = (11155111, "sepolia")
|
||||
|
||||
|
||||
|
@ -83,11 +82,11 @@ MAINNET = TACoDomain(
|
|||
|
||||
LYNX = TACoDomain(
|
||||
name="lynx",
|
||||
eth_chain=EthChain.GOERLI,
|
||||
eth_chain=EthChain.SEPOLIA,
|
||||
polygon_chain=PolygonChain.MUMBAI,
|
||||
condition_chains=(
|
||||
EthChain.MAINNET,
|
||||
EthChain.GOERLI,
|
||||
EthChain.SEPOLIA,
|
||||
PolygonChain.MUMBAI,
|
||||
PolygonChain.MAINNET,
|
||||
),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import math
|
||||
import pprint
|
||||
from pathlib import Path
|
||||
from typing import Callable, NamedTuple, Optional, Union
|
||||
from typing import Callable, Dict, NamedTuple, Optional, Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
|
@ -10,7 +10,6 @@ from constant_sorrow.constants import (
|
|||
NO_BLOCKCHAIN_CONNECTION,
|
||||
UNKNOWN_TX_STATUS,
|
||||
)
|
||||
from eth.typing import TransactionDict
|
||||
from eth_tester import EthereumTester
|
||||
from eth_tester.exceptions import (
|
||||
TransactionFailed as TestTransactionFailed,
|
||||
|
@ -260,7 +259,8 @@ class BlockchainInterface:
|
|||
self.log.debug('Injecting POA middleware at layer 0')
|
||||
self.client.inject_middleware(geth_poa_middleware, layer=0)
|
||||
|
||||
self.configure_gas_strategy()
|
||||
# TODO: See #2770
|
||||
# self.configure_gas_strategy()
|
||||
|
||||
def configure_gas_strategy(self, gas_strategy: Optional[Callable] = None) -> None:
|
||||
|
||||
|
@ -512,13 +512,14 @@ class BlockchainInterface:
|
|||
|
||||
return transaction_dict
|
||||
|
||||
def sign_and_broadcast_transaction(self,
|
||||
transacting_power: TransactingPower,
|
||||
transaction_dict: TransactionDict,
|
||||
transaction_name: str = "",
|
||||
confirmations: int = 0,
|
||||
fire_and_forget: bool = False
|
||||
) -> Union[TxReceipt, HexBytes]:
|
||||
def sign_and_broadcast_transaction(
|
||||
self,
|
||||
transacting_power: TransactingPower,
|
||||
transaction_dict: Dict,
|
||||
transaction_name: str = "",
|
||||
confirmations: int = 0,
|
||||
fire_and_forget: bool = False,
|
||||
) -> Union[TxReceipt, HexBytes]:
|
||||
"""
|
||||
Takes a transaction dictionary, signs it with the configured signer, then broadcasts the signed
|
||||
transaction using the ethereum provider's eth_sendRawTransaction RPC endpoint.
|
||||
|
|
|
@ -186,7 +186,7 @@ class RegistrySourceManager:
|
|||
def __init__(
|
||||
self,
|
||||
domain: TACoDomain,
|
||||
sources: Optional[RegistrySource] = None,
|
||||
sources: Optional[List[RegistrySource]] = None,
|
||||
only_primary: bool = False,
|
||||
):
|
||||
if only_primary and sources:
|
||||
|
@ -208,11 +208,15 @@ class RegistrySourceManager:
|
|||
try:
|
||||
source = source_class(domain=self.domain)
|
||||
except RegistrySource.Unavailable:
|
||||
self.logger.warn(f"Fetching registry from {source_class} failed.")
|
||||
self.logger.warn(
|
||||
f"Fetching registry from {source_class.__name__} failed."
|
||||
)
|
||||
continue
|
||||
else:
|
||||
if not source.is_primary:
|
||||
message = f"Warning: {source_class} is not a primary source."
|
||||
message = (
|
||||
f"Warning: {source_class.__name__} is not a primary source."
|
||||
)
|
||||
self.logger.warn(message)
|
||||
return source
|
||||
self.logger.warn("All known registry sources failed.")
|
||||
|
|
|
@ -4,6 +4,7 @@ import time
|
|||
from typing import Callable, List, Optional, Tuple
|
||||
|
||||
import maya
|
||||
from prometheus_client import REGISTRY, Gauge
|
||||
from twisted.internet import threads
|
||||
from web3.datastructures import AttributeDict
|
||||
|
||||
|
@ -50,7 +51,7 @@ class EventActuator(EventScanner):
|
|||
class EventScannerTask(SimpleTask):
|
||||
"""Task that runs the event scanner in a looping call."""
|
||||
|
||||
INTERVAL = 20 # seconds
|
||||
INTERVAL = 120 # seconds
|
||||
|
||||
def __init__(self, scanner: Callable, *args, **kwargs):
|
||||
self.scanner = scanner
|
||||
|
@ -63,7 +64,7 @@ class EventScannerTask(SimpleTask):
|
|||
self.log.warn("Error during ritual event scanning: {}".format(args[0].getTraceback()))
|
||||
if not self._task.running:
|
||||
self.log.warn("Restarting event scanner task!")
|
||||
self.start(now=True)
|
||||
self.start(now=False) # take a breather
|
||||
|
||||
|
||||
class ActiveRitualTracker:
|
||||
|
@ -76,6 +77,12 @@ class ActiveRitualTracker:
|
|||
# what's the buffer for potentially receiving repeated events - 10mins?
|
||||
_RITUAL_TIMEOUT_ADDITIONAL_TTL_BUFFER = 60 * 10
|
||||
|
||||
_LAST_SCANNED_BLOCK_METRIC = Gauge(
|
||||
"ritual_events_last_scanned_block_number",
|
||||
"Last scanned block number for ritual events",
|
||||
registry=REGISTRY,
|
||||
)
|
||||
|
||||
class ParticipationState:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -417,14 +424,16 @@ class ActiveRitualTracker:
|
|||
Because there might have been a minor Ethereum chain reorganisations since the last scan ended,
|
||||
we need to discard the last few blocks from the previous scan results.
|
||||
"""
|
||||
self.scanner.delete_potentially_forked_block_data(
|
||||
self.state.get_last_scanned_block() - self.scanner.chain_reorg_rescan_window
|
||||
)
|
||||
last_scanned_block = self.scanner.get_last_scanned_block()
|
||||
self._LAST_SCANNED_BLOCK_METRIC.set(last_scanned_block)
|
||||
|
||||
if self.scanner.get_last_scanned_block() == 0:
|
||||
if last_scanned_block == 0:
|
||||
# first run so calculate starting block number based on dkg timeout
|
||||
suggested_start_block = self._get_first_scan_start_block_number()
|
||||
else:
|
||||
self.scanner.delete_potentially_forked_block_data(
|
||||
last_scanned_block - self.scanner.chain_reorg_rescan_window
|
||||
)
|
||||
suggested_start_block = self.scanner.get_suggested_scan_start_block()
|
||||
|
||||
end_block = self.scanner.get_suggested_scan_end_block()
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
from typing import List
|
||||
|
||||
from nucypher.utilities.prometheus.collector import MetricsCollector
|
||||
from nucypher.utilities.task import SimpleTask
|
||||
|
||||
|
||||
class PrometheusMetricsTracker(SimpleTask):
|
||||
def __init__(self, collectors: List[MetricsCollector], interval: float):
|
||||
self.metrics_collectors = collectors
|
||||
super().__init__(interval=interval)
|
||||
|
||||
def run(self):
|
||||
for collector in self.metrics_collectors:
|
||||
collector.collect()
|
||||
|
||||
def handle_errors(self, *args, **kwargs):
|
||||
self.log.warn(
|
||||
"Error during prometheus metrics collection: {}".format(
|
||||
args[0].getTraceback()
|
||||
)
|
||||
)
|
||||
if not self._task.running:
|
||||
self.log.warn("Restarting prometheus metrics task!")
|
||||
self.start(now=False) # take a breather
|
|
@ -4,7 +4,6 @@ import time
|
|||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
|
@ -120,9 +119,10 @@ from nucypher.policy.policies import Policy
|
|||
from nucypher.utilities.emitters import StdoutEmitter
|
||||
from nucypher.utilities.logging import Logger
|
||||
from nucypher.utilities.networking import validate_operator_ip
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nucypher.utilities.prometheus.metrics import PrometheusMetricsConfig
|
||||
from nucypher.utilities.prometheus.metrics import (
|
||||
PrometheusMetricsConfig,
|
||||
start_prometheus_exporter,
|
||||
)
|
||||
|
||||
|
||||
class Alice(Character, actors.PolicyAuthor):
|
||||
|
@ -871,9 +871,6 @@ class Ursula(Teacher, Character, Operator):
|
|||
# Only *YOU* can prevent forest fires
|
||||
self.revoked_policies: Set[bytes] = set()
|
||||
|
||||
# Care to introduce yourself?
|
||||
message = "THIS IS YOU: {}: {}".format(self.__class__.__name__, self)
|
||||
self.log.info(message)
|
||||
self.log.info(self.banner.format(self.nickname))
|
||||
|
||||
else:
|
||||
|
@ -890,6 +887,8 @@ class Ursula(Teacher, Character, Operator):
|
|||
certificate_filepath=certificate_filepath,
|
||||
)
|
||||
|
||||
self._prometheus_metrics_tracker = None
|
||||
|
||||
def _substantiate_stamp(self):
|
||||
transacting_power = self.transacting_power
|
||||
signature = transacting_power.sign_message(message=bytes(self.stamp))
|
||||
|
@ -962,7 +961,7 @@ class Ursula(Teacher, Character, Operator):
|
|||
ritual_tracking: bool = True,
|
||||
hendrix: bool = True,
|
||||
start_reactor: bool = True,
|
||||
prometheus_config: "PrometheusMetricsConfig" = None,
|
||||
prometheus_config: PrometheusMetricsConfig = None,
|
||||
preflight: bool = True,
|
||||
block_until_ready: bool = True,
|
||||
eager: bool = False,
|
||||
|
@ -1009,12 +1008,15 @@ class Ursula(Teacher, Character, Operator):
|
|||
emitter.message("✓ Start Operator Bonded Tracker", color="green")
|
||||
|
||||
if prometheus_config:
|
||||
# Locally scoped to prevent import without prometheus explicitly installed
|
||||
from nucypher.utilities.prometheus.metrics import start_prometheus_exporter
|
||||
|
||||
start_prometheus_exporter(ursula=self, prometheus_config=prometheus_config)
|
||||
self._prometheus_metrics_tracker = start_prometheus_exporter(
|
||||
ursula=self, prometheus_config=prometheus_config
|
||||
)
|
||||
if emitter:
|
||||
emitter.message("✓ Prometheus Exporter", color="green")
|
||||
emitter.message(
|
||||
f"✓ Prometheus Exporter http://{self.rest_interface.host}:"
|
||||
f"{prometheus_config.port}/metrics",
|
||||
color="green",
|
||||
)
|
||||
|
||||
if hendrix:
|
||||
if emitter:
|
||||
|
@ -1060,6 +1062,8 @@ class Ursula(Teacher, Character, Operator):
|
|||
self.stop_learning_loop()
|
||||
self._operator_bonded_tracker.stop()
|
||||
self.ritual_tracker.stop()
|
||||
if self._prometheus_metrics_tracker:
|
||||
self._prometheus_metrics_tracker.stop()
|
||||
if halt_reactor:
|
||||
reactor.stop()
|
||||
|
||||
|
@ -1336,6 +1340,7 @@ class Ursula(Teacher, Character, Operator):
|
|||
previous_fleet_states=previous_fleet_states,
|
||||
known_nodes=known_nodes_info,
|
||||
balance_eth=balance_eth,
|
||||
block_height=self.ritual_tracker.scanner.get_last_scanned_block(),
|
||||
)
|
||||
|
||||
def as_external_validator(self) -> Validator:
|
||||
|
@ -1372,6 +1377,7 @@ class LocalUrsulaStatus(NamedTuple):
|
|||
previous_fleet_states: List[ArchivedFleetState]
|
||||
known_nodes: Optional[List[RemoteUrsulaStatus]]
|
||||
balance_eth: float
|
||||
block_height: int
|
||||
|
||||
def to_json(self) -> Dict[str, Any]:
|
||||
if self.known_nodes is None:
|
||||
|
@ -1392,6 +1398,7 @@ class LocalUrsulaStatus(NamedTuple):
|
|||
],
|
||||
known_nodes=known_nodes_json,
|
||||
balance_eth=self.balance_eth,
|
||||
block_height=self.block_height,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -64,6 +64,7 @@ from nucypher.config.migrations.common import (
|
|||
WrongConfigurationVersion,
|
||||
)
|
||||
from nucypher.crypto.keystore import Keystore
|
||||
from nucypher.utilities.prometheus.metrics import PrometheusMetricsConfig
|
||||
|
||||
|
||||
class UrsulaConfigOptions:
|
||||
|
@ -388,37 +389,52 @@ def forget(general_config, config_options, config_file):
|
|||
@option_dry_run
|
||||
@option_force
|
||||
@group_general_config
|
||||
@click.option('--prometheus', help="Run the ursula prometheus exporter", is_flag=True, default=False)
|
||||
@click.option('--metrics-port', help="Run a Prometheus metrics exporter on specified HTTP port", type=NETWORK_PORT)
|
||||
@click.option("--metrics-listen-address", help="Run a prometheus metrics exporter on specified IP address", default='')
|
||||
@click.option("--metrics-prefix", help="Create metrics params with specified prefix", default="ursula")
|
||||
@click.option("--metrics-interval", help="The frequency of metrics collection", type=click.INT, default=90)
|
||||
@click.option("--ip-checkup/--no-ip-checkup", help="Verify external IP matches configuration", default=True)
|
||||
def run(general_config, character_options, config_file, dry_run, prometheus, metrics_port,
|
||||
metrics_listen_address, metrics_prefix, metrics_interval, force, ip_checkup):
|
||||
@click.option(
|
||||
"--prometheus",
|
||||
help="Enable the prometheus metrics exporter",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
)
|
||||
@click.option(
|
||||
"--metrics-port",
|
||||
help="Specify the HTTP port of the Prometheus metrics exporter (if prometheus enabled)",
|
||||
default=9101,
|
||||
type=NETWORK_PORT,
|
||||
)
|
||||
@click.option(
|
||||
"--metrics-interval",
|
||||
help="The frequency of metrics collection in seconds (if prometheus enabled)",
|
||||
type=click.INT,
|
||||
default=90,
|
||||
)
|
||||
@click.option(
|
||||
"--ip-checkup/--no-ip-checkup",
|
||||
help="Verify external IP matches configuration",
|
||||
default=True,
|
||||
)
|
||||
def run(
|
||||
general_config,
|
||||
character_options,
|
||||
config_file,
|
||||
dry_run,
|
||||
prometheus,
|
||||
metrics_port,
|
||||
metrics_interval,
|
||||
force,
|
||||
ip_checkup,
|
||||
):
|
||||
"""Run an "Ursula" node."""
|
||||
|
||||
emitter = setup_emitter(general_config)
|
||||
dev_mode = character_options.config_options.dev
|
||||
lonely = character_options.config_options.lonely
|
||||
|
||||
if prometheus and not metrics_port:
|
||||
# Require metrics port when using prometheus
|
||||
raise click.BadOptionUsage(option_name='metrics-port',
|
||||
message=click.style('--metrics-port is required when using --prometheus', fg="red"))
|
||||
|
||||
_pre_launch_warnings(emitter, dev=dev_mode, force=None)
|
||||
|
||||
prometheus_config: "PrometheusMetricsConfig" = None
|
||||
if prometheus and not dev_mode:
|
||||
# Locally scoped to prevent import without prometheus explicitly installed
|
||||
from nucypher.utilities.prometheus.metrics import PrometheusMetricsConfig
|
||||
|
||||
prometheus_config = None
|
||||
if prometheus:
|
||||
prometheus_config = PrometheusMetricsConfig(
|
||||
port=metrics_port,
|
||||
metrics_prefix=metrics_prefix,
|
||||
listen_address=metrics_listen_address,
|
||||
collection_interval=metrics_interval,
|
||||
port=metrics_port, collection_interval=metrics_interval
|
||||
)
|
||||
|
||||
ursula_config, URSULA = character_options.create_character(
|
||||
|
|
|
@ -37,8 +37,8 @@ NUCYPHER_SENTRY_USER_ID = ""
|
|||
NUCYPHER_SENTRY_ENDPOINT = f"https://{NUCYPHER_SENTRY_PUBLIC_KEY}@sentry.io/{NUCYPHER_SENTRY_USER_ID}"
|
||||
|
||||
# Web
|
||||
CLI_ROOT = NUCYPHER_PACKAGE / 'network' / 'templates'
|
||||
MAX_UPLOAD_CONTENT_LENGTH = 1024 * 50
|
||||
CLI_ROOT = NUCYPHER_PACKAGE / "network" / "templates"
|
||||
MAX_UPLOAD_CONTENT_LENGTH = 1024 * 250 # 250kb
|
||||
|
||||
# Dev Mode
|
||||
TEMPORARY_DOMAIN_NAME = ":temporary-domain:" # for use with `--dev` node runtimes
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import math
|
||||
from http import HTTPStatus
|
||||
from random import shuffle
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
from eth_typing import ChecksumAddress
|
||||
|
@ -12,15 +14,24 @@ from nucypher.utilities.concurrency import BatchValueFactory, WorkerPool
|
|||
|
||||
|
||||
class ThresholdDecryptionClient(ThresholdAccessControlClient):
|
||||
DEFAULT_DECRYPTION_TIMEOUT = 15
|
||||
DEFAULT_DECRYPTION_TIMEOUT = 30
|
||||
DEFAULT_STAGGER_TIMEOUT = 3
|
||||
|
||||
class ThresholdDecryptionRequestFailed(Exception):
|
||||
"""Raised when a decryption request returns a non-zero status code."""
|
||||
|
||||
class ThresholdDecryptionRequestFactory(BatchValueFactory):
|
||||
def __init__(self, ursula_to_contact: List[ChecksumAddress], threshold: int):
|
||||
# TODO should we batch the ursulas to contact i.e. pass `batch_size` parameter
|
||||
super().__init__(values=ursula_to_contact, required_successes=threshold)
|
||||
def __init__(
|
||||
self,
|
||||
ursulas_to_contact: List[ChecksumAddress],
|
||||
threshold: int,
|
||||
batch_size: int,
|
||||
):
|
||||
super().__init__(
|
||||
values=ursulas_to_contact,
|
||||
required_successes=threshold,
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -30,6 +41,7 @@ class ThresholdDecryptionClient(ThresholdAccessControlClient):
|
|||
encrypted_requests: Dict[ChecksumAddress, EncryptedThresholdDecryptionRequest],
|
||||
threshold: int,
|
||||
timeout: int = DEFAULT_DECRYPTION_TIMEOUT,
|
||||
stagger_timeout: int = DEFAULT_STAGGER_TIMEOUT,
|
||||
) -> Tuple[
|
||||
Dict[ChecksumAddress, EncryptedThresholdDecryptionResponse],
|
||||
Dict[ChecksumAddress, str],
|
||||
|
@ -60,23 +72,32 @@ class ThresholdDecryptionClient(ThresholdAccessControlClient):
|
|||
response.content
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.warn(f"Node {ursula_address} raised {e}")
|
||||
raise
|
||||
message = f"Node {ursula_address} raised {e}"
|
||||
self.log.warn(message)
|
||||
raise self.ThresholdDecryptionRequestFailed(message)
|
||||
|
||||
message = f"Node {ursula_address} returned {response.status_code} - {response.content}."
|
||||
self.log.warn(message)
|
||||
raise self.ThresholdDecryptionRequestFailed(message)
|
||||
|
||||
# TODO: Find a better request order, perhaps based on latency data obtained from discovery loop - #3395
|
||||
requests = list(encrypted_requests)
|
||||
shuffle(requests)
|
||||
# Discussion about WorkerPool parameters:
|
||||
# "https://github.com/nucypher/nucypher/pull/3393#discussion_r1456307991"
|
||||
worker_pool = WorkerPool(
|
||||
worker=worker,
|
||||
value_factory=self.ThresholdDecryptionRequestFactory(
|
||||
ursula_to_contact=list(encrypted_requests.keys()), threshold=threshold
|
||||
ursulas_to_contact=requests,
|
||||
batch_size=math.ceil(threshold * 1.25),
|
||||
threshold=threshold,
|
||||
),
|
||||
target_successes=threshold,
|
||||
threadpool_size=len(
|
||||
encrypted_requests
|
||||
threadpool_size=math.ceil(
|
||||
threshold * 1.5
|
||||
), # TODO should we cap this (say 40?)
|
||||
timeout=timeout,
|
||||
stagger_timeout=stagger_timeout,
|
||||
)
|
||||
worker_pool.start()
|
||||
try:
|
||||
|
@ -86,7 +107,7 @@ class ThresholdDecryptionClient(ThresholdAccessControlClient):
|
|||
successes = worker_pool.get_successes()
|
||||
finally:
|
||||
worker_pool.cancel()
|
||||
worker_pool.join()
|
||||
|
||||
failures = worker_pool.get_failures()
|
||||
|
||||
return successes, failures
|
||||
|
|
|
@ -662,7 +662,9 @@ class Learner:
|
|||
self.log.info(f"Learned about enough nodes after {rounds_undertaken} rounds.")
|
||||
return True
|
||||
if not self._learning_task.running:
|
||||
raise RuntimeError("Learning loop is not running. Start it with start_learning().")
|
||||
raise RuntimeError(
|
||||
"Learning loop is not running. Start it with start_learning_loop()."
|
||||
)
|
||||
elif not reactor.running and not learn_on_this_thread:
|
||||
raise RuntimeError(
|
||||
f"The reactor isn't running, but you're trying to use it for discovery. You need to start the Reactor in order to use {self} this way.")
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import json
|
||||
import weakref
|
||||
from http import HTTPStatus
|
||||
from ipaddress import AddressValueError
|
||||
from pathlib import Path
|
||||
|
||||
from constant_sorrow import constants
|
||||
|
@ -15,6 +16,7 @@ from nucypher_core import (
|
|||
MetadataResponsePayload,
|
||||
ReencryptionRequest,
|
||||
)
|
||||
from prometheus_client import REGISTRY, Counter, Summary
|
||||
|
||||
from nucypher.config.constants import MAX_UPLOAD_CONTENT_LENGTH
|
||||
from nucypher.crypto.keypairs import DecryptingKeypair
|
||||
|
@ -26,6 +28,25 @@ from nucypher.policy.conditions.utils import (
|
|||
evaluate_condition_lingo,
|
||||
)
|
||||
from nucypher.utilities.logging import Logger
|
||||
from nucypher.utilities.networking import get_global_source_ipv4
|
||||
|
||||
DECRYPTION_REQUESTS_SUCCESSES = Counter(
|
||||
"threshold_decryption_num_successes",
|
||||
"Number of threshold decryption successes",
|
||||
registry=REGISTRY,
|
||||
)
|
||||
DECRYPTION_REQUESTS_FAILURES = Counter(
|
||||
"threshold_decryption_num_failures",
|
||||
"Number of threshold decryption failures",
|
||||
registry=REGISTRY,
|
||||
)
|
||||
|
||||
# Summary provides both `count` (num of calls), and `sum` (time taken in method)
|
||||
DECRYPTION_REQUEST_SUMMARY = Summary(
|
||||
"decryption_request_processing",
|
||||
"Summary of decryption request processing",
|
||||
registry=REGISTRY,
|
||||
)
|
||||
|
||||
HERE = BASE_DIR = Path(__file__).parent
|
||||
TEMPLATES_DIR = HERE / "templates"
|
||||
|
@ -149,14 +170,18 @@ def _make_rest_app(this_node, log: Logger) -> Flask:
|
|||
return Response(json.dumps(payload), mimetype="application/json")
|
||||
|
||||
@rest_app.route('/decrypt', methods=["POST"])
|
||||
@DECRYPTION_REQUEST_SUMMARY.time()
|
||||
def threshold_decrypt():
|
||||
try:
|
||||
encrypted_request = EncryptedThresholdDecryptionRequest.from_bytes(
|
||||
request.data
|
||||
)
|
||||
encrypted_response = this_node.handle_threshold_decryption_request(
|
||||
encrypted_request
|
||||
)
|
||||
with DECRYPTION_REQUESTS_FAILURES.count_exceptions():
|
||||
encrypted_request = EncryptedThresholdDecryptionRequest.from_bytes(
|
||||
request.data
|
||||
)
|
||||
encrypted_response = this_node.handle_threshold_decryption_request(
|
||||
encrypted_request
|
||||
)
|
||||
|
||||
DECRYPTION_REQUESTS_SUCCESSES.inc()
|
||||
response = Response(
|
||||
response=bytes(encrypted_response),
|
||||
status=HTTPStatus.OK,
|
||||
|
@ -274,9 +299,20 @@ def _make_rest_app(this_node, log: Logger) -> Flask:
|
|||
|
||||
@rest_app.route("/ping", methods=['GET'])
|
||||
def ping():
|
||||
"""Asks this node: What is my IP address?"""
|
||||
requester_ip_address = request.remote_addr
|
||||
return Response(requester_ip_address, status=HTTPStatus.OK)
|
||||
"""Asks this node: What is my public IPv4 address?"""
|
||||
try:
|
||||
ipv4 = get_global_source_ipv4(request=request)
|
||||
except AddressValueError as e:
|
||||
return Response(
|
||||
response=str(e),
|
||||
status=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
if not ipv4:
|
||||
return Response(
|
||||
response="No public IPv4 address detected.",
|
||||
status=HTTPStatus.BAD_GATEWAY,
|
||||
)
|
||||
return Response(response=ipv4, status=HTTPStatus.OK)
|
||||
|
||||
@rest_app.route('/status/', methods=['GET'])
|
||||
def status():
|
||||
|
|
|
@ -69,7 +69,7 @@ def character_span(character):
|
|||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="icon" type="image/x-icon" href="https://www.nucypher.com/favicon-32x32.png"/>
|
||||
<link rel="icon" type="image/x-icon" href="https://threshold.network/favicon-32x32.png"/>
|
||||
<link rel="stylesheet" type="text/css" href="https://fonts.googleapis.com/css?family=Open+Sans" />
|
||||
</head>
|
||||
<style type="text/css">
|
||||
|
@ -183,6 +183,10 @@ def character_span(character):
|
|||
%endfor
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><i>Latest Scanned Block:</i></td>
|
||||
<td>${ status_info.block_height }</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
%if status_info.known_nodes is not None:
|
||||
|
|
|
@ -47,7 +47,6 @@ from nucypher.utilities import logging
|
|||
|
||||
_CONDITION_CHAINS = {
|
||||
1: "ethereum/mainnet",
|
||||
5: "ethereum/goerli",
|
||||
11155111: "ethereum/sepolia",
|
||||
137: "polygon/mainnet",
|
||||
80001: "polygon/mumbai",
|
||||
|
|
|
@ -114,6 +114,10 @@ class Policy:
|
|||
|
||||
self.publisher.block_until_number_of_known_nodes_is(self.shares, learn_on_this_thread=True, eager=True)
|
||||
reservoir = self._make_reservoir(handpicked_addresses)
|
||||
if len(reservoir) < self.shares:
|
||||
raise self.NotEnoughUrsulas(
|
||||
f"There aren't enough nodes, {len(reservoir)} to sample {self.shares}"
|
||||
)
|
||||
value_factory = PrefetchStrategy(reservoir, self.shares)
|
||||
|
||||
def worker(address) -> "characters.lawful.Ursula":
|
||||
|
|
|
@ -49,6 +49,9 @@ class MergedReservoir:
|
|||
else:
|
||||
return None
|
||||
|
||||
def __len__(self):
|
||||
return len(self.values) + len(self.reservoir)
|
||||
|
||||
|
||||
class PrefetchStrategy:
|
||||
"""
|
||||
|
@ -58,10 +61,18 @@ class PrefetchStrategy:
|
|||
"""
|
||||
|
||||
def __init__(self, reservoir: MergedReservoir, need_successes: int):
|
||||
if len(reservoir) < need_successes:
|
||||
raise ValueError(
|
||||
f"Insufficient staking providers ({len(reservoir.values)}) to draw {need_successes}"
|
||||
)
|
||||
self.reservoir = reservoir
|
||||
self.need_successes = need_successes
|
||||
|
||||
def __call__(self, successes: int) -> Optional[List[ChecksumAddress]]:
|
||||
if successes > self.need_successes:
|
||||
# TODO: we could raise here, but we were returning `None` before
|
||||
return None
|
||||
|
||||
batch = []
|
||||
for i in range(self.need_successes - successes):
|
||||
value = self.reservoir()
|
||||
|
|
|
@ -354,7 +354,7 @@ class BatchValueFactory:
|
|||
|
||||
if batch_size is not None and batch_size <= 0:
|
||||
raise ValueError(f"Invalid batch size specified ({batch_size})")
|
||||
self.batch_size = batch_size if batch_size else required_successes
|
||||
self.batch_size = batch_size or required_successes
|
||||
|
||||
def __call__(self, successes) -> Optional[List[Any]]:
|
||||
if successes >= self.required_successes:
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import random
|
||||
from http import HTTPStatus
|
||||
from ipaddress import ip_address
|
||||
from ipaddress import AddressValueError, IPv4Address, IPv6Address, ip_address
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
from flask import Request
|
||||
from requests.exceptions import HTTPError, RequestException
|
||||
|
||||
from nucypher.acumen.perception import FleetSensor
|
||||
|
@ -178,6 +179,9 @@ def determine_external_ip_address(
|
|||
3. A centralized IP address service
|
||||
|
||||
If the IP address cannot be determined for any reason UnknownIPAddress is raised.
|
||||
|
||||
This function is intended to be used by nodes operators running `nucypher ursula init`
|
||||
to assist determine their global IPv4 address for configuration purposes only.
|
||||
"""
|
||||
rest_host = None
|
||||
|
||||
|
@ -200,4 +204,58 @@ def determine_external_ip_address(
|
|||
# complete failure!
|
||||
if not rest_host:
|
||||
raise UnknownIPAddress('External IP address detection failed')
|
||||
|
||||
return rest_host
|
||||
|
||||
|
||||
def _resolve_ipv4(ip: str) -> Optional[IPv4Address]:
|
||||
"""
|
||||
Resolve an IPv6 address to IPv4 if required and possible.
|
||||
Returns None if there is no valid IPv4 address available.
|
||||
"""
|
||||
try:
|
||||
ip = ip_address(ip.strip())
|
||||
except (AddressValueError, ValueError):
|
||||
raise AddressValueError(f"'{ip}' does not appear to be an IPv4 or IPv6 address")
|
||||
if isinstance(ip, IPv6Address):
|
||||
return ip.ipv4_mapped # returns IPv4Address or None
|
||||
elif isinstance(ip, IPv4Address):
|
||||
return ip
|
||||
|
||||
|
||||
def _get_header_ips(header: str, request: Request) -> Optional[str]:
|
||||
"""Yields source IP addresses in sequential order from a given request and header name."""
|
||||
if header in request.headers:
|
||||
for ip in request.headers[header].split(","):
|
||||
yield ip
|
||||
|
||||
|
||||
def _ip_sources(request: Request) -> str:
|
||||
"""Yields all possible sources of IP addresses in a given request's headers."""
|
||||
for header in ["X-Forwarded-For", "X-Real-IP"]:
|
||||
yield from _get_header_ips(header, request)
|
||||
yield request.remote_addr
|
||||
|
||||
|
||||
def get_global_source_ipv4(request: Request) -> Optional[str]:
|
||||
"""
|
||||
Resolve the first global IPv4 address in a request's headers.
|
||||
|
||||
If the request is forwarded from a proxy, the first global IP address in the chain is returned.
|
||||
'X-Forwarded-For' (XFF) https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-For#selecting_an_ip_address
|
||||
|
||||
If XFF is not present in the request headers, this method also checks for 'X-Real-IP',
|
||||
a popular non-standard header conventionally configured in some proxies like nginx.
|
||||
|
||||
Finally, if neither XFF nor X-Real-IP are present, the request is assumed to be
|
||||
direct and the remote address is returned.
|
||||
|
||||
In all cases, tests that the IPv4 range is global. RFC 1918 privately designated
|
||||
address ranges must not be returned.
|
||||
https://www.ietf.org/rfc/rfc1918.txt
|
||||
"""
|
||||
|
||||
for ip_str in _ip_sources(request=request):
|
||||
ipv4_address = _resolve_ipv4(ip_str)
|
||||
if ipv4_address and ipv4_address.is_global:
|
||||
return str(ipv4_address)
|
||||
|
|
|
@ -1,23 +1,16 @@
|
|||
from nucypher.blockchain.eth.domains import TACoDomain
|
||||
from nucypher.blockchain.eth.events import ContractEventsThrottler
|
||||
|
||||
try:
|
||||
from prometheus_client import Enum, Gauge, Info
|
||||
from prometheus_client.registry import CollectorRegistry
|
||||
except ImportError:
|
||||
raise ImportError('"prometheus_client" must be installed - run "pip install nucypher[ursula]" and try again.')
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Type
|
||||
from typing import Dict
|
||||
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from prometheus_client import Gauge, Info
|
||||
from prometheus_client.registry import CollectorRegistry
|
||||
from web3 import Web3
|
||||
|
||||
import nucypher
|
||||
from nucypher.blockchain.eth import actors
|
||||
from nucypher.blockchain.eth.agents import (
|
||||
ContractAgency,
|
||||
EthereumContractAgent,
|
||||
TACoApplicationAgent,
|
||||
TACoChildApplicationAgent,
|
||||
)
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import ContractRegistry
|
||||
|
@ -26,6 +19,7 @@ from nucypher.characters import lawful
|
|||
|
||||
class MetricsCollector(ABC):
|
||||
"""Metrics Collector Interface."""
|
||||
|
||||
class CollectorError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -33,7 +27,7 @@ class MetricsCollector(ABC):
|
|||
"""Raised when the Collector was not initialized before being used."""
|
||||
|
||||
@abstractmethod
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
def initialize(self, registry: CollectorRegistry) -> None:
|
||||
"""Initialize metrics collector."""
|
||||
return NotImplemented
|
||||
|
||||
|
@ -50,6 +44,7 @@ class BaseMetricsCollector(MetricsCollector):
|
|||
Subclasses should initialize the self.metrics member in their initialize() method since the
|
||||
self.metrics member is used to determine whether initialize was called, and if not an exception is raised.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.metrics: Dict = None
|
||||
|
||||
|
@ -77,33 +72,26 @@ class UrsulaInfoMetricsCollector(BaseMetricsCollector):
|
|||
super().__init__()
|
||||
self.ursula = ursula
|
||||
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
def initialize(self, registry: CollectorRegistry) -> None:
|
||||
self.metrics = {
|
||||
"host_info": Info(
|
||||
f"{metrics_prefix}_host", "Ursula info", registry=registry
|
||||
),
|
||||
"learning_status": Enum(
|
||||
f"{metrics_prefix}_node_discovery",
|
||||
"Learning loop status",
|
||||
states=["starting", "running", "stopped"],
|
||||
"client_info": Info("client", "TACo node client info", registry=registry),
|
||||
"node_discovery_running": Gauge(
|
||||
"node_discovery_running",
|
||||
"Node discovery loop status",
|
||||
registry=registry,
|
||||
),
|
||||
"known_nodes_gauge": Gauge(
|
||||
f"{metrics_prefix}_known_nodes",
|
||||
"known_nodes": Gauge(
|
||||
"known_nodes",
|
||||
"Number of currently known nodes",
|
||||
registry=registry,
|
||||
),
|
||||
"reencryption_requests_gauge": Gauge(
|
||||
f"{metrics_prefix}_reencryption_requests",
|
||||
"Number of accepted work orders",
|
||||
registry=registry,
|
||||
),
|
||||
}
|
||||
|
||||
def _collect_internal(self) -> None:
|
||||
# info
|
||||
payload = {
|
||||
"app_version": nucypher.__version__,
|
||||
"app": "TACo",
|
||||
"version": nucypher.__version__,
|
||||
"host": str(self.ursula.rest_interface),
|
||||
"domain": str(self.ursula.domain),
|
||||
"nickname": str(self.ursula.nickname),
|
||||
|
@ -112,36 +100,54 @@ class UrsulaInfoMetricsCollector(BaseMetricsCollector):
|
|||
"operator_address": self.ursula.operator_address,
|
||||
}
|
||||
|
||||
self.metrics["learning_status"].state('running' if self.ursula._learning_task.running else 'stopped')
|
||||
self.metrics["known_nodes_gauge"].set(len(self.ursula.known_nodes))
|
||||
self.metrics["host_info"].info(payload)
|
||||
self.metrics["client_info"].info(payload)
|
||||
self.metrics["node_discovery_running"].set(self.ursula._learning_task.running)
|
||||
self.metrics["known_nodes"].set(len(self.ursula.known_nodes))
|
||||
|
||||
|
||||
class BlockchainMetricsCollector(BaseMetricsCollector):
|
||||
"""Collector for Blockchain specific metrics."""
|
||||
|
||||
def __init__(self, eth_endpoint: str):
|
||||
def __init__(self, root_net_endpoint: str, child_net_endpoint: str):
|
||||
super().__init__()
|
||||
self.eth_endpoint = eth_endpoint
|
||||
self.root_net_endpoint = root_net_endpoint
|
||||
self.child_net_endpoint = child_net_endpoint
|
||||
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
def initialize(self, registry: CollectorRegistry) -> None:
|
||||
self.metrics = {
|
||||
"eth_chain_id": Gauge(
|
||||
f"{metrics_prefix}_eth_chain_id", "Ethereum Chain ID", registry=registry
|
||||
"root_net_chain_id": Gauge(
|
||||
"root_net_chain_id", "Root network Chain ID", registry=registry
|
||||
),
|
||||
"eth_current_block_number": Gauge(
|
||||
f"{metrics_prefix}_eth_block_number",
|
||||
"Current Ethereum block",
|
||||
"root_net_current_block_number": Gauge(
|
||||
"root_net_current_block_number",
|
||||
"Root network current block",
|
||||
registry=registry,
|
||||
),
|
||||
"child_net_chain_id": Gauge(
|
||||
"child_net_chain_id", "Child network Chain ID", registry=registry
|
||||
),
|
||||
"child_net_current_block_number": Gauge(
|
||||
"child_net_current_block_number",
|
||||
"Child network current block",
|
||||
registry=registry,
|
||||
),
|
||||
}
|
||||
|
||||
def _collect_internal(self) -> None:
|
||||
blockchain = BlockchainInterfaceFactory.get_or_create_interface(
|
||||
endpoint=self.eth_endpoint
|
||||
root_blockchain = BlockchainInterfaceFactory.get_or_create_interface(
|
||||
endpoint=self.root_net_endpoint
|
||||
)
|
||||
child_blockchain = BlockchainInterfaceFactory.get_or_create_interface(
|
||||
endpoint=self.child_net_endpoint
|
||||
)
|
||||
self.metrics["root_net_chain_id"].set(root_blockchain.client.chain_id)
|
||||
self.metrics["root_net_current_block_number"].set(
|
||||
root_blockchain.client.block_number
|
||||
)
|
||||
self.metrics["child_net_chain_id"].set(child_blockchain.client.chain_id)
|
||||
self.metrics["child_net_current_block_number"].set(
|
||||
child_blockchain.client.block_number
|
||||
)
|
||||
self.metrics["eth_chain_id"].set(blockchain.client.chain_id)
|
||||
self.metrics["eth_current_block_number"].set(blockchain.client.block_number)
|
||||
|
||||
|
||||
class StakingProviderMetricsCollector(BaseMetricsCollector):
|
||||
|
@ -158,21 +164,16 @@ class StakingProviderMetricsCollector(BaseMetricsCollector):
|
|||
self.contract_registry = contract_registry
|
||||
self.eth_endpoint = eth_endpoint
|
||||
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
def initialize(self, registry: CollectorRegistry) -> None:
|
||||
self.metrics = {
|
||||
"active_stake_gauge": Gauge(
|
||||
f"{metrics_prefix}_associated_active_stake",
|
||||
"Total amount of T staked (adapted NU/KEEP and liquid T)",
|
||||
"active_stake": Gauge(
|
||||
"active_stake",
|
||||
"Total amount of T staked",
|
||||
registry=registry,
|
||||
),
|
||||
"operator_confirmed_gauge": Gauge(
|
||||
f"{metrics_prefix}_operator_confirmed",
|
||||
"Operator already confirmed",
|
||||
registry=registry,
|
||||
),
|
||||
"operator_start_gauge": Gauge(
|
||||
f"{metrics_prefix}_operator_start_timestamp",
|
||||
"Operator start timestamp",
|
||||
"operator_bonded_timestamp": Gauge(
|
||||
"operator_bonded_timestamp",
|
||||
"Timestamp operator bonded to stake",
|
||||
registry=registry,
|
||||
),
|
||||
}
|
||||
|
@ -186,15 +187,12 @@ class StakingProviderMetricsCollector(BaseMetricsCollector):
|
|||
authorized = application_agent.get_authorized_stake(
|
||||
staking_provider=self.staking_provider_address
|
||||
)
|
||||
self.metrics["active_stake_gauge"].set(int(authorized))
|
||||
self.metrics["active_stake"].set(Web3.from_wei(authorized, "ether"))
|
||||
|
||||
staking_provider_info = application_agent.get_staking_provider_info(
|
||||
staking_provider=self.staking_provider_address
|
||||
)
|
||||
self.metrics["operator_confirmed_gauge"].set(
|
||||
staking_provider_info.operator_confirmed
|
||||
)
|
||||
self.metrics["operator_start_gauge"].set(
|
||||
self.metrics["operator_bonded_timestamp"].set(
|
||||
staking_provider_info.operator_start_timestamp
|
||||
)
|
||||
|
||||
|
@ -204,92 +202,41 @@ class OperatorMetricsCollector(BaseMetricsCollector):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
domain: TACoDomain,
|
||||
operator_address: ChecksumAddress,
|
||||
contract_registry: ContractRegistry,
|
||||
polygon_endpoint: str,
|
||||
):
|
||||
super().__init__()
|
||||
self.domain = domain
|
||||
self.operator_address = operator_address
|
||||
self.contract_registry = contract_registry
|
||||
self.polygon_endpoint = polygon_endpoint
|
||||
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
def initialize(self, registry: CollectorRegistry) -> None:
|
||||
self.metrics = {
|
||||
"operator_eth_balance_gauge": Gauge(
|
||||
f"{metrics_prefix}_operator_eth_balance",
|
||||
"Operator Ethereum balance",
|
||||
"operator_confirmed": Gauge(
|
||||
"operator_confirmed",
|
||||
"Operator already confirmed",
|
||||
registry=registry,
|
||||
),
|
||||
"operator_matic_balance": Gauge(
|
||||
"operator_matic_balance", "Operator MATIC balance", registry=registry
|
||||
),
|
||||
}
|
||||
|
||||
def _collect_internal(self) -> None:
|
||||
operator_token_actor = actors.NucypherTokenActor(
|
||||
child_application_agent = ContractAgency.get_agent(
|
||||
TACoChildApplicationAgent,
|
||||
registry=self.contract_registry,
|
||||
domain=self.domain,
|
||||
checksum_address=self.operator_address,
|
||||
blockchain_endpoint=self.polygon_endpoint,
|
||||
)
|
||||
self.metrics["operator_eth_balance_gauge"].set(
|
||||
float(operator_token_actor.eth_balance)
|
||||
self.metrics["operator_confirmed"].set(
|
||||
child_application_agent.is_operator_confirmed(
|
||||
operator_address=self.operator_address
|
||||
)
|
||||
)
|
||||
matic_balance = child_application_agent.blockchain.client.get_balance(
|
||||
self.operator_address
|
||||
)
|
||||
self.metrics["operator_matic_balance"].set(
|
||||
Web3.from_wei(matic_balance, "ether")
|
||||
)
|
||||
|
||||
|
||||
class EventMetricsCollector(BaseMetricsCollector):
|
||||
"""General collector for emitted events."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
event_name: str,
|
||||
event_args_config: Dict[str, tuple],
|
||||
argument_filters: Dict[str, str],
|
||||
contract_agent_class: Type[EthereumContractAgent],
|
||||
contract_registry: ContractRegistry,
|
||||
):
|
||||
super().__init__()
|
||||
self.event_name = event_name
|
||||
self.contract_agent_class = contract_agent_class
|
||||
self.contract_registry = contract_registry
|
||||
|
||||
contract_agent = ContractAgency.get_agent(self.contract_agent_class, registry=self.contract_registry)
|
||||
# this way we don't have to deal with 'latest' at all
|
||||
self.filter_current_from_block = contract_agent.blockchain.client.block_number
|
||||
self.filter_arguments = argument_filters
|
||||
self.event_args_config = event_args_config
|
||||
|
||||
def initialize(self, metrics_prefix: str, registry: CollectorRegistry) -> None:
|
||||
self.metrics = dict()
|
||||
for arg_name in self.event_args_config:
|
||||
metric_class, metric_name, metric_doc = self.event_args_config[arg_name]
|
||||
metric_key = self._get_arg_metric_key(arg_name)
|
||||
self.metrics[metric_key] = metric_class(metric_name, metric_doc, registry=registry)
|
||||
|
||||
def _collect_internal(self) -> None:
|
||||
contract_agent = ContractAgency.get_agent(self.contract_agent_class, registry=self.contract_registry)
|
||||
from_block = self.filter_current_from_block
|
||||
to_block = contract_agent.blockchain.client.block_number
|
||||
if from_block >= to_block:
|
||||
# we've already checked the latest block and waiting for a new block
|
||||
# nothing to see here
|
||||
return
|
||||
|
||||
# update last block checked for the next round - from/to block range is inclusive
|
||||
# increment before potentially long running execution to improve concurrency handling
|
||||
self.filter_current_from_block = to_block + 1
|
||||
|
||||
events_throttler = ContractEventsThrottler(agent=contract_agent,
|
||||
event_name=self.event_name,
|
||||
from_block=from_block,
|
||||
to_block=to_block,
|
||||
**self.filter_arguments)
|
||||
for event_record in events_throttler:
|
||||
self._event_occurred(event_record.raw_event)
|
||||
|
||||
def _event_occurred(self, event) -> None:
|
||||
for arg_name in self.event_args_config:
|
||||
metric_key = self._get_arg_metric_key(arg_name)
|
||||
if arg_name == "block_number":
|
||||
self.metrics[metric_key].set(event["blockNumber"])
|
||||
continue
|
||||
self.metrics[metric_key].set(event['args'][arg_name])
|
||||
|
||||
def _get_arg_metric_key(self, arg_name: str):
|
||||
return f'{self.event_name}_{arg_name}'
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
|
||||
from nucypher.exceptions import DevelopmentInstallationRequired
|
||||
|
||||
try:
|
||||
from prometheus_client.core import Timestamp
|
||||
from prometheus_client.registry import REGISTRY, CollectorRegistry
|
||||
from prometheus_client.utils import floatToGoString
|
||||
except ImportError:
|
||||
raise DevelopmentInstallationRequired(importable_name='prometheus_client')
|
||||
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from twisted.internet import reactor, task
|
||||
from prometheus_client import GC_COLLECTOR, PLATFORM_COLLECTOR, PROCESS_COLLECTOR
|
||||
from prometheus_client.core import Timestamp
|
||||
from prometheus_client.registry import REGISTRY, CollectorRegistry
|
||||
from prometheus_client.twisted import MetricsResource
|
||||
from prometheus_client.utils import floatToGoString
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.server import Site
|
||||
|
||||
from nucypher.blockchain.eth.trackers.prometheus import PrometheusMetricsTracker
|
||||
from nucypher.characters import lawful
|
||||
from nucypher.utilities.prometheus.collector import (
|
||||
BlockchainMetricsCollector,
|
||||
|
@ -26,20 +23,18 @@ from nucypher.utilities.prometheus.collector import (
|
|||
|
||||
class PrometheusMetricsConfig:
|
||||
"""Prometheus configuration."""
|
||||
def __init__(self,
|
||||
port: int,
|
||||
metrics_prefix: str,
|
||||
listen_address: str = '', # default to localhost ip
|
||||
collection_interval: int = 90, # every 1.5 minutes
|
||||
start_now: bool = False):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
port: int,
|
||||
listen_address: str = "", # default to localhost ip
|
||||
collection_interval: int = 90, # every 1.5 minutes
|
||||
start_now: bool = False,
|
||||
):
|
||||
if not port:
|
||||
raise ValueError('port must be provided')
|
||||
if not metrics_prefix:
|
||||
raise ValueError('metrics prefix must be provided')
|
||||
raise ValueError("port must be provided")
|
||||
|
||||
self.port = port
|
||||
self.metrics_prefix = metrics_prefix
|
||||
self.listen_address = listen_address
|
||||
self.collection_interval = collection_interval
|
||||
self.start_now = start_now
|
||||
|
@ -56,6 +51,7 @@ class JSONMetricsResource(Resource):
|
|||
"""
|
||||
Twisted ``Resource`` that serves prometheus in JSON.
|
||||
"""
|
||||
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, registry=REGISTRY):
|
||||
|
@ -63,14 +59,17 @@ class JSONMetricsResource(Resource):
|
|||
self.registry = registry
|
||||
|
||||
def render_GET(self, request):
|
||||
request.setHeader(b'Content-Type', "text/json")
|
||||
request.setHeader(b"Content-Type", "text/json")
|
||||
return self.generate_latest_json()
|
||||
|
||||
@staticmethod
|
||||
def get_exemplar(sample, metric):
|
||||
if not sample.exemplar:
|
||||
return {}
|
||||
elif metric.type not in ('histogram', 'gaugehistogram') or not sample.name.endswith('_bucket'):
|
||||
elif metric.type not in (
|
||||
"histogram",
|
||||
"gaugehistogram",
|
||||
) or not sample.name.endswith("_bucket"):
|
||||
raise ValueError(
|
||||
"Metric {} has exemplars, but is not a "
|
||||
"histogram bucket".format(metric.name)
|
||||
|
@ -78,7 +77,7 @@ class JSONMetricsResource(Resource):
|
|||
return {
|
||||
"labels": sample.exemplar.labels,
|
||||
"value": floatToGoString(sample.exemplar.value),
|
||||
"timestamp": sample.exemplar.timestamp
|
||||
"timestamp": sample.exemplar.timestamp,
|
||||
}
|
||||
|
||||
def get_sample(self, sample, metric):
|
||||
|
@ -87,14 +86,14 @@ class JSONMetricsResource(Resource):
|
|||
"labels": sample.labels,
|
||||
"value": floatToGoString(sample.value),
|
||||
"timestamp": sample.timestamp,
|
||||
"exemplar": self.get_exemplar(sample, metric)
|
||||
"exemplar": self.get_exemplar(sample, metric),
|
||||
}
|
||||
|
||||
def get_metric(self, metric):
|
||||
return {
|
||||
"samples": [self.get_sample(sample, metric) for sample in metric.samples],
|
||||
"help": metric.documentation,
|
||||
"type": metric.type
|
||||
"type": metric.type,
|
||||
}
|
||||
|
||||
def generate_latest_json(self):
|
||||
|
@ -107,10 +106,10 @@ class JSONMetricsResource(Resource):
|
|||
try:
|
||||
output[metric.name] = self.get_metric(metric)
|
||||
except Exception as exception:
|
||||
exception.args = (exception.args or ('',)) + (metric,)
|
||||
exception.args = (exception.args or ("",)) + (metric,)
|
||||
raise
|
||||
|
||||
json_dump = json.dumps(output, cls=MetricsEncoder).encode('utf-8')
|
||||
json_dump = json.dumps(output, cls=MetricsEncoder).encode("utf-8")
|
||||
return json_dump
|
||||
|
||||
|
||||
|
@ -123,58 +122,54 @@ def start_prometheus_exporter(
|
|||
ursula: "lawful.Ursula",
|
||||
prometheus_config: PrometheusMetricsConfig,
|
||||
registry: CollectorRegistry = REGISTRY,
|
||||
) -> None:
|
||||
) -> PrometheusMetricsTracker:
|
||||
"""Configure, collect, and serve prometheus metrics."""
|
||||
from prometheus_client.twisted import MetricsResource
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.server import Site
|
||||
|
||||
# Disabling default collector metrics
|
||||
registry.unregister(GC_COLLECTOR)
|
||||
registry.unregister(PLATFORM_COLLECTOR)
|
||||
registry.unregister(PROCESS_COLLECTOR)
|
||||
|
||||
metrics_collectors = create_metrics_collectors(ursula)
|
||||
# initialize collectors
|
||||
for collector in metrics_collectors:
|
||||
collector.initialize(metrics_prefix=prometheus_config.metrics_prefix, registry=registry)
|
||||
collector.initialize(registry=registry)
|
||||
|
||||
# TODO: was never used
|
||||
# "requests_counter": Counter(f'{metrics_prefix}_http_failures', 'HTTP Failures', ['method', 'endpoint']),
|
||||
|
||||
# Scheduling
|
||||
metrics_task = task.LoopingCall(collect_prometheus_metrics,
|
||||
metrics_collectors=metrics_collectors)
|
||||
metrics_task.start(interval=prometheus_config.collection_interval,
|
||||
now=prometheus_config.start_now)
|
||||
metrics_tracker = PrometheusMetricsTracker(
|
||||
collectors=metrics_collectors, interval=prometheus_config.collection_interval
|
||||
)
|
||||
metrics_tracker.start(now=prometheus_config.start_now)
|
||||
|
||||
# WSGI Service
|
||||
root = Resource()
|
||||
root.putChild(b'metrics', MetricsResource())
|
||||
root.putChild(b'json_metrics', JSONMetricsResource())
|
||||
root.putChild(b"metrics", MetricsResource())
|
||||
root.putChild(b"json_metrics", JSONMetricsResource())
|
||||
factory = Site(root)
|
||||
reactor.listenTCP(prometheus_config.port, factory, interface=prometheus_config.listen_address)
|
||||
reactor.listenTCP(
|
||||
prometheus_config.port, factory, interface=prometheus_config.listen_address
|
||||
)
|
||||
|
||||
return metrics_tracker
|
||||
|
||||
|
||||
def create_metrics_collectors(ursula: "lawful.Ursula") -> List[MetricsCollector]:
|
||||
"""Create collectors used to obtain metrics."""
|
||||
collectors: List[MetricsCollector] = [UrsulaInfoMetricsCollector(ursula=ursula)]
|
||||
|
||||
# Blockchain prometheus
|
||||
# TODO possible include information about payment
|
||||
collectors.append(BlockchainMetricsCollector(eth_endpoint=ursula.eth_endpoint))
|
||||
|
||||
# Staking Provider prometheus
|
||||
collectors.append(
|
||||
collectors: List[MetricsCollector] = [
|
||||
UrsulaInfoMetricsCollector(ursula=ursula),
|
||||
BlockchainMetricsCollector(
|
||||
root_net_endpoint=ursula.eth_endpoint,
|
||||
child_net_endpoint=ursula.polygon_endpoint,
|
||||
),
|
||||
StakingProviderMetricsCollector(
|
||||
staking_provider_address=ursula.checksum_address,
|
||||
contract_registry=ursula.registry,
|
||||
eth_endpoint=ursula.eth_endpoint,
|
||||
)
|
||||
)
|
||||
|
||||
# Operator prometheus
|
||||
collectors.append(
|
||||
),
|
||||
OperatorMetricsCollector(
|
||||
domain=ursula.domain,
|
||||
operator_address=ursula.operator_address,
|
||||
contract_registry=ursula.registry,
|
||||
polygon_endpoint=ursula.polygon_endpoint,
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
return collectors
|
||||
|
|
|
@ -12,7 +12,8 @@ class SimpleTask(ABC):
|
|||
INTERVAL = 60 # 60s default
|
||||
CLOCK = reactor
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, interval: float = INTERVAL):
|
||||
self.interval = interval
|
||||
self.log = Logger(self.__class__.__name__)
|
||||
self._task = LoopingCall(self.run)
|
||||
# self.__task.clock = self.CLOCK
|
||||
|
@ -25,7 +26,7 @@ class SimpleTask(ABC):
|
|||
def start(self, now: bool = False):
|
||||
"""Start task."""
|
||||
if not self.running:
|
||||
d = self._task.start(interval=self.INTERVAL, now=now)
|
||||
d = self._task.start(interval=self.interval, now=now)
|
||||
d.addErrback(self.handle_errors)
|
||||
# return d
|
||||
|
||||
|
|
|
@ -1,99 +1,91 @@
|
|||
-i https://pypi.python.org/simple
|
||||
aiohttp==3.8.2; python_version >= '3.6'
|
||||
aiohttp==3.9.1; python_version >= '3.8'
|
||||
aiosignal==1.3.1; python_version >= '3.7'
|
||||
appdirs==1.4.4
|
||||
async-timeout==4.0.3; python_version >= '3.7'
|
||||
attrs==23.1.0; python_version >= '3.7'
|
||||
attrs==23.2.0; python_version >= '3.7'
|
||||
autobahn==23.6.2; python_version >= '3.9'
|
||||
automat==22.10.0
|
||||
bitarray==2.8.2
|
||||
blinker==1.6.3; python_version >= '3.7'
|
||||
bitarray==2.9.2
|
||||
blinker==1.7.0; python_version >= '3.8'
|
||||
bytestring-splitter==2.4.1
|
||||
cached-property==1.5.2
|
||||
certifi==2023.7.22; python_version >= '3.6'
|
||||
certifi==2023.11.17; python_version >= '3.6'
|
||||
cffi==1.16.0; python_version >= '3.8'
|
||||
charset-normalizer==2.1.1; python_full_version >= '3.6.0'
|
||||
charset-normalizer==3.3.2; python_full_version >= '3.7.0'
|
||||
click==8.1.7; python_version >= '3.7'
|
||||
colorama==0.4.6; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'
|
||||
constant-sorrow==0.1.0a9; python_version >= '3'
|
||||
constantly==15.1.0
|
||||
cryptography==41.0.5; python_version >= '3.7'
|
||||
constantly==23.10.4; python_version >= '3.8'
|
||||
cryptography==41.0.7; python_version >= '3.7'
|
||||
cytoolz==0.12.2; implementation_name == 'cpython'
|
||||
dateparser==1.1.8; python_version >= '3.7'
|
||||
dateparser==1.2.0; python_version >= '3.7'
|
||||
eth-abi==4.2.1; python_version < '4' and python_full_version >= '3.7.2'
|
||||
eth-account==0.8.0; python_version >= '3.6' and python_version < '4'
|
||||
eth-bloom==2.0.0; python_version >= '3.7' and python_version < '4'
|
||||
eth-hash[pycryptodome]==0.5.2; python_version >= '3.7' and python_version < '4'
|
||||
eth-hash[pycryptodome]==0.6.0; python_version >= '3.8' and python_version < '4'
|
||||
eth-keyfile==0.6.1
|
||||
eth-keys==0.4.0
|
||||
eth-rlp==0.3.0; python_version >= '3.7' and python_version < '4'
|
||||
eth-tester==0.9.1b1; python_version < '4' and python_full_version >= '3.6.8'
|
||||
eth-typing==3.5.1; python_version < '4' and python_full_version >= '3.7.2'
|
||||
eth-utils==2.3.0; python_version >= '3.7' and python_version < '4'
|
||||
eth-typing==3.5.2; python_version < '4' and python_full_version >= '3.7.2'
|
||||
eth-utils==2.3.1; python_version >= '3.7' and python_version < '4'
|
||||
flask==3.0.0; python_version >= '3.8'
|
||||
frozenlist==1.4.0; python_version >= '3.8'
|
||||
frozenlist==1.4.1; python_version >= '3.8'
|
||||
hendrix==4.0.0
|
||||
hexbytes==0.3.1; python_version >= '3.7' and python_version < '4'
|
||||
humanize==4.8.0; python_version >= '3.8'
|
||||
humanize==4.9.0; python_version >= '3.8'
|
||||
hyperlink==21.0.0
|
||||
idna==3.4; python_version >= '3.5'
|
||||
idna==3.6; python_version >= '3.5'
|
||||
incremental==22.10.0
|
||||
itsdangerous==2.1.2; python_version >= '3.7'
|
||||
jinja2==3.1.2; python_version >= '3.7'
|
||||
jsonschema==4.19.1; python_version >= '3.8'
|
||||
jsonschema-specifications==2023.7.1; python_version >= '3.8'
|
||||
jinja2==3.1.3; python_version >= '3.7'
|
||||
jsonschema==4.20.0; python_version >= '3.8'
|
||||
jsonschema-specifications==2023.12.1; python_version >= '3.8'
|
||||
lru-dict==1.2.0
|
||||
mako==1.2.4; python_version >= '3.7'
|
||||
mako==1.3.0; python_version >= '3.8'
|
||||
markupsafe==2.1.3; python_version >= '3.7'
|
||||
marshmallow==3.20.1; python_version >= '3.8'
|
||||
marshmallow==3.20.2; python_version >= '3.8'
|
||||
maya==0.6.1
|
||||
mnemonic==0.20; python_version >= '3.5'
|
||||
mnemonic==0.21; python_full_version >= '3.8.1'
|
||||
msgpack==1.0.7; python_version >= '3.8'
|
||||
msgpack-python==0.5.6
|
||||
multidict==5.2.0; python_version >= '3.6'
|
||||
mypy-extensions==1.0.0; python_version >= '3.5'
|
||||
multidict==6.0.4; python_version >= '3.7'
|
||||
nucypher-core==0.13.0
|
||||
packaging==23.2; python_version >= '3.7'
|
||||
parsimonious==0.9.0
|
||||
pendulum==3.0.0b1; python_version >= '3.8'
|
||||
protobuf==4.25.0rc2; python_version >= '3.8'
|
||||
py-ecc==6.0.0; python_version >= '3.6' and python_version < '4'
|
||||
py-evm==0.7.0a4
|
||||
pyasn1==0.5.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
pendulum==3.0.0; python_version >= '3.8'
|
||||
prometheus-client==0.19.0; python_version >= '3.8'
|
||||
protobuf==4.25.2; python_version >= '3.8'
|
||||
pyasn1==0.5.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
pyasn1-modules==0.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
pychalk==2.0.1
|
||||
pycparser==2.21
|
||||
pycryptodome==3.19.0
|
||||
pyethash==0.1.27
|
||||
pycryptodome==3.20.0
|
||||
pynacl==1.5.0; python_version >= '3.6'
|
||||
pyopenssl==23.2.0; python_version >= '3.6'
|
||||
pyopenssl==23.3.0; python_version >= '3.7'
|
||||
python-dateutil==2.8.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pytz==2023.3.post1
|
||||
pyunormalize==15.0.0; python_version >= '3.6'
|
||||
referencing==0.30.2; python_version >= '3.8'
|
||||
regex==2023.10.3; python_version >= '3.7'
|
||||
pyunormalize==15.1.0; python_version >= '3.6'
|
||||
referencing==0.32.1; python_version >= '3.8'
|
||||
regex==2023.12.25; python_version >= '3.7'
|
||||
requests==2.31.0; python_version >= '3.7'
|
||||
rlp==3.0.0
|
||||
rpds-py==0.10.6; python_version >= '3.8'
|
||||
rpds-py==0.16.2; python_version >= '3.8'
|
||||
semantic-version==2.10.0; python_version >= '2.7'
|
||||
service-identity==23.1.0; python_version >= '3.8'
|
||||
setuptools==68.2.2; python_version >= '3.8'
|
||||
setuptools==69.0.3; python_version >= '3.8'
|
||||
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
snaptime==0.2.4
|
||||
sortedcontainers==2.4.0
|
||||
tabulate==0.9.0; python_version >= '3.7'
|
||||
time-machine==2.13.0; implementation_name != 'pypy'
|
||||
toolz==0.12.0; python_version >= '3.5'
|
||||
trie==2.1.1; python_version >= '3.7' and python_version < '4'
|
||||
twisted==23.8.0; python_full_version >= '3.7.1'
|
||||
twisted==23.10.0; python_full_version >= '3.8.0'
|
||||
txaio==23.1.1; python_version >= '3.7'
|
||||
typing-extensions==4.8.0; python_version >= '3.8'
|
||||
tzdata==2023.3; python_version >= '2'
|
||||
typing-extensions==4.9.0; python_version >= '3.8'
|
||||
tzdata==2023.4; python_version >= '2'
|
||||
tzlocal==5.2; python_version >= '3.8'
|
||||
urllib3==2.0.7; python_version >= '3.7'
|
||||
urllib3==1.26.18; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
watchdog==3.0.0; python_version >= '3.7'
|
||||
web3==6.11.1; python_full_version >= '3.7.2'
|
||||
web3==6.14.0; python_full_version >= '3.7.2'
|
||||
websockets==12.0; python_version >= '3.8'
|
||||
werkzeug==3.0.1; python_version >= '3.8'
|
||||
yarl==1.9.2; python_version >= '3.7'
|
||||
yarl==1.9.4; python_version >= '3.7'
|
||||
zope-interface==6.1; python_version >= '3.7'
|
||||
|
|
|
@ -25,7 +25,6 @@ echo "Removing existing requirement files"
|
|||
pipenv --rm
|
||||
rm -f $PREFIX.txt
|
||||
rm -f dev-$PREFIX.txt
|
||||
rm -f docs-$PREFIX.txt
|
||||
|
||||
echo "Removing pip cache"
|
||||
pip cache purge
|
||||
|
@ -34,11 +33,11 @@ pip cache purge
|
|||
set -e
|
||||
|
||||
echo "Building Development Requirements"
|
||||
pipenv --python 3.11 lock --clear --pre --dev-only
|
||||
pipenv --python 3.12 lock --clear --pre --dev-only
|
||||
pipenv requirements --dev-only > dev-$PREFIX.txt
|
||||
|
||||
echo "Building Standard Requirements"
|
||||
pipenv --python 3.11 lock --clear --pre
|
||||
pipenv --python 3.12 lock --clear --pre
|
||||
pipenv requirements > $PREFIX.txt
|
||||
|
||||
echo "OK!"
|
||||
|
|
3
setup.py
3
setup.py
|
@ -47,6 +47,7 @@ PYPI_CLASSIFIERS = [
|
|||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Security",
|
||||
]
|
||||
|
||||
|
@ -131,7 +132,7 @@ DEPLOY_REQUIRES = [
|
|||
'wheel'
|
||||
]
|
||||
|
||||
URSULA_REQUIRES = ['prometheus_client', 'sentry-sdk'] # TODO: Consider renaming to 'monitor', etc.
|
||||
URSULA_REQUIRES = ["sentry-sdk"]
|
||||
|
||||
EXTRAS = {
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import random
|
|||
import pytest
|
||||
import pytest_twisted
|
||||
from hexbytes import HexBytes
|
||||
from prometheus_client import REGISTRY
|
||||
from twisted.internet.threads import deferToThread
|
||||
|
||||
from nucypher.blockchain.eth.agents import ContractAgency, SubscriptionManagerAgent
|
||||
|
@ -196,6 +197,11 @@ def test_ursula_ritualist(
|
|||
for ursula in cohort:
|
||||
assert ursula.dkg_storage.get_transcript(RITUAL_ID) is not None
|
||||
|
||||
last_scanned_block = REGISTRY.get_sample_value(
|
||||
"ritual_events_last_scanned_block_number"
|
||||
)
|
||||
assert last_scanned_block > 0
|
||||
|
||||
def test_encrypt(_):
|
||||
"""Encrypts a message and returns the ciphertext and conditions"""
|
||||
print("==================== DKG ENCRYPTION ====================")
|
||||
|
@ -228,6 +234,14 @@ def test_ursula_ritualist(
|
|||
_ = bob.threshold_decrypt(
|
||||
threshold_message_kit=threshold_message_kit,
|
||||
)
|
||||
|
||||
# check prometheus metric for decryption requests
|
||||
# since all running on the same machine - the value is not per-ursula but rather all
|
||||
num_failures = REGISTRY.get_sample_value(
|
||||
"threshold_decryption_num_failures_total"
|
||||
)
|
||||
assert len(cohort) == int(num_failures) # each ursula in cohort had a failure
|
||||
|
||||
print("========= UNAUTHORIZED DECRYPTION UNSUCCESSFUL =========")
|
||||
|
||||
return threshold_message_kit
|
||||
|
@ -248,6 +262,16 @@ def test_ursula_ritualist(
|
|||
threshold_message_kit=threshold_message_kit,
|
||||
)
|
||||
assert bytes(cleartext) == PLAINTEXT.encode()
|
||||
|
||||
# check prometheus metric for decryption requests
|
||||
# since all running on the same machine - the value is not per-ursula but rather all
|
||||
num_successes = REGISTRY.get_sample_value(
|
||||
"threshold_decryption_num_successes_total"
|
||||
)
|
||||
|
||||
ritual = coordinator_agent.get_ritual(RITUAL_ID)
|
||||
# at least a threshold of ursulas were successful (concurrency)
|
||||
assert int(num_successes) >= ritual.threshold
|
||||
print("==================== DECRYPTION SUCCESSFUL ====================")
|
||||
|
||||
def error_handler(e):
|
||||
|
@ -271,3 +295,18 @@ def test_ursula_ritualist(
|
|||
d.addCallback(callback)
|
||||
d.addErrback(error_handler)
|
||||
yield d
|
||||
|
||||
# check prometheus metric for decryption requests
|
||||
# since all running on the same machine - the value is not per-ursula but rather all
|
||||
num_decryption_failures = REGISTRY.get_sample_value(
|
||||
"threshold_decryption_num_failures_total"
|
||||
)
|
||||
num_decryption_successes = REGISTRY.get_sample_value(
|
||||
"threshold_decryption_num_successes_total"
|
||||
)
|
||||
num_decryption_requests = REGISTRY.get_sample_value(
|
||||
"decryption_request_processing_count"
|
||||
)
|
||||
assert num_decryption_requests == (
|
||||
num_decryption_successes + num_decryption_failures
|
||||
)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import datetime
|
||||
|
||||
import maya
|
||||
import pytest
|
||||
from eth_account._utils.signing import to_standard_signature_bytes
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
from prometheus_client import REGISTRY
|
||||
from web3 import Web3
|
||||
|
||||
from nucypher.utilities.prometheus.metrics import (
|
||||
PrometheusMetricsConfig,
|
||||
start_prometheus_exporter,
|
||||
)
|
||||
from tests.utils.ursula import select_test_port
|
||||
|
||||
|
||||
def test_start_prometheus_exporter(ursulas, testerchain):
|
||||
ursula = ursulas[0]
|
||||
task = None
|
||||
try:
|
||||
port = select_test_port()
|
||||
config = PrometheusMetricsConfig(
|
||||
port=port, start_now=True, collection_interval=5
|
||||
)
|
||||
task = start_prometheus_exporter(ursula, config, REGISTRY)
|
||||
REGISTRY.collect()
|
||||
|
||||
assert bool(REGISTRY.get_sample_value("operator_confirmed"))
|
||||
|
||||
authorized_stake = ursula.application_agent.get_authorized_stake(
|
||||
staking_provider=ursula.checksum_address
|
||||
)
|
||||
assert REGISTRY.get_sample_value("active_stake") == float(
|
||||
Web3.from_wei(authorized_stake, "ether")
|
||||
)
|
||||
assert (
|
||||
REGISTRY.get_sample_value("root_net_chain_id")
|
||||
== ursula.application_agent.blockchain.client.chain_id
|
||||
)
|
||||
assert (
|
||||
REGISTRY.get_sample_value("child_net_chain_id")
|
||||
== ursula.child_application_agent.blockchain.client.chain_id
|
||||
)
|
||||
assert REGISTRY.get_sample_value("known_nodes") == len(ursula.known_nodes)
|
||||
assert (
|
||||
bool(REGISTRY.get_sample_value("node_discovery_running"))
|
||||
== ursula._learning_task.running
|
||||
)
|
||||
finally:
|
||||
task.stop()
|
|
@ -35,31 +35,6 @@ def test_missing_configuration_file(_default_filepath_mock, click_runner):
|
|||
command=configuration_type) in result.output
|
||||
|
||||
|
||||
@pt.inlineCallbacks
|
||||
def test_ursula_run_with_prometheus_but_no_metrics_port(click_runner):
|
||||
args = (
|
||||
"ursula",
|
||||
"run", # Stat Ursula Command
|
||||
"--debug", # Display log output; Do not attach console
|
||||
"--dev", # Run in development mode (local ephemeral node)
|
||||
"--dry-run", # Disable twisted reactor in subprocess
|
||||
"--lonely", # Do not load seednodes
|
||||
"--prometheus", # Specify collection of prometheus metrics
|
||||
"--eth-endpoint",
|
||||
TEST_ETH_PROVIDER_URI,
|
||||
"--polygon-endpoint",
|
||||
TEST_POLYGON_PROVIDER_URI,
|
||||
)
|
||||
|
||||
result = yield threads.deferToThread(
|
||||
click_runner.invoke, nucypher_cli, args, catch_exceptions=False
|
||||
)
|
||||
|
||||
assert result.exit_code != 0
|
||||
expected_error = "Error: --metrics-port is required when using --prometheus"
|
||||
assert expected_error in result.output
|
||||
|
||||
|
||||
@pt.inlineCallbacks
|
||||
def test_run_lone_default_development_ursula(click_runner, ursulas, testerchain):
|
||||
deploy_port = select_test_port()
|
||||
|
|
|
@ -263,7 +263,7 @@ def test_rpc_condition_evaluation_no_connection_to_chain(
|
|||
# condition providers for other unrelated chains
|
||||
providers = {
|
||||
1: mock.Mock(), # mainnet
|
||||
5: mock.Mock(), # Goerli
|
||||
11155111: mock.Mock(), # Sepolia
|
||||
}
|
||||
|
||||
with pytest.raises(NoConnectionToChain):
|
||||
|
|
|
@ -7,6 +7,7 @@ from datetime import timedelta
|
|||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
import maya
|
||||
import pytest
|
||||
|
@ -765,3 +766,14 @@ def dkg_public_key(dkg_public_key_data) -> DkgPublicKey:
|
|||
def aggregated_transcript(dkg_public_key_data) -> AggregatedTranscript:
|
||||
aggregated_transcript, _ = dkg_public_key_data
|
||||
return aggregated_transcript
|
||||
|
||||
|
||||
#
|
||||
# DKG Ritual Aggregation
|
||||
#
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def mock_operator_aggregation_delay(module_mocker):
|
||||
module_mocker.patch(
|
||||
"nucypher.blockchain.eth.actors.Operator.AGGREGATION_SUBMISSION_MAX_DELAY",
|
||||
PropertyMock(return_value=1),
|
||||
)
|
||||
|
|
|
@ -12,6 +12,7 @@ from web3.datastructures import AttributeDict
|
|||
from nucypher.blockchain.eth.agents import CoordinatorAgent
|
||||
from nucypher.blockchain.eth.signers.software import Web3Signer
|
||||
from nucypher.characters.lawful import Enrico, Ursula
|
||||
from nucypher.crypto.powers import RitualisticPower
|
||||
from nucypher.policy.conditions.lingo import ConditionLingo, ConditionType
|
||||
from tests.constants import TESTERCHAIN_CHAIN_ID
|
||||
from tests.mock.coordinator import MockCoordinatorAgent
|
||||
|
@ -68,6 +69,9 @@ def cohort(ursulas, mock_coordinator_agent):
|
|||
mock_coordinator_agent._add_operator_to_staking_provider_mapping(
|
||||
{u.operator_address: u.checksum_address}
|
||||
)
|
||||
mock_coordinator_agent.set_provider_public_key(
|
||||
u.public_keys(RitualisticPower), u.transacting_power
|
||||
)
|
||||
u.coordinator_agent = mock_coordinator_agent
|
||||
u.ritual_tracker.coordinator_agent = mock_coordinator_agent
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ def _policy_info_kwargs(enacted_policy):
|
|||
alice_verifying_key=enacted_policy.publisher_verifying_key,
|
||||
)
|
||||
|
||||
|
||||
def test_retrieval_kit(enacted_policy, ursulas):
|
||||
messages, message_kits = make_message_kits(enacted_policy.public_key)
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ def test_node_deployer(ursulas):
|
|||
assert deployer.application == ursula.rest_app
|
||||
|
||||
|
||||
def test_goerli_and_mumbai_as_conditions_providers(lonely_ursula_maker):
|
||||
def test_no_corresponding_condition_blockchain_provider(lonely_ursula_maker):
|
||||
INVALID_CHAIN_ID = 66775827584859395569954838 # If we eventually support a chain with this ID, heaven help us.
|
||||
|
||||
with pytest.raises(Ursula.ActorError):
|
||||
|
|
|
@ -1,8 +1,3 @@
|
|||
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
import pytest
|
||||
|
||||
|
@ -62,6 +57,7 @@ def test_auto_select_config_file(
|
|||
config_file=str(config_path)) in captured.out
|
||||
|
||||
|
||||
@pytest.mark.skip("planned for removal in PR #3382")
|
||||
def test_interactive_select_config_file(
|
||||
test_emitter,
|
||||
capsys,
|
||||
|
@ -97,24 +93,8 @@ def test_interactive_select_config_file(
|
|||
assert config_path.exists()
|
||||
|
||||
mock_stdin.line(str(user_input))
|
||||
result = select_config_file(emitter=test_emitter,
|
||||
config_class=config_class,
|
||||
config_root=temp_dir_path)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
for filename, account in accounts:
|
||||
assert account.address in captured.out
|
||||
assert mock_stdin.empty()
|
||||
|
||||
table_data = captured.out.split('\n')
|
||||
table_addresses = [row.split()[1] for row in table_data[6:-2]] # escape extra lines printed before table
|
||||
|
||||
# TODO: Finish this test
|
||||
# for index, (filename, account) in enumerate(accounts):
|
||||
# assert False
|
||||
#
|
||||
# selection = config.filepath
|
||||
# assert isinstance(result, str)
|
||||
# result = Path(result)
|
||||
# assert result.exists()
|
||||
# assert result == selection
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
from nucypher.blockchain.eth.actors import Operator
|
||||
from nucypher.cli.main import nucypher_cli
|
||||
from nucypher.config.characters import UrsulaConfiguration
|
||||
from tests.constants import FAKE_PASSWORD_CONFIRMED, MOCK_IP_ADDRESS
|
||||
|
||||
|
||||
def mock_ursula_run(mocker, ursulas, monkeypatch, ursula_test_config, mock_prometheus):
|
||||
# Mock IP determination
|
||||
target = "nucypher.cli.actions.configure.determine_external_ip_address"
|
||||
mocker.patch(target, return_value=MOCK_IP_ADDRESS)
|
||||
|
||||
ursula_test_config.rest_host = MOCK_IP_ADDRESS
|
||||
|
||||
# Mock worker qualification
|
||||
staking_provider = ursulas[1]
|
||||
|
||||
def set_staking_provider_address(operator):
|
||||
operator.checksum_address = staking_provider.checksum_address
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(Operator, "block_until_ready", set_staking_provider_address)
|
||||
|
||||
# Mock Ursula configuration
|
||||
mocker.patch.object(
|
||||
UrsulaConfiguration, "from_configuration_file", return_value=ursula_test_config
|
||||
)
|
||||
|
||||
# Resetting start_prometheus_exporter mock just in case it was called in other test
|
||||
mock_prometheus.reset_mock()
|
||||
|
||||
|
||||
def test_ursula_cli_prometheus(
|
||||
click_runner,
|
||||
mocker,
|
||||
ursulas,
|
||||
monkeypatch,
|
||||
ursula_test_config,
|
||||
tempfile_path,
|
||||
mock_prometheus,
|
||||
):
|
||||
mock_ursula_run(mocker, ursulas, monkeypatch, ursula_test_config, mock_prometheus)
|
||||
|
||||
run_args = (
|
||||
"ursula",
|
||||
"run",
|
||||
"--prometheus",
|
||||
"--dry-run",
|
||||
"--debug",
|
||||
"--config-file",
|
||||
str(tempfile_path.absolute()),
|
||||
)
|
||||
|
||||
result = click_runner.invoke(
|
||||
nucypher_cli, run_args, input=FAKE_PASSWORD_CONFIRMED, catch_exceptions=False
|
||||
)
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
assert (
|
||||
f"✓ Prometheus Exporter http://{MOCK_IP_ADDRESS}:9101/metrics" in result.output
|
||||
), "CLI didn't print Prometheus exporter check"
|
||||
|
||||
mock_prometheus.assert_called_once()
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].port == 9101
|
||||
), "Wrong port set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].listen_address == ""
|
||||
), "Wrong listen address set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].collection_interval == 90
|
||||
), "Wrong collection interval set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].start_now is False
|
||||
), "Wrong value for start_now in prometheus_config"
|
||||
|
||||
|
||||
def test_ursula_cli_prometheus_metrics_non_default_port_and_interval(
|
||||
click_runner,
|
||||
mocker,
|
||||
ursulas,
|
||||
monkeypatch,
|
||||
ursula_test_config,
|
||||
tempfile_path,
|
||||
mock_prometheus,
|
||||
):
|
||||
port = 6666
|
||||
interval = 30
|
||||
|
||||
mock_ursula_run(mocker, ursulas, monkeypatch, ursula_test_config, mock_prometheus)
|
||||
|
||||
run_args = (
|
||||
"ursula",
|
||||
"run",
|
||||
"--dry-run",
|
||||
"--debug",
|
||||
"--config-file",
|
||||
str(tempfile_path.absolute()),
|
||||
"--prometheus",
|
||||
"--metrics-port",
|
||||
str(port),
|
||||
"--metrics-interval",
|
||||
str(interval),
|
||||
)
|
||||
|
||||
result = click_runner.invoke(
|
||||
nucypher_cli, run_args, input=FAKE_PASSWORD_CONFIRMED, catch_exceptions=False
|
||||
)
|
||||
|
||||
assert result.exit_code == 0, result.output
|
||||
assert (
|
||||
f"✓ Prometheus Exporter http://{MOCK_IP_ADDRESS}:{port}/metrics"
|
||||
in result.output
|
||||
), "CLI didn't print Prometheus exporter check"
|
||||
|
||||
mock_prometheus.assert_called_once()
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].port == port
|
||||
), "Wrong port set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].listen_address == ""
|
||||
), "Wrong listen address set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].collection_interval
|
||||
== interval
|
||||
), "Wrong collection interval set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].start_now is False
|
||||
), "Wrong value for start_now in prometheus_config"
|
|
@ -1,12 +1,10 @@
|
|||
|
||||
|
||||
import json
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import shutil
|
||||
|
||||
from nucypher.config.base import BaseConfiguration
|
||||
from nucypher.config.constants import DEFAULT_CONFIG_ROOT
|
||||
|
|
|
@ -296,3 +296,8 @@ def multichain_ids(module_mocker):
|
|||
def multichain_ursulas(ursulas, multichain_ids):
|
||||
setup_multichain_ursulas(ursulas=ursulas, chain_ids=multichain_ids)
|
||||
return ursulas
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_prometheus(module_mocker):
|
||||
return module_mocker.patch("nucypher.characters.lawful.start_prometheus_exporter")
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
|
||||
|
||||
|
||||
import datetime
|
||||
from functools import partial
|
||||
|
||||
|
|
|
@ -6,8 +6,9 @@ from nucypher.acumen.nicknames import Nickname
|
|||
from nucypher.acumen.perception import FleetSensor
|
||||
from nucypher.characters.unlawful import Vladimir
|
||||
from nucypher.config.constants import TEMPORARY_DOMAIN_NAME
|
||||
from nucypher.network.middleware import RestMiddleware
|
||||
from tests.constants import MOCK_ETH_PROVIDER_URI
|
||||
from tests.utils.middleware import MockRestMiddleware
|
||||
from tests.utils.middleware import EvilMiddleWare, MockRestMiddleware
|
||||
|
||||
|
||||
def test_all_ursulas_know_about_all_other_ursulas(ursulas, test_registry):
|
||||
|
@ -99,10 +100,11 @@ def test_vladimir_illegal_interface_key_does_not_propagate(ursulas):
|
|||
# assert vladimir not in other_ursula.known_nodes
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("monkeypatch_get_staking_provider_from_operator")
|
||||
def test_alice_refuses_to_select_node_unless_ursula_is_valid(
|
||||
alice, idle_policy, ursulas
|
||||
):
|
||||
|
||||
Vladimir.network_middleware = EvilMiddleWare(eth_endpoint=MOCK_ETH_PROVIDER_URI)
|
||||
target = list(ursulas)[2]
|
||||
# First, let's imagine that Alice has sampled a Vladimir while making this policy.
|
||||
vladimir = Vladimir.from_target_ursula(target,
|
||||
|
@ -117,6 +119,9 @@ def test_alice_refuses_to_select_node_unless_ursula_is_valid(
|
|||
alice.known_nodes.record_node(vladimir)
|
||||
alice.known_nodes.record_fleet_state()
|
||||
|
||||
# unmock the ping endpoint on mock rest middleware for this test.
|
||||
MockRestMiddleware.ping = RestMiddleware.ping
|
||||
|
||||
with pytest.raises(vladimir.InvalidNode):
|
||||
idle_policy._ping_node(
|
||||
address=vladimir.checksum_address,
|
||||
|
|
|
@ -1,9 +1,3 @@
|
|||
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
|
|
|
@ -3,35 +3,37 @@ import time
|
|||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
from nucypher.blockchain.eth.agents import ContractAgency, TACoApplicationAgent
|
||||
from tests.constants import MOCK_ETH_PROVIDER_URI
|
||||
|
||||
try:
|
||||
# all prometheus related imports
|
||||
from prometheus_client import CollectorRegistry
|
||||
|
||||
# include dependencies that have sub-dependencies on prometheus
|
||||
from nucypher.utilities.prometheus.collector import (
|
||||
BlockchainMetricsCollector,
|
||||
MetricsCollector,
|
||||
OperatorMetricsCollector,
|
||||
StakingProviderMetricsCollector,
|
||||
UrsulaInfoMetricsCollector,
|
||||
)
|
||||
from nucypher.utilities.prometheus.metrics import create_metrics_collectors
|
||||
|
||||
# flag to skip tests
|
||||
PROMETHEUS_INSTALLED = True
|
||||
except ImportError:
|
||||
PROMETHEUS_INSTALLED = False
|
||||
|
||||
from prometheus_client import CollectorRegistry
|
||||
from web3 import Web3
|
||||
from web3.types import Timestamp
|
||||
|
||||
from nucypher.blockchain.eth.agents import (
|
||||
ContractAgency,
|
||||
TACoApplicationAgent,
|
||||
TACoChildApplicationAgent,
|
||||
)
|
||||
from nucypher.utilities.prometheus.collector import (
|
||||
BlockchainMetricsCollector,
|
||||
MetricsCollector,
|
||||
OperatorMetricsCollector,
|
||||
StakingProviderMetricsCollector,
|
||||
UrsulaInfoMetricsCollector,
|
||||
)
|
||||
from nucypher.utilities.prometheus.metrics import (
|
||||
PrometheusMetricsConfig,
|
||||
create_metrics_collectors,
|
||||
)
|
||||
from tests.constants import MOCK_ETH_PROVIDER_URI
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_operator_confirmation(random_address, mock_taco_application_agent):
|
||||
mock_taco_application_agent.is_operator_confirmed.return_value = True
|
||||
def mock_taco_child_app_info(mock_taco_child_application_agent, testerchain):
|
||||
mock_taco_child_application_agent.is_operator_confirmed.return_value = True
|
||||
mock_taco_child_application_agent.blockchain = testerchain
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_taco_app_staking_provider_info(random_address, mock_taco_application_agent):
|
||||
info = TACoApplicationAgent.StakingProviderInfo(
|
||||
operator=random_address,
|
||||
operator_confirmed=True,
|
||||
|
@ -40,56 +42,88 @@ def mock_operator_confirmation(random_address, mock_taco_application_agent):
|
|||
mock_taco_application_agent.get_staking_provider_info.return_value = info
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
condition=(not PROMETHEUS_INSTALLED),
|
||||
reason="prometheus_client is required for test",
|
||||
)
|
||||
def test_ursula_info_metrics_collector(test_registry, ursulas):
|
||||
def test_start_prometheus_exporter_called(ursulas, mock_prometheus):
|
||||
port = 9101
|
||||
|
||||
# Reset start_prometheus_exporter mock just in case it was previously called
|
||||
mock_prometheus.reset_mock()
|
||||
|
||||
prometheus_config = PrometheusMetricsConfig(port=port)
|
||||
ursula = random.choice(ursulas)
|
||||
|
||||
ursula.run(
|
||||
start_reactor=False,
|
||||
prometheus_config=prometheus_config,
|
||||
preflight=False,
|
||||
block_until_ready=False,
|
||||
)
|
||||
ursula.stop()
|
||||
|
||||
mock_prometheus.assert_called_once()
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].port == port
|
||||
), "Wrong port set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].listen_address
|
||||
== prometheus_config.listen_address
|
||||
), "Wrong listen address set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].collection_interval
|
||||
== prometheus_config.collection_interval
|
||||
), "Wrong listen address set in prometheus_config"
|
||||
assert (
|
||||
mock_prometheus.call_args.kwargs["prometheus_config"].start_now
|
||||
== prometheus_config.start_now
|
||||
), "Wrong listen address set in prometheus_config"
|
||||
|
||||
|
||||
def test_ursula_info_metrics_collector(ursulas):
|
||||
ursula = random.choice(ursulas)
|
||||
collector = UrsulaInfoMetricsCollector(ursula=ursula)
|
||||
|
||||
collector_registry = CollectorRegistry()
|
||||
prefix = 'test_ursula_info_metrics_collector'
|
||||
collector.initialize(metrics_prefix=prefix, registry=collector_registry)
|
||||
collector.initialize(registry=collector_registry)
|
||||
collector.collect()
|
||||
|
||||
mode = "running" if ursula._learning_task.running else "stopped"
|
||||
learning_mode = collector_registry.get_sample_value(
|
||||
f"{prefix}_node_discovery", labels={f"{prefix}_node_discovery": f"{mode}"}
|
||||
)
|
||||
assert learning_mode == 1
|
||||
discovery_status = collector_registry.get_sample_value("node_discovery_running")
|
||||
assert discovery_status == ursula._learning_task.running
|
||||
|
||||
known_nodes = collector_registry.get_sample_value(f"{prefix}_known_nodes")
|
||||
known_nodes = collector_registry.get_sample_value("known_nodes")
|
||||
assert known_nodes == len(ursula.known_nodes)
|
||||
|
||||
reencryption_requests = collector_registry.get_sample_value(
|
||||
f"{prefix}_reencryption_requests"
|
||||
)
|
||||
assert reencryption_requests == 0
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=(not PROMETHEUS_INSTALLED), reason="prometheus_client is required for test")
|
||||
def test_blockchain_metrics_collector(testerchain):
|
||||
collector = BlockchainMetricsCollector(eth_endpoint=MOCK_ETH_PROVIDER_URI)
|
||||
collector = BlockchainMetricsCollector(
|
||||
root_net_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
child_net_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
)
|
||||
|
||||
collector_registry = CollectorRegistry()
|
||||
prefix = 'test_blockchain_metrics_collector'
|
||||
collector.initialize(metrics_prefix=prefix, registry=collector_registry)
|
||||
collector.initialize(registry=collector_registry)
|
||||
collector.collect()
|
||||
|
||||
metric_name = f"{prefix}_eth_chain_id"
|
||||
metric_name = "root_net_chain_id"
|
||||
assert metric_name in collector_registry._names_to_collectors.keys()
|
||||
chain_id = collector_registry.get_sample_value(f"{prefix}_eth_chain_id")
|
||||
chain_id = collector_registry.get_sample_value("root_net_chain_id")
|
||||
assert chain_id == testerchain.client.chain_id
|
||||
|
||||
metric_name = f"{prefix}_eth_block_number"
|
||||
metric_name = "root_net_current_block_number"
|
||||
assert metric_name in collector_registry._names_to_collectors.keys()
|
||||
block_number = collector_registry.get_sample_value(metric_name)
|
||||
assert block_number == testerchain.get_block_number()
|
||||
|
||||
metric_name = "child_net_chain_id"
|
||||
assert metric_name in collector_registry._names_to_collectors.keys()
|
||||
chain_id = collector_registry.get_sample_value("child_net_chain_id")
|
||||
assert chain_id == testerchain.client.chain_id
|
||||
|
||||
metric_name = "child_net_current_block_number"
|
||||
assert metric_name in collector_registry._names_to_collectors.keys()
|
||||
block_number = collector_registry.get_sample_value(metric_name)
|
||||
assert block_number == testerchain.get_block_number()
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=(not PROMETHEUS_INSTALLED), reason="prometheus_client is required for test")
|
||||
@pytest.mark.usefixtures("mock_operator_confirmation")
|
||||
@pytest.mark.usefixtures("mock_taco_app_staking_provider_info")
|
||||
def test_staking_provider_metrics_collector(test_registry, staking_providers):
|
||||
|
||||
staking_provider_address = random.choice(staking_providers)
|
||||
|
@ -99,23 +133,23 @@ def test_staking_provider_metrics_collector(test_registry, staking_providers):
|
|||
eth_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
)
|
||||
collector_registry = CollectorRegistry()
|
||||
prefix = "test_staking_provider_metrics_collector"
|
||||
collector.initialize(metrics_prefix=prefix, registry=collector_registry)
|
||||
collector.initialize(registry=collector_registry)
|
||||
collector.collect()
|
||||
|
||||
taco_application_agent = ContractAgency.get_agent(
|
||||
TACoApplicationAgent, registry=test_registry
|
||||
TACoApplicationAgent,
|
||||
registry=test_registry,
|
||||
blockchain_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
)
|
||||
|
||||
active_stake = collector_registry.get_sample_value(
|
||||
f"{prefix}_associated_active_stake"
|
||||
)
|
||||
active_stake = collector_registry.get_sample_value("active_stake")
|
||||
# only floats can be stored
|
||||
assert active_stake == float(
|
||||
int(
|
||||
Web3.from_wei(
|
||||
taco_application_agent.get_authorized_stake(
|
||||
staking_provider=staking_provider_address
|
||||
)
|
||||
),
|
||||
"ether",
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -123,54 +157,68 @@ def test_staking_provider_metrics_collector(test_registry, staking_providers):
|
|||
staking_provider=staking_provider_address
|
||||
)
|
||||
|
||||
operator_confirmed = collector_registry.get_sample_value(
|
||||
f"{prefix}_operator_confirmed"
|
||||
)
|
||||
assert operator_confirmed == staking_provider_info.operator_confirmed
|
||||
|
||||
operator_start = collector_registry.get_sample_value(
|
||||
f"{prefix}_operator_start_timestamp"
|
||||
)
|
||||
operator_start = collector_registry.get_sample_value("operator_bonded_timestamp")
|
||||
assert operator_start == staking_provider_info.operator_start_timestamp
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=(not PROMETHEUS_INSTALLED), reason="prometheus_client is required for test")
|
||||
def test_operator_metrics_collector(test_registry, ursulas):
|
||||
ursula = random.choice(ursulas)
|
||||
@pytest.mark.usefixtures("mock_taco_child_app_info")
|
||||
def test_operator_metrics_collector(
|
||||
test_registry, operator_address, testerchain, mock_taco_child_application_agent
|
||||
):
|
||||
collector = OperatorMetricsCollector(
|
||||
domain=ursula.domain,
|
||||
operator_address=ursula.operator_address,
|
||||
operator_address=operator_address,
|
||||
contract_registry=test_registry,
|
||||
polygon_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
)
|
||||
collector_registry = CollectorRegistry()
|
||||
prefix = 'test_worker_metrics_collector'
|
||||
collector.initialize(metrics_prefix=prefix, registry=collector_registry)
|
||||
collector.initialize(registry=collector_registry)
|
||||
collector.collect()
|
||||
|
||||
operator_eth = collector_registry.get_sample_value(f"{prefix}_operator_eth_balance")
|
||||
# only floats can be stored
|
||||
assert operator_eth == float(ursula.eth_balance)
|
||||
taco_child_application_agent = ContractAgency.get_agent(
|
||||
TACoChildApplicationAgent,
|
||||
registry=test_registry,
|
||||
blockchain_endpoint=MOCK_ETH_PROVIDER_URI,
|
||||
)
|
||||
|
||||
operator_confirmed = collector_registry.get_sample_value("operator_confirmed")
|
||||
assert operator_confirmed
|
||||
assert operator_confirmed == taco_child_application_agent.is_operator_confirmed(
|
||||
operator_address
|
||||
)
|
||||
|
||||
operator_matic_balance = collector_registry.get_sample_value(
|
||||
"operator_matic_balance"
|
||||
)
|
||||
assert operator_matic_balance == Web3.from_wei(
|
||||
testerchain.client.get_balance(operator_address), "ether"
|
||||
)
|
||||
|
||||
# switch operator confirmed and collect again
|
||||
mock_taco_child_application_agent.is_operator_confirmed.return_value = False
|
||||
collector.collect()
|
||||
operator_confirmed = collector_registry.get_sample_value("operator_confirmed")
|
||||
assert not operator_confirmed
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=(not PROMETHEUS_INSTALLED), reason="prometheus_client is required for test")
|
||||
@pytest.mark.usefixtures("mock_operator_confirmation")
|
||||
@pytest.mark.usefixtures("mock_taco_child_app_info")
|
||||
@pytest.mark.usefixtures("mock_taco_app_staking_provider_info")
|
||||
def test_all_metrics_collectors_sanity_collect(ursulas):
|
||||
ursula = random.choice(ursulas)
|
||||
|
||||
collector_registry = CollectorRegistry()
|
||||
prefix = 'test_all_metrics_collectors'
|
||||
|
||||
metrics_collectors = create_metrics_collectors(ursula=ursula)
|
||||
initialize_collectors(metrics_collectors=metrics_collectors,
|
||||
collector_registry=collector_registry,
|
||||
prefix=prefix)
|
||||
initialize_collectors(
|
||||
metrics_collectors=metrics_collectors, collector_registry=collector_registry
|
||||
)
|
||||
|
||||
for collector in metrics_collectors:
|
||||
collector.collect()
|
||||
|
||||
|
||||
def initialize_collectors(metrics_collectors: List['MetricsCollector'],
|
||||
collector_registry: 'CollectorRegistry',
|
||||
prefix: str) -> None:
|
||||
def initialize_collectors(
|
||||
metrics_collectors: List["MetricsCollector"],
|
||||
collector_registry: "CollectorRegistry",
|
||||
) -> None:
|
||||
for collector in metrics_collectors:
|
||||
collector.initialize(metrics_prefix=prefix, registry=collector_registry)
|
||||
collector.initialize(registry=collector_registry)
|
||||
|
|
|
@ -170,9 +170,8 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
ritual.total_aggregations += 1
|
||||
return self.blockchain.FAKE_RECEIPT
|
||||
|
||||
@staticmethod
|
||||
def is_provider_public_key_set(staking_provider: ChecksumAddress) -> bool:
|
||||
return False
|
||||
def is_provider_public_key_set(self, staking_provider: ChecksumAddress) -> bool:
|
||||
return staking_provider in self._participant_keys_history
|
||||
|
||||
def set_provider_public_key(
|
||||
self, public_key: FerveoPublicKey, transacting_power: TransactingPower
|
||||
|
@ -187,6 +186,7 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
participant_keys = self._participant_keys_history.get(provider_address)
|
||||
if not participant_keys:
|
||||
participant_keys = []
|
||||
self._participant_keys_history[provider_address] = participant_keys
|
||||
|
||||
participant_keys.append(
|
||||
self.ParticipantKey(
|
||||
|
@ -274,7 +274,7 @@ class MockCoordinatorAgent(MockContractAgent):
|
|||
def get_provider_public_key(
|
||||
self, provider: ChecksumAddress, ritual_id: int
|
||||
) -> FerveoPublicKey:
|
||||
participant_keys = self._participant_keys_history.get(provider)
|
||||
participant_keys = self._participant_keys_history[provider]
|
||||
for participant_key in reversed(participant_keys):
|
||||
if participant_key.lastRitualId <= ritual_id:
|
||||
g2Point = participant_key.publicKey
|
||||
|
|
|
@ -207,7 +207,7 @@ def _determine_good_serials(start, end):
|
|||
for i in range(start, end):
|
||||
try:
|
||||
NotAPublicKey.from_int(i).i_want_to_be_a_real_boy()
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
good_serials.append(i)
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from pathlib import Path
|
||||
|
||||
import maya
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
|
||||
|
||||
import builtins
|
||||
|
||||
import pytest
|
||||
|
||||
from nucypher.exceptions import DevelopmentInstallationRequired
|
||||
|
@ -49,7 +50,7 @@ def test_use_vladimir_without_development_installation(import_mocker, mocker):
|
|||
del EvilMiddleWare
|
||||
|
||||
with import_mocker:
|
||||
from nucypher.characters.unlawful import Vladimir # Import OK
|
||||
from nucypher.characters.unlawful import Vladimir # Import OK
|
||||
with pytest.raises(DevelopmentInstallationRequired, match=message): # Expect lazy failure
|
||||
Vladimir.from_target_ursula(target_ursula=mocker.Mock())
|
||||
|
||||
|
@ -63,7 +64,8 @@ def test_get_pyevm_backend_without_development_installation(import_mocker):
|
|||
del constants
|
||||
|
||||
with import_mocker:
|
||||
from nucypher.blockchain.eth.providers import _get_pyevm_test_backend # Import OK
|
||||
from nucypher.blockchain.eth.providers import (
|
||||
_get_pyevm_test_backend, # Import OK
|
||||
)
|
||||
with pytest.raises(DevelopmentInstallationRequired, match=message): # Expect lazy failure
|
||||
_get_pyevm_test_backend()
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ from nucypher.blockchain.eth.actors import Operator
|
|||
from nucypher.blockchain.eth.agents import ContractAgency
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterfaceFactory
|
||||
from nucypher.blockchain.eth.registry import ContractRegistry
|
||||
from nucypher.config.constants import TEMPORARY_DOMAIN_NAME
|
||||
from nucypher.crypto.ferveo import dkg
|
||||
from nucypher.crypto.powers import TransactingPower
|
||||
from nucypher.network.nodes import Teacher
|
||||
|
|
|
@ -4,11 +4,7 @@ import unittest
|
|||
|
||||
from eth_utils import keccak
|
||||
|
||||
from nucypher.crypto.utils import (
|
||||
secure_random_range,
|
||||
secure_random,
|
||||
keccak_digest
|
||||
)
|
||||
from nucypher.crypto.utils import keccak_digest, secure_random, secure_random_range
|
||||
|
||||
|
||||
class TestCrypto(unittest.TestCase):
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import pytest
|
||||
|
||||
from nucypher.blockchain.eth.registry import ContractRegistry
|
||||
from nucypher.config.constants import TEMPORARY_DOMAIN_NAME
|
||||
from tests.constants import TESTERCHAIN_CHAIN_ID, TEMPORARY_DOMAIN
|
||||
from tests.constants import TEMPORARY_DOMAIN, TESTERCHAIN_CHAIN_ID
|
||||
from tests.utils.registry import MockRegistrySource
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from unittest.mock import PropertyMock
|
|||
|
||||
import pytest
|
||||
from hexbytes import HexBytes
|
||||
from web3.exceptions import TransactionNotFound, TimeExhausted
|
||||
from web3.exceptions import TimeExhausted, TransactionNotFound
|
||||
|
||||
from tests.mock.interfaces import MockEthereumClient
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
|
||||
from unittest.mock import Mock, MagicMock
|
||||
from unittest.mock import MagicMock, Mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
|
||||
|
||||
import pytest
|
||||
from web3.gas_strategies import time_based
|
||||
|
||||
from constant_sorrow.constants import ALL_OF_THEM
|
||||
from web3.gas_strategies import time_based
|
||||
|
||||
from nucypher.blockchain.eth.interfaces import BlockchainInterface
|
||||
from nucypher.utilities.gas_strategies import WEB3_GAS_STRATEGIES
|
||||
|
|
|
@ -3,7 +3,7 @@ import pytest
|
|||
|
||||
import nucypher
|
||||
from nucypher.cli.main import ENTRY_POINTS, nucypher_cli
|
||||
from nucypher.config.constants import USER_LOG_DIR, DEFAULT_CONFIG_ROOT
|
||||
from nucypher.config.constants import DEFAULT_CONFIG_ROOT, USER_LOG_DIR
|
||||
|
||||
|
||||
def test_echo_nucypher_version(click_runner):
|
||||
|
|
|
@ -6,7 +6,7 @@ from statistics import median
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from constant_sorrow.constants import SLOW, MEDIUM, FAST, FASTEST
|
||||
from constant_sorrow.constants import FAST, FASTEST, MEDIUM, SLOW
|
||||
from requests.exceptions import ConnectionError
|
||||
from web3 import Web3
|
||||
|
||||
|
@ -15,11 +15,10 @@ from nucypher.utilities.datafeeds import (
|
|||
EtherchainGasPriceDatafeed,
|
||||
EthereumGasPriceDatafeed,
|
||||
UpvestGasPriceDatafeed,
|
||||
ZoltuGasPriceDatafeed
|
||||
ZoltuGasPriceDatafeed,
|
||||
)
|
||||
from nucypher.utilities.gas_strategies import construct_datafeed_median_strategy
|
||||
|
||||
|
||||
etherchain_json = {
|
||||
"safeLow": "99.0",
|
||||
"standard": "105.0",
|
||||
|
@ -204,7 +203,6 @@ def test_zoltu():
|
|||
|
||||
def test_datafeed_median_gas_price_strategy():
|
||||
|
||||
mock_etherchain_gas_price = 1000
|
||||
mock_upvest_gas_price = 2000
|
||||
mock_zoltu_gas_price = 4000
|
||||
mock_rpc_gas_price = 42
|
||||
|
|
|
@ -2,7 +2,10 @@
|
|||
|
||||
import pytest
|
||||
|
||||
from nucypher.blockchain.eth.decorators import InvalidChecksumAddress, validate_checksum_address
|
||||
from nucypher.blockchain.eth.decorators import (
|
||||
InvalidChecksumAddress,
|
||||
validate_checksum_address,
|
||||
)
|
||||
|
||||
|
||||
def test_validate_checksum_address(get_random_checksum_address):
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
|
||||
import itertools
|
||||
|
||||
import pytest
|
||||
from web3 import Web3
|
||||
|
||||
from nucypher.utilities.gas_strategies import (
|
||||
construct_fixed_price_gas_strategy,
|
||||
max_price_gas_strategy_wrapper,
|
||||
GasStrategyError
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,8 @@ from io import StringIO
|
|||
from json.encoder import py_encode_basestring_ascii
|
||||
|
||||
import pytest
|
||||
from twisted.logger import Logger as TwistedLogger, formatEvent, jsonFileLogObserver
|
||||
from twisted.logger import Logger as TwistedLogger
|
||||
from twisted.logger import formatEvent, jsonFileLogObserver
|
||||
|
||||
from nucypher.utilities.logging import Logger
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ def test_operator_bonded_but_becomes_unbonded(mocker, get_random_checksum_addres
|
|||
tracker.stop()
|
||||
|
||||
|
||||
def test_operator_handle_errors(mocker, get_random_checksum_address):
|
||||
def test_operator_handle_errors(mocker):
|
||||
ursula = mocker.Mock()
|
||||
tracker = OperatorBondedTracker(ursula=ursula)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue