Resolution conflict fixes for monitor crawl and dashboard.

remotes/upstream/doubtfire
derekpierre 2019-11-18 16:07:21 -05:00 committed by Kieran R. Prasch
parent c7593cf9cc
commit d3f1cc810d
5 changed files with 16 additions and 34 deletions

View File

@ -41,10 +41,6 @@ def monitor():
@click.option('--network', help="Network Domain Name", type=click.STRING, default='goerli')
@click.option('--learn-on-launch', help="Conduct first learning loop on main thread at launch.", is_flag=True)
@click.option('--provider', 'provider_uri', help="Blockchain provider's URI", type=click.STRING)
@click.option('--node-storage-dirpath', 'node_storage_dir', help="Directory path for storing known nodes information",
type=click.STRING)
@click.option('--node-metadata-dbfilename', 'node_metadata_dbfile', help="DB file to store known nodes metadata",
type=click.STRING)
@nucypher_click_config
def crawl(click_config,
teacher_uri,
@ -53,8 +49,6 @@ def crawl(click_config,
network,
learn_on_launch,
provider_uri,
node_storage_dir,
node_metadata_dbfile,
):
"""
Gather NuCypher network information.
@ -84,8 +78,6 @@ def crawl(click_config,
federated_only=False,
start_learning_now=True,
learn_on_same_thread=learn_on_launch,
storage_dir=node_storage_dir,
db_filename=node_metadata_dbfile
)
crawler.start()
@ -100,9 +92,6 @@ def crawl(click_config,
@click.option('--tls-key-filepath', help="TLS private key filepath")
@click.option('--provider', 'provider_uri', help="Blockchain provider's URI", type=click.STRING)
@click.option('--network', help="Network Domain Name", type=click.STRING, default='goerli')
@click.option('--node-metadata-filepath', 'node_metadata_dbfilepath',
help="Path to crawler DB file with known nodes metadata",
type=click.STRING)
@click.option('--dry-run', '-x', help="Execute normally without actually starting the node", is_flag=True)
@nucypher_click_config
def dashboard(click_config,
@ -113,7 +102,6 @@ def dashboard(click_config,
tls_key_filepath,
provider_uri,
network,
node_metadata_dbfilepath,
dry_run,
):
"""
@ -135,8 +123,7 @@ def dashboard(click_config,
flask_server=rest_app,
route_url='/',
registry=registry,
network=network,
node_metadata_dbfilepath=node_metadata_dbfilepath)
network=network)
#
# Server

View File

@ -283,12 +283,15 @@ class SQLiteForgetfulNodeStorage(ForgetfulNodeStorage):
SQLite forgetful storage of node metadata
"""
_name = 'sqlite'
DB_NAME = 'node_metadata.sqlite'
DEFAULT_DB_FILEPATH = os.path.join(DEFAULT_CONFIG_ROOT, DB_NAME)
DB_FILE_NAME = 'sql-storage-metadata.sqlite'
NODE_DB_NAME = 'node_info'
STATE_DB_NAME = 'fleet_state'
DEFAULT_DB_FILEPATH = os.path.join(DEFAULT_CONFIG_ROOT, DB_FILE_NAME)
def __init__(self, db_filepath: str = DEFAULT_DB_FILEPATH, *args, **kwargs):
self.__db_filepath = db_filepath
self.__db_conn = sqlite3.connect(self.__db_filepath)
self.__create_db_tables()
super().__init__(*args, **kwargs)
def __del__(self):

View File

@ -26,11 +26,11 @@ class MonitorDashboardApp(NetworkStatusPage):
'fillFrame': False,
'displayModeBar': False}
def __init__(self, registry, network, node_metadata_dbfilepath, *args, **kwargs):
def __init__(self, registry, network, *args, **kwargs):
super().__init__(*args, **kwargs)
self.blockchain_db_client = NetworkCrawler.get_blockchain_crawler_client()
self.node_metadata_db_client = NodeMetadataClient(db_filepath=node_metadata_dbfilepath)
self.node_metadata_db_client = NodeMetadataClient()
self.registry = registry
self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent, registry=self.registry)
self.network = network

View File

@ -1,6 +1,5 @@
from influxdb import InfluxDBClient
from maya import MayaDT
from nucypher.network.status_app.db import BlockchainCrawlerClient
from twisted.internet import task
from twisted.logger import Logger
@ -49,8 +48,6 @@ class NetworkCrawler(Learner):
def __init__(self,
registry,
storage_dir,
db_filename,
federated_only: bool = False,
refresh_rate=DEFAULT_REFRESH_RATE,
restart_on_error=True,
@ -58,9 +55,7 @@ class NetworkCrawler(Learner):
self.registry = registry
self.federated_only = federated_only
node_storage = SQLiteForgetfulNodeStorage(federated_only=False,
parent_dir=storage_dir,
db_filename=db_filename)
node_storage = SQLiteForgetfulNodeStorage(federated_only=False)
class MonitoringTracker(FleetStateTracker):
def record_fleet_state(self, *args, **kwargs):

View File

@ -1,4 +1,3 @@
import os
import sqlite3
from collections import OrderedDict
from datetime import datetime, timedelta
@ -6,8 +5,8 @@ from datetime import datetime, timedelta
from influxdb import InfluxDBClient
from maya import MayaDT
from nucypher.config.constants import DEFAULT_CONFIG_ROOT
from nucypher.config.storages import SQLiteForgetfulNodeStorage
from typing import List, Dict
class BlockchainCrawlerClient:
@ -80,14 +79,12 @@ class BlockchainCrawlerClient:
class NodeMetadataClient:
DEFAULT_DB_FILEPATH = os.path.join(DEFAULT_CONFIG_ROOT, 'monitor.sqlite')
def __init__(self, db_filepath: str = SQLiteForgetfulNodeStorage.DEFAULT_DB_FILEPATH):
self._db_filepath = db_filepath
def __init__(self, db_filepath: str = DEFAULT_DB_FILEPATH):
self._metadata_filepath = db_filepath
def get_known_nodes_metadata(self) -> dict:
def get_known_nodes_metadata(self) -> Dict:
# dash threading means that connection needs to be established in same thread as use
db_conn = sqlite3.connect(self._metadata_filepath)
db_conn = sqlite3.connect(self._db_filepath)
try:
result = db_conn.execute(f"SELECT * FROM {SQLiteForgetfulNodeStorage.NODE_DB_NAME}")
@ -105,9 +102,9 @@ class NodeMetadataClient:
finally:
db_conn.close()
def get_previous_states_metadata(self, limit: int = 5) -> dict:
def get_previous_states_metadata(self, limit: int = 5) -> List[Dict]:
# dash threading means that connection needs to be established in same thread as use
db_conn = sqlite3.connect(self._metadata_filepath)
db_conn = sqlite3.connect(self._db_filepath)
states_dict_list = []
try:
result = db_conn.execute(f"SELECT * FROM {SQLiteForgetfulNodeStorage.STATE_DB_NAME} "