From 47e521228b7484f5917d70c26f160b23bde057cb Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 8 Jun 2021 09:23:51 -0700 Subject: [PATCH 01/19] Created separate file for fetch status functions. Cleaned up imports. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 89 +++---------------- .../fetch_status_library.py | 78 ++++++++++++++++ 2 files changed, 89 insertions(+), 78 deletions(-) create mode 100644 fetch-validator-status/fetch_status_library.py diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index d5e94ea..5638209 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -1,90 +1,22 @@ import argparse import asyncio -# import base58 -# import base64 import json import os import sys -# import datetime -import urllib.request -# from typing import Tuple - -# import nacl.signing import indy_vdr -from indy_vdr.ledger import ( - build_get_validator_info_request, - build_get_txn_request, - # Request, +from fetch_status_library import ( + enable_verbose, + log, + fetch_status, + get_script_dir, + download_genesis_file, + load_network_list, + list_networks ) -from indy_vdr.pool import open_pool -from plugin_collection import PluginCollection -# import time from DidKey import DidKey -verbose = False - - -def log(*args): - if verbose: - print(*args, "\n", file=sys.stderr) - - -async def fetch_status(genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None): - # Start Of Engine - attempt = 3 - while attempt: - try: - pool = await open_pool(transactions_path=genesis_path) - except: - log("Pool Timed Out! Trying again...") - if not attempt: - print("Unable to get pool Response! 3 attempts where made. Exiting...") - exit() - attempt -= 1 - continue - break - - result = [] - verifiers = {} - - if ident: - request = build_get_validator_info_request(ident.did) - ident.sign_request(request) - else: - request = build_get_txn_request(None, 1, 1) - - from_nodes = [] - if nodes: - from_nodes = nodes.split(",") - response = await pool.submit_action(request, node_aliases = from_nodes) - try: - # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 - verifiers = await pool.get_verifiers() - except AttributeError: - pass - # End Of Engine - - result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) - print(json.dumps(result, indent=2)) - -def get_script_dir(): - return os.path.dirname(os.path.realpath(__file__)) - - -def download_genesis_file(url: str, target_local_path: str): - log("Fetching genesis file ...") - target_local_path = f"{get_script_dir()}/genesis.txn" - urllib.request.urlretrieve(url, target_local_path) - -def load_network_list(): - with open(f"{get_script_dir()}/networks.json") as json_file: - networks = json.load(json_file) - return networks - -def list_networks(): - networks = load_network_list() - return networks.keys() +from plugin_collection import PluginCollection if __name__ == "__main__": monitor_plugins = PluginCollection('plugins') @@ -102,6 +34,7 @@ def list_networks(): args, unknown = parser.parse_known_args() verbose = args.verbose + enable_verbose(verbose) monitor_plugins.load_all_parse_args(args) @@ -136,4 +69,4 @@ def list_networks(): else: ident = None - asyncio.get_event_loop().run_until_complete(fetch_status(args.genesis_path, args.nodes, ident, network_name)) \ No newline at end of file + asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, args.genesis_path, args.nodes, ident, network_name)) \ No newline at end of file diff --git a/fetch-validator-status/fetch_status_library.py b/fetch-validator-status/fetch_status_library.py new file mode 100644 index 0000000..9456c31 --- /dev/null +++ b/fetch-validator-status/fetch_status_library.py @@ -0,0 +1,78 @@ +import json +import os +import sys + +import urllib.request +from indy_vdr.ledger import ( + build_get_validator_info_request, + build_get_txn_request, +) +from indy_vdr.pool import open_pool +from DidKey import DidKey + +from plugin_collection import PluginCollection + +verbose = False + +def enable_verbose(enable): + global verbose + verbose = enable + +def log(*args): + if verbose: + print(*args, "\n", file=sys.stderr) + +async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None): + # Start Of Engine + attempt = 3 + while attempt: + try: + pool = await open_pool(transactions_path=genesis_path) + except: + log("Pool Timed Out! Trying again...") + if not attempt: + print("Unable to get pool Response! 3 attempts where made. Exiting...") + exit() + attempt -= 1 + continue + break + + result = [] + verifiers = {} + + if ident: + request = build_get_validator_info_request(ident.did) + ident.sign_request(request) + else: + request = build_get_txn_request(None, 1, 1) + + from_nodes = [] + if nodes: + from_nodes = nodes.split(",") + response = await pool.submit_action(request, node_aliases = from_nodes) + try: + # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 + verifiers = await pool.get_verifiers() + except AttributeError: + pass + # End Of Engine + + result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) + print(json.dumps(result, indent=2)) + +def get_script_dir(): + return os.path.dirname(os.path.realpath(__file__)) + +def download_genesis_file(url: str, target_local_path: str): + log("Fetching genesis file ...") + target_local_path = f"{get_script_dir()}/genesis.txn" + urllib.request.urlretrieve(url, target_local_path) + +def load_network_list(): + with open(f"{get_script_dir()}/networks.json") as json_file: + networks = json.load(json_file) + return networks + +def list_networks(): + networks = load_network_list() + return networks.keys() \ No newline at end of file From 8f48087cc5558cd3083a46c157493bcf60cdef31 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 8 Jun 2021 14:29:58 -0700 Subject: [PATCH 02/19] * Fix pipe to file issue on Windows - Only use `winpty` when running interactively on Windows. - This fixes the `stdout is not a tty` issue seen on Windows when piping the output of the script to a file. - For example; `./run.sh --net smn --status > status.json` Removed all trailing white-space from network names. Added network name and consistent timestamp to node summary. - Add the network name to the node summary so monitoring results can be filtered and grouped by network. - Ensure there is a consistent (UTC) timestamp on the results that can be used as the time series for the collected data. This ensures events can be compared across nodes regardless of any clock skew on the nodes themselves. Updated run script. - Add support for specifying the number of samples to collect via an environment variable (default 1); `SAMPLES`. - Samples are taken one minute apart as the Nodes refresh their own data every minute. - Add support for logging results directly to a timestamped file via an environment variable; `LOG`. Full Example: `SAMPLES=20 LOG=/c/tick-sandbox/telegraf/indy-node-monitor/BuilderNet/BuilderNet-AllNodes.json ./run.sh --net sbn --seed ` - Runs the `fetch_status` process 20 times (once every minute) producing a set of files in `/c/tick-sandbox/telegraf/indy-node-monitor/BuilderNet/` with a timestamped filename that looks like this; `BuilderNet-AllNodes_2021-05-23_06-48-20.json`. Signed-off-by: KoleBarnes --- fetch-validator-status/networks.json | 14 ++++++------ fetch-validator-status/plugins/analysis.py | 14 ++++++++---- fetch-validator-status/run.sh | 26 +++++++++++++++++++--- 3 files changed, 40 insertions(+), 14 deletions(-) diff --git a/fetch-validator-status/networks.json b/fetch-validator-status/networks.json index 76dd582..4bff172 100644 --- a/fetch-validator-status/networks.json +++ b/fetch-validator-status/networks.json @@ -24,7 +24,7 @@ "genesisUrl": "http://dev.bcovrin.vonx.io/genesis" }, "bct": { - "name": "BCovrin Test ", + "name": "BCovrin Test", "genesisUrl": "http://test.bcovrin.vonx.io/genesis" }, "bcp": { @@ -32,23 +32,23 @@ "genesisUrl": "http://prod.bcovrin.vonx.io/genesis" }, "gld": { - "name": "GreenLight Dev Ledger ", + "name": "GreenLight Dev Ledger", "genesisUrl": "http://dev.greenlight.bcovrin.vonx.io/genesis" }, "gl": { - "name": "GreenLight Ledger ", + "name": "GreenLight Ledger", "genesisUrl": "http://greenlight.bcovrin.vonx.io/genesis" }, "imn": { - "name": "Indicio MainNet ", + "name": "Indicio MainNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_mainnet_genesis" }, "idn": { - "name": "Indicio DemoNet ", + "name": "Indicio DemoNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_demonet_genesis" }, "itn": { - "name": "Indicio TestNet ", + "name": "Indicio TestNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_testnet_genesis" } -} +} \ No newline at end of file diff --git a/fetch-validator-status/plugins/analysis.py b/fetch-validator-status/plugins/analysis.py index 3e42729..678b18c 100644 --- a/fetch-validator-status/plugins/analysis.py +++ b/fetch-validator-status/plugins/analysis.py @@ -31,6 +31,7 @@ async def perform_operation(self, result, network_name, response, verifiers): warnings = [] info = [] entry = {"name": node} + entry["network"] = network_name try: await self.get_node_addresses(entry, verifiers) jsval = json.loads(val) @@ -88,13 +89,18 @@ async def get_primary_name(self, jsval: any, node: str) -> str: async def get_status_summary(self, jsval: any, errors: list) -> any: status = {} status["ok"] = (len(errors) <= 0) + # Ensure there is always a consistent timestamp + # Note: We are not using the timestamp from the node itself for this; result.data.timestamp + # - There could be clock skew on the node which would affect the time series data when + # recorded and graphed. This would adversely affect the comparison of events across nodes. + # - In the case of a node that is not responding, we would not receive the timestamp + # from the node. + # - The solution is to add a consistent timestamp marking the time the data was collected + # by the monitor. + status["timestamp"] = datetime.datetime.now(datetime.timezone.utc).strftime('%s') if jsval and ("REPLY" in jsval["op"]): if "Node_info" in jsval["result"]["data"]: status["uptime"] = str(datetime.timedelta(seconds = jsval["result"]["data"]["Node_info"]["Metrics"]["uptime"])) - if "timestamp" in jsval["result"]["data"]: - status["timestamp"] = jsval["result"]["data"]["timestamp"] - else: - status["timestamp"] = datetime.datetime.now().strftime('%s') if "Software" in jsval["result"]["data"]: status["software"] = {} status["software"]["indy-node"] = jsval["result"]["data"]["Software"]["indy-node"] diff --git a/fetch-validator-status/run.sh b/fetch-validator-status/run.sh index b8c27b1..c9c431c 100755 --- a/fetch-validator-status/run.sh +++ b/fetch-validator-status/run.sh @@ -15,6 +15,17 @@ function getVolumeMount() { echo " --volume='${path}:/home/indy/${mountPoint}:Z' " } +function runCmd() { + _cmd=${1} + if [ ! -z ${LOG} ]; then + _cmd+=" > ${LOG%.*}_`date +\%Y-\%m-\%d_%H-%M-%S`.json" + fi + + eval ${_cmd} + # echo + # echo ${_cmd} +} + # IM is for "interactive mode" so Docker is run with the "-it" parameter. Probably never needed # but it is there. Use "IM=1 run.sh ..." to run the Docker container in interactive mode if [ -z "${IM+x}" ]; then @@ -35,7 +46,6 @@ cmd="${terminalEmu} docker run --rm ${DOCKER_INTERACTIVE} \ -e "GENESIS_PATH=${GENESIS_PATH}" \ -e "GENESIS_URL=${GENESIS_URL}" \ -e "SEED=${SEED}"" - # Dynamically mount teh 'conf' directory if it exists. if [ -d "./conf" ]; then @@ -47,5 +57,15 @@ if [ -d "./plugins" ]; then fi cmd+="fetch_status \"$@\"" -eval ${cmd} -# echo ${cmd} \ No newline at end of file + +counter=${SAMPLES:-1} +while [[ ${counter} > 0 ]] +do + runCmd "${cmd}" + counter=$(( ${counter} - 1 )) + if [[ ${counter} > 0 ]]; then + # Nodes update their validator info every minute. + # Therefore calling more than once per minute is not productive. + sleep 60 + fi +done \ No newline at end of file From 87ff7ff19a021d2d3b69eb38719306c0bf83f3b3 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 8 Jun 2021 14:45:53 -0700 Subject: [PATCH 03/19] Added requirements.txt Signed-off-by: KoleBarnes --- fetch-validator-status/Dockerfile | 5 +++++ fetch-validator-status/requirements.txt | 3 +++ 2 files changed, 8 insertions(+) create mode 100644 fetch-validator-status/requirements.txt diff --git a/fetch-validator-status/Dockerfile b/fetch-validator-status/Dockerfile index 76938a4..dbc8b2b 100644 --- a/fetch-validator-status/Dockerfile +++ b/fetch-validator-status/Dockerfile @@ -12,6 +12,11 @@ USER $user RUN pip install pynacl gspread oauth2client +USER $user + +ADD requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + ADD networks.json . ADD *.py ./ diff --git a/fetch-validator-status/requirements.txt b/fetch-validator-status/requirements.txt new file mode 100644 index 0000000..b2384ac --- /dev/null +++ b/fetch-validator-status/requirements.txt @@ -0,0 +1,3 @@ +pynacl +gspread +oauth2client \ No newline at end of file From 8744ade391dc210a95eaf764bc8b27899a68cc87 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 8 Jun 2021 15:02:43 -0700 Subject: [PATCH 04/19] General refactoring. - Added function init_network_args_test. This function returns a tuple with the network info. - Added more logging to pool connection. - Added asyncio to requirements.txt - Minor clean-up. - Removed Commented code. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 27 +++------------- .../fetch_status_library.py | 31 ++++++++++++++++++- fetch-validator-status/requirements.txt | 1 + 3 files changed, 36 insertions(+), 23 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 5638209..a9afa77 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -12,7 +12,8 @@ get_script_dir, download_genesis_file, load_network_list, - list_networks + list_networks, + init_network_args ) from DidKey import DidKey @@ -33,8 +34,7 @@ monitor_plugins.get_parse_args(parser) args, unknown = parser.parse_known_args() - verbose = args.verbose - enable_verbose(verbose) + enable_verbose(args.verbose) monitor_plugins.load_all_parse_args(args) @@ -42,24 +42,6 @@ print(json.dumps(load_network_list(), indent=2)) exit() - network_name = None - if args.net: - log("Loading known network list ...") - networks = load_network_list() - if args.net in networks: - log("Connecting to '{0}' ...".format(networks[args.net]["name"])) - args.genesis_url = networks[args.net]["genesisUrl"] - network_name = networks[args.net]["name"] - - if args.genesis_url: - download_genesis_file(args.genesis_url, args.genesis_path) - if not network_name: - network_name = args.genesis_url - if not os.path.exists(args.genesis_path): - print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) - parser.print_help() - exit() - did_seed = None if not args.seed else args.seed log("indy-vdr version:", indy_vdr.version()) @@ -69,4 +51,5 @@ else: ident = None - asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, args.genesis_path, args.nodes, ident, network_name)) \ No newline at end of file + network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) + asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) \ No newline at end of file diff --git a/fetch-validator-status/fetch_status_library.py b/fetch-validator-status/fetch_status_library.py index 9456c31..3ceaa9a 100644 --- a/fetch-validator-status/fetch_status_library.py +++ b/fetch-validator-status/fetch_status_library.py @@ -1,6 +1,7 @@ import json import os import sys +from collections import namedtuple import urllib.request from indy_vdr.ledger import ( @@ -27,6 +28,7 @@ async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nod attempt = 3 while attempt: try: + log("Connecting to Pool ...") pool = await open_pool(transactions_path=genesis_path) except: log("Pool Timed Out! Trying again...") @@ -35,6 +37,8 @@ async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nod exit() attempt -= 1 continue + else: + log("Connected to Pool ...") break result = [] @@ -75,4 +79,29 @@ def load_network_list(): def list_networks(): networks = load_network_list() - return networks.keys() \ No newline at end of file + return networks.keys() + +def init_network_args(network: str = None, genesis_url: str = None, genesis_path: str = None): + + if network: + log("Loading known network list ...") + networks = load_network_list() + if network in networks: + log("Connecting to '{0}' ...".format(networks[network]["name"])) + genesis_url = networks[network]["genesisUrl"] + network_name = networks[network]["name"] + + if genesis_url: + download_genesis_file(genesis_url, genesis_path) + if not network_name: + network_name = genesis_url + log(f"Setting network name = {network_name} ...") + + if not os.path.exists(genesis_path): + print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) + exit() + + Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path']) + network_info = Network_Info(network_name, genesis_url, genesis_path) + + return network_info \ No newline at end of file diff --git a/fetch-validator-status/requirements.txt b/fetch-validator-status/requirements.txt index b2384ac..da7b106 100644 --- a/fetch-validator-status/requirements.txt +++ b/fetch-validator-status/requirements.txt @@ -1,3 +1,4 @@ +asyncio pynacl gspread oauth2client \ No newline at end of file From 1cd82eaddfaaa288f1af37334a5f571f4a311e84 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Sat, 12 Jun 2021 14:58:39 -0700 Subject: [PATCH 05/19] Created rest_api.py fetch_status.py - Added parser arg to start Fast API - Moved default for genesis_path to fetch_status_library.py in init_network_args so it can be used by the command line and REST API fetch_status_library.py - Added return to fetch_status - Added default for genesis_path run.sh - Opened port Added requirements Cleaned up imports. Fixed fetch_status_library.py verbose not being enabled when using REST API. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 16 +++- .../fetch_status_library.py | 3 + fetch-validator-status/requirements.txt | 7 +- fetch-validator-status/rest_api.py | 96 +++++++++++++++++++ fetch-validator-status/run.sh | 5 +- 5 files changed, 119 insertions(+), 8 deletions(-) create mode 100644 fetch-validator-status/rest_api.py diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index a9afa77..f791150 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -9,8 +9,6 @@ enable_verbose, log, fetch_status, - get_script_dir, - download_genesis_file, load_network_list, list_networks, init_network_args @@ -26,10 +24,11 @@ parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.") parser.add_argument("--list-nets", action="store_true", help="List known networks.") parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") - parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH") or f"{get_script_dir()}/genesis.txn" , help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") + parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") + parser.add_argument("--web", action="store_true", help="Start API server.") monitor_plugins.get_parse_args(parser) args, unknown = parser.parse_known_args() @@ -51,5 +50,12 @@ else: ident = None - network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) - asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) \ No newline at end of file + if args.web: + log("Starting web server ...") + # Pass verbose to rest api through env var + os.environ['VERBOSE'] = str(args.verbose) + os.system('uvicorn rest_api:app --reload --host 0.0.0.0 --port 8080') + else: + network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) + log("Starting from the command line ...") + asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) \ No newline at end of file diff --git a/fetch-validator-status/fetch_status_library.py b/fetch-validator-status/fetch_status_library.py index 3ceaa9a..5ed6819 100644 --- a/fetch-validator-status/fetch_status_library.py +++ b/fetch-validator-status/fetch_status_library.py @@ -63,6 +63,7 @@ async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nod result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) print(json.dumps(result, indent=2)) + return result def get_script_dir(): return os.path.dirname(os.path.realpath(__file__)) @@ -82,6 +83,8 @@ def list_networks(): return networks.keys() def init_network_args(network: str = None, genesis_url: str = None, genesis_path: str = None): + if not genesis_path: + genesis_path = f"{get_script_dir()}/genesis.txn" if network: log("Loading known network list ...") diff --git a/fetch-validator-status/requirements.txt b/fetch-validator-status/requirements.txt index da7b106..e46b35b 100644 --- a/fetch-validator-status/requirements.txt +++ b/fetch-validator-status/requirements.txt @@ -1,4 +1,9 @@ asyncio pynacl gspread -oauth2client \ No newline at end of file +oauth2client +fastapi +pydantic +httpx +uvicorn +gunicorn \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py new file mode 100644 index 0000000..884d287 --- /dev/null +++ b/fetch-validator-status/rest_api.py @@ -0,0 +1,96 @@ +import os +import json +import argparse + +from typing import Optional +from fastapi import FastAPI, Header +from pydantic import BaseModel +from httpx import AsyncClient + +from fetch_status_library import ( + enable_verbose, + log, + fetch_status, + load_network_list, + init_network_args +) +from DidKey import DidKey +from plugin_collection import PluginCollection + +APP_NAME='test_name' +APP_DESCRIPTION='test_description' +APP_VERSION='app_version' + +# https://fastapi.tiangolo.com/tutorial/metadata/ +app = FastAPI( + title = APP_NAME, + description = APP_DESCRIPTION, + version = APP_VERSION +) + +args = None +monitor_plugins = None + +def set_plugin_parameters(status: bool = False, alerts: bool = False): + + # Store args and monitor_plugins for lazy loading. + global args + global monitor_plugins + + if not args: + # Create plugin instance and set default args + monitor_plugins = PluginCollection('plugins') + parser = argparse.ArgumentParser() + parser.add_argument("-v", "--verbose", default=(os.environ.get('VERBOSE', 'False').lower() == 'true'), action="store_true") + monitor_plugins.get_parse_args(parser) + args, unknown = parser.parse_known_args() + enable_verbose(args.verbose) + + # Create namspace with default args + api_args = argparse.Namespace() + for name, value in args._get_kwargs(): + setattr(api_args, name, value) + + setattr(api_args, 'status', status) + setattr(api_args, 'alerts', alerts) + + monitor_plugins.load_all_parse_args(api_args) + + return monitor_plugins + +@app.get("/networks") +async def networks(): + data = load_network_list() + return data + +@app.get("/networks/{network}") +async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): + monitor_plugins = set_plugin_parameters(status, alerts) + network_info = init_network_args(network=network) + + ident = None + if seed: + try: + ident = DidKey(seed) + log("DID:", ident.did, " Verkey:", ident.verkey) + except: + log("Invalid seed. Continuing anonymously ...") + + result = await fetch_status(monitor_plugins=monitor_plugins, genesis_path=network_info.genesis_path, ident=ident, network_name=network_info.network_name) + return result + +@app.get("/networks/{network}/{node}") +async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): + monitor_plugins = set_plugin_parameters(status, alerts) + network_info = init_network_args(network=network) + + ident = None + if seed: + try: + ident = DidKey(seed) + log("DID:", ident.did, " Verkey:", ident.verkey) + except: + log("Invalid seed. Continuing anonymously ...") + + result = await fetch_status(monitor_plugins, network_info.genesis_path, node, ident, network_info.network_name) + return result \ No newline at end of file diff --git a/fetch-validator-status/run.sh b/fetch-validator-status/run.sh index c9c431c..6f567ae 100755 --- a/fetch-validator-status/run.sh +++ b/fetch-validator-status/run.sh @@ -40,12 +40,13 @@ else fi fi -docker build -t fetch_status . > /dev/null 2>&1 +docker build -t fetch_status . #> /dev/null 2>&1 cmd="${terminalEmu} docker run --rm ${DOCKER_INTERACTIVE} \ -e "GENESIS_PATH=${GENESIS_PATH}" \ -e "GENESIS_URL=${GENESIS_URL}" \ - -e "SEED=${SEED}"" + -e "SEED=${SEED}" \ + --publish 8080:8080" # Dynamically mount teh 'conf' directory if it exists. if [ -d "./conf" ]; then From 5bbb1ae706bce4222369ebb3740250203f735d23 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Sun, 13 Jun 2021 08:22:32 -0700 Subject: [PATCH 06/19] Sorted fetch_status.py Created did helper function. Created separate instances for plug-ins in REST API. One to set default args and one for a separate user instance. Added and fixed logging. Fixed results being printed to console when running REST API. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 39 +++++++++---------- .../fetch_status_library.py | 26 +++++++++++-- fetch-validator-status/plugin_collection.py | 5 +-- fetch-validator-status/rest_api.py | 39 +++++++------------ 4 files changed, 55 insertions(+), 54 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index f791150..8a80aba 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -11,15 +11,14 @@ fetch_status, load_network_list, list_networks, - init_network_args + init_network_args, + create_did ) from DidKey import DidKey from plugin_collection import PluginCollection if __name__ == "__main__": - monitor_plugins = PluginCollection('plugins') - parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.") parser.add_argument("--list-nets", action="store_true", help="List known networks.") @@ -30,32 +29,30 @@ parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") parser.add_argument("--web", action="store_true", help="Start API server.") + monitor_plugins = PluginCollection('plugins') monitor_plugins.get_parse_args(parser) args, unknown = parser.parse_known_args() - - enable_verbose(args.verbose) - monitor_plugins.load_all_parse_args(args) - if args.list_nets: - print(json.dumps(load_network_list(), indent=2)) - exit() - - did_seed = None if not args.seed else args.seed - - log("indy-vdr version:", indy_vdr.version()) - if did_seed: - ident = DidKey(did_seed) - log("DID:", ident.did, " Verkey:", ident.verkey) - else: - ident = None - if args.web: log("Starting web server ...") # Pass verbose to rest api through env var os.environ['VERBOSE'] = str(args.verbose) os.system('uvicorn rest_api:app --reload --host 0.0.0.0 --port 8080') else: - network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) log("Starting from the command line ...") - asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) \ No newline at end of file + + enable_verbose(args.verbose) + + if args.list_nets: + print(json.dumps(load_network_list(), indent=2)) + exit() + + log("indy-vdr version:", indy_vdr.version()) + + did_seed = None if not args.seed else args.seed + ident = create_did(did_seed) + + network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) + result = asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) + print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/fetch_status_library.py b/fetch-validator-status/fetch_status_library.py index 5ed6819..515d7ff 100644 --- a/fetch-validator-status/fetch_status_library.py +++ b/fetch-validator-status/fetch_status_library.py @@ -31,9 +31,9 @@ async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nod log("Connecting to Pool ...") pool = await open_pool(transactions_path=genesis_path) except: - log("Pool Timed Out! Trying again...") + log("Pool Timed Out! Trying again ...") if not attempt: - print("Unable to get pool Response! 3 attempts where made. Exiting...") + print("Unable to get pool Response! 3 attempts where made. Exiting ...") exit() attempt -= 1 continue @@ -45,24 +45,32 @@ async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nod verifiers = {} if ident: + log(f"Building request with did: {ident.did} ...") request = build_get_validator_info_request(ident.did) ident.sign_request(request) else: + log("Building anonymous request ...") request = build_get_txn_request(None, 1, 1) from_nodes = [] if nodes: from_nodes = nodes.split(",") + log("Submitting request ...") + response = await pool.submit_action(request, node_aliases = from_nodes) + try: # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 + log("Getting list of verifiers ...") verifiers = await pool.get_verifiers() except AttributeError: + log("Unable to get list of verifiers. Plesase make sure you have the latest verson of indy-vdr.") pass # End Of Engine + log("Passing results to plugins for processing ...") result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) - print(json.dumps(result, indent=2)) + log("Processing complete.") return result def get_script_dir(): @@ -107,4 +115,14 @@ def init_network_args(network: str = None, genesis_url: str = None, genesis_path Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path']) network_info = Network_Info(network_name, genesis_url, genesis_path) - return network_info \ No newline at end of file + return network_info + +def create_did(seed): + ident = None + if seed: + try: + ident = DidKey(seed) + log("DID:", ident.did, " Verkey:", ident.verkey) + except: + log("Invalid seed. Continuing anonymously ...") + return ident \ No newline at end of file diff --git a/fetch-validator-status/plugin_collection.py b/fetch-validator-status/plugin_collection.py index 67cb0de..1cb2ee0 100644 --- a/fetch-validator-status/plugin_collection.py +++ b/fetch-validator-status/plugin_collection.py @@ -81,12 +81,11 @@ def reload_plugins(self): async def apply_all_plugins_on_value(self, result, network_name, response, verifiers): """Apply all of the plugins with the argument supplied to this function """ - self.log(f'\033[38;5;37mRunning plugins...\033[0m\n') + self.log(f'\033[38;5;37mRunning plugins ...\033[0m\n') for plugin in self.plugins: if plugin.enabled: - self.log(f'\033[38;5;37mRunning {plugin.name}...\033[0m') + self.log(f'\033[38;5;37mRunning {plugin.name} ...\033[0m') result = await plugin.perform_operation(result, network_name, response, verifiers) - self.log((f'\033[38;5;37m{plugin.name} yields value\033[0m\n')) #{result} else: self.log(f"\033[38;5;3m{plugin.name} disabled.\033[0m\n") return result diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 884d287..8afcfe1 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -12,7 +12,8 @@ log, fetch_status, load_network_list, - init_network_args + init_network_args, + create_did ) from DidKey import DidKey from plugin_collection import PluginCollection @@ -28,32 +29,32 @@ version = APP_VERSION ) -args = None +default_args = None monitor_plugins = None def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. - global args - global monitor_plugins + global default_args - if not args: + if not default_args: # Create plugin instance and set default args - monitor_plugins = PluginCollection('plugins') + default_monitor_plugins = PluginCollection('plugins') parser = argparse.ArgumentParser() parser.add_argument("-v", "--verbose", default=(os.environ.get('VERBOSE', 'False').lower() == 'true'), action="store_true") - monitor_plugins.get_parse_args(parser) - args, unknown = parser.parse_known_args() - enable_verbose(args.verbose) + default_monitor_plugins.get_parse_args(parser) + default_args, unknown = parser.parse_known_args() + enable_verbose(default_args.verbose) # Create namspace with default args api_args = argparse.Namespace() - for name, value in args._get_kwargs(): + for name, value in default_args._get_kwargs(): setattr(api_args, name, value) setattr(api_args, 'status', status) setattr(api_args, 'alerts', alerts) + monitor_plugins = PluginCollection('plugins') monitor_plugins.load_all_parse_args(api_args) return monitor_plugins @@ -67,14 +68,7 @@ async def networks(): async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) network_info = init_network_args(network=network) - - ident = None - if seed: - try: - ident = DidKey(seed) - log("DID:", ident.did, " Verkey:", ident.verkey) - except: - log("Invalid seed. Continuing anonymously ...") + ident = create_did(seed) result = await fetch_status(monitor_plugins=monitor_plugins, genesis_path=network_info.genesis_path, ident=ident, network_name=network_info.network_name) return result @@ -83,14 +77,7 @@ async def network(network, status: bool = False, alerts: bool = False, seed: Opt async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) network_info = init_network_args(network=network) - - ident = None - if seed: - try: - ident = DidKey(seed) - log("DID:", ident.did, " Verkey:", ident.verkey) - except: - log("Invalid seed. Continuing anonymously ...") + ident = create_did(seed) result = await fetch_status(monitor_plugins, network_info.genesis_path, node, ident, network_info.network_name) return result \ No newline at end of file From 8c9d0ecd80641a66c77a0d3c72c20fbd2a9bc949 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Sun, 13 Jun 2021 15:53:43 -0700 Subject: [PATCH 07/19] Node Monitor Refactoring - Fetch status renamed to main. - Fetch status library renamed to fetch status. - Fetching of pool connection and fetch status are now classes. - Created util file for helper functions. - Fixed and added logging. - Removed some imports and requirements. Signed-off-by: KoleBarnes --- fetch-validator-status/Dockerfile | 2 +- fetch-validator-status/fetch_status.py | 103 +++++++------- .../fetch_status_library.py | 128 ------------------ fetch-validator-status/main.py | 53 ++++++++ fetch-validator-status/plugin_collection.py | 7 +- .../plugins/Example/example.py | 2 +- fetch-validator-status/pool.py | 87 ++++++++++++ fetch-validator-status/requirements.txt | 2 - fetch-validator-status/rest_api.py | 36 ++--- fetch-validator-status/util.py | 22 +++ 10 files changed, 233 insertions(+), 209 deletions(-) delete mode 100644 fetch-validator-status/fetch_status_library.py create mode 100644 fetch-validator-status/main.py create mode 100644 fetch-validator-status/pool.py create mode 100644 fetch-validator-status/util.py diff --git a/fetch-validator-status/Dockerfile b/fetch-validator-status/Dockerfile index dbc8b2b..2ca58e1 100644 --- a/fetch-validator-status/Dockerfile +++ b/fetch-validator-status/Dockerfile @@ -20,4 +20,4 @@ RUN pip install --no-cache-dir -r requirements.txt ADD networks.json . ADD *.py ./ -ENTRYPOINT ["bash", "-c", "python fetch_status.py $@", "--"] \ No newline at end of file +ENTRYPOINT ["bash", "-c", "python main.py $@", "--"] \ No newline at end of file diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 8a80aba..4f92b8a 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -1,58 +1,49 @@ -import argparse -import asyncio -import json -import os -import sys - -import indy_vdr -from fetch_status_library import ( - enable_verbose, - log, - fetch_status, - load_network_list, - list_networks, - init_network_args, - create_did +from indy_vdr.ledger import ( + build_get_validator_info_request, + build_get_txn_request, ) -from DidKey import DidKey - +from util import log from plugin_collection import PluginCollection - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") - parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.") - parser.add_argument("--list-nets", action="store_true", help="List known networks.") - parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") - parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") - parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") - parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") - parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") - parser.add_argument("--web", action="store_true", help="Start API server.") - - monitor_plugins = PluginCollection('plugins') - monitor_plugins.get_parse_args(parser) - args, unknown = parser.parse_known_args() - monitor_plugins.load_all_parse_args(args) - - if args.web: - log("Starting web server ...") - # Pass verbose to rest api through env var - os.environ['VERBOSE'] = str(args.verbose) - os.system('uvicorn rest_api:app --reload --host 0.0.0.0 --port 8080') - else: - log("Starting from the command line ...") - - enable_verbose(args.verbose) - - if args.list_nets: - print(json.dumps(load_network_list(), indent=2)) - exit() - - log("indy-vdr version:", indy_vdr.version()) - - did_seed = None if not args.seed else args.seed - ident = create_did(did_seed) - - network_info = init_network_args(network=args.net, genesis_url=args.genesis_url, genesis_path=args.genesis_path) - result = asyncio.get_event_loop().run_until_complete(fetch_status(monitor_plugins, network_info.genesis_path, args.nodes, ident, network_info.network_name)) - print(json.dumps(result, indent=2)) \ No newline at end of file +from DidKey import DidKey +from pool import PoolCollection + +class FetchStatus(): + def __init__(self, verbose, pool_collection: PoolCollection, monitor_plugins: PluginCollection, ident: DidKey = None): + self.verbose = verbose + self.pool_collection = pool_collection + self.monitor_plugins = monitor_plugins + self.ident = ident + + async def fetch(self, network_info, nodes: str = None): + result = [] + verifiers = {} + + pool = await self.pool_collection.get_pool(network_info) + + if self.ident: + log(f"Building request with did: {self.ident.did} ...") + request = build_get_validator_info_request(self.ident.did) + self.ident.sign_request(request) + else: + log("Building anonymous request ...") + request = build_get_txn_request(None, 1, 1) + + from_nodes = [] + if nodes: + from_nodes = nodes.split(",") + log("Submitting request ...") + + response = await pool.submit_action(request, node_aliases = from_nodes) + + try: + # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 + log("Getting list of verifiers ...") + verifiers = await pool.get_verifiers() + except AttributeError: + log("Unable to get list of verifiers. Plesase make sure you have the latest verson of indy-vdr.") + pass + + log("Passing results to plugins for processing ...") + result = await self.monitor_plugins.apply_all_plugins_on_value(result, network_info.network_name, response, verifiers) + log("Processing complete.") + return result \ No newline at end of file diff --git a/fetch-validator-status/fetch_status_library.py b/fetch-validator-status/fetch_status_library.py deleted file mode 100644 index 515d7ff..0000000 --- a/fetch-validator-status/fetch_status_library.py +++ /dev/null @@ -1,128 +0,0 @@ -import json -import os -import sys -from collections import namedtuple - -import urllib.request -from indy_vdr.ledger import ( - build_get_validator_info_request, - build_get_txn_request, -) -from indy_vdr.pool import open_pool -from DidKey import DidKey - -from plugin_collection import PluginCollection - -verbose = False - -def enable_verbose(enable): - global verbose - verbose = enable - -def log(*args): - if verbose: - print(*args, "\n", file=sys.stderr) - -async def fetch_status(monitor_plugins: PluginCollection, genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None): - # Start Of Engine - attempt = 3 - while attempt: - try: - log("Connecting to Pool ...") - pool = await open_pool(transactions_path=genesis_path) - except: - log("Pool Timed Out! Trying again ...") - if not attempt: - print("Unable to get pool Response! 3 attempts where made. Exiting ...") - exit() - attempt -= 1 - continue - else: - log("Connected to Pool ...") - break - - result = [] - verifiers = {} - - if ident: - log(f"Building request with did: {ident.did} ...") - request = build_get_validator_info_request(ident.did) - ident.sign_request(request) - else: - log("Building anonymous request ...") - request = build_get_txn_request(None, 1, 1) - - from_nodes = [] - if nodes: - from_nodes = nodes.split(",") - log("Submitting request ...") - - response = await pool.submit_action(request, node_aliases = from_nodes) - - try: - # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 - log("Getting list of verifiers ...") - verifiers = await pool.get_verifiers() - except AttributeError: - log("Unable to get list of verifiers. Plesase make sure you have the latest verson of indy-vdr.") - pass - # End Of Engine - - log("Passing results to plugins for processing ...") - result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) - log("Processing complete.") - return result - -def get_script_dir(): - return os.path.dirname(os.path.realpath(__file__)) - -def download_genesis_file(url: str, target_local_path: str): - log("Fetching genesis file ...") - target_local_path = f"{get_script_dir()}/genesis.txn" - urllib.request.urlretrieve(url, target_local_path) - -def load_network_list(): - with open(f"{get_script_dir()}/networks.json") as json_file: - networks = json.load(json_file) - return networks - -def list_networks(): - networks = load_network_list() - return networks.keys() - -def init_network_args(network: str = None, genesis_url: str = None, genesis_path: str = None): - if not genesis_path: - genesis_path = f"{get_script_dir()}/genesis.txn" - - if network: - log("Loading known network list ...") - networks = load_network_list() - if network in networks: - log("Connecting to '{0}' ...".format(networks[network]["name"])) - genesis_url = networks[network]["genesisUrl"] - network_name = networks[network]["name"] - - if genesis_url: - download_genesis_file(genesis_url, genesis_path) - if not network_name: - network_name = genesis_url - log(f"Setting network name = {network_name} ...") - - if not os.path.exists(genesis_path): - print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) - exit() - - Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path']) - network_info = Network_Info(network_name, genesis_url, genesis_path) - - return network_info - -def create_did(seed): - ident = None - if seed: - try: - ident = DidKey(seed) - log("DID:", ident.did, " Verkey:", ident.verkey) - except: - log("Invalid seed. Continuing anonymously ...") - return ident \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py new file mode 100644 index 0000000..b829ad0 --- /dev/null +++ b/fetch-validator-status/main.py @@ -0,0 +1,53 @@ +import argparse +import asyncio +import json +import os + +import indy_vdr +from util import ( + enable_verbose, + log, + create_did +) +from fetch_status import FetchStatus +from pool import PoolCollection +from plugin_collection import PluginCollection + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") + parser.add_argument("--net", choices=PoolCollection.list_networks(), help="Connect to a known network using an ID.") + parser.add_argument("--list-nets", action="store_true", help="List known networks.") + parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") + parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") + parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") + parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") + parser.add_argument("--web", action="store_true", help="Start API server.") + + monitor_plugins = PluginCollection('plugins') + monitor_plugins.get_parse_args(parser) + args, unknown = parser.parse_known_args() + monitor_plugins.load_all_parse_args(args) + + if args.web: + log("Starting web server ...") + # Pass verbose to rest api through env var + os.environ['VERBOSE'] = str(args.verbose) + os.system('uvicorn rest_api:app --reload --host 0.0.0.0 --port 8080') + else: + log("Starting from the command line ...") + enable_verbose(args.verbose) + + if args.list_nets: + print(json.dumps(PoolCollection.load_network_list(), indent=2)) + exit() + + log("indy-vdr version:", indy_vdr.version()) + did_seed = None if not args.seed else args.seed + ident = create_did(did_seed) + + pool_collection = PoolCollection(args.verbose) + network_info = pool_collection.get_network_info(args.net, args.genesis_url, args.genesis_path) + status = FetchStatus(args.verbose, pool_collection, monitor_plugins, ident) + result = asyncio.get_event_loop().run_until_complete(status.fetch(network_info, args.nodes)) + print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/plugin_collection.py b/fetch-validator-status/plugin_collection.py index 1cb2ee0..e7d865e 100644 --- a/fetch-validator-status/plugin_collection.py +++ b/fetch-validator-status/plugin_collection.py @@ -84,7 +84,7 @@ async def apply_all_plugins_on_value(self, result, network_name, response, verif self.log(f'\033[38;5;37mRunning plugins ...\033[0m\n') for plugin in self.plugins: if plugin.enabled: - self.log(f'\033[38;5;37mRunning {plugin.name} ...\033[0m') + self.log(f'\033[38;5;37mRunning {plugin.name} ...\033[0m\n') result = await plugin.perform_operation(result, network_name, response, verifiers) else: self.log(f"\033[38;5;3m{plugin.name} disabled.\033[0m\n") @@ -144,6 +144,7 @@ def log(self, *args): print(*args, file=sys.stderr) def plugin_list(self): - self.log("\033[38;5;37m--- Plug-ins ---\033[0m") + self.log("\n\033[38;5;37m--- Plug-ins ---\033[0m") for plugin in self.plugins: - self.log(f"\033[38;5;37m{plugin.name}: {plugin.__class__.__module__}.{plugin.__class__.__name__}\033[0m") \ No newline at end of file + self.log(f"\033[38;5;37m{plugin.name}: {plugin.__class__.__module__}.{plugin.__class__.__name__}\033[0m") + self.log(f"\n") \ No newline at end of file diff --git a/fetch-validator-status/plugins/Example/example.py b/fetch-validator-status/plugins/Example/example.py index 626ba28..d312f20 100644 --- a/fetch-validator-status/plugins/Example/example.py +++ b/fetch-validator-status/plugins/Example/example.py @@ -17,7 +17,7 @@ def __init__(self): self.type = '' def parse_args(self, parser): - # Declear your parser arguments here. This will add them to the fetch_status.py parser arguments. + # Declear your parser arguments here. This will add them to the main.py parser arguments. parser.add_argument("--example", action="store_true", help="Example Plug-in: Runs expample plug-in") # Here you set your variables with the arguments from the parser diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py new file mode 100644 index 0000000..3b2c295 --- /dev/null +++ b/fetch-validator-status/pool.py @@ -0,0 +1,87 @@ +import os +import json +import urllib.request +import sys +from collections import namedtuple +from util import log +from indy_vdr.pool import open_pool + +class PoolCollection(object): + def __init__(self, verbose): + self.verbose = verbose + + async def fetch_pool_connection(self, genesis_path): + attempt = 3 + while attempt: + try: + log("Connecting to Pool ...") + pool = await open_pool(transactions_path=genesis_path) + except: + log("Pool Timed Out! Trying again ...") + if not attempt: + print("Unable to get pool Response! 3 attempts where made. Exiting ...") + exit() + attempt -= 1 + continue + else: + log("Connected to Pool ...") + break + return pool + + async def get_pool(self, network_info): + # manage dict + # get network_info use as key + # look into dict with network_info as tupl or network_info.network_name as key + # if key found return value pool + # other wise fetch pool connection + # add value to dict with key + # return value + + return await self.fetch_pool_connection(network_info.genesis_path) + + def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None): + if not genesis_path: + genesis_path = f"{PoolCollection.get_script_dir()}/genesis.txn" # use as base dir save file with using network name or genesis url + + if network: + log("Loading known network list ...") + networks = PoolCollection.load_network_list() + if network in networks: + log("Connecting to '{0}' ...".format(networks[network]["name"])) + genesis_url = networks[network]["genesisUrl"] + network_name = networks[network]["name"] # if dosen't exist brake down the url ^ + + if genesis_url: + self.download_genesis_file(genesis_url, genesis_path) + if not network_name: + network_name = genesis_url + log(f"Setting network name = {network_name} ...") + + if not os.path.exists(genesis_path): + print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) + exit() + + Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path']) + network_info = Network_Info(network_name, genesis_url, genesis_path) + + return network_info + + def download_genesis_file(self, url: str, target_local_path: str): + log("Fetching genesis file ...") + target_local_path = f"{PoolCollection.get_script_dir()}/genesis.txn" + urllib.request.urlretrieve(url, target_local_path) + + @staticmethod + def get_script_dir(): + return os.path.dirname(os.path.realpath(__file__)) + + @staticmethod + def load_network_list(): + with open(f"{PoolCollection.get_script_dir()}/networks.json") as json_file: + networks = json.load(json_file) + return networks + + @staticmethod + def list_networks(): + networks = PoolCollection.load_network_list() + return networks.keys() diff --git a/fetch-validator-status/requirements.txt b/fetch-validator-status/requirements.txt index e46b35b..fd5f597 100644 --- a/fetch-validator-status/requirements.txt +++ b/fetch-validator-status/requirements.txt @@ -3,7 +3,5 @@ pynacl gspread oauth2client fastapi -pydantic -httpx uvicorn gunicorn \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 8afcfe1..ca23577 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -1,21 +1,17 @@ import os -import json import argparse from typing import Optional from fastapi import FastAPI, Header -from pydantic import BaseModel -from httpx import AsyncClient -from fetch_status_library import ( +from util import ( enable_verbose, - log, - fetch_status, - load_network_list, - init_network_args, +# log, create_did ) -from DidKey import DidKey + +from pool import PoolCollection +from fetch_status import FetchStatus from plugin_collection import PluginCollection APP_NAME='test_name' @@ -33,7 +29,6 @@ monitor_plugins = None def set_plugin_parameters(status: bool = False, alerts: bool = False): - # Store args and monitor_plugins for lazy loading. global default_args @@ -46,7 +41,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): default_args, unknown = parser.parse_known_args() enable_verbose(default_args.verbose) - # Create namspace with default args + # Create namespace with default args api_args = argparse.Namespace() for name, value in default_args._get_kwargs(): setattr(api_args, name, value) @@ -61,23 +56,28 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): @app.get("/networks") async def networks(): - data = load_network_list() + data = PoolCollection.load_network_list() return data @app.get("/networks/{network}") async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) - network_info = init_network_args(network=network) ident = create_did(seed) - - result = await fetch_status(monitor_plugins=monitor_plugins, genesis_path=network_info.genesis_path, ident=ident, network_name=network_info.network_name) + pool_collection = PoolCollection(default_args.verbose) + network_info = pool_collection.get_network_info(network=network) + status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) + result = await status.fetch(network_info=network_info) return result @app.get("/networks/{network}/{node}") async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) - network_info = init_network_args(network=network) + ident = create_did(seed) - - result = await fetch_status(monitor_plugins, network_info.genesis_path, node, ident, network_info.network_name) + pool_collection = PoolCollection(default_args.verbose) + network_info = pool_collection.get_network_info(network=network) + status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) + result = await status.fetch(network_info, node) + # result = await status.fetch(network_info, ident, node) + return result \ No newline at end of file diff --git a/fetch-validator-status/util.py b/fetch-validator-status/util.py new file mode 100644 index 0000000..53522e4 --- /dev/null +++ b/fetch-validator-status/util.py @@ -0,0 +1,22 @@ +import sys +from DidKey import DidKey + +verbose = False + +def enable_verbose(enable): + global verbose + verbose = enable + +def log(*args): + if verbose: + print(*args, "\n", file=sys.stderr) + +def create_did(seed): + ident = None + if seed: + try: + ident = DidKey(seed) + log("DID:", ident.did, " Verkey:", ident.verkey) + except: + log("Invalid seed. Continuing anonymously ...") + return ident \ No newline at end of file From 93d2fa538839f8d0506c793e32dc98a7f95045cf Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 15 Jun 2021 15:11:12 -0700 Subject: [PATCH 08/19] Added caching of pool connections with async thread lock to avoid multi request errors when running REST API. Added Singleton meta class to Pool class to have a global pool cache. Added refactoring Notes. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 1 + fetch-validator-status/pool.py | 58 +++++++++++++++++++------- fetch-validator-status/rest_api.py | 14 +++++-- 3 files changed, 54 insertions(+), 19 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 4f92b8a..cd0df22 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -18,6 +18,7 @@ async def fetch(self, network_info, nodes: str = None): result = [] verifiers = {} + # network_info = pool_collection.get_network_info(network=network) pool = await self.pool_collection.get_pool(network_info) if self.ident: diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index 3b2c295..eb9a9ea 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -1,14 +1,26 @@ import os +from os import path import json import urllib.request import sys +import asyncio from collections import namedtuple from util import log from indy_vdr.pool import open_pool -class PoolCollection(object): +# https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python +class Singleton(type): + _instances = {} + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] + +class PoolCollection(object, metaclass=Singleton): def __init__(self, verbose): self.verbose = verbose + self.network_cache = {} + self.lock = asyncio.Lock() async def fetch_pool_connection(self, genesis_path): attempt = 3 @@ -29,34 +41,51 @@ async def fetch_pool_connection(self, genesis_path): return pool async def get_pool(self, network_info): - # manage dict - # get network_info use as key - # look into dict with network_info as tupl or network_info.network_name as key - # if key found return value pool - # other wise fetch pool connection - # add value to dict with key - # return value - return await self.fetch_pool_connection(network_info.genesis_path) + async with self.lock: + if network_info.network_name in self.network_cache: + log(f"Pool for {network_info.network_name} found in cache ... ") + pool = self.network_cache[network_info.network_name]['pool'] + else: + log(f"Pool for {network_info.network_name} not found in cache, creating new connection ... ") + self.network_cache[network_info.network_name] = {} + self.network_cache[network_info.network_name]['genesis_path'] = network_info.genesis_path + self.network_cache[network_info.network_name]['genesis_url'] = network_info.genesis_url + pool = await self.fetch_pool_connection(network_info.genesis_path) + self.network_cache[network_info.network_name]['pool'] = pool - def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None): - if not genesis_path: - genesis_path = f"{PoolCollection.get_script_dir()}/genesis.txn" # use as base dir save file with using network name or genesis url + return pool + def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None): + network_name = None + genesis_path_base = f"{PoolCollection.get_script_dir()}/" + if network: log("Loading known network list ...") networks = PoolCollection.load_network_list() if network in networks: log("Connecting to '{0}' ...".format(networks[network]["name"])) genesis_url = networks[network]["genesisUrl"] - network_name = networks[network]["name"] # if dosen't exist brake down the url ^ + network_name = networks[network]["name"] if genesis_url: - self.download_genesis_file(genesis_url, genesis_path) if not network_name: network_name = genesis_url log(f"Setting network name = {network_name} ...") + if not genesis_path: + network_name_path = network_name.replace("https://", "") + network_name_path = network_name_path.replace(" ", "_") + network_name_path = network_name_path.replace("/", "_") + network_name_path = network_name_path.replace(".", "_") + genesis_path = f"{genesis_path_base}{network_name_path}/" + if not path.exists(genesis_path): + os.makedirs(genesis_path) + genesis_path = f"{genesis_path}genesis.txn" + # genesis_path = f"{genesis_path_base}/genesis.txn" # use as base dir save file with using network name or genesis url + + self.download_genesis_file(genesis_url, genesis_path) + if not os.path.exists(genesis_path): print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) exit() @@ -68,7 +97,6 @@ def get_network_info(self, network: str = None, genesis_url: str = None, genesis def download_genesis_file(self, url: str, target_local_path: str): log("Fetching genesis file ...") - target_local_path = f"{PoolCollection.get_script_dir()}/genesis.txn" urllib.request.urlretrieve(url, target_local_path) @staticmethod diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index ca23577..0bc1500 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -27,6 +27,7 @@ default_args = None monitor_plugins = None +pool_collection = None def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. @@ -40,6 +41,8 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): default_monitor_plugins.get_parse_args(parser) default_args, unknown = parser.parse_known_args() enable_verbose(default_args.verbose) + global pool_collection + pool_collection = PoolCollection(default_args.verbose) # Create namespace with default args api_args = argparse.Namespace() @@ -63,7 +66,6 @@ async def networks(): async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - pool_collection = PoolCollection(default_args.verbose) network_info = pool_collection.get_network_info(network=network) status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) result = await status.fetch(network_info=network_info) @@ -74,10 +76,14 @@ async def node(network, node, status: bool = False, alerts: bool = False, seed: monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - pool_collection = PoolCollection(default_args.verbose) + network_info = pool_collection.get_network_info(network=network) status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) + # TODO status = FetchStatus(default_args.verbose, pool_collection) result = await status.fetch(network_info, node) - # result = await status.fetch(network_info, ident, node) + # TODO result = await status.fetch(network, ident, node, monitor_plugins, network) - return result \ No newline at end of file + return result + +# TODO fetchstatus.fetch for be refactored to take network, node, ident and monitor_plugins +# TODO FetchStatus as singleton \ No newline at end of file From 3a3a3342a954f95bb4569039f76a5110535e9730 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Thu, 17 Jun 2021 08:34:07 -0700 Subject: [PATCH 09/19] FetchStatus Class refactor. Fixed import in pool.py Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 18 ++++++++---------- fetch-validator-status/main.py | 7 ++----- fetch-validator-status/pool.py | 3 +-- fetch-validator-status/rest_api.py | 19 ++++--------------- 4 files changed, 15 insertions(+), 32 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index cd0df22..3748773 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -8,23 +8,21 @@ from pool import PoolCollection class FetchStatus(): - def __init__(self, verbose, pool_collection: PoolCollection, monitor_plugins: PluginCollection, ident: DidKey = None): + def __init__(self, verbose, pool_collection: PoolCollection): self.verbose = verbose self.pool_collection = pool_collection - self.monitor_plugins = monitor_plugins - self.ident = ident - async def fetch(self, network_info, nodes: str = None): + async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None): result = [] verifiers = {} - # network_info = pool_collection.get_network_info(network=network) + network_info = self.pool_collection.get_network_info(network=network) pool = await self.pool_collection.get_pool(network_info) - if self.ident: - log(f"Building request with did: {self.ident.did} ...") - request = build_get_validator_info_request(self.ident.did) - self.ident.sign_request(request) + if ident: + log(f"Building request with did: {ident.did} ...") + request = build_get_validator_info_request(ident.did) + ident.sign_request(request) else: log("Building anonymous request ...") request = build_get_txn_request(None, 1, 1) @@ -45,6 +43,6 @@ async def fetch(self, network_info, nodes: str = None): pass log("Passing results to plugins for processing ...") - result = await self.monitor_plugins.apply_all_plugins_on_value(result, network_info.network_name, response, verifiers) + result = await monitor_plugins.apply_all_plugins_on_value(result, network_info.network_name, response, verifiers) log("Processing complete.") return result \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index b829ad0..a402316 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -2,7 +2,6 @@ import asyncio import json import os - import indy_vdr from util import ( enable_verbose, @@ -45,9 +44,7 @@ log("indy-vdr version:", indy_vdr.version()) did_seed = None if not args.seed else args.seed ident = create_did(did_seed) - pool_collection = PoolCollection(args.verbose) - network_info = pool_collection.get_network_info(args.net, args.genesis_url, args.genesis_path) - status = FetchStatus(args.verbose, pool_collection, monitor_plugins, ident) - result = asyncio.get_event_loop().run_until_complete(status.fetch(network_info, args.nodes)) + status = FetchStatus(args.verbose, pool_collection) + result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident)) print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index eb9a9ea..69838d6 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -1,5 +1,4 @@ import os -from os import path import json import urllib.request import sys @@ -79,7 +78,7 @@ def get_network_info(self, network: str = None, genesis_url: str = None, genesis network_name_path = network_name_path.replace("/", "_") network_name_path = network_name_path.replace(".", "_") genesis_path = f"{genesis_path_base}{network_name_path}/" - if not path.exists(genesis_path): + if not os.path.exists(genesis_path): os.makedirs(genesis_path) genesis_path = f"{genesis_path}genesis.txn" # genesis_path = f"{genesis_path_base}/genesis.txn" # use as base dir save file with using network name or genesis url diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 0bc1500..c3663db 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -1,15 +1,12 @@ import os import argparse - from typing import Optional from fastapi import FastAPI, Header - from util import ( enable_verbose, # log, create_did ) - from pool import PoolCollection from fetch_status import FetchStatus from plugin_collection import PluginCollection @@ -66,24 +63,16 @@ async def networks(): async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - network_info = pool_collection.get_network_info(network=network) - status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) - result = await status.fetch(network_info=network_info) + status = FetchStatus(default_args.verbose, pool_collection) + result = await status.fetch(network=network, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) - ident = create_did(seed) - - network_info = pool_collection.get_network_info(network=network) - status = FetchStatus(default_args.verbose, pool_collection, monitor_plugins, ident) - # TODO status = FetchStatus(default_args.verbose, pool_collection) - result = await status.fetch(network_info, node) - # TODO result = await status.fetch(network, ident, node, monitor_plugins, network) - + status = FetchStatus(default_args.verbose, pool_collection) + result = await status.fetch(network, monitor_plugins, node, ident) return result -# TODO fetchstatus.fetch for be refactored to take network, node, ident and monitor_plugins # TODO FetchStatus as singleton \ No newline at end of file From 65cc9ef61a98765eebe6350d35d6b2ba641e1b24 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Thu, 17 Jun 2021 08:53:55 -0700 Subject: [PATCH 10/19] Fetch status class has been made a singleton. Singleton meta class now has its own file. Changed string manipulation in fetch pool connection for something better. Added genesis path and url to fetch status class. Added comments. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 7 ++++--- fetch-validator-status/main.py | 2 +- fetch-validator-status/pool.py | 23 +++++++--------------- fetch-validator-status/rest_api.py | 27 +++++++++++++++++--------- fetch-validator-status/singleton.py | 8 ++++++++ 5 files changed, 38 insertions(+), 29 deletions(-) create mode 100644 fetch-validator-status/singleton.py diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 3748773..b70f971 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -6,17 +6,18 @@ from plugin_collection import PluginCollection from DidKey import DidKey from pool import PoolCollection +from singleton import Singleton -class FetchStatus(): +class FetchStatus(object, metaclass=Singleton): def __init__(self, verbose, pool_collection: PoolCollection): self.verbose = verbose self.pool_collection = pool_collection - async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None): + async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None, genesis_url: str = None, genesis_path: str = None): result = [] verifiers = {} - network_info = self.pool_collection.get_network_info(network=network) + network_info = self.pool_collection.get_network_info(network, genesis_url, genesis_path) pool = await self.pool_collection.get_pool(network_info) if ident: diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index a402316..143e405 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -46,5 +46,5 @@ ident = create_did(did_seed) pool_collection = PoolCollection(args.verbose) status = FetchStatus(args.verbose, pool_collection) - result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident)) + result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident, args.genesis_url, args.genesis_path)) print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index 69838d6..2904ba6 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -3,17 +3,11 @@ import urllib.request import sys import asyncio +import re from collections import namedtuple from util import log from indy_vdr.pool import open_pool - -# https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python -class Singleton(type): - _instances = {} - def __call__(cls, *args, **kwargs): - if cls not in cls._instances: - cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) - return cls._instances[cls] +from singleton import Singleton class PoolCollection(object, metaclass=Singleton): def __init__(self, verbose): @@ -40,19 +34,20 @@ async def fetch_pool_connection(self, genesis_path): return pool async def get_pool(self, network_info): - + # Network pool connection cache with async thread lock for REST API. async with self.lock: if network_info.network_name in self.network_cache: + # Use cache. log(f"Pool for {network_info.network_name} found in cache ... ") pool = self.network_cache[network_info.network_name]['pool'] else: + # Create cache. log(f"Pool for {network_info.network_name} not found in cache, creating new connection ... ") self.network_cache[network_info.network_name] = {} self.network_cache[network_info.network_name]['genesis_path'] = network_info.genesis_path self.network_cache[network_info.network_name]['genesis_url'] = network_info.genesis_url pool = await self.fetch_pool_connection(network_info.genesis_path) self.network_cache[network_info.network_name]['pool'] = pool - return pool def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None): @@ -71,20 +66,16 @@ def get_network_info(self, network: str = None, genesis_url: str = None, genesis if not network_name: network_name = genesis_url log(f"Setting network name = {network_name} ...") - if not genesis_path: + # Remove and replace parts of the string to make a file name to create the path. network_name_path = network_name.replace("https://", "") - network_name_path = network_name_path.replace(" ", "_") - network_name_path = network_name_path.replace("/", "_") - network_name_path = network_name_path.replace(".", "_") + network_name_path = re.sub('[ /.]', '_', network_name_path) genesis_path = f"{genesis_path_base}{network_name_path}/" if not os.path.exists(genesis_path): os.makedirs(genesis_path) genesis_path = f"{genesis_path}genesis.txn" - # genesis_path = f"{genesis_path_base}/genesis.txn" # use as base dir save file with using network name or genesis url self.download_genesis_file(genesis_url, genesis_path) - if not os.path.exists(genesis_path): print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) exit() diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index c3663db..2514e2f 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -11,9 +11,9 @@ from fetch_status import FetchStatus from plugin_collection import PluginCollection -APP_NAME='test_name' +APP_NAME='Node Monitor' APP_DESCRIPTION='test_description' -APP_VERSION='app_version' +APP_VERSION='0.0.0' # https://fastapi.tiangolo.com/tutorial/metadata/ app = FastAPI( @@ -26,6 +26,9 @@ monitor_plugins = None pool_collection = None +# TODO fix +status_test = None + def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. global default_args @@ -41,14 +44,20 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): global pool_collection pool_collection = PoolCollection(default_args.verbose) - # Create namespace with default args + # TODO fix + global status_test + status_test = FetchStatus(default_args.verbose, pool_collection) + + # Create namespace with default args and load them into api_args api_args = argparse.Namespace() for name, value in default_args._get_kwargs(): setattr(api_args, name, value) + # Set api_args with the values from the parameters setattr(api_args, 'status', status) setattr(api_args, 'alerts', alerts) + # Create anf load plugins with api_args monitor_plugins = PluginCollection('plugins') monitor_plugins.load_all_parse_args(api_args) @@ -63,16 +72,16 @@ async def networks(): async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - status = FetchStatus(default_args.verbose, pool_collection) - result = await status.fetch(network=network, monitor_plugins=monitor_plugins, ident=ident) + + # TODO fix + result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - status = FetchStatus(default_args.verbose, pool_collection) - result = await status.fetch(network, monitor_plugins, node, ident) - return result -# TODO FetchStatus as singleton \ No newline at end of file + # TODO fix + result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + return result \ No newline at end of file diff --git a/fetch-validator-status/singleton.py b/fetch-validator-status/singleton.py new file mode 100644 index 0000000..fd27736 --- /dev/null +++ b/fetch-validator-status/singleton.py @@ -0,0 +1,8 @@ +# Meta Class +# https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python +class Singleton(type): + _instances = {} + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] \ No newline at end of file From f5808ba32776a0ab37a03b51695d6cc78ba04892 Mon Sep 17 00:00:00 2001 From: Wade Barnes Date: Sun, 20 Jun 2021 17:58:12 -0700 Subject: [PATCH 11/19] Refactor PoolCollection - Pull the known network related code out into it's own class; Networks - Refactor network and pool resolution to use network ids. - Update dependent code. Testing is not complete Signed-off-by: Wade Barnes --- fetch-validator-status/fetch_status.py | 8 +- fetch-validator-status/main.py | 16 ++- fetch-validator-status/pool.py | 170 +++++++++++++++---------- fetch-validator-status/rest_api.py | 10 +- 4 files changed, 122 insertions(+), 82 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index b70f971..91ffc86 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -13,13 +13,11 @@ def __init__(self, verbose, pool_collection: PoolCollection): self.verbose = verbose self.pool_collection = pool_collection - async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None, genesis_url: str = None, genesis_path: str = None): + async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None): result = [] verifiers = {} - network_info = self.pool_collection.get_network_info(network, genesis_url, genesis_path) - pool = await self.pool_collection.get_pool(network_info) - + pool, network_name = await self.pool_collection.get_pool(network_id) if ident: log(f"Building request with did: {ident.did} ...") request = build_get_validator_info_request(ident.did) @@ -44,6 +42,6 @@ async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = N pass log("Passing results to plugins for processing ...") - result = await monitor_plugins.apply_all_plugins_on_value(result, network_info.network_name, response, verifiers) + result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) log("Processing complete.") return result \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index 143e405..0f22b81 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -10,11 +10,12 @@ ) from fetch_status import FetchStatus from pool import PoolCollection +from pool import Networks from plugin_collection import PluginCollection if __name__ == "__main__": parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") - parser.add_argument("--net", choices=PoolCollection.list_networks(), help="Connect to a known network using an ID.") + parser.add_argument("--net", choices=Networks.get_names(), help="Connect to a known network using an ID.") parser.add_argument("--list-nets", action="store_true", help="List known networks.") parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") @@ -38,13 +39,20 @@ enable_verbose(args.verbose) if args.list_nets: - print(json.dumps(PoolCollection.load_network_list(), indent=2)) + print(json.dumps(Networks.get_all(), indent=2)) exit() log("indy-vdr version:", indy_vdr.version()) did_seed = None if not args.seed else args.seed ident = create_did(did_seed) - pool_collection = PoolCollection(args.verbose) + networks = Networks() + + # ToDo: + # - Flesh out Networks.resolve so this registers a adhoc network + # (i.e. user passed in args.genesis_url, or args.genesis_path rather than a known network id) + networks.resolve(args.net, args.genesis_url, args.genesis_path) + + pool_collection = PoolCollection(args.verbose, networks) status = FetchStatus(args.verbose, pool_collection) - result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident, args.genesis_url, args.genesis_path)) + result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident)) print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index 2904ba6..25073cb 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -8,11 +8,91 @@ from util import log from indy_vdr.pool import open_pool from singleton import Singleton - + + +Network = namedtuple('Network', ['name', 'genesis_url', 'genesis_path']) + +class Networks(object, metaclass=Singleton): + def __init__(self): + self._networks = self.__load_network_list() + + def __get_script_dir(self): + return os.path.dirname(os.path.realpath(__file__)) + + def __load_network_list(self): + log("Loading known network list ...") + with open(f"{self.__get_script_dir()}/networks.json") as json_file: + networks = json.load(json_file) + return networks + + @property + def names(self): + return self._networks.keys() + + @property + def networks(self): + return self._networks + + @staticmethod + def get_names(): + networks = Networks() + return networks.names + + @staticmethod + def get_all(): + networks = Networks() + return networks.networks + + @staticmethod + def __download_genesis_file(url: str, target_local_path: str): + log("Fetching genesis file ...") + urllib.request.urlretrieve(url, target_local_path) + + # ToDo: + # - Refactor to maintain the list of networks dynamically using self._networks + # - In the case a network does not existing the list add it. + # - For example in the case a user provides a genesis_url or genesis_path rather than a named named (known) network. + # - The key for the network should be dynamically generated, could simply be Network#; Network1, Network2, etc. + # - As genesis files are downloaded (or provided) the entries should be updated with the genesis_path information. + # - Genesis files should only be downloaded for entries without genesis_path info. + def resolve(self, network: str = None, genesis_url: str = None, genesis_path: str = None): + network_id = None + genesis_path_base = f"{self.__get_script_dir()}/" + + if network: + if network in self.names: + log("Connecting to '{0}' ...".format(self.networks[network]["name"])) + genesis_url = self.networks[network]["genesisUrl"] + network_id = self.networks[network]["name"] + + if genesis_url: + if not network_id: + network_id = genesis_url + log(f"Setting network name = {network_id} ...") + if not genesis_path: + # Remove and replace parts of the string to make a file name to create the path. + network_id_path = network_id.replace("https://", "") + network_id_path = re.sub('[ /.]', '_', network_id_path) + genesis_path = f"{genesis_path_base}{network_id_path}/" + if not os.path.exists(genesis_path): + os.makedirs(genesis_path) + genesis_path = f"{genesis_path}genesis.txn" + + Networks.__download_genesis_file(genesis_url, genesis_path) + if not os.path.exists(genesis_path): + print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) + exit() + + network = Network(network_id, genesis_url, genesis_path) + + return network + + class PoolCollection(object, metaclass=Singleton): - def __init__(self, verbose): + def __init__(self, verbose, networks: Networks): self.verbose = verbose - self.network_cache = {} + self.networks = networks + self.pool_cache = {} self.lock = asyncio.Lock() async def fetch_pool_connection(self, genesis_path): @@ -33,73 +113,25 @@ async def fetch_pool_connection(self, genesis_path): break return pool - async def get_pool(self, network_info): + # ToDo: + # - Once Networks.resolve is fully fleshed out and the Networks class managaes all of the network properties + # this class no longer has to manage the 'genesis_path' and 'genesis_url' properties, it can use the + # networks instance for lookup, and cache and look up information by network key rather than network name; + # Networks.names (the network keys), rather than full Networks.networks[key].name (network name). + async def get_pool(self, network_id): + network = self.networks.resolve(network_id) # Network pool connection cache with async thread lock for REST API. async with self.lock: - if network_info.network_name in self.network_cache: + if network.name in self.pool_cache: # Use cache. - log(f"Pool for {network_info.network_name} found in cache ... ") - pool = self.network_cache[network_info.network_name]['pool'] - else: + log(f"Pool for {network.name} found in cache ... ") + pool = self.pool_cache[network.name]['pool'] + else: # Create cache. - log(f"Pool for {network_info.network_name} not found in cache, creating new connection ... ") - self.network_cache[network_info.network_name] = {} - self.network_cache[network_info.network_name]['genesis_path'] = network_info.genesis_path - self.network_cache[network_info.network_name]['genesis_url'] = network_info.genesis_url - pool = await self.fetch_pool_connection(network_info.genesis_path) - self.network_cache[network_info.network_name]['pool'] = pool - return pool - - def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None): - network_name = None - genesis_path_base = f"{PoolCollection.get_script_dir()}/" - - if network: - log("Loading known network list ...") - networks = PoolCollection.load_network_list() - if network in networks: - log("Connecting to '{0}' ...".format(networks[network]["name"])) - genesis_url = networks[network]["genesisUrl"] - network_name = networks[network]["name"] - - if genesis_url: - if not network_name: - network_name = genesis_url - log(f"Setting network name = {network_name} ...") - if not genesis_path: - # Remove and replace parts of the string to make a file name to create the path. - network_name_path = network_name.replace("https://", "") - network_name_path = re.sub('[ /.]', '_', network_name_path) - genesis_path = f"{genesis_path_base}{network_name_path}/" - if not os.path.exists(genesis_path): - os.makedirs(genesis_path) - genesis_path = f"{genesis_path}genesis.txn" - - self.download_genesis_file(genesis_url, genesis_path) - if not os.path.exists(genesis_path): - print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) - exit() - - Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path']) - network_info = Network_Info(network_name, genesis_url, genesis_path) - - return network_info - - def download_genesis_file(self, url: str, target_local_path: str): - log("Fetching genesis file ...") - urllib.request.urlretrieve(url, target_local_path) - - @staticmethod - def get_script_dir(): - return os.path.dirname(os.path.realpath(__file__)) - - @staticmethod - def load_network_list(): - with open(f"{PoolCollection.get_script_dir()}/networks.json") as json_file: - networks = json.load(json_file) - return networks - - @staticmethod - def list_networks(): - networks = PoolCollection.load_network_list() - return networks.keys() + log(f"Pool for {network.name} not found in cache, creating new connection ... ") + self.pool_cache[network.name] = {} + self.pool_cache[network.name]['genesis_path'] = network.genesis_path + self.pool_cache[network.name]['genesis_url'] = network.genesis_url + pool = await self.fetch_pool_connection(network.genesis_path) + self.pool_cache[network.name]['pool'] = pool + return pool, network.name \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 2514e2f..7890a8c 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -8,6 +8,7 @@ create_did ) from pool import PoolCollection +from pool import Networks from fetch_status import FetchStatus from plugin_collection import PluginCollection @@ -26,6 +27,7 @@ monitor_plugins = None pool_collection = None + # TODO fix status_test = None @@ -42,7 +44,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): default_args, unknown = parser.parse_known_args() enable_verbose(default_args.verbose) global pool_collection - pool_collection = PoolCollection(default_args.verbose) + pool_collection = PoolCollection(default_args.verbose, Networks()) # TODO fix global status_test @@ -65,7 +67,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): @app.get("/networks") async def networks(): - data = PoolCollection.load_network_list() + data = Networks.get_all() return data @app.get("/networks/{network}") @@ -74,7 +76,7 @@ async def network(network, status: bool = False, alerts: bool = False, seed: Opt ident = create_did(seed) # TODO fix - result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, ident=ident) + result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") @@ -83,5 +85,5 @@ async def node(network, node, status: bool = False, alerts: bool = False, seed: ident = create_did(seed) # TODO fix - result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) return result \ No newline at end of file From c659c6737929f9bcf321a605ed568ffbe604242d Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Wed, 23 Jun 2021 13:02:20 -0700 Subject: [PATCH 12/19] Made seed_as_bytes an instance method in DidKey. Renamed Networks.get_names to Networks.get_ids, and Networks.get_all to Networks.get_details. Renamed status_test to node_info and cleaned up global variables in rest_api.py. Fixed comments and logs. Signed-off-by: KoleBarnes --- fetch-validator-status/DidKey.py | 17 +++++++++-------- fetch-validator-status/fetch_status.py | 2 +- fetch-validator-status/main.py | 4 ++-- fetch-validator-status/pool.py | 10 +++++----- fetch-validator-status/rest_api.py | 26 ++++++++------------------ 5 files changed, 25 insertions(+), 34 deletions(-) diff --git a/fetch-validator-status/DidKey.py b/fetch-validator-status/DidKey.py index 1a92597..efb5948 100644 --- a/fetch-validator-status/DidKey.py +++ b/fetch-validator-status/DidKey.py @@ -5,8 +5,9 @@ class DidKey: def __init__(self, seed): - seed = seed_as_bytes(seed) - self.sk = nacl.signing.SigningKey(seed) + self.seed = seed + self.seed = self.seed_as_bytes() + self.sk = nacl.signing.SigningKey(self.seed) self.vk = bytes(self.sk.verify_key) self.did = base58.b58encode(self.vk[:16]).decode("ascii") self.verkey = base58.b58encode(self.vk).decode("ascii") @@ -15,9 +16,9 @@ def sign_request(self, req: Request): signed = self.sk.sign(req.signature_input) req.set_signature(signed.signature) -def seed_as_bytes(seed): - if not seed or isinstance(seed, bytes): - return seed - if len(seed) != 32: - return base64.b64decode(seed) - return seed.encode("ascii") \ No newline at end of file + def seed_as_bytes(self): + if not self.seed or isinstance(self.seed, bytes): + return self.seed + if len(self.seed) != 32: + return base64.b64decode(self.seed) + return self.seed.encode("ascii") \ No newline at end of file diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 91ffc86..e3f4fc8 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -38,7 +38,7 @@ async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: log("Getting list of verifiers ...") verifiers = await pool.get_verifiers() except AttributeError: - log("Unable to get list of verifiers. Plesase make sure you have the latest verson of indy-vdr.") + log("Unable to get list of verifiers. Please make sure you have the latest version of indy-vdr.") pass log("Passing results to plugins for processing ...") diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index 0f22b81..6dd6c70 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -15,7 +15,7 @@ if __name__ == "__main__": parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") - parser.add_argument("--net", choices=Networks.get_names(), help="Connect to a known network using an ID.") + parser.add_argument("--net", choices=Networks.get_ids(), help="Connect to a known network using an ID.") parser.add_argument("--list-nets", action="store_true", help="List known networks.") parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") @@ -39,7 +39,7 @@ enable_verbose(args.verbose) if args.list_nets: - print(json.dumps(Networks.get_all(), indent=2)) + print(json.dumps(Networks.get_details(), indent=2)) exit() log("indy-vdr version:", indy_vdr.version()) diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index 25073cb..d87ea08 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -34,12 +34,12 @@ def networks(self): return self._networks @staticmethod - def get_names(): + def get_ids(): networks = Networks() return networks.names @staticmethod - def get_all(): + def get_details(): networks = Networks() return networks.networks @@ -95,7 +95,7 @@ def __init__(self, verbose, networks: Networks): self.pool_cache = {} self.lock = asyncio.Lock() - async def fetch_pool_connection(self, genesis_path): + async def __fetch_pool_connection(self, genesis_path): attempt = 3 while attempt: try: @@ -114,7 +114,7 @@ async def fetch_pool_connection(self, genesis_path): return pool # ToDo: - # - Once Networks.resolve is fully fleshed out and the Networks class managaes all of the network properties + # - Once Networks.resolve is fully fleshed out and the Networks class manages all of the network properties # this class no longer has to manage the 'genesis_path' and 'genesis_url' properties, it can use the # networks instance for lookup, and cache and look up information by network key rather than network name; # Networks.names (the network keys), rather than full Networks.networks[key].name (network name). @@ -132,6 +132,6 @@ async def get_pool(self, network_id): self.pool_cache[network.name] = {} self.pool_cache[network.name]['genesis_path'] = network.genesis_path self.pool_cache[network.name]['genesis_url'] = network.genesis_url - pool = await self.fetch_pool_connection(network.genesis_path) + pool = await self.__fetch_pool_connection(network.genesis_path) self.pool_cache[network.name]['pool'] = pool return pool, network.name \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 7890a8c..296f2bd 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -23,17 +23,15 @@ version = APP_VERSION ) +# global variables default_args = None monitor_plugins = None pool_collection = None - - -# TODO fix -status_test = None +node_info = None def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. - global default_args + global default_args, pool_collection, node_info if not default_args: # Create plugin instance and set default args @@ -43,12 +41,8 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): default_monitor_plugins.get_parse_args(parser) default_args, unknown = parser.parse_known_args() enable_verbose(default_args.verbose) - global pool_collection pool_collection = PoolCollection(default_args.verbose, Networks()) - - # TODO fix - global status_test - status_test = FetchStatus(default_args.verbose, pool_collection) + node_info = FetchStatus(default_args.verbose, pool_collection) # Create namespace with default args and load them into api_args api_args = argparse.Namespace() @@ -59,7 +53,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): setattr(api_args, 'status', status) setattr(api_args, 'alerts', alerts) - # Create anf load plugins with api_args + # Create and load plugins with api_args monitor_plugins = PluginCollection('plugins') monitor_plugins.load_all_parse_args(api_args) @@ -67,23 +61,19 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): @app.get("/networks") async def networks(): - data = Networks.get_all() + data = Networks.get_details() return data @app.get("/networks/{network}") async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - - # TODO fix - result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident) + result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - - # TODO fix - result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) return result \ No newline at end of file From 86128a4ce596025e21f791de6965aa5825237b3a Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Fri, 25 Jun 2021 15:29:57 -0700 Subject: [PATCH 13/19] API refactoring. - Created an exception for when a requested node is not found on the requested network. - Refactored Pool.resolve to deal with genesis url's using the new Networks class. - Cleaned up pool cacheing. - Clean-up variable names and logging messages. - Add redirect to API, from `/` to `/docs`. Signed-off-by: Wade Barnes Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 17 ++++-- fetch-validator-status/main.py | 15 ++--- fetch-validator-status/pool.py | 84 ++++++++++++-------------- fetch-validator-status/rest_api.py | 24 ++++++-- 4 files changed, 73 insertions(+), 67 deletions(-) diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index e3f4fc8..453b632 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -1,3 +1,4 @@ +from argparse import ArgumentError from indy_vdr.ledger import ( build_get_validator_info_request, build_get_txn_request, @@ -8,6 +9,9 @@ from pool import PoolCollection from singleton import Singleton +class NodeNotFound(Exception): + pass + class FetchStatus(object, metaclass=Singleton): def __init__(self, verbose, pool_collection: PoolCollection): self.verbose = verbose @@ -23,16 +27,13 @@ async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: request = build_get_validator_info_request(ident.did) ident.sign_request(request) else: - log("Building anonymous request ...") + log("Building an anonymous request ...") request = build_get_txn_request(None, 1, 1) from_nodes = [] if nodes: from_nodes = nodes.split(",") - log("Submitting request ...") - response = await pool.submit_action(request, node_aliases = from_nodes) - try: # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 log("Getting list of verifiers ...") @@ -41,6 +42,14 @@ async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: log("Unable to get list of verifiers. Please make sure you have the latest version of indy-vdr.") pass + if verifiers and from_nodes: + for node in from_nodes: + if not node in verifiers: + raise NodeNotFound(f'{node} is not a member of {network_name}.') + + log("Submitting request ...") + response = await pool.submit_action(request, node_aliases = from_nodes) + log("Passing results to plugins for processing ...") result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) log("Processing complete.") diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index 6dd6c70..0a8ab4d 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -39,20 +39,15 @@ enable_verbose(args.verbose) if args.list_nets: - print(json.dumps(Networks.get_details(), indent=2)) + print(json.dumps(Networks.get_networks(), indent=2)) exit() log("indy-vdr version:", indy_vdr.version()) - did_seed = None if not args.seed else args.seed + did_seed = None if not args.seed else args.seed ident = create_did(did_seed) networks = Networks() - - # ToDo: - # - Flesh out Networks.resolve so this registers a adhoc network - # (i.e. user passed in args.genesis_url, or args.genesis_path rather than a known network id) - networks.resolve(args.net, args.genesis_url, args.genesis_path) - pool_collection = PoolCollection(args.verbose, networks) - status = FetchStatus(args.verbose, pool_collection) - result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident)) + network = networks.resolve(args.net, args.genesis_url, args.genesis_path) + node_info = FetchStatus(args.verbose, pool_collection) + result = asyncio.get_event_loop().run_until_complete(node_info.fetch(network.id, monitor_plugins, args.nodes, ident)) print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index d87ea08..66118da 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -10,7 +10,7 @@ from singleton import Singleton -Network = namedtuple('Network', ['name', 'genesis_url', 'genesis_path']) +Network = namedtuple('Network', ['id', 'name', 'genesis_url', 'genesis_path']) class Networks(object, metaclass=Singleton): def __init__(self): @@ -26,7 +26,7 @@ def __load_network_list(self): return networks @property - def names(self): + def ids(self): return self._networks.keys() @property @@ -36,55 +36,51 @@ def networks(self): @staticmethod def get_ids(): networks = Networks() - return networks.names + return networks.ids @staticmethod - def get_details(): + def get_networks(): networks = Networks() return networks.networks @staticmethod - def __download_genesis_file(url: str, target_local_path: str): + def __download_genesis_file(genesis_url: str, destination_path: str): log("Fetching genesis file ...") - urllib.request.urlretrieve(url, target_local_path) - - # ToDo: - # - Refactor to maintain the list of networks dynamically using self._networks - # - In the case a network does not existing the list add it. - # - For example in the case a user provides a genesis_url or genesis_path rather than a named named (known) network. - # - The key for the network should be dynamically generated, could simply be Network#; Network1, Network2, etc. - # - As genesis files are downloaded (or provided) the entries should be updated with the genesis_path information. - # - Genesis files should only be downloaded for entries without genesis_path info. - def resolve(self, network: str = None, genesis_url: str = None, genesis_path: str = None): - network_id = None + urllib.request.urlretrieve(genesis_url, destination_path) + + def resolve(self, network_id: str = None, genesis_url: str = None, genesis_path: str = None): + network_name = None genesis_path_base = f"{self.__get_script_dir()}/" - if network: - if network in self.names: - log("Connecting to '{0}' ...".format(self.networks[network]["name"])) - genesis_url = self.networks[network]["genesisUrl"] - network_id = self.networks[network]["name"] + if network_id and network_id in self.ids: + log("Connecting to '{0}' ...".format(self.networks[network_id]["name"])) + network_name = self.networks[network_id]["name"] + genesis_url = self.networks[network_id]["genesisUrl"] + if 'genesisPath' in self.networks[network_id]: + genesis_path = self.networks[network_id]['genesisPath'] if genesis_url: - if not network_id: - network_id = genesis_url - log(f"Setting network name = {network_id} ...") + if not network_name: + network_name = genesis_url + network_id = network_name + log(f"Setting network name = {network_name} ...") + if not genesis_path: - # Remove and replace parts of the string to make a file name to create the path. - network_id_path = network_id.replace("https://", "") - network_id_path = re.sub('[ /.]', '_', network_id_path) - genesis_path = f"{genesis_path_base}{network_id_path}/" + # Remove and replace parts of the string to make a valid path based on the network name. + sub_path = network_name.replace("https://", "") + sub_path = re.sub('[ /.]', '_', sub_path) + genesis_path = f"{genesis_path_base}{sub_path}/" if not os.path.exists(genesis_path): os.makedirs(genesis_path) genesis_path = f"{genesis_path}genesis.txn" + Networks.__download_genesis_file(genesis_url, genesis_path) + self._networks[network_id] = {'name': network_name, 'genesisUrl': genesis_url, 'genesisPath': genesis_path} - Networks.__download_genesis_file(genesis_url, genesis_path) if not os.path.exists(genesis_path): print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) exit() - network = Network(network_id, genesis_url, genesis_path) - + network = Network(network_id, network_name, genesis_url, genesis_path) return network @@ -104,7 +100,7 @@ async def __fetch_pool_connection(self, genesis_path): except: log("Pool Timed Out! Trying again ...") if not attempt: - print("Unable to get pool Response! 3 attempts where made. Exiting ...") + print("Unable to get response from pool! 3 attempts where made. Exiting ...") exit() attempt -= 1 continue @@ -113,25 +109,19 @@ async def __fetch_pool_connection(self, genesis_path): break return pool - # ToDo: - # - Once Networks.resolve is fully fleshed out and the Networks class manages all of the network properties - # this class no longer has to manage the 'genesis_path' and 'genesis_url' properties, it can use the - # networks instance for lookup, and cache and look up information by network key rather than network name; - # Networks.names (the network keys), rather than full Networks.networks[key].name (network name). async def get_pool(self, network_id): network = self.networks.resolve(network_id) # Network pool connection cache with async thread lock for REST API. async with self.lock: - if network.name in self.pool_cache: - # Use cache. - log(f"Pool for {network.name} found in cache ... ") - pool = self.pool_cache[network.name]['pool'] + if network.id in self.pool_cache: + # Cache hit ... + log(f"The pool for {network.name} was found in the cache ...") + pool = self.pool_cache[network.id]['pool'] else: - # Create cache. - log(f"Pool for {network.name} not found in cache, creating new connection ... ") - self.pool_cache[network.name] = {} - self.pool_cache[network.name]['genesis_path'] = network.genesis_path - self.pool_cache[network.name]['genesis_url'] = network.genesis_url + # Cache miss ... + log(f"A pool for {network.name} was not found in the cache, creating new connection ...") pool = await self.__fetch_pool_connection(network.genesis_path) - self.pool_cache[network.name]['pool'] = pool + self.pool_cache[network.id] = {} + self.pool_cache[network.id]['pool'] = pool + log(f"Cached the pool for {network.name} ...") return pool, network.name \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 296f2bd..1829fe8 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -1,7 +1,8 @@ import os import argparse from typing import Optional -from fastapi import FastAPI, Header +from fastapi import FastAPI, Header, HTTPException +from starlette.responses import RedirectResponse from util import ( enable_verbose, # log, @@ -9,7 +10,7 @@ ) from pool import PoolCollection from pool import Networks -from fetch_status import FetchStatus +from fetch_status import FetchStatus, NodeNotFound from plugin_collection import PluginCollection APP_NAME='Node Monitor' @@ -49,19 +50,25 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False): for name, value in default_args._get_kwargs(): setattr(api_args, name, value) - # Set api_args with the values from the parameters + # Set api_args with the values from the parameters setattr(api_args, 'status', status) setattr(api_args, 'alerts', alerts) # Create and load plugins with api_args - monitor_plugins = PluginCollection('plugins') + monitor_plugins = PluginCollection('plugins') monitor_plugins.load_all_parse_args(api_args) return monitor_plugins +# Redirect users to the '/docs' page but don't include this endpoint in the docs. +@app.get("/", include_in_schema=False) +async def redirect(): + response = RedirectResponse(url='/docs') + return response + @app.get("/networks") async def networks(): - data = Networks.get_details() + data = Networks.get_networks() return data @app.get("/networks/{network}") @@ -75,5 +82,10 @@ async def network(network, status: bool = False, alerts: bool = False, seed: Opt async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + try: + result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + except NodeNotFound as error: + print(error) + raise HTTPException(status_code=400, detail=str(error)) + return result \ No newline at end of file From d449280d24bc285e506a2e3f9b3da862090a686c Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 29 Jun 2021 11:32:06 -0700 Subject: [PATCH 14/19] Moved Networks class into its own file. - Removed import argparse ArgumentError. Signed-off-by: KoleBarnes --- fetch-validator-status/fetch_status.py | 1 - fetch-validator-status/main.py | 2 +- fetch-validator-status/networks.py | 81 +++++++++++++++++++++++++ fetch-validator-status/pool.py | 82 +------------------------- fetch-validator-status/rest_api.py | 2 +- 5 files changed, 84 insertions(+), 84 deletions(-) create mode 100644 fetch-validator-status/networks.py diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index 453b632..b7c3a77 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -1,4 +1,3 @@ -from argparse import ArgumentError from indy_vdr.ledger import ( build_get_validator_info_request, build_get_txn_request, diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index 0a8ab4d..af4c844 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -10,7 +10,7 @@ ) from fetch_status import FetchStatus from pool import PoolCollection -from pool import Networks +from networks import Networks from plugin_collection import PluginCollection if __name__ == "__main__": diff --git a/fetch-validator-status/networks.py b/fetch-validator-status/networks.py new file mode 100644 index 0000000..6ee9e55 --- /dev/null +++ b/fetch-validator-status/networks.py @@ -0,0 +1,81 @@ +import os +import json +import urllib.request +import sys +import re +from collections import namedtuple +from util import log +from singleton import Singleton + +Network = namedtuple('Network', ['id', 'name', 'genesis_url', 'genesis_path']) + +class Networks(object, metaclass=Singleton): + def __init__(self): + self._networks = self.__load_network_list() + + def __get_script_dir(self): + return os.path.dirname(os.path.realpath(__file__)) + + def __load_network_list(self): + log("Loading known network list ...") + with open(f"{self.__get_script_dir()}/networks.json") as json_file: + networks = json.load(json_file) + return networks + + @property + def ids(self): + return self._networks.keys() + + @property + def networks(self): + return self._networks + + @staticmethod + def get_ids(): + networks = Networks() + return networks.ids + + @staticmethod + def get_networks(): + networks = Networks() + return networks.networks + + @staticmethod + def __download_genesis_file(genesis_url: str, destination_path: str): + log("Fetching genesis file ...") + urllib.request.urlretrieve(genesis_url, destination_path) + + def resolve(self, network_id: str = None, genesis_url: str = None, genesis_path: str = None): + network_name = None + genesis_path_base = f"{self.__get_script_dir()}/" + + if network_id and network_id in self.ids: + log("Connecting to '{0}' ...".format(self.networks[network_id]["name"])) + network_name = self.networks[network_id]["name"] + genesis_url = self.networks[network_id]["genesisUrl"] + if 'genesisPath' in self.networks[network_id]: + genesis_path = self.networks[network_id]['genesisPath'] + + if genesis_url: + if not network_name: + network_name = genesis_url + network_id = network_name + log(f"Setting network name = {network_name} ...") + + if not genesis_path: + # Remove and replace parts of the string to make a valid path based on the network name. + sub_path = network_name.replace("https://", "") + sub_path = re.sub('[ /.]', '_', sub_path) + genesis_path = f"{genesis_path_base}{sub_path}/" + if not os.path.exists(genesis_path): + os.makedirs(genesis_path) + genesis_path = f"{genesis_path}genesis.txn" + Networks.__download_genesis_file(genesis_url, genesis_path) + self._networks[network_id] = {'name': network_name, 'genesisUrl': genesis_url, 'genesisPath': genesis_path} + + if not os.path.exists(genesis_path): + print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) + exit() + + network = Network(network_id, network_name, genesis_url, genesis_path) + return network \ No newline at end of file diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py index 66118da..35ca298 100644 --- a/fetch-validator-status/pool.py +++ b/fetch-validator-status/pool.py @@ -1,88 +1,8 @@ -import os -import json -import urllib.request -import sys import asyncio -import re -from collections import namedtuple from util import log from indy_vdr.pool import open_pool from singleton import Singleton - - -Network = namedtuple('Network', ['id', 'name', 'genesis_url', 'genesis_path']) - -class Networks(object, metaclass=Singleton): - def __init__(self): - self._networks = self.__load_network_list() - - def __get_script_dir(self): - return os.path.dirname(os.path.realpath(__file__)) - - def __load_network_list(self): - log("Loading known network list ...") - with open(f"{self.__get_script_dir()}/networks.json") as json_file: - networks = json.load(json_file) - return networks - - @property - def ids(self): - return self._networks.keys() - - @property - def networks(self): - return self._networks - - @staticmethod - def get_ids(): - networks = Networks() - return networks.ids - - @staticmethod - def get_networks(): - networks = Networks() - return networks.networks - - @staticmethod - def __download_genesis_file(genesis_url: str, destination_path: str): - log("Fetching genesis file ...") - urllib.request.urlretrieve(genesis_url, destination_path) - - def resolve(self, network_id: str = None, genesis_url: str = None, genesis_path: str = None): - network_name = None - genesis_path_base = f"{self.__get_script_dir()}/" - - if network_id and network_id in self.ids: - log("Connecting to '{0}' ...".format(self.networks[network_id]["name"])) - network_name = self.networks[network_id]["name"] - genesis_url = self.networks[network_id]["genesisUrl"] - if 'genesisPath' in self.networks[network_id]: - genesis_path = self.networks[network_id]['genesisPath'] - - if genesis_url: - if not network_name: - network_name = genesis_url - network_id = network_name - log(f"Setting network name = {network_name} ...") - - if not genesis_path: - # Remove and replace parts of the string to make a valid path based on the network name. - sub_path = network_name.replace("https://", "") - sub_path = re.sub('[ /.]', '_', sub_path) - genesis_path = f"{genesis_path_base}{sub_path}/" - if not os.path.exists(genesis_path): - os.makedirs(genesis_path) - genesis_path = f"{genesis_path}genesis.txn" - Networks.__download_genesis_file(genesis_url, genesis_path) - self._networks[network_id] = {'name': network_name, 'genesisUrl': genesis_url, 'genesisPath': genesis_path} - - if not os.path.exists(genesis_path): - print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) - exit() - - network = Network(network_id, network_name, genesis_url, genesis_path) - return network - +from networks import Networks class PoolCollection(object, metaclass=Singleton): def __init__(self, verbose, networks: Networks): diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 1829fe8..db356d0 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -9,7 +9,7 @@ create_did ) from pool import PoolCollection -from pool import Networks +from networks import Networks from fetch_status import FetchStatus, NodeNotFound from plugin_collection import PluginCollection From 77ae7446026a26ab767ff954849014ec7e313493 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 14 Sep 2021 13:19:05 -0700 Subject: [PATCH 15/19] Added debug and production HTTP servers. - uvicorn for debug. - gunicorn for production. Signed-off-by: KoleBarnes --- fetch-validator-status/gunicorn_conf.py | 73 +++++++++++++++++++++++++ fetch-validator-status/main.py | 24 +++++++- 2 files changed, 94 insertions(+), 3 deletions(-) create mode 100644 fetch-validator-status/gunicorn_conf.py diff --git a/fetch-validator-status/gunicorn_conf.py b/fetch-validator-status/gunicorn_conf.py new file mode 100644 index 0000000..8f8ae3e --- /dev/null +++ b/fetch-validator-status/gunicorn_conf.py @@ -0,0 +1,73 @@ +""" +File created by tiangolo. +https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/master/docker-images/gunicorn_conf.py +""" + +import json +import multiprocessing +import os + +workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1") +max_workers_str = os.getenv("MAX_WORKERS") +use_max_workers = None +if max_workers_str: + use_max_workers = int(max_workers_str) +web_concurrency_str = os.getenv("WEB_CONCURRENCY", None) + +host = os.getenv("HOST", "0.0.0.0") +port = os.getenv("PORT", "8080") +bind_env = os.getenv("BIND", None) +use_loglevel = os.getenv("LOG_LEVEL", "info") +if bind_env: + use_bind = bind_env +else: + use_bind = f"{host}:{port}" + +cores = multiprocessing.cpu_count() +workers_per_core = float(workers_per_core_str) +default_web_concurrency = workers_per_core * cores +if web_concurrency_str: + web_concurrency = int(web_concurrency_str) + assert web_concurrency > 0 +else: + web_concurrency = max(int(default_web_concurrency), 2) + if use_max_workers: + web_concurrency = min(web_concurrency, use_max_workers) +accesslog_var = os.getenv("ACCESS_LOG", "-") +use_accesslog = accesslog_var or None +errorlog_var = os.getenv("ERROR_LOG", "-") +use_errorlog = errorlog_var or None +graceful_timeout_str = os.getenv("GRACEFUL_TIMEOUT", "120") +timeout_str = os.getenv("TIMEOUT", "120") +keepalive_str = os.getenv("KEEP_ALIVE", "5") + +# Gunicorn config variables +loglevel = use_loglevel +workers = web_concurrency +bind = use_bind +errorlog = use_errorlog +worker_tmp_dir = "/dev/shm" +accesslog = use_accesslog +graceful_timeout = int(graceful_timeout_str) +timeout = int(timeout_str) +keepalive = int(keepalive_str) + + +# For debugging and testing +log_data = { + "loglevel": loglevel, + "workers": workers, + "bind": bind, + "graceful_timeout": graceful_timeout, + "timeout": timeout, + "keepalive": keepalive, + "errorlog": errorlog, + "accesslog": accesslog, + # Additional, non-gunicorn variables + "workers_per_core": workers_per_core, + "use_max_workers": use_max_workers, + "host": host, + "port": port, +} +print('gunicorn config:') +print(json.dumps(log_data, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index af4c844..6a3be37 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -23,20 +23,38 @@ parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") parser.add_argument("--web", action="store_true", help="Start API server.") + parser.add_argument("--debug", action="store_true", help="Run in debug mode.") monitor_plugins = PluginCollection('plugins') monitor_plugins.get_parse_args(parser) args, unknown = parser.parse_known_args() monitor_plugins.load_all_parse_args(args) + enable_verbose(args.verbose) + if args.web: - log("Starting web server ...") # Pass verbose to rest api through env var os.environ['VERBOSE'] = str(args.verbose) - os.system('uvicorn rest_api:app --reload --host 0.0.0.0 --port 8080') + + MODULE_NAME = os.environ.get('MODULE_NAME', "rest_api") + VARIABLE_NAME = os.environ.get('VARIABLE_NAME', "app") + APP_MODULE = os.environ.get('APP_MODULE', f"{MODULE_NAME}:{VARIABLE_NAME}") + + if args.debug: + HOST = os.environ.get('HOST', '0.0.0.0') + PORT = os.environ.get('PORT', '8080') + LOG_LEVEL = os.environ.get('LOG_LEVEL', 'info') + + log("Starting web server in debug mode ...") + os.system(f'uvicorn --reload --host {HOST} --port {PORT} --log-level {LOG_LEVEL} "{APP_MODULE}"') + else: + GUNICORN_CONF = os.environ.get('GUNICORN_CONF', 'gunicorn_conf.py') + WORKER_CLASS = os.environ.get('WORKER_CLASS', "uvicorn.workers.UvicornWorker") + + log("Starting web server ...") + os.system(f'gunicorn -k "{WORKER_CLASS}" -c "{GUNICORN_CONF}" "{APP_MODULE}"') else: log("Starting from the command line ...") - enable_verbose(args.verbose) if args.list_nets: print(json.dumps(Networks.get_networks(), indent=2)) From 0247fe91a6987146104f9160571d7d0478ddc814 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Thu, 25 Nov 2021 12:52:28 -0800 Subject: [PATCH 16/19] Added documentation on how to use the REST API. Added code to check if a seed is being used while running the REST API. If so it will exit. Changed App name and description. Signed-off-by: KoleBarnes --- fetch-validator-status/README.md | 4 ++++ fetch-validator-status/REST_API.md | 8 ++++++++ fetch-validator-status/main.py | 6 ++++++ fetch-validator-status/rest_api.py | 4 ++-- fetch-validator-status/run.sh | 2 +- 5 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 fetch-validator-status/REST_API.md diff --git a/fetch-validator-status/README.md b/fetch-validator-status/README.md index bf5f4c2..1757897 100644 --- a/fetch-validator-status/README.md +++ b/fetch-validator-status/README.md @@ -171,6 +171,10 @@ Note that there are three different formats for the timestamps in the data struc For info on plug-ins see the plug-ins [readme](plugins/README.md) +## Rest API + +For info on Rest API see [REST API](REST_API.md) + ### Running against other Indy Networks To see the validator info against any other Indy network, you need a URL for the Genesis file for the network, and the seed for a suitably authorized DID. The pool Genesis file URLs are easy, since that is published data needed by agents connecting to Indy networks. Sovrin genesis URLs can be found [here](https://github.com/sovrin-foundation/sovrin/tree/master/sovrin). You need the URL for the raw version of the pool transaction files. For example, the URL you need for the Sovrin MainNet is: diff --git a/fetch-validator-status/REST_API.md b/fetch-validator-status/REST_API.md new file mode 100644 index 0000000..2fa2b88 --- /dev/null +++ b/fetch-validator-status/REST_API.md @@ -0,0 +1,8 @@ +# Rest API + +To run [fetch validator](README.md) as a webAPI `cd fetch-validator-status` and `IM=1 ./run.sh --web -v` to start the server. +To run in debug mode add `--debug`. + +## How To Use + +After running the command above. Go to http://localhost:8080/ in your browser. Then click on one of the colored drop downs and click the 'Try it out' button. Fill out any required fields then click 'execute'. This will give you a response with a, curl command, request url, and response body. \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py index 6a3be37..c86429f 100644 --- a/fetch-validator-status/main.py +++ b/fetch-validator-status/main.py @@ -33,6 +33,12 @@ enable_verbose(args.verbose) if args.web: + if args.seed: + print("WARNING: You are trying to run the REST API with a SEED.") + print("Please remove your SEED and try again.") + print("Exiting...") + exit() + # Pass verbose to rest api through env var os.environ['VERBOSE'] = str(args.verbose) diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index db356d0..80e27e0 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -13,8 +13,8 @@ from fetch_status import FetchStatus, NodeNotFound from plugin_collection import PluginCollection -APP_NAME='Node Monitor' -APP_DESCRIPTION='test_description' +APP_NAME='Hyperledger Indy Node Monitor REST API' +APP_DESCRIPTION='https://github.com/hyperledger/indy-node-monitor' APP_VERSION='0.0.0' # https://fastapi.tiangolo.com/tutorial/metadata/ diff --git a/fetch-validator-status/run.sh b/fetch-validator-status/run.sh index 6f567ae..9b9a62f 100755 --- a/fetch-validator-status/run.sh +++ b/fetch-validator-status/run.sh @@ -40,7 +40,7 @@ else fi fi -docker build -t fetch_status . #> /dev/null 2>&1 +docker build -t fetch_status . > /dev/null 2>&1 cmd="${terminalEmu} docker run --rm ${DOCKER_INTERACTIVE} \ -e "GENESIS_PATH=${GENESIS_PATH}" \ From 924ea8438d947e81b42fc72dbf754d00623b2ac9 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 30 Nov 2021 14:33:28 -0800 Subject: [PATCH 17/19] Add node time stamp. Add API documentation. Signed-off-by: KoleBarnes --- fetch-validator-status/plugins/analysis.py | 2 ++ fetch-validator-status/rest_api.py | 13 ++++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/fetch-validator-status/plugins/analysis.py b/fetch-validator-status/plugins/analysis.py index 678b18c..66511ce 100644 --- a/fetch-validator-status/plugins/analysis.py +++ b/fetch-validator-status/plugins/analysis.py @@ -99,6 +99,8 @@ async def get_status_summary(self, jsval: any, errors: list) -> any: # by the monitor. status["timestamp"] = datetime.datetime.now(datetime.timezone.utc).strftime('%s') if jsval and ("REPLY" in jsval["op"]): + if "timestamp" in jsval["result"]["data"]: + status["node_timestamp"] = jsval["result"]["data"]["timestamp"] if "Node_info" in jsval["result"]["data"]: status["uptime"] = str(datetime.timedelta(seconds = jsval["result"]["data"]["Node_info"]["Metrics"]["uptime"])) if "Software" in jsval["result"]["data"]: diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index 80e27e0..cdd76af 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -1,7 +1,7 @@ import os import argparse from typing import Optional -from fastapi import FastAPI, Header, HTTPException +from fastapi import FastAPI, Header, HTTPException, Path, Query from starlette.responses import RedirectResponse from util import ( enable_verbose, @@ -72,14 +72,21 @@ async def networks(): return data @app.get("/networks/{network}") -async def network(network, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): +async def network(network: str = Path(..., example="sbn", description="The network code."), + status: bool = Query(False, description="Filter results to status only."), + alerts: bool = Query(False, description="Filter results to alerts only."), + seed: Optional[str] = Header(None, description="Your network monitor seed.")): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") -async def node(network, node, status: bool = False, alerts: bool = False, seed: Optional[str] = Header(None)): +async def node(network: str = Path(..., example="sbn", description="The network code."), + node: str = Path(..., example="FoundationBuilder", description="The node name."), + status: bool = Query(False, description="Filter results to status only."), + alerts: bool = Query(False, description="Filter results to alerts only."), + seed: Optional[str] = Header(None, description="Your network monitor seed.")): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) try: From 418de4f56224a489e5b839710bbaf145ebd7e3b0 Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Tue, 30 Nov 2021 14:37:27 -0800 Subject: [PATCH 18/19] Fix merge issues with Dockerfile. Signed-off-by: KoleBarnes --- fetch-validator-status/Dockerfile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/fetch-validator-status/Dockerfile b/fetch-validator-status/Dockerfile index 2ca58e1..21bb8f8 100644 --- a/fetch-validator-status/Dockerfile +++ b/fetch-validator-status/Dockerfile @@ -10,10 +10,6 @@ RUN apt-get update -y && \ rm -rf /var/lib/apt/lists/* USER $user -RUN pip install pynacl gspread oauth2client - -USER $user - ADD requirements.txt . RUN pip install --no-cache-dir -r requirements.txt From 01acb3cc7bfbbaf43250af4603b1e8da2b11a45b Mon Sep 17 00:00:00 2001 From: KoleBarnes Date: Mon, 20 Dec 2021 08:14:50 -0800 Subject: [PATCH 19/19] Added dynamically generating list that is used to select the available Networks. Signed-off-by: KoleBarnes --- fetch-validator-status/networks.py | 12 ++++++++++++ fetch-validator-status/rest_api.py | 12 +++++++----- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/fetch-validator-status/networks.py b/fetch-validator-status/networks.py index 6ee9e55..d079d74 100644 --- a/fetch-validator-status/networks.py +++ b/fetch-validator-status/networks.py @@ -3,12 +3,17 @@ import urllib.request import sys import re +from enum import Enum from collections import namedtuple from util import log from singleton import Singleton Network = namedtuple('Network', ['id', 'name', 'genesis_url', 'genesis_path']) +class NetworkEnum(Enum): + def _generate_next_value_(name, start, count, last_values): + return name + class Networks(object, metaclass=Singleton): def __init__(self): self._networks = self.__load_network_list() @@ -45,6 +50,13 @@ def __download_genesis_file(genesis_url: str, destination_path: str): log("Fetching genesis file ...") urllib.request.urlretrieve(genesis_url, destination_path) + @staticmethod + def get_NetworkEnum() -> NetworkEnum: + """Dynamically generates a NetworkEnum that can be used to select the available Networks. + """ + networks = Networks() + return NetworkEnum('Network', list(networks.ids)) + def resolve(self, network_id: str = None, genesis_url: str = None, genesis_path: str = None): network_name = None genesis_path_base = f"{self.__get_script_dir()}/" diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py index cdd76af..22a11ff 100644 --- a/fetch-validator-status/rest_api.py +++ b/fetch-validator-status/rest_api.py @@ -9,7 +9,7 @@ create_did ) from pool import PoolCollection -from networks import Networks +from networks import Networks, NetworkEnum from fetch_status import FetchStatus, NodeNotFound from plugin_collection import PluginCollection @@ -30,6 +30,8 @@ pool_collection = None node_info = None +Network: NetworkEnum = Networks.get_NetworkEnum() + def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. global default_args, pool_collection, node_info @@ -72,17 +74,17 @@ async def networks(): return data @app.get("/networks/{network}") -async def network(network: str = Path(..., example="sbn", description="The network code."), +async def network(network: Network = Path(Network.sbn, example="sbn", description="The network code."), status: bool = Query(False, description="Filter results to status only."), alerts: bool = Query(False, description="Filter results to alerts only."), seed: Optional[str] = Header(None, description="Your network monitor seed.")): monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) - result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident) + result = await node_info.fetch(network_id=network.value, monitor_plugins=monitor_plugins, ident=ident) return result @app.get("/networks/{network}/{node}") -async def node(network: str = Path(..., example="sbn", description="The network code."), +async def node(network: Network = Path(Network.sbn, example="sbn", description="The network code."), node: str = Path(..., example="FoundationBuilder", description="The node name."), status: bool = Query(False, description="Filter results to status only."), alerts: bool = Query(False, description="Filter results to alerts only."), @@ -90,7 +92,7 @@ async def node(network: str = Path(..., example="sbn", description="The network monitor_plugins = set_plugin_parameters(status, alerts) ident = create_did(seed) try: - result = await node_info.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + result = await node_info.fetch(network_id=network.value, monitor_plugins=monitor_plugins, nodes=node, ident=ident) except NodeNotFound as error: print(error) raise HTTPException(status_code=400, detail=str(error))