diff --git a/fetch-validator-status/DidKey.py b/fetch-validator-status/DidKey.py index 1a92597..efb5948 100644 --- a/fetch-validator-status/DidKey.py +++ b/fetch-validator-status/DidKey.py @@ -5,8 +5,9 @@ class DidKey: def __init__(self, seed): - seed = seed_as_bytes(seed) - self.sk = nacl.signing.SigningKey(seed) + self.seed = seed + self.seed = self.seed_as_bytes() + self.sk = nacl.signing.SigningKey(self.seed) self.vk = bytes(self.sk.verify_key) self.did = base58.b58encode(self.vk[:16]).decode("ascii") self.verkey = base58.b58encode(self.vk).decode("ascii") @@ -15,9 +16,9 @@ def sign_request(self, req: Request): signed = self.sk.sign(req.signature_input) req.set_signature(signed.signature) -def seed_as_bytes(seed): - if not seed or isinstance(seed, bytes): - return seed - if len(seed) != 32: - return base64.b64decode(seed) - return seed.encode("ascii") \ No newline at end of file + def seed_as_bytes(self): + if not self.seed or isinstance(self.seed, bytes): + return self.seed + if len(self.seed) != 32: + return base64.b64decode(self.seed) + return self.seed.encode("ascii") \ No newline at end of file diff --git a/fetch-validator-status/Dockerfile b/fetch-validator-status/Dockerfile index 76938a4..21bb8f8 100644 --- a/fetch-validator-status/Dockerfile +++ b/fetch-validator-status/Dockerfile @@ -10,9 +10,10 @@ RUN apt-get update -y && \ rm -rf /var/lib/apt/lists/* USER $user -RUN pip install pynacl gspread oauth2client +ADD requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt ADD networks.json . ADD *.py ./ -ENTRYPOINT ["bash", "-c", "python fetch_status.py $@", "--"] \ No newline at end of file +ENTRYPOINT ["bash", "-c", "python main.py $@", "--"] \ No newline at end of file diff --git a/fetch-validator-status/README.md b/fetch-validator-status/README.md index bf5f4c2..1757897 100644 --- a/fetch-validator-status/README.md +++ b/fetch-validator-status/README.md @@ -171,6 +171,10 @@ Note that there are three different formats for the timestamps in the data struc For info on plug-ins see the plug-ins [readme](plugins/README.md) +## Rest API + +For info on Rest API see [REST API](REST_API.md) + ### Running against other Indy Networks To see the validator info against any other Indy network, you need a URL for the Genesis file for the network, and the seed for a suitably authorized DID. The pool Genesis file URLs are easy, since that is published data needed by agents connecting to Indy networks. Sovrin genesis URLs can be found [here](https://github.com/sovrin-foundation/sovrin/tree/master/sovrin). You need the URL for the raw version of the pool transaction files. For example, the URL you need for the Sovrin MainNet is: diff --git a/fetch-validator-status/REST_API.md b/fetch-validator-status/REST_API.md new file mode 100644 index 0000000..2fa2b88 --- /dev/null +++ b/fetch-validator-status/REST_API.md @@ -0,0 +1,8 @@ +# Rest API + +To run [fetch validator](README.md) as a webAPI `cd fetch-validator-status` and `IM=1 ./run.sh --web -v` to start the server. +To run in debug mode add `--debug`. + +## How To Use + +After running the command above. Go to http://localhost:8080/ in your browser. Then click on one of the colored drop downs and click the 'Try it out' button. Fill out any required fields then click 'execute'. This will give you a response with a, curl command, request url, and response body. \ No newline at end of file diff --git a/fetch-validator-status/fetch_status.py b/fetch-validator-status/fetch_status.py index d5e94ea..b7c3a77 100644 --- a/fetch-validator-status/fetch_status.py +++ b/fetch-validator-status/fetch_status.py @@ -1,139 +1,55 @@ -import argparse -import asyncio -# import base58 -# import base64 -import json -import os -import sys -# import datetime -import urllib.request -# from typing import Tuple - -# import nacl.signing - -import indy_vdr from indy_vdr.ledger import ( build_get_validator_info_request, build_get_txn_request, - # Request, ) -from indy_vdr.pool import open_pool +from util import log from plugin_collection import PluginCollection -# import time from DidKey import DidKey +from pool import PoolCollection +from singleton import Singleton + +class NodeNotFound(Exception): + pass + +class FetchStatus(object, metaclass=Singleton): + def __init__(self, verbose, pool_collection: PoolCollection): + self.verbose = verbose + self.pool_collection = pool_collection + + async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None): + result = [] + verifiers = {} + + pool, network_name = await self.pool_collection.get_pool(network_id) + if ident: + log(f"Building request with did: {ident.did} ...") + request = build_get_validator_info_request(ident.did) + ident.sign_request(request) + else: + log("Building an anonymous request ...") + request = build_get_txn_request(None, 1, 1) + + from_nodes = [] + if nodes: + from_nodes = nodes.split(",") -verbose = False - - -def log(*args): - if verbose: - print(*args, "\n", file=sys.stderr) - - -async def fetch_status(genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None): - # Start Of Engine - attempt = 3 - while attempt: try: - pool = await open_pool(transactions_path=genesis_path) - except: - log("Pool Timed Out! Trying again...") - if not attempt: - print("Unable to get pool Response! 3 attempts where made. Exiting...") - exit() - attempt -= 1 - continue - break - - result = [] - verifiers = {} - - if ident: - request = build_get_validator_info_request(ident.did) - ident.sign_request(request) - else: - request = build_get_txn_request(None, 1, 1) - - from_nodes = [] - if nodes: - from_nodes = nodes.split(",") - response = await pool.submit_action(request, node_aliases = from_nodes) - try: - # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 - verifiers = await pool.get_verifiers() - except AttributeError: - pass - # End Of Engine - - result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) - print(json.dumps(result, indent=2)) - -def get_script_dir(): - return os.path.dirname(os.path.realpath(__file__)) - - -def download_genesis_file(url: str, target_local_path: str): - log("Fetching genesis file ...") - target_local_path = f"{get_script_dir()}/genesis.txn" - urllib.request.urlretrieve(url, target_local_path) - -def load_network_list(): - with open(f"{get_script_dir()}/networks.json") as json_file: - networks = json.load(json_file) - return networks - -def list_networks(): - networks = load_network_list() - return networks.keys() - -if __name__ == "__main__": - monitor_plugins = PluginCollection('plugins') - - parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") - parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.") - parser.add_argument("--list-nets", action="store_true", help="List known networks.") - parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") - parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH") or f"{get_script_dir()}/genesis.txn" , help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") - parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") - parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") - parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") - - monitor_plugins.get_parse_args(parser) - args, unknown = parser.parse_known_args() - - verbose = args.verbose - - monitor_plugins.load_all_parse_args(args) - - if args.list_nets: - print(json.dumps(load_network_list(), indent=2)) - exit() - - network_name = None - if args.net: - log("Loading known network list ...") - networks = load_network_list() - if args.net in networks: - log("Connecting to '{0}' ...".format(networks[args.net]["name"])) - args.genesis_url = networks[args.net]["genesisUrl"] - network_name = networks[args.net]["name"] - - if args.genesis_url: - download_genesis_file(args.genesis_url, args.genesis_path) - if not network_name: - network_name = args.genesis_url - if not os.path.exists(args.genesis_path): - print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) - parser.print_help() - exit() - - did_seed = None if not args.seed else args.seed - - log("indy-vdr version:", indy_vdr.version()) - if did_seed: - ident = DidKey(did_seed) - log("DID:", ident.did, " Verkey:", ident.verkey) - else: - ident = None - - asyncio.get_event_loop().run_until_complete(fetch_status(args.genesis_path, args.nodes, ident, network_name)) \ No newline at end of file + # Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 + log("Getting list of verifiers ...") + verifiers = await pool.get_verifiers() + except AttributeError: + log("Unable to get list of verifiers. Please make sure you have the latest version of indy-vdr.") + pass + + if verifiers and from_nodes: + for node in from_nodes: + if not node in verifiers: + raise NodeNotFound(f'{node} is not a member of {network_name}.') + + log("Submitting request ...") + response = await pool.submit_action(request, node_aliases = from_nodes) + + log("Passing results to plugins for processing ...") + result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) + log("Processing complete.") + return result \ No newline at end of file diff --git a/fetch-validator-status/gunicorn_conf.py b/fetch-validator-status/gunicorn_conf.py new file mode 100644 index 0000000..8f8ae3e --- /dev/null +++ b/fetch-validator-status/gunicorn_conf.py @@ -0,0 +1,73 @@ +""" +File created by tiangolo. +https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/master/docker-images/gunicorn_conf.py +""" + +import json +import multiprocessing +import os + +workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1") +max_workers_str = os.getenv("MAX_WORKERS") +use_max_workers = None +if max_workers_str: + use_max_workers = int(max_workers_str) +web_concurrency_str = os.getenv("WEB_CONCURRENCY", None) + +host = os.getenv("HOST", "0.0.0.0") +port = os.getenv("PORT", "8080") +bind_env = os.getenv("BIND", None) +use_loglevel = os.getenv("LOG_LEVEL", "info") +if bind_env: + use_bind = bind_env +else: + use_bind = f"{host}:{port}" + +cores = multiprocessing.cpu_count() +workers_per_core = float(workers_per_core_str) +default_web_concurrency = workers_per_core * cores +if web_concurrency_str: + web_concurrency = int(web_concurrency_str) + assert web_concurrency > 0 +else: + web_concurrency = max(int(default_web_concurrency), 2) + if use_max_workers: + web_concurrency = min(web_concurrency, use_max_workers) +accesslog_var = os.getenv("ACCESS_LOG", "-") +use_accesslog = accesslog_var or None +errorlog_var = os.getenv("ERROR_LOG", "-") +use_errorlog = errorlog_var or None +graceful_timeout_str = os.getenv("GRACEFUL_TIMEOUT", "120") +timeout_str = os.getenv("TIMEOUT", "120") +keepalive_str = os.getenv("KEEP_ALIVE", "5") + +# Gunicorn config variables +loglevel = use_loglevel +workers = web_concurrency +bind = use_bind +errorlog = use_errorlog +worker_tmp_dir = "/dev/shm" +accesslog = use_accesslog +graceful_timeout = int(graceful_timeout_str) +timeout = int(timeout_str) +keepalive = int(keepalive_str) + + +# For debugging and testing +log_data = { + "loglevel": loglevel, + "workers": workers, + "bind": bind, + "graceful_timeout": graceful_timeout, + "timeout": timeout, + "keepalive": keepalive, + "errorlog": errorlog, + "accesslog": accesslog, + # Additional, non-gunicorn variables + "workers_per_core": workers_per_core, + "use_max_workers": use_max_workers, + "host": host, + "port": port, +} +print('gunicorn config:') +print(json.dumps(log_data, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/main.py b/fetch-validator-status/main.py new file mode 100644 index 0000000..c86429f --- /dev/null +++ b/fetch-validator-status/main.py @@ -0,0 +1,77 @@ +import argparse +import asyncio +import json +import os +import indy_vdr +from util import ( + enable_verbose, + log, + create_did +) +from fetch_status import FetchStatus +from pool import PoolCollection +from networks import Networks +from plugin_collection import PluginCollection + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") + parser.add_argument("--net", choices=Networks.get_ids(), help="Connect to a known network using an ID.") + parser.add_argument("--list-nets", action="store_true", help="List known networks.") + parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") + parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") + parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") + parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") + parser.add_argument("--web", action="store_true", help="Start API server.") + parser.add_argument("--debug", action="store_true", help="Run in debug mode.") + + monitor_plugins = PluginCollection('plugins') + monitor_plugins.get_parse_args(parser) + args, unknown = parser.parse_known_args() + monitor_plugins.load_all_parse_args(args) + + enable_verbose(args.verbose) + + if args.web: + if args.seed: + print("WARNING: You are trying to run the REST API with a SEED.") + print("Please remove your SEED and try again.") + print("Exiting...") + exit() + + # Pass verbose to rest api through env var + os.environ['VERBOSE'] = str(args.verbose) + + MODULE_NAME = os.environ.get('MODULE_NAME', "rest_api") + VARIABLE_NAME = os.environ.get('VARIABLE_NAME', "app") + APP_MODULE = os.environ.get('APP_MODULE', f"{MODULE_NAME}:{VARIABLE_NAME}") + + if args.debug: + HOST = os.environ.get('HOST', '0.0.0.0') + PORT = os.environ.get('PORT', '8080') + LOG_LEVEL = os.environ.get('LOG_LEVEL', 'info') + + log("Starting web server in debug mode ...") + os.system(f'uvicorn --reload --host {HOST} --port {PORT} --log-level {LOG_LEVEL} "{APP_MODULE}"') + else: + GUNICORN_CONF = os.environ.get('GUNICORN_CONF', 'gunicorn_conf.py') + WORKER_CLASS = os.environ.get('WORKER_CLASS', "uvicorn.workers.UvicornWorker") + + log("Starting web server ...") + os.system(f'gunicorn -k "{WORKER_CLASS}" -c "{GUNICORN_CONF}" "{APP_MODULE}"') + else: + log("Starting from the command line ...") + + if args.list_nets: + print(json.dumps(Networks.get_networks(), indent=2)) + exit() + + log("indy-vdr version:", indy_vdr.version()) + did_seed = None if not args.seed else args.seed + ident = create_did(did_seed) + networks = Networks() + pool_collection = PoolCollection(args.verbose, networks) + network = networks.resolve(args.net, args.genesis_url, args.genesis_path) + node_info = FetchStatus(args.verbose, pool_collection) + result = asyncio.get_event_loop().run_until_complete(node_info.fetch(network.id, monitor_plugins, args.nodes, ident)) + print(json.dumps(result, indent=2)) \ No newline at end of file diff --git a/fetch-validator-status/networks.json b/fetch-validator-status/networks.json index 76dd582..4bff172 100644 --- a/fetch-validator-status/networks.json +++ b/fetch-validator-status/networks.json @@ -24,7 +24,7 @@ "genesisUrl": "http://dev.bcovrin.vonx.io/genesis" }, "bct": { - "name": "BCovrin Test ", + "name": "BCovrin Test", "genesisUrl": "http://test.bcovrin.vonx.io/genesis" }, "bcp": { @@ -32,23 +32,23 @@ "genesisUrl": "http://prod.bcovrin.vonx.io/genesis" }, "gld": { - "name": "GreenLight Dev Ledger ", + "name": "GreenLight Dev Ledger", "genesisUrl": "http://dev.greenlight.bcovrin.vonx.io/genesis" }, "gl": { - "name": "GreenLight Ledger ", + "name": "GreenLight Ledger", "genesisUrl": "http://greenlight.bcovrin.vonx.io/genesis" }, "imn": { - "name": "Indicio MainNet ", + "name": "Indicio MainNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_mainnet_genesis" }, "idn": { - "name": "Indicio DemoNet ", + "name": "Indicio DemoNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_demonet_genesis" }, "itn": { - "name": "Indicio TestNet ", + "name": "Indicio TestNet", "genesisUrl": "https://raw.githubusercontent.com/Indicio-tech/indicio-network/main/genesis_files/pool_transactions_testnet_genesis" } -} +} \ No newline at end of file diff --git a/fetch-validator-status/networks.py b/fetch-validator-status/networks.py new file mode 100644 index 0000000..d079d74 --- /dev/null +++ b/fetch-validator-status/networks.py @@ -0,0 +1,93 @@ +import os +import json +import urllib.request +import sys +import re +from enum import Enum +from collections import namedtuple +from util import log +from singleton import Singleton + +Network = namedtuple('Network', ['id', 'name', 'genesis_url', 'genesis_path']) + +class NetworkEnum(Enum): + def _generate_next_value_(name, start, count, last_values): + return name + +class Networks(object, metaclass=Singleton): + def __init__(self): + self._networks = self.__load_network_list() + + def __get_script_dir(self): + return os.path.dirname(os.path.realpath(__file__)) + + def __load_network_list(self): + log("Loading known network list ...") + with open(f"{self.__get_script_dir()}/networks.json") as json_file: + networks = json.load(json_file) + return networks + + @property + def ids(self): + return self._networks.keys() + + @property + def networks(self): + return self._networks + + @staticmethod + def get_ids(): + networks = Networks() + return networks.ids + + @staticmethod + def get_networks(): + networks = Networks() + return networks.networks + + @staticmethod + def __download_genesis_file(genesis_url: str, destination_path: str): + log("Fetching genesis file ...") + urllib.request.urlretrieve(genesis_url, destination_path) + + @staticmethod + def get_NetworkEnum() -> NetworkEnum: + """Dynamically generates a NetworkEnum that can be used to select the available Networks. + """ + networks = Networks() + return NetworkEnum('Network', list(networks.ids)) + + def resolve(self, network_id: str = None, genesis_url: str = None, genesis_path: str = None): + network_name = None + genesis_path_base = f"{self.__get_script_dir()}/" + + if network_id and network_id in self.ids: + log("Connecting to '{0}' ...".format(self.networks[network_id]["name"])) + network_name = self.networks[network_id]["name"] + genesis_url = self.networks[network_id]["genesisUrl"] + if 'genesisPath' in self.networks[network_id]: + genesis_path = self.networks[network_id]['genesisPath'] + + if genesis_url: + if not network_name: + network_name = genesis_url + network_id = network_name + log(f"Setting network name = {network_name} ...") + + if not genesis_path: + # Remove and replace parts of the string to make a valid path based on the network name. + sub_path = network_name.replace("https://", "") + sub_path = re.sub('[ /.]', '_', sub_path) + genesis_path = f"{genesis_path_base}{sub_path}/" + if not os.path.exists(genesis_path): + os.makedirs(genesis_path) + genesis_path = f"{genesis_path}genesis.txn" + Networks.__download_genesis_file(genesis_url, genesis_path) + self._networks[network_id] = {'name': network_name, 'genesisUrl': genesis_url, 'genesisPath': genesis_path} + + if not os.path.exists(genesis_path): + print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) + exit() + + network = Network(network_id, network_name, genesis_url, genesis_path) + return network \ No newline at end of file diff --git a/fetch-validator-status/plugin_collection.py b/fetch-validator-status/plugin_collection.py index 67cb0de..e7d865e 100644 --- a/fetch-validator-status/plugin_collection.py +++ b/fetch-validator-status/plugin_collection.py @@ -81,12 +81,11 @@ def reload_plugins(self): async def apply_all_plugins_on_value(self, result, network_name, response, verifiers): """Apply all of the plugins with the argument supplied to this function """ - self.log(f'\033[38;5;37mRunning plugins...\033[0m\n') + self.log(f'\033[38;5;37mRunning plugins ...\033[0m\n') for plugin in self.plugins: if plugin.enabled: - self.log(f'\033[38;5;37mRunning {plugin.name}...\033[0m') + self.log(f'\033[38;5;37mRunning {plugin.name} ...\033[0m\n') result = await plugin.perform_operation(result, network_name, response, verifiers) - self.log((f'\033[38;5;37m{plugin.name} yields value\033[0m\n')) #{result} else: self.log(f"\033[38;5;3m{plugin.name} disabled.\033[0m\n") return result @@ -145,6 +144,7 @@ def log(self, *args): print(*args, file=sys.stderr) def plugin_list(self): - self.log("\033[38;5;37m--- Plug-ins ---\033[0m") + self.log("\n\033[38;5;37m--- Plug-ins ---\033[0m") for plugin in self.plugins: - self.log(f"\033[38;5;37m{plugin.name}: {plugin.__class__.__module__}.{plugin.__class__.__name__}\033[0m") \ No newline at end of file + self.log(f"\033[38;5;37m{plugin.name}: {plugin.__class__.__module__}.{plugin.__class__.__name__}\033[0m") + self.log(f"\n") \ No newline at end of file diff --git a/fetch-validator-status/plugins/Example/example.py b/fetch-validator-status/plugins/Example/example.py index 626ba28..d312f20 100644 --- a/fetch-validator-status/plugins/Example/example.py +++ b/fetch-validator-status/plugins/Example/example.py @@ -17,7 +17,7 @@ def __init__(self): self.type = '' def parse_args(self, parser): - # Declear your parser arguments here. This will add them to the fetch_status.py parser arguments. + # Declear your parser arguments here. This will add them to the main.py parser arguments. parser.add_argument("--example", action="store_true", help="Example Plug-in: Runs expample plug-in") # Here you set your variables with the arguments from the parser diff --git a/fetch-validator-status/plugins/analysis.py b/fetch-validator-status/plugins/analysis.py index 3e42729..66511ce 100644 --- a/fetch-validator-status/plugins/analysis.py +++ b/fetch-validator-status/plugins/analysis.py @@ -31,6 +31,7 @@ async def perform_operation(self, result, network_name, response, verifiers): warnings = [] info = [] entry = {"name": node} + entry["network"] = network_name try: await self.get_node_addresses(entry, verifiers) jsval = json.loads(val) @@ -88,13 +89,20 @@ async def get_primary_name(self, jsval: any, node: str) -> str: async def get_status_summary(self, jsval: any, errors: list) -> any: status = {} status["ok"] = (len(errors) <= 0) + # Ensure there is always a consistent timestamp + # Note: We are not using the timestamp from the node itself for this; result.data.timestamp + # - There could be clock skew on the node which would affect the time series data when + # recorded and graphed. This would adversely affect the comparison of events across nodes. + # - In the case of a node that is not responding, we would not receive the timestamp + # from the node. + # - The solution is to add a consistent timestamp marking the time the data was collected + # by the monitor. + status["timestamp"] = datetime.datetime.now(datetime.timezone.utc).strftime('%s') if jsval and ("REPLY" in jsval["op"]): + if "timestamp" in jsval["result"]["data"]: + status["node_timestamp"] = jsval["result"]["data"]["timestamp"] if "Node_info" in jsval["result"]["data"]: status["uptime"] = str(datetime.timedelta(seconds = jsval["result"]["data"]["Node_info"]["Metrics"]["uptime"])) - if "timestamp" in jsval["result"]["data"]: - status["timestamp"] = jsval["result"]["data"]["timestamp"] - else: - status["timestamp"] = datetime.datetime.now().strftime('%s') if "Software" in jsval["result"]["data"]: status["software"] = {} status["software"]["indy-node"] = jsval["result"]["data"]["Software"]["indy-node"] diff --git a/fetch-validator-status/pool.py b/fetch-validator-status/pool.py new file mode 100644 index 0000000..35ca298 --- /dev/null +++ b/fetch-validator-status/pool.py @@ -0,0 +1,47 @@ +import asyncio +from util import log +from indy_vdr.pool import open_pool +from singleton import Singleton +from networks import Networks + +class PoolCollection(object, metaclass=Singleton): + def __init__(self, verbose, networks: Networks): + self.verbose = verbose + self.networks = networks + self.pool_cache = {} + self.lock = asyncio.Lock() + + async def __fetch_pool_connection(self, genesis_path): + attempt = 3 + while attempt: + try: + log("Connecting to Pool ...") + pool = await open_pool(transactions_path=genesis_path) + except: + log("Pool Timed Out! Trying again ...") + if not attempt: + print("Unable to get response from pool! 3 attempts where made. Exiting ...") + exit() + attempt -= 1 + continue + else: + log("Connected to Pool ...") + break + return pool + + async def get_pool(self, network_id): + network = self.networks.resolve(network_id) + # Network pool connection cache with async thread lock for REST API. + async with self.lock: + if network.id in self.pool_cache: + # Cache hit ... + log(f"The pool for {network.name} was found in the cache ...") + pool = self.pool_cache[network.id]['pool'] + else: + # Cache miss ... + log(f"A pool for {network.name} was not found in the cache, creating new connection ...") + pool = await self.__fetch_pool_connection(network.genesis_path) + self.pool_cache[network.id] = {} + self.pool_cache[network.id]['pool'] = pool + log(f"Cached the pool for {network.name} ...") + return pool, network.name \ No newline at end of file diff --git a/fetch-validator-status/requirements.txt b/fetch-validator-status/requirements.txt new file mode 100644 index 0000000..fd5f597 --- /dev/null +++ b/fetch-validator-status/requirements.txt @@ -0,0 +1,7 @@ +asyncio +pynacl +gspread +oauth2client +fastapi +uvicorn +gunicorn \ No newline at end of file diff --git a/fetch-validator-status/rest_api.py b/fetch-validator-status/rest_api.py new file mode 100644 index 0000000..22a11ff --- /dev/null +++ b/fetch-validator-status/rest_api.py @@ -0,0 +1,100 @@ +import os +import argparse +from typing import Optional +from fastapi import FastAPI, Header, HTTPException, Path, Query +from starlette.responses import RedirectResponse +from util import ( + enable_verbose, +# log, + create_did +) +from pool import PoolCollection +from networks import Networks, NetworkEnum +from fetch_status import FetchStatus, NodeNotFound +from plugin_collection import PluginCollection + +APP_NAME='Hyperledger Indy Node Monitor REST API' +APP_DESCRIPTION='https://github.com/hyperledger/indy-node-monitor' +APP_VERSION='0.0.0' + +# https://fastapi.tiangolo.com/tutorial/metadata/ +app = FastAPI( + title = APP_NAME, + description = APP_DESCRIPTION, + version = APP_VERSION +) + +# global variables +default_args = None +monitor_plugins = None +pool_collection = None +node_info = None + +Network: NetworkEnum = Networks.get_NetworkEnum() + +def set_plugin_parameters(status: bool = False, alerts: bool = False): + # Store args and monitor_plugins for lazy loading. + global default_args, pool_collection, node_info + + if not default_args: + # Create plugin instance and set default args + default_monitor_plugins = PluginCollection('plugins') + parser = argparse.ArgumentParser() + parser.add_argument("-v", "--verbose", default=(os.environ.get('VERBOSE', 'False').lower() == 'true'), action="store_true") + default_monitor_plugins.get_parse_args(parser) + default_args, unknown = parser.parse_known_args() + enable_verbose(default_args.verbose) + pool_collection = PoolCollection(default_args.verbose, Networks()) + node_info = FetchStatus(default_args.verbose, pool_collection) + + # Create namespace with default args and load them into api_args + api_args = argparse.Namespace() + for name, value in default_args._get_kwargs(): + setattr(api_args, name, value) + + # Set api_args with the values from the parameters + setattr(api_args, 'status', status) + setattr(api_args, 'alerts', alerts) + + # Create and load plugins with api_args + monitor_plugins = PluginCollection('plugins') + monitor_plugins.load_all_parse_args(api_args) + + return monitor_plugins + +# Redirect users to the '/docs' page but don't include this endpoint in the docs. +@app.get("/", include_in_schema=False) +async def redirect(): + response = RedirectResponse(url='/docs') + return response + +@app.get("/networks") +async def networks(): + data = Networks.get_networks() + return data + +@app.get("/networks/{network}") +async def network(network: Network = Path(Network.sbn, example="sbn", description="The network code."), + status: bool = Query(False, description="Filter results to status only."), + alerts: bool = Query(False, description="Filter results to alerts only."), + seed: Optional[str] = Header(None, description="Your network monitor seed.")): + monitor_plugins = set_plugin_parameters(status, alerts) + ident = create_did(seed) + result = await node_info.fetch(network_id=network.value, monitor_plugins=monitor_plugins, ident=ident) + return result + +@app.get("/networks/{network}/{node}") +async def node(network: Network = Path(Network.sbn, example="sbn", description="The network code."), + node: str = Path(..., example="FoundationBuilder", description="The node name."), + status: bool = Query(False, description="Filter results to status only."), + alerts: bool = Query(False, description="Filter results to alerts only."), + seed: Optional[str] = Header(None, description="Your network monitor seed.")): + monitor_plugins = set_plugin_parameters(status, alerts) + ident = create_did(seed) + try: + result = await node_info.fetch(network_id=network.value, monitor_plugins=monitor_plugins, nodes=node, ident=ident) + except NodeNotFound as error: + print(error) + raise HTTPException(status_code=400, detail=str(error)) + + return result \ No newline at end of file diff --git a/fetch-validator-status/run.sh b/fetch-validator-status/run.sh index b8c27b1..9b9a62f 100755 --- a/fetch-validator-status/run.sh +++ b/fetch-validator-status/run.sh @@ -15,6 +15,17 @@ function getVolumeMount() { echo " --volume='${path}:/home/indy/${mountPoint}:Z' " } +function runCmd() { + _cmd=${1} + if [ ! -z ${LOG} ]; then + _cmd+=" > ${LOG%.*}_`date +\%Y-\%m-\%d_%H-%M-%S`.json" + fi + + eval ${_cmd} + # echo + # echo ${_cmd} +} + # IM is for "interactive mode" so Docker is run with the "-it" parameter. Probably never needed # but it is there. Use "IM=1 run.sh ..." to run the Docker container in interactive mode if [ -z "${IM+x}" ]; then @@ -34,8 +45,8 @@ docker build -t fetch_status . > /dev/null 2>&1 cmd="${terminalEmu} docker run --rm ${DOCKER_INTERACTIVE} \ -e "GENESIS_PATH=${GENESIS_PATH}" \ -e "GENESIS_URL=${GENESIS_URL}" \ - -e "SEED=${SEED}"" - + -e "SEED=${SEED}" \ + --publish 8080:8080" # Dynamically mount teh 'conf' directory if it exists. if [ -d "./conf" ]; then @@ -47,5 +58,15 @@ if [ -d "./plugins" ]; then fi cmd+="fetch_status \"$@\"" -eval ${cmd} -# echo ${cmd} \ No newline at end of file + +counter=${SAMPLES:-1} +while [[ ${counter} > 0 ]] +do + runCmd "${cmd}" + counter=$(( ${counter} - 1 )) + if [[ ${counter} > 0 ]]; then + # Nodes update their validator info every minute. + # Therefore calling more than once per minute is not productive. + sleep 60 + fi +done \ No newline at end of file diff --git a/fetch-validator-status/singleton.py b/fetch-validator-status/singleton.py new file mode 100644 index 0000000..fd27736 --- /dev/null +++ b/fetch-validator-status/singleton.py @@ -0,0 +1,8 @@ +# Meta Class +# https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python +class Singleton(type): + _instances = {} + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] \ No newline at end of file diff --git a/fetch-validator-status/util.py b/fetch-validator-status/util.py new file mode 100644 index 0000000..53522e4 --- /dev/null +++ b/fetch-validator-status/util.py @@ -0,0 +1,22 @@ +import sys +from DidKey import DidKey + +verbose = False + +def enable_verbose(enable): + global verbose + verbose = enable + +def log(*args): + if verbose: + print(*args, "\n", file=sys.stderr) + +def create_did(seed): + ident = None + if seed: + try: + ident = DidKey(seed) + log("DID:", ident.did, " Verkey:", ident.verkey) + except: + log("Invalid seed. Continuing anonymously ...") + return ident \ No newline at end of file