Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for running as an API server. #35

Merged
merged 19 commits into from
Dec 24, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
17 changes: 9 additions & 8 deletions fetch-validator-status/DidKey.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@

class DidKey:
def __init__(self, seed):
seed = seed_as_bytes(seed)
self.sk = nacl.signing.SigningKey(seed)
self.seed = seed
self.seed = self.seed_as_bytes()
self.sk = nacl.signing.SigningKey(self.seed)
self.vk = bytes(self.sk.verify_key)
self.did = base58.b58encode(self.vk[:16]).decode("ascii")
self.verkey = base58.b58encode(self.vk).decode("ascii")
Expand All @@ -15,9 +16,9 @@ def sign_request(self, req: Request):
signed = self.sk.sign(req.signature_input)
req.set_signature(signed.signature)

def seed_as_bytes(seed):
if not seed or isinstance(seed, bytes):
return seed
if len(seed) != 32:
return base64.b64decode(seed)
return seed.encode("ascii")
def seed_as_bytes(self):
if not self.seed or isinstance(self.seed, bytes):
return self.seed
if len(self.seed) != 32:
return base64.b64decode(self.seed)
return self.seed.encode("ascii")
5 changes: 3 additions & 2 deletions fetch-validator-status/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@ RUN apt-get update -y && \
rm -rf /var/lib/apt/lists/*
USER $user

RUN pip install pynacl gspread oauth2client
ADD requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

ADD networks.json .
ADD *.py ./

ENTRYPOINT ["bash", "-c", "python fetch_status.py $@", "--"]
ENTRYPOINT ["bash", "-c", "python main.py $@", "--"]
4 changes: 4 additions & 0 deletions fetch-validator-status/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,10 @@ Note that there are three different formats for the timestamps in the data struc

For info on plug-ins see the plug-ins [readme](plugins/README.md)

## Rest API

For info on Rest API see [REST API](REST_API.md)

### Running against other Indy Networks

To see the validator info against any other Indy network, you need a URL for the Genesis file for the network, and the seed for a suitably authorized DID. The pool Genesis file URLs are easy, since that is published data needed by agents connecting to Indy networks. Sovrin genesis URLs can be found [here](https://github.com/sovrin-foundation/sovrin/tree/master/sovrin). You need the URL for the raw version of the pool transaction files. For example, the URL you need for the Sovrin MainNet is:
Expand Down
8 changes: 8 additions & 0 deletions fetch-validator-status/REST_API.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Rest API

To run [fetch validator](README.md) as a webAPI `cd fetch-validator-status` and `IM=1 ./run.sh --web -v` to start the server.
To run in debug mode add `--debug`.

## How To Use

After running the command above. Go to http://localhost:8080/ in your browser. Then click on one of the colored drop downs and click the 'Try it out' button. Fill out any required fields then click 'execute'. This will give you a response with a, curl command, request url, and response body.
178 changes: 47 additions & 131 deletions fetch-validator-status/fetch_status.py
Original file line number Diff line number Diff line change
@@ -1,139 +1,55 @@
import argparse
import asyncio
# import base58
# import base64
import json
import os
import sys
# import datetime
import urllib.request
# from typing import Tuple

# import nacl.signing

import indy_vdr
from indy_vdr.ledger import (
build_get_validator_info_request,
build_get_txn_request,
# Request,
)
from indy_vdr.pool import open_pool
from util import log
from plugin_collection import PluginCollection
# import time
from DidKey import DidKey
from pool import PoolCollection
from singleton import Singleton

class NodeNotFound(Exception):
pass

class FetchStatus(object, metaclass=Singleton):
def __init__(self, verbose, pool_collection: PoolCollection):
self.verbose = verbose
self.pool_collection = pool_collection

async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None):
result = []
verifiers = {}

pool, network_name = await self.pool_collection.get_pool(network_id)
if ident:
log(f"Building request with did: {ident.did} ...")
request = build_get_validator_info_request(ident.did)
ident.sign_request(request)
else:
log("Building an anonymous request ...")
request = build_get_txn_request(None, 1, 1)

from_nodes = []
if nodes:
from_nodes = nodes.split(",")

verbose = False


def log(*args):
if verbose:
print(*args, "\n", file=sys.stderr)


async def fetch_status(genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None):
# Start Of Engine
attempt = 3
while attempt:
try:
pool = await open_pool(transactions_path=genesis_path)
except:
log("Pool Timed Out! Trying again...")
if not attempt:
print("Unable to get pool Response! 3 attempts where made. Exiting...")
exit()
attempt -= 1
continue
break

result = []
verifiers = {}

if ident:
request = build_get_validator_info_request(ident.did)
ident.sign_request(request)
else:
request = build_get_txn_request(None, 1, 1)

from_nodes = []
if nodes:
from_nodes = nodes.split(",")
response = await pool.submit_action(request, node_aliases = from_nodes)
try:
# Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7
verifiers = await pool.get_verifiers()
except AttributeError:
pass
# End Of Engine

result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers)
print(json.dumps(result, indent=2))

def get_script_dir():
return os.path.dirname(os.path.realpath(__file__))


def download_genesis_file(url: str, target_local_path: str):
log("Fetching genesis file ...")
target_local_path = f"{get_script_dir()}/genesis.txn"
urllib.request.urlretrieve(url, target_local_path)

def load_network_list():
with open(f"{get_script_dir()}/networks.json") as json_file:
networks = json.load(json_file)
return networks

def list_networks():
networks = load_network_list()
return networks.keys()

if __name__ == "__main__":
monitor_plugins = PluginCollection('plugins')

parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.")
parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.")
parser.add_argument("--list-nets", action="store_true", help="List known networks.")
parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.")
parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH") or f"{get_script_dir()}/genesis.txn" , help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.")
parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.")
parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.")
parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.")

monitor_plugins.get_parse_args(parser)
args, unknown = parser.parse_known_args()

verbose = args.verbose

monitor_plugins.load_all_parse_args(args)

if args.list_nets:
print(json.dumps(load_network_list(), indent=2))
exit()

network_name = None
if args.net:
log("Loading known network list ...")
networks = load_network_list()
if args.net in networks:
log("Connecting to '{0}' ...".format(networks[args.net]["name"]))
args.genesis_url = networks[args.net]["genesisUrl"]
network_name = networks[args.net]["name"]

if args.genesis_url:
download_genesis_file(args.genesis_url, args.genesis_path)
if not network_name:
network_name = args.genesis_url
if not os.path.exists(args.genesis_path):
print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr)
parser.print_help()
exit()

did_seed = None if not args.seed else args.seed

log("indy-vdr version:", indy_vdr.version())
if did_seed:
ident = DidKey(did_seed)
log("DID:", ident.did, " Verkey:", ident.verkey)
else:
ident = None

asyncio.get_event_loop().run_until_complete(fetch_status(args.genesis_path, args.nodes, ident, network_name))
# Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7
log("Getting list of verifiers ...")
verifiers = await pool.get_verifiers()
except AttributeError:
log("Unable to get list of verifiers. Please make sure you have the latest version of indy-vdr.")
pass

if verifiers and from_nodes:
for node in from_nodes:
if not node in verifiers:
raise NodeNotFound(f'{node} is not a member of {network_name}.')

log("Submitting request ...")
response = await pool.submit_action(request, node_aliases = from_nodes)

log("Passing results to plugins for processing ...")
result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers)
log("Processing complete.")
return result
73 changes: 73 additions & 0 deletions fetch-validator-status/gunicorn_conf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"""
File created by tiangolo.
https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/master/docker-images/gunicorn_conf.py
"""

import json
import multiprocessing
import os

workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1")
max_workers_str = os.getenv("MAX_WORKERS")
use_max_workers = None
if max_workers_str:
use_max_workers = int(max_workers_str)
web_concurrency_str = os.getenv("WEB_CONCURRENCY", None)

host = os.getenv("HOST", "0.0.0.0")
port = os.getenv("PORT", "8080")
bind_env = os.getenv("BIND", None)
use_loglevel = os.getenv("LOG_LEVEL", "info")
if bind_env:
use_bind = bind_env
else:
use_bind = f"{host}:{port}"

cores = multiprocessing.cpu_count()
workers_per_core = float(workers_per_core_str)
default_web_concurrency = workers_per_core * cores
if web_concurrency_str:
web_concurrency = int(web_concurrency_str)
assert web_concurrency > 0
else:
web_concurrency = max(int(default_web_concurrency), 2)
if use_max_workers:
web_concurrency = min(web_concurrency, use_max_workers)
accesslog_var = os.getenv("ACCESS_LOG", "-")
use_accesslog = accesslog_var or None
errorlog_var = os.getenv("ERROR_LOG", "-")
use_errorlog = errorlog_var or None
graceful_timeout_str = os.getenv("GRACEFUL_TIMEOUT", "120")
timeout_str = os.getenv("TIMEOUT", "120")
keepalive_str = os.getenv("KEEP_ALIVE", "5")

# Gunicorn config variables
loglevel = use_loglevel
workers = web_concurrency
bind = use_bind
errorlog = use_errorlog
worker_tmp_dir = "/dev/shm"
accesslog = use_accesslog
graceful_timeout = int(graceful_timeout_str)
timeout = int(timeout_str)
keepalive = int(keepalive_str)


# For debugging and testing
log_data = {
"loglevel": loglevel,
"workers": workers,
"bind": bind,
"graceful_timeout": graceful_timeout,
"timeout": timeout,
"keepalive": keepalive,
"errorlog": errorlog,
"accesslog": accesslog,
# Additional, non-gunicorn variables
"workers_per_core": workers_per_core,
"use_max_workers": use_max_workers,
"host": host,
"port": port,
}
print('gunicorn config:')
print(json.dumps(log_data, indent=2))