Skip to content

Commit

Permalink
Merge pull request hyperledger#5 from WadeBarnes/feature/fastapi
Browse files Browse the repository at this point in the history
Refactor PoolCollection
  • Loading branch information
KoleBarnes committed Jun 23, 2021
2 parents 9793a24 + 1bb37e9 commit 03c5167
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 82 deletions.
8 changes: 3 additions & 5 deletions fetch-validator-status/fetch_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,11 @@ def __init__(self, verbose, pool_collection: PoolCollection):
self.verbose = verbose
self.pool_collection = pool_collection

async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None, genesis_url: str = None, genesis_path: str = None):
async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None):
result = []
verifiers = {}

network_info = self.pool_collection.get_network_info(network, genesis_url, genesis_path)
pool = await self.pool_collection.get_pool(network_info)

pool, network_name = await self.pool_collection.get_pool(network_id)
if ident:
log(f"Building request with did: {ident.did} ...")
request = build_get_validator_info_request(ident.did)
Expand All @@ -44,6 +42,6 @@ async def fetch(self, network, monitor_plugins: PluginCollection, nodes: str = N
pass

log("Passing results to plugins for processing ...")
result = await monitor_plugins.apply_all_plugins_on_value(result, network_info.network_name, response, verifiers)
result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers)
log("Processing complete.")
return result
16 changes: 12 additions & 4 deletions fetch-validator-status/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,12 @@
)
from fetch_status import FetchStatus
from pool import PoolCollection
from pool import Networks
from plugin_collection import PluginCollection

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.")
parser.add_argument("--net", choices=PoolCollection.list_networks(), help="Connect to a known network using an ID.")
parser.add_argument("--net", choices=Networks.get_names(), help="Connect to a known network using an ID.")
parser.add_argument("--list-nets", action="store_true", help="List known networks.")
parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.")
parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH"), help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.")
Expand All @@ -38,13 +39,20 @@
enable_verbose(args.verbose)

if args.list_nets:
print(json.dumps(PoolCollection.load_network_list(), indent=2))
print(json.dumps(Networks.get_all(), indent=2))
exit()

log("indy-vdr version:", indy_vdr.version())
did_seed = None if not args.seed else args.seed
ident = create_did(did_seed)
pool_collection = PoolCollection(args.verbose)
networks = Networks()

# ToDo:
# - Flesh out Networks.resolve so this registers a adhoc network
# (i.e. user passed in args.genesis_url, or args.genesis_path rather than a known network id)
networks.resolve(args.net, args.genesis_url, args.genesis_path)

pool_collection = PoolCollection(args.verbose, networks)
status = FetchStatus(args.verbose, pool_collection)
result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident, args.genesis_url, args.genesis_path))
result = asyncio.get_event_loop().run_until_complete(status.fetch(args.net, monitor_plugins, args.nodes, ident))
print(json.dumps(result, indent=2))
170 changes: 101 additions & 69 deletions fetch-validator-status/pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,91 @@
from util import log
from indy_vdr.pool import open_pool
from singleton import Singleton



Network = namedtuple('Network', ['name', 'genesis_url', 'genesis_path'])

class Networks(object, metaclass=Singleton):
def __init__(self):
self._networks = self.__load_network_list()

def __get_script_dir(self):
return os.path.dirname(os.path.realpath(__file__))

def __load_network_list(self):
log("Loading known network list ...")
with open(f"{self.__get_script_dir()}/networks.json") as json_file:
networks = json.load(json_file)
return networks

@property
def names(self):
return self._networks.keys()

@property
def networks(self):
return self._networks

@staticmethod
def get_names():
networks = Networks()
return networks.names

@staticmethod
def get_all():
networks = Networks()
return networks.networks

@staticmethod
def __download_genesis_file(url: str, target_local_path: str):
log("Fetching genesis file ...")
urllib.request.urlretrieve(url, target_local_path)

# ToDo:
# - Refactor to maintain the list of networks dynamically using self._networks
# - In the case a network does not existing the list add it.
# - For example in the case a user provides a genesis_url or genesis_path rather than a named named (known) network.
# - The key for the network should be dynamically generated, could simply be Network#; Network1, Network2, etc.
# - As genesis files are downloaded (or provided) the entries should be updated with the genesis_path information.
# - Genesis files should only be downloaded for entries without genesis_path info.
def resolve(self, network: str = None, genesis_url: str = None, genesis_path: str = None):
network_id = None
genesis_path_base = f"{self.__get_script_dir()}/"

if network:
if network in self.names:
log("Connecting to '{0}' ...".format(self.networks[network]["name"]))
genesis_url = self.networks[network]["genesisUrl"]
network_id = self.networks[network]["name"]

if genesis_url:
if not network_id:
network_id = genesis_url
log(f"Setting network name = {network_id} ...")
if not genesis_path:
# Remove and replace parts of the string to make a file name to create the path.
network_id_path = network_id.replace("https://", "")
network_id_path = re.sub('[ /.]', '_', network_id_path)
genesis_path = f"{genesis_path_base}{network_id_path}/"
if not os.path.exists(genesis_path):
os.makedirs(genesis_path)
genesis_path = f"{genesis_path}genesis.txn"

Networks.__download_genesis_file(genesis_url, genesis_path)
if not os.path.exists(genesis_path):
print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr)
exit()

network = Network(network_id, genesis_url, genesis_path)

return network


class PoolCollection(object, metaclass=Singleton):
def __init__(self, verbose):
def __init__(self, verbose, networks: Networks):
self.verbose = verbose
self.network_cache = {}
self.networks = networks
self.pool_cache = {}
self.lock = asyncio.Lock()

async def fetch_pool_connection(self, genesis_path):
Expand All @@ -33,73 +113,25 @@ async def fetch_pool_connection(self, genesis_path):
break
return pool

async def get_pool(self, network_info):
# ToDo:
# - Once Networks.resolve is fully fleshed out and the Networks class managaes all of the network properties
# this class no longer has to manage the 'genesis_path' and 'genesis_url' properties, it can use the
# networks instance for lookup, and cache and look up information by network key rather than network name;
# Networks.names (the network keys), rather than full Networks.networks[key].name (network name).
async def get_pool(self, network_id):
network = self.networks.resolve(network_id)
# Network pool connection cache with async thread lock for REST API.
async with self.lock:
if network_info.network_name in self.network_cache:
if network.name in self.pool_cache:
# Use cache.
log(f"Pool for {network_info.network_name} found in cache ... ")
pool = self.network_cache[network_info.network_name]['pool']
else:
log(f"Pool for {network.name} found in cache ... ")
pool = self.pool_cache[network.name]['pool']
else:
# Create cache.
log(f"Pool for {network_info.network_name} not found in cache, creating new connection ... ")
self.network_cache[network_info.network_name] = {}
self.network_cache[network_info.network_name]['genesis_path'] = network_info.genesis_path
self.network_cache[network_info.network_name]['genesis_url'] = network_info.genesis_url
pool = await self.fetch_pool_connection(network_info.genesis_path)
self.network_cache[network_info.network_name]['pool'] = pool
return pool

def get_network_info(self, network: str = None, genesis_url: str = None, genesis_path: str = None):
network_name = None
genesis_path_base = f"{PoolCollection.get_script_dir()}/"

if network:
log("Loading known network list ...")
networks = PoolCollection.load_network_list()
if network in networks:
log("Connecting to '{0}' ...".format(networks[network]["name"]))
genesis_url = networks[network]["genesisUrl"]
network_name = networks[network]["name"]

if genesis_url:
if not network_name:
network_name = genesis_url
log(f"Setting network name = {network_name} ...")
if not genesis_path:
# Remove and replace parts of the string to make a file name to create the path.
network_name_path = network_name.replace("https://", "")
network_name_path = re.sub('[ /.]', '_', network_name_path)
genesis_path = f"{genesis_path_base}{network_name_path}/"
if not os.path.exists(genesis_path):
os.makedirs(genesis_path)
genesis_path = f"{genesis_path}genesis.txn"

self.download_genesis_file(genesis_url, genesis_path)
if not os.path.exists(genesis_path):
print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr)
exit()

Network_Info = namedtuple('Network_Info', ['network_name', 'genesis_url', 'genesis_path'])
network_info = Network_Info(network_name, genesis_url, genesis_path)

return network_info

def download_genesis_file(self, url: str, target_local_path: str):
log("Fetching genesis file ...")
urllib.request.urlretrieve(url, target_local_path)

@staticmethod
def get_script_dir():
return os.path.dirname(os.path.realpath(__file__))

@staticmethod
def load_network_list():
with open(f"{PoolCollection.get_script_dir()}/networks.json") as json_file:
networks = json.load(json_file)
return networks

@staticmethod
def list_networks():
networks = PoolCollection.load_network_list()
return networks.keys()
log(f"Pool for {network.name} not found in cache, creating new connection ... ")
self.pool_cache[network.name] = {}
self.pool_cache[network.name]['genesis_path'] = network.genesis_path
self.pool_cache[network.name]['genesis_url'] = network.genesis_url
pool = await self.fetch_pool_connection(network.genesis_path)
self.pool_cache[network.name]['pool'] = pool
return pool, network.name
10 changes: 6 additions & 4 deletions fetch-validator-status/rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
create_did
)
from pool import PoolCollection
from pool import Networks
from fetch_status import FetchStatus
from plugin_collection import PluginCollection

Expand All @@ -26,6 +27,7 @@
monitor_plugins = None
pool_collection = None


# TODO fix
status_test = None

Expand All @@ -42,7 +44,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False):
default_args, unknown = parser.parse_known_args()
enable_verbose(default_args.verbose)
global pool_collection
pool_collection = PoolCollection(default_args.verbose)
pool_collection = PoolCollection(default_args.verbose, Networks())

# TODO fix
global status_test
Expand All @@ -65,7 +67,7 @@ def set_plugin_parameters(status: bool = False, alerts: bool = False):

@app.get("/networks")
async def networks():
data = PoolCollection.load_network_list()
data = Networks.get_all()
return data

@app.get("/networks/{network}")
Expand All @@ -74,7 +76,7 @@ async def network(network, status: bool = False, alerts: bool = False, seed: Opt
ident = create_did(seed)

# TODO fix
result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, ident=ident)
result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, ident=ident)
return result

@app.get("/networks/{network}/{node}")
Expand All @@ -83,5 +85,5 @@ async def node(network, node, status: bool = False, alerts: bool = False, seed:
ident = create_did(seed)

# TODO fix
result = await status_test.fetch(network=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident)
result = await status_test.fetch(network_id=network, monitor_plugins=monitor_plugins, nodes=node, ident=ident)
return result

0 comments on commit 03c5167

Please sign in to comment.