Skip to content

Commit

Permalink
Cached return values and lazy evaluation instead of constants
Browse files Browse the repository at this point in the history
  • Loading branch information
mkoura committed Feb 28, 2021
1 parent 9b07e33 commit ed37353
Show file tree
Hide file tree
Showing 16 changed files with 75 additions and 66 deletions.
2 changes: 1 addition & 1 deletion cardano_node_tests/prepare_cluster_scripts.py
Expand Up @@ -59,7 +59,7 @@ def prepare_scripts_files(
if not (start_script and stop_script):
raise RuntimeError(f"Start/stop scripts not found in '{scriptsdir}'")

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.prepare_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.prepare_scripts_files(
destdir=destdir,
instance_num=instance_num,
start_script=start_script,
Expand Down
2 changes: 1 addition & 1 deletion cardano_node_tests/testnet_cleanup.py
Expand Up @@ -226,7 +226,7 @@ def main() -> None:
)
args = get_args()

cluster_obj = cluster_nodes.CLUSTER_TYPE.get_cluster_obj()
cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj()
cleanup(cluster_obj=cluster_obj, location=args.artifacts_base_dir)


Expand Down
6 changes: 4 additions & 2 deletions cardano_node_tests/tests/conftest.py
Expand Up @@ -49,8 +49,10 @@ def pytest_configure(config: Any) -> None:
config._metadata["cardano-node"] = str(VERSIONS.node)
config._metadata["cardano-node rev"] = VERSIONS.git_rev
config._metadata["ghc"] = VERSIONS.ghc
config._metadata["cardano-node-tests rev"] = helpers.CURRENT_COMMIT
config._metadata["cardano-node-tests url"] = helpers.GITHUB_TREE_URL
config._metadata["cardano-node-tests rev"] = helpers.get_current_commit()
config._metadata[
"cardano-node-tests url"
] = f"{helpers.GITHUB_URL}/tree/{helpers.get_current_commit()}"


def _skip_all_tests(config: Any, items: list) -> None:
Expand Down
4 changes: 2 additions & 2 deletions cardano_node_tests/tests/test_configuration.py
Expand Up @@ -45,7 +45,7 @@ def epoch_length_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
if destdir_ls:
return destdir_ls[0]

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.copy_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
Expand Down Expand Up @@ -74,7 +74,7 @@ def slot_length_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
if destdir_ls:
return destdir_ls[0]

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.copy_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
Expand Down
2 changes: 1 addition & 1 deletion cardano_node_tests/tests/test_kes.py
Expand Up @@ -54,7 +54,7 @@ def short_kes_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
if destdir_ls:
return destdir_ls[0]

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.copy_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
Expand Down
8 changes: 5 additions & 3 deletions cardano_node_tests/tests/test_metrics.py
Expand Up @@ -119,9 +119,11 @@ def test_available_metrics(
):
"""Test that list of available metrics == list of expected metrics."""
# pylint: disable=unused-argument
prometheus_port = cluster_nodes.CLUSTER_TYPE.cluster_scripts.get_instance_ports(
cluster_nodes.get_cluster_env().instance_num
).prometheus_bft1
prometheus_port = (
cluster_nodes.get_cluster_type()
.cluster_scripts.get_instance_ports(cluster_nodes.get_cluster_env().instance_num)
.prometheus_bft1
)

response = get_prometheus_metrics(prometheus_port)

Expand Down
2 changes: 1 addition & 1 deletion cardano_node_tests/tests/test_native_tokens.py
Expand Up @@ -1277,7 +1277,7 @@ def _mint_tx(
max_size=1000,
)
)
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
def test_long_name(
self,
cluster: clusterlib.ClusterLib,
Expand Down
14 changes: 7 additions & 7 deletions cardano_node_tests/tests/test_pools.py
Expand Up @@ -55,7 +55,7 @@ def pool_cost_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
if destdir_ls:
return destdir_ls[0]

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.copy_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
Expand Down Expand Up @@ -1356,7 +1356,7 @@ def pool_owners(
return pool_owners

@hypothesis.given(pool_cost=st.integers(max_value=499)) # minPoolCost is now 500
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_low_cost(
self,
Expand Down Expand Up @@ -1779,7 +1779,7 @@ def test_stake_pool_metadata_no_homepage(
assert 'key "homepage" not found' in str(excinfo.value)

@hypothesis.given(pool_name=st.text(min_size=51))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_metadata_long_name(
self,
Expand Down Expand Up @@ -1812,7 +1812,7 @@ def test_stake_pool_metadata_long_name(
)

@hypothesis.given(pool_description=st.text(min_size=256))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_metadata_long_description(
self,
Expand Down Expand Up @@ -1845,7 +1845,7 @@ def test_stake_pool_metadata_long_description(
)

@hypothesis.given(pool_ticker=st.text())
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_metadata_long_ticker(
self,
Expand Down Expand Up @@ -1876,7 +1876,7 @@ def test_stake_pool_metadata_long_ticker(
assert '"ticker" must have at least 3 and at most 5 characters' in str(excinfo.value)

@hypothesis.given(pool_homepage=st.text(min_size=425))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_metadata_long_homepage(
self,
Expand Down Expand Up @@ -1907,7 +1907,7 @@ def test_stake_pool_metadata_long_homepage(
@hypothesis.given(
metadata_url=st.text(alphabet=st.characters(blacklist_categories=["C"]), min_size=25)
)
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_stake_pool_long_metadata_url(
self,
Expand Down
2 changes: 1 addition & 1 deletion cardano_node_tests/tests/test_staking.py
Expand Up @@ -683,7 +683,7 @@ class TestRewards:
@allure.link(helpers.get_vcs_link())
@pytest.mark.testnets
@pytest.mark.skipif(
cluster_nodes.CLUSTER_TYPE.type != cluster_nodes.ClusterType.TESTNET,
cluster_nodes.get_cluster_type().type != cluster_nodes.ClusterType.TESTNET,
reason="supposed to run on testnet with pools",
)
def test_reward_simple(
Expand Down
2 changes: 1 addition & 1 deletion cardano_node_tests/tests/test_transaction_fees.py
Expand Up @@ -68,7 +68,7 @@ def payment_addrs(
return addrs

@hypothesis.given(fee=st.integers(max_value=-1))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_negative_fee(
self,
Expand Down
16 changes: 8 additions & 8 deletions cardano_node_tests/tests/test_transactions.py
Expand Up @@ -924,7 +924,7 @@ def test_negative_change(
)

@hypothesis.given(transfer_add=st.integers(), change_amount=st.integers(min_value=0))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_wrong_balance(
self,
Expand Down Expand Up @@ -1250,7 +1250,7 @@ def test_send_funds_to_utxo_address(
)

@hypothesis.given(addr=st.text(alphabet=ADDR_ALPHABET, min_size=98, max_size=98))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_to_non_existent_address(
self,
Expand All @@ -1266,7 +1266,7 @@ def test_send_funds_to_non_existent_address(
self._send_funds_to_invalid_address(cluster_obj=cluster, pool_users=pool_users, addr=addr)

@hypothesis.given(addr=st.text(alphabet=ADDR_ALPHABET, min_size=50, max_size=250))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_to_invalid_length_address(
self,
Expand All @@ -1284,7 +1284,7 @@ def test_send_funds_to_invalid_length_address(
@hypothesis.given(
addr=st.text(alphabet=st.characters(blacklist_categories=["C"]), min_size=98, max_size=98)
)
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_to_invalid_chars_address(
self,
Expand All @@ -1300,7 +1300,7 @@ def test_send_funds_to_invalid_chars_address(
self._send_funds_to_invalid_address(cluster_obj=cluster, pool_users=pool_users, addr=addr)

@hypothesis.given(addr=st.text(alphabet=ADDR_ALPHABET, min_size=98, max_size=98))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_from_non_existent_address(
self,
Expand All @@ -1316,7 +1316,7 @@ def test_send_funds_from_non_existent_address(
self._send_funds_from_invalid_address(cluster_obj=cluster, pool_users=pool_users, addr=addr)

@hypothesis.given(addr=st.text(alphabet=ADDR_ALPHABET, min_size=50, max_size=250))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_from_invalid_length_address(
self,
Expand All @@ -1334,7 +1334,7 @@ def test_send_funds_from_invalid_length_address(
@hypothesis.given(
addr=st.text(alphabet=st.characters(blacklist_categories=["C"]), min_size=98, max_size=98)
)
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_send_funds_from_invalid_chars_address(
self,
Expand Down Expand Up @@ -1389,7 +1389,7 @@ def test_nonexistent_utxo_hash(
assert "BadInputsUTxO" in err

@hypothesis.given(utxo_hash=st.text(alphabet=ADDR_ALPHABET, min_size=10, max_size=550))
@helpers.HYPOTHESIS_SETTINGS
@helpers.hypothesis_settings()
@allure.link(helpers.get_vcs_link())
def test_invalid_lenght_utxo_hash(
self,
Expand Down
20 changes: 11 additions & 9 deletions cardano_node_tests/utils/cluster_management.py
Expand Up @@ -56,9 +56,9 @@

def _kill_supervisor(instance_num: int) -> None:
"""Kill supervisor process."""
port_num = cluster_nodes.CLUSTER_TYPE.cluster_scripts.get_instance_ports(
instance_num
).supervisor
port_num = (
cluster_nodes.get_cluster_type().cluster_scripts.get_instance_ports(instance_num).supervisor
)
port_str = f":{port_num}"
netstat = helpers.run_command("netstat -plnt").decode().splitlines()
for line in netstat:
Expand Down Expand Up @@ -158,7 +158,9 @@ def instance_dir(self) -> Path:
@property
def ports(self) -> cluster_scripts.InstancePorts:
"""Return port mappings for current cluster instance."""
return cluster_nodes.CLUSTER_TYPE.cluster_scripts.get_instance_ports(self.cluster_instance)
return cluster_nodes.get_cluster_type().cluster_scripts.get_instance_ports(
self.cluster_instance
)

def _init_log(self) -> Path:
"""Return path to run log file."""
Expand Down Expand Up @@ -226,7 +228,7 @@ def stop_all_clusters(self) -> None:
self._log(f"cluster instance {instance_num} not running")
continue

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.prepare_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.prepare_scripts_files(
destdir=self._create_startup_files_dir(instance_num),
instance_num=instance_num,
)
Expand Down Expand Up @@ -359,7 +361,7 @@ def _restart(self, start_cmd: str = "", stop_cmd: str = "") -> None: # noqa: C9
f"stop_cmd='{stop_cmd}'"
)

startup_files = cluster_nodes.CLUSTER_TYPE.cluster_scripts.prepare_scripts_files(
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.prepare_scripts_files(
destdir=self.cm._create_startup_files_dir(self.cm.cluster_instance),
instance_num=self.cm.cluster_instance,
start_script=start_cmd,
Expand Down Expand Up @@ -536,7 +538,7 @@ def _reload_cluster_obj(self, state_dir: Path) -> None:
# save CLI coverage collected by the old `cluster_obj` instance
self._save_cli_coverage()
# replace the old `cluster_obj` instance and reload data
self.cm.cache.cluster_obj = cluster_nodes.CLUSTER_TYPE.get_cluster_obj()
self.cm.cache.cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj()
self.cm.cache.test_data = {}
self.cm.cache.addrs_data = cluster_nodes.load_addrs_data()
self.cm.cache.last_checksum = addrs_data_checksum
Expand All @@ -553,7 +555,7 @@ def _reuse_dev_cluster(self) -> clusterlib.ClusterLib:

cluster_obj = self.cm.cache.cluster_obj
if not cluster_obj:
cluster_obj = cluster_nodes.CLUSTER_TYPE.get_cluster_obj()
cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj()

# setup faucet addresses
if not (state_dir / cluster_nodes.ADDRS_DATA).exists():
Expand Down Expand Up @@ -934,7 +936,7 @@ def get( # noqa: C901

cluster_obj = self.cm.cache.cluster_obj
if not cluster_obj:
cluster_obj = cluster_nodes.CLUSTER_TYPE.get_cluster_obj()
cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj()

# `cluster_obj` is ready, we can start the test
break
Expand Down
16 changes: 7 additions & 9 deletions cardano_node_tests/utils/cluster_nodes.py
@@ -1,4 +1,5 @@
"""Functionality for cluster setup and interaction with cluster nodes."""
import functools
import json
import logging
import os
Expand Down Expand Up @@ -298,6 +299,7 @@ def __init__(self) -> None:
)


@functools.lru_cache
def get_cluster_type() -> ClusterType:
"""Return instance of the cluster type indicated by configuration."""
if configuration.BOOTSTRAP_DIR and configuration.NOPOOLS:
Expand All @@ -309,10 +311,6 @@ def get_cluster_type() -> ClusterType:
return LocalCluster()


# cluster type doesn't change during test run, so it can be used as constant
CLUSTER_TYPE = get_cluster_type()


def _get_cardano_node_socket_path(instance_num: int) -> Path:
"""Return path to socket file in the given cluster instance."""
socket_path = Path(os.environ["CARDANO_NODE_SOCKET_PATH"]).resolve()
Expand Down Expand Up @@ -355,7 +353,7 @@ def start_cluster(cmd: str, args: List[str]) -> clusterlib.ClusterLib:
LOGGER.info(f"Starting cluster with `{cmd}{args_str}`.")
helpers.run_shell_command(f"{cmd}{args_str}", workdir=get_cluster_env().work_dir)
LOGGER.info("Cluster started.")
return CLUSTER_TYPE.get_cluster_obj()
return get_cluster_type().get_cluster_obj()


def stop_cluster(cmd: str) -> None:
Expand All @@ -368,9 +366,9 @@ def restart_node(node_name: str) -> None:
"""Restart single node of the running cluster."""
LOGGER.info(f"Restarting cluster node `{node_name}`.")
cluster_env = get_cluster_env()
supervisor_port = CLUSTER_TYPE.cluster_scripts.get_instance_ports(
cluster_env.instance_num
).supervisor
supervisor_port = (
get_cluster_type().cluster_scripts.get_instance_ports(cluster_env.instance_num).supervisor
)
try:
helpers.run_command(
f"supervisorctl -s http://localhost:{supervisor_port} restart {node_name}",
Expand Down Expand Up @@ -442,7 +440,7 @@ def setup_test_addrs(cluster_obj: clusterlib.ClusterLib, destination_dir: FileTy
cluster_env = get_cluster_env()

LOGGER.debug("Creating addresses and keys for tests.")
addrs_data = CLUSTER_TYPE.create_addrs_data(
addrs_data = get_cluster_type().create_addrs_data(
cluster_obj=cluster_obj, destination_dir=destination_dir
)

Expand Down
8 changes: 4 additions & 4 deletions cardano_node_tests/utils/clusterlib_utils.py
Expand Up @@ -108,7 +108,7 @@ def fund_from_genesis(
if not fund_dst:
return

with helpers.FileLockIfXdist(f"{helpers.TEST_TEMP_DIR}/{cluster_obj.genesis_utxo_addr}.lock"):
with helpers.FileLockIfXdist(f"{helpers.get_basetemp()}/{cluster_obj.genesis_utxo_addr}.lock"):
tx_name = tx_name or helpers.get_timestamped_rand_str()
tx_name = f"{tx_name}_genesis_funding"
fund_tx_files = clusterlib.TxFiles(
Expand Down Expand Up @@ -142,7 +142,7 @@ def return_funds_to_faucet(
"""
tx_name = tx_name or helpers.get_timestamped_rand_str()
tx_name = f"{tx_name}_return_funds"
with helpers.FileLockIfXdist(f"{helpers.TEST_TEMP_DIR}/{faucet_addr}.lock"):
with helpers.FileLockIfXdist(f"{helpers.get_basetemp()}/{faucet_addr}.lock"):
try:
logging.disable(logging.ERROR)
for src in src_addrs:
Expand Down Expand Up @@ -188,7 +188,7 @@ def fund_from_faucet(
return

src_address = faucet_data["payment"].address
with helpers.FileLockIfXdist(f"{helpers.TEST_TEMP_DIR}/{src_address}.lock"):
with helpers.FileLockIfXdist(f"{helpers.get_basetemp()}/{src_address}.lock"):
tx_name = tx_name or helpers.get_timestamped_rand_str()
tx_name = f"{tx_name}_funding"
fund_tx_files = clusterlib.TxFiles(signing_key_files=[faucet_data["payment"].skey_file])
Expand Down Expand Up @@ -383,7 +383,7 @@ def update_params(
_cli_args = [(u.arg, str(u.value)) for u in update_proposals]
cli_args = list(itertools.chain.from_iterable(_cli_args))

with helpers.FileLockIfXdist(f"{helpers.TEST_TEMP_DIR}/update_params.lock"):
with helpers.FileLockIfXdist(f"{helpers.get_basetemp()}/update_params.lock"):
LOGGER.info("Waiting for new epoch to submit proposal.")
cluster_obj.wait_for_new_epoch()

Expand Down

0 comments on commit ed37353

Please sign in to comment.