Skip to content

Commit

Permalink
Refactoring part 2
Browse files Browse the repository at this point in the history
  • Loading branch information
ArturWieczorek committed Jun 5, 2023
1 parent 156c814 commit e29d88f
Show file tree
Hide file tree
Showing 3 changed files with 98 additions and 119 deletions.
26 changes: 17 additions & 9 deletions .github/workflows/node_sync_test.yaml
Expand Up @@ -5,8 +5,17 @@ on:
branches:
- sync_tests
inputs:
environment:
description: Environment on which Buildkite agent will run tests
type: choice
options:
- mainnet
- preprod
- preview
- shelley-qa
default: mainnet
build_mode:
description: how to get the node & cli executables - currently only nix is supported and windows agents can use only cabal
description: Currently only nix is supported, there are no prebuilt CI binaries available anymore. Windows agents can only build with cabal, nix is not supported.
type: choice
options:
- nix
Expand All @@ -16,37 +25,37 @@ on:
required: true
default: "None"
node_rev1:
description: desired cardano-node revision (used for initial sync) - cardano-node (tags/1.33.0-rc2) tag or branch
description: Desired cardano-node revision (used for initial sync) - cardano-node (tags/1.33.0-rc2) tag or branch
required: true
default: "None"
node_topology1:
description: desired cardano-node topology type (used for initial sync) - legacy, p2p
description: Desired cardano-node topology type (used for initial sync) - legacy, p2p
type: choice
options:
- legacy
- p2p
default: legacy
node_start_arguments1:
description: extra arguments to be used when starting the node using tag_no1 (--a1 --a2 21)
description: Extra arguments to be used when starting the node using tag_no1 (--a1 --a2 21)
required: false
default: "None"
tag_no2:
description: rev_label in db/visuals (1.33.0-rc2 (tag number) or 1.33.0 (release number) or 1.33.0_PR2124 (for not released and not tagged runs with a specific node PR/version))
required: true
default: "None"
node_rev2:
description: desired cardano-node revision (used for final sync) - cardano-node (tags/1.33.0-rc2) tag or branch
description: Desired cardano-node revision (used for final sync) - cardano-node (tags/1.33.0-rc2) tag or branch
required: true
default: "None"
node_topology2:
description: desired cardano-node topology type (used for final sync) - legacy, p2p
description: Desired cardano-node topology type (used for final sync) - legacy, p2p
type: choice
options:
- legacy
- p2p
default: legacy
node_start_arguments2:
description: extra arguments to be used when starting the node using tag_no2 (--a1 --a2 21)
description: Extra arguments to be used when starting the node using tag_no2 (--a1 --a2 21)
required: false
default: "None"
jobs:
Expand All @@ -66,7 +75,7 @@ jobs:
AWS_DB_HOSTNAME: ${{ secrets.AWS_DB_HOSTNAME }}
BLOCKFROST_API_KEY: ${{ secrets.BLOCKFROST_API_KEY }}
BUILD_ENV_VARS: '{
"env":"preprod",
"env":"${{ github.event.inputs.environment }}",
"build_mode":"${{ github.event.inputs.build_mode }}",
"node_rev1":"${{ github.event.inputs.node_rev1 }}",
"node_rev2":"${{ github.event.inputs.node_rev2 }}",
Expand Down Expand Up @@ -175,7 +184,6 @@ jobs:
pip install colorama
- name: Run sync test
# ${{ (runner.os == 'Windows' && '$env:PATH=("C:\msys64\mingw64\bin;D:\a\cardano-node-tests\cardano-node-tests\cardano_node_tests\cardano-node;D:\a\cardano-node-tests\cardano-node-tests\cardano_node_tests\cardano-cli;{0}" -f $env:PATH)') || '' }}
env:
BLOCKFROST_API_KEY: ${{ secrets.BLOCKFROST_API_KEY }}
run: |
Expand Down
102 changes: 37 additions & 65 deletions sync_tests/node_sync_test.py
Expand Up @@ -730,13 +730,13 @@ def copy_node_executables(src_location, dst_location, build_mode):
shutil.copy2(Path(src_location) / node_binary_location / "cardano-node",
Path(dst_location) / "cardano-node")
except Exception as e:
print(f" !!! ERROR - could not copy the cardano-cli file - {e}")
print_error(f" !!! ERROR - could not copy the cardano-cli file - {e}")
exit(1)
try:
shutil.copy2(Path(src_location) / cli_binary_location / "cardano-cli",
Path(dst_location) / "cardano-cli")
except Exception as e:
print(f" !!! ERROR - could not copy the cardano-cli file - {e}")
print_error(f" !!! ERROR - could not copy the cardano-cli file - {e}")
exit(1)
time.sleep(5)
if build_mode == "cabal":
Expand All @@ -746,83 +746,57 @@ def copy_node_executables(src_location, dst_location, build_mode):
try:
shutil.copy2(node_binary_location, Path(dst_location) / "cardano-node")
except Exception as e:
print(f" !!! ERROR - could not copy the cardano-cli file - {e}")
print_error(f" !!! ERROR - could not copy the cardano-cli file - {e}")
exit(1)
try:
shutil.copy2(cli_binary_location, Path(dst_location) / "cardano-cli")
except Exception as e:
print(f" !!! ERROR - could not copy the cardano-cli file - {e}")
print_error(f" !!! ERROR - could not copy the cardano-cli file - {e}")
exit(1)
time.sleep(5)


def get_node_files_using_nix(node_rev, repository = None):
def get_node_files(node_rev, repository=None, build_tool='nix'):
test_directory = Path.cwd()
repo = None
print(f"test_directory: {test_directory}")

repo_name = "cardano-node"
repo_dir = Path(test_directory) / "cardano_node_dir"
repo_name = 'cardano-node'
repo_dir = test_directory / 'cardano_node_dir'

if is_dir(repo_dir) is True:
if repo_dir.is_dir():
repo = git_checkout(repository, node_rev)
else:
repo = git_clone_iohk_repo(repo_name, repo_dir, node_rev)

os.chdir(Path(repo_dir))
Path("cardano-node-bin").unlink(missing_ok=True)
Path("cardano-cli-bin").unlink(missing_ok=True)
execute_command("nix build -v .#cardano-node -o cardano-node-bin")
execute_command("nix build -v .#cardano-cli -o cardano-cli-bin")
copy_node_executables(repo_dir, test_directory, "nix")
os.chdir(Path(test_directory))
if build_tool == 'nix':
os.chdir(repo_dir)
Path("cardano-node-bin").unlink(missing_ok=True)
Path("cardano-cli-bin").unlink(missing_ok=True)
execute_command("nix build -v .#cardano-node -o cardano-node-bin")
execute_command("nix build -v .#cardano-cli -o cardano-cli-bin")
copy_node_executables(repo_dir, test_directory, "nix")
elif build_tool == 'cabal':
os.chdir(Path(ROOT_TEST_PATH))
repo = git_checkout(repository, node_rev) if repo_dir.is_dir() else git_clone_iohk_repo(repo_name, repo_dir, node_rev)
cabal_local_file = Path(ROOT_TEST_PATH) / 'sync_tests' / 'cabal.project.local'
shutil.copy2(cabal_local_file, repo_dir)
os.chdir(repo_dir)
for line in fileinput.input("cabal.project", inplace=True):
print(line.replace("tests: True", "tests: False"), end="")
execute_command("cabal update")
execute_command("cabal build cardano-node cardano-cli")
copy_node_executables(repo_dir, test_directory, "cabal")
git_checkout(repo, 'cabal.project')

os.chdir(test_directory)
subprocess.check_call(['chmod', '+x', NODE])
subprocess.check_call(['chmod', '+x', CLI])
print_info(f"files permissions inside test folder:")
print_info("files permissions inside test folder:")
subprocess.check_call(['ls', '-la'])
return repo


def get_node_files_using_cabal(node_rev, repository = None):
test_directory = Path.cwd()
repo = None
print(f"test_directory: {test_directory}")
print(f" - listdir test_directory: {os.listdir(test_directory)}")

os.chdir(Path(ROOT_TEST_PATH))
print(f" - listdir ROOT_TEST_PATH: {os.listdir(ROOT_TEST_PATH)}")

repo_name = 'cardano-node'
repo_dir = Path(test_directory) / 'cardano_node_dir'

if is_dir(repo_dir) is True:
repo = git_checkout(repository, node_rev)
else:
repo = git_clone_iohk_repo(repo_name, repo_dir, node_rev)

cabal_local_file = Path(ROOT_TEST_PATH) / 'sync_tests' / 'cabal.project.local'
shutil.copy2(cabal_local_file , Path(repo_dir))
os.chdir(Path(repo_dir))
print(f" - listdir repo_dir: {os.listdir(repo_dir)}")

for line in fileinput.input('cabal.project', inplace=True):
print(line.replace("tests: True", "tests: False"), end="")

print('cabal.project :')
with open('cabal.project', 'r') as f:
print(f.read())

execute_command("cabal update")
execute_command("cabal build cardano-node cardano-cli")
print(f" - listdir repo_dir after cabal build: {os.listdir(repo_dir)}")
copy_node_executables(repo_dir, test_directory, "cabal")
git_checkout(repo, 'cabal.project')

os.chdir(Path(test_directory))
print(f" - listdir test_directory after copying executables: {os.listdir(test_directory)}")
return repo


def main():
global NODE, CLI
secs_to_start1, secs_to_start2 = 0, 0
Expand Down Expand Up @@ -862,9 +836,9 @@ def main():
print_info(f"Get the cardano-node and cardano-cli files using - {node_build_mode}")
start_build_time = get_current_date_time()
if node_build_mode == "nix" and "windows" not in platform_system.lower():
repository = get_node_files_using_nix(node_rev1)
repository = get_node_files(node_rev1)
elif node_build_mode == "nix" and "windows" in platform_system.lower():
repository = get_node_files_using_cabal(node_rev1)
repository = get_node_files(node_rev1, build_tool="cabal")
else:
print_error(
f"ERROR: method not implemented yet!!! Only building with NIX is supported at this moment - {node_build_mode}")
Expand All @@ -884,9 +858,7 @@ def main():
get_node_config_files(env, node_topology_type1)

print("Enabling the desired cardano node tracers")
if env == "preprod":
enable_cardano_node_resources_monitoring("config.json")

enable_cardano_node_resources_monitoring("config.json")
enable_cardano_node_tracers("config.json")

print(f"--- Start node sync test using node_rev1: {node_rev1}")
Expand Down Expand Up @@ -934,10 +906,10 @@ def main():
print("==============================================================================")

print("Get the cardano-node and cardano-cli files")
if node_build_mode == "nix" and "windows" not in platform_system.lower():
get_node_files_using_nix(node_rev2, repository)
elif node_build_mode == "nix" and "windows" in platform_system.lower():
get_node_files_using_cabal(node_rev2, repository)
if node_build_mode == 'nix' and 'windows' not in platform_system.lower():
get_node_files(node_rev2, repository)
elif node_build_mode == 'nix' and 'windows' in platform_system.lower():
get_node_files(node_rev2, repository, build_tool='cabal')
else:
print_error(
f"ERROR: method not implemented yet!!! Only building with NIX is supported at this moment - {node_build_mode}")
Expand Down
89 changes: 44 additions & 45 deletions sync_tests/node_write_sync_values_to_db.py
Expand Up @@ -78,53 +78,52 @@ def main():
print(f"val_to_insert: {val_to_insert}")
exit(1)

if env == "preprod":
print_info(f" ==== Write test values into the {env + '_logs'} DB table")
log_values_dict = ast.literal_eval(str((sync_test_results_dict["log_values"])))

df1_column_names = ["identifier", "timestamp", "slot_no", "ram_bytes", "cpu_percent"]
df1 = pd.DataFrame(columns=df1_column_names)

print_info(f" ==== Creating the dataframe with the test values")
for key, val in log_values_dict.items():
new_row_data = {"identifier": sync_test_results_dict["identifier"],
"timestamp": key,
"slot_no": val["tip"],
"ram_bytes": val["ram"],
"cpu_percent": val["cpu"]}
print_info(f" ==== Write test values into the {env + '_logs'} DB table")
log_values_dict = ast.literal_eval(str((sync_test_results_dict["log_values"])))

df1_column_names = ["identifier", "timestamp", "slot_no", "ram_bytes", "cpu_percent"]
df1 = pd.DataFrame(columns=df1_column_names)

print_info(f" ==== Creating the dataframe with the test values")
for key, val in log_values_dict.items():
new_row_data = {"identifier": sync_test_results_dict["identifier"],
"timestamp": key,
"slot_no": val["tip"],
"ram_bytes": val["ram"],
"cpu_percent": val["cpu"]}

new_row = pd.DataFrame([new_row_data])
df1 = pd.concat([df1, new_row], ignore_index=True)
new_row = pd.DataFrame([new_row_data])
df1 = pd.concat([df1, new_row], ignore_index=True)

col_to_insert = list(df1.columns)
val_to_insert = df1.values.tolist()
if not add_bulk_values_into_db(env + '_logs', col_to_insert, val_to_insert):
print(f"col_to_insert: {col_to_insert}")
print(f"val_to_insert: {val_to_insert}")
exit(1)

print_info(f" ==== Write test values into the {env + '_epoch_duration'} DB table")
sync_duration_values_dict = ast.literal_eval(
str(sync_test_results_dict["sync_duration_per_epoch"]))
epoch_list = list(sync_duration_values_dict.keys())

df2_column_names = ["identifier", "epoch_no", "sync_duration_secs"]
df2 = pd.DataFrame(columns=df2_column_names)

# ignoring the current/last epoch that is not synced completely
for epoch in epoch_list[:-1]:
new_row = {"identifier": sync_test_results_dict["identifier"],
"epoch_no": epoch,
"sync_duration_secs": sync_duration_values_dict[epoch]}
row_df = pd.DataFrame([new_row])
df2 = pd.concat([row_df, df2], ignore_index=True)

col_to_insert = list(df2.columns)
val_to_insert = df2.values.tolist()
if not add_bulk_values_into_db(env + '_epoch_duration', col_to_insert, val_to_insert):
print(f"col_to_insert: {col_to_insert}")
print(f"val_to_insert: {val_to_insert}")
exit(1)
col_to_insert = list(df1.columns)
val_to_insert = df1.values.tolist()
if not add_bulk_values_into_db(env + '_logs', col_to_insert, val_to_insert):
print(f"col_to_insert: {col_to_insert}")
print(f"val_to_insert: {val_to_insert}")
exit(1)

print_info(f" ==== Write test values into the {env + '_epoch_duration'} DB table")
sync_duration_values_dict = ast.literal_eval(
str(sync_test_results_dict["sync_duration_per_epoch"]))
epoch_list = list(sync_duration_values_dict.keys())

df2_column_names = ["identifier", "epoch_no", "sync_duration_secs"]
df2 = pd.DataFrame(columns=df2_column_names)

# ignoring the current/last epoch that is not synced completely
for epoch in epoch_list[:-1]:
new_row = {"identifier": sync_test_results_dict["identifier"],
"epoch_no": epoch,
"sync_duration_secs": sync_duration_values_dict[epoch]}
row_df = pd.DataFrame([new_row])
df2 = pd.concat([row_df, df2], ignore_index=True)

col_to_insert = list(df2.columns)
val_to_insert = df2.values.tolist()
if not add_bulk_values_into_db(env + '_epoch_duration', col_to_insert, val_to_insert):
print(f"col_to_insert: {col_to_insert}")
print(f"val_to_insert: {val_to_insert}")
exit(1)


if __name__ == "__main__":
Expand Down

0 comments on commit e29d88f

Please sign in to comment.