From a2ea4956963cc1365115c92131066be0b1a58ec0 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 6 May 2024 10:00:42 +0200 Subject: [PATCH 01/11] Added checkpoint prefix handling needed for migration cron job --- .../migration/mina-berkeley-migration-script | 114 +++++++++++------- 1 file changed, 69 insertions(+), 45 deletions(-) diff --git a/scripts/archive/migration/mina-berkeley-migration-script b/scripts/archive/migration/mina-berkeley-migration-script index a0180715bb6..4e9e885405f 100755 --- a/scripts/archive/migration/mina-berkeley-migration-script +++ b/scripts/archive/migration/mina-berkeley-migration-script @@ -79,18 +79,19 @@ function initial_help(){ echo "" echo "Parameters:" echo "" - printf " %-25s %s\n" "-h | --help" "show help"; - printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; - printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; - printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; - printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-h | --help" "show help"; + printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; + printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; + printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; + printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; printf " %-25s %s\n" "-bs | --blocks-batch-size" "[int] number of precomputed blocks to be fetch at once from Gcloud. Bigger number like 1000 can help speed up migration process"; - printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; - printf " %-25s %s\n" "-d | --delete-blocks" "[flag] delete blocks after they are processed (saves space with -sb)" - printf " %-25s %s\n" "-p | --prefetch-blocks" "[flag] downloads all blocks at once instead of incrementally" - printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" - printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" - printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" + printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-d | --delete-blocks" "[flag] delete blocks after they are processed (saves space with -sb)" + printf " %-25s %s\n" "-p | --prefetch-blocks" "[flag] downloads all blocks at once instead of incrementally" + printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" + printf " %-25s %s\n" "-cp | --checkpoint-prefix" "[string] replayer checkpoint prefix. Default: $CHECKPOINT_PREFIX" + printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" + printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" echo "" echo "Example:" echo "" @@ -122,6 +123,7 @@ function initial(){ local __checkpoint_output_path='.' local __precomputed_blocks_local_path='.' local __checkpoint_interval=$CHECKPOINT_INTERVAL + local __checkpoint_prefix=$CHECKPOINT_PREFIX while [ ${#} -gt 0 ]; do error_message="Error: a value is needed for '$1'"; @@ -169,6 +171,10 @@ function initial(){ __checkpoint_interval=${2:?$error_message} shift 2; ;; + -cp | --checkpoint-prefix ) + __checkpoint_prefix=${2:?$error_message} + shift 2; + ;; -l | --precomputed-blocks-local-path ) __precomputed_blocks_local_path=${2:?$error_message} shift 2; @@ -225,7 +231,8 @@ function initial(){ "$__network" \ "$__checkpoint_output_path" \ "$__precomputed_blocks_local_path" \ - "$__checkpoint_interval" + "$__checkpoint_interval" \ + "$__checkpoint_prefix" } function check_log_for_error() { @@ -272,6 +279,7 @@ function run_initial_migration() { local __checkpoint_output_path=$9 local __precomputed_blocks_local_path=${10} local __checkpoint_interval=${11} + local __checkpoint_prefix=${12} local __date=$(date '+%Y-%m-%d_%H_%M_%S') local __berkely_migration_log="berkeley_migration_$__date.log" @@ -305,7 +313,7 @@ function run_initial_migration() { --archive-uri "$__migrated_archive_uri" \ --input-file "$__config_file" \ --checkpoint-interval "$__checkpoint_interval" \ - --checkpoint-file-prefix "$CHECKPOINT_PREFIX" \ + --checkpoint-file-prefix "$__checkpoint_prefix" \ --checkpoint-output-folder "$__checkpoint_output_path" \ --log-file "$__replayer_log" @@ -313,7 +321,7 @@ function run_initial_migration() { set -e # exit immediately on errors - check_output_replayer_for_initial "$CHECKPOINT_PREFIX" + check_output_replayer_for_initial "$__checkpoint_prefix" mina-berkeley-migration-verifier pre-fork \ --mainnet-archive-uri "$__mainnet_archive_uri" \ @@ -329,19 +337,20 @@ function incremental_help(){ echo "" echo "Parameters:" echo "" - printf " %-25s %s\n" "-h | --help" "show help"; - printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; - printf " %-25s %s\n" "-r | --replayer-checkpoint" "[file] path to genesis ledger file"; - printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; - printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; - printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-h | --help" "show help"; + printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; + printf " %-25s %s\n" "-r | --replayer-checkpoint" "[file] path to genesis ledger file"; + printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; + printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; + printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; printf " %-25s %s\n" "-bs | --blocks-batch-size" "[int] number of precomputed blocks to be fetch at once from Gcloud. Bigger number like 1000 can help speed up migration process"; - printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; - printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)" - printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally" - printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" - printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" - printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" + printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)" + printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally" + printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" + printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" + printf " %-25s %s\n" "-cp | --checkpoint-prefix" "[string] replayer checkpoint prefix. Default: $CHECKPOINT_PREFIX" + printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" echo "" echo "Example:" echo "" @@ -401,6 +410,7 @@ function incremental(){ local __checkpoint_interval=$CHECKPOINT_INTERVAL local __checkpoint_output_path='.' local __precomputed_blocks_local_path='.' + local __checkpoint_prefix="$CHECKPOINT_PREFIX" while [ ${#} -gt 0 ]; do error_message="Error: a value is needed for '$1'"; @@ -452,6 +462,10 @@ function incremental(){ __checkpoint_output_path=${2:?$error_message} shift 2; ;; + -cp | --checkpoint-prefix ) + __checkpoint_prefix=${2:?$error_message} + shift 2; + ;; -l | --precomputed-blocks-local-path ) __precomputed_blocks_local_path=${2:?$error_message} shift 2; @@ -511,7 +525,8 @@ function incremental(){ "$__checkpoint_interval" \ "$__replayer_checkpoint" \ "$__checkpoint_output_path" \ - "$__precomputed_blocks_local_path" + "$__precomputed_blocks_local_path" \ + "$__checkpoint_prefix" } @@ -528,6 +543,7 @@ function run_incremental_migration() { local __replayer_checkpoint=${10} local __checkpoint_output_path=${11} local __precomputed_blocks_local_path=${12} + local __checkpoint_prefix=${13} local __date=$(date '+%Y-%m-%d_%H%M') local __berkely_migration_log="berkeley_migration_$__date.log" @@ -557,7 +573,7 @@ function run_incremental_migration() { --archive-uri "$__migrated_archive_uri" \ --input-file "$__replayer_checkpoint" \ --checkpoint-interval "$__checkpoint_interval" \ - --checkpoint-file-prefix "$CHECKPOINT_PREFIX" \ + --checkpoint-file-prefix "$__checkpoint_prefix" \ --checkpoint-output-folder "$__checkpoint_output_path" \ --log-file "$__replayer_log" @@ -565,7 +581,7 @@ function run_incremental_migration() { set -e # exit immediately on errors - check_new_replayer_checkpoints_for_incremental "$CHECKPOINT_PREFIX" + check_new_replayer_checkpoints_for_incremental "$__checkpoint_prefix" mina-berkeley-migration-verifier pre-fork \ --mainnet-archive-uri "$__mainnet_archive_uri" \ @@ -582,20 +598,21 @@ function final_help(){ echo "" echo "Parameters:" echo "" - printf " %-25s %s\n" "-h | --help" "show help"; - printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; - printf " %-25s %s\n" "-r | --replayer-checkpoint" "[file] path to genesis ledger file"; - printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; - printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; - printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-h | --help" "show help"; + printf " %-25s %s\n" "-g | --genesis-ledger" "[file] path to genesis ledger file"; + printf " %-25s %s\n" "-r | --replayer-checkpoint" "[file] path to genesis ledger file"; + printf " %-25s %s\n" "-s | --source-db" "[connection_str] connection string to database to be migrated"; + printf " %-25s %s\n" "-t | --target-db" "[connection_str] connection string to database which will hold migrated data"; + printf " %-25s %s\n" "-b | --blocks-bucket" "[string] name of precomputed blocks bucket. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; printf " %-25s %s\n" "-bs | --blocks-batch-size" "[int] number of precomputed blocks to be fetch at once from Gcloud. Bigger number like 1000 can help speed up migration process"; - printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; + printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json"; printf " %-25s %s\n" "-fc | --fork-genesis-config" "[file] Genesis config file for the fork network. It should be provied by MF or O(1)Labs team after fork block is announced"; - printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)" - printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally" - printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" - printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" - printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" + printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)" + printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally" + printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints" + printf " %-25s %s\n" "-cp | --checkpoint-prefix" "[string] replayer checkpoint prefix. Default: $CHECKPOINT_PREFIX" + printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000" + printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location" echo "" echo "Example:" echo "" @@ -627,6 +644,7 @@ function final(){ local __fork_genesis_config='' local __checkpoint_output_path='.' local __precomputed_blocks_local_path='.' + local __checkpoint_prefix="$CHECKPOINT_PREFIX" while [ ${#} -gt 0 ]; do error_message="Error: a value is needed for '$1'"; @@ -682,6 +700,10 @@ function final(){ __checkpoint_output_path=${2:?$error_message} shift 2; ;; + -cp | --checkpoint-prefix ) + __checkpoint_prefix=${2:?error_message} + shift 2; + ;; -l | --precomputed-blocks-local-path ) __precomputed_blocks_local_path=${2:?$error_message} shift 2; @@ -747,7 +769,8 @@ function final(){ "$__replayer_checkpoint" \ "$__fork_genesis_config" \ "$__checkpoint_output_path" \ - "$__precomputed_blocks_local_path" + "$__precomputed_blocks_local_path" \ + "$__checkpoint_prefix" } function run_final_migration() { @@ -764,6 +787,7 @@ function run_final_migration() { local __fork_genesis_config=${11} local __checkpoint_output_path=${12} local __precomputed_blocks_local_path=${13} + local __checkpoint_prefix=${14} local __fork_state_hash="$(jq -r .proof.fork.state_hash "$__fork_genesis_config")" local __date=$(date '+%Y-%m-%d_%H%M') @@ -795,7 +819,7 @@ function run_final_migration() { --archive-uri "$__migrated_archive_uri" \ --input-file "$__replayer_checkpoint" \ --checkpoint-interval "$__checkpoint_interval" \ - --checkpoint-file-prefix "$CHECKPOINT_PREFIX" \ + --checkpoint-file-prefix "$__checkpoint_prefix" \ --checkpoint-output-folder "$__checkpoint_output_path" \ --log-file "$__replayer_log" @@ -803,10 +827,10 @@ function run_final_migration() { set -e # exit immediately on errors check_incremental_migration_progress "$__replayer_checkpoint" "$__migrated_archive_uri" - check_new_replayer_checkpoints_for_incremental "$CHECKPOINT_PREFIX" + check_new_replayer_checkpoints_for_incremental "$__checkpoint_prefix" # sort by https://stackoverflow.com/questions/60311787/how-to-sort-by-numbers-that-are-part-of-a-filename-in-bash - local migrated_replayer_output=$(find . -maxdepth 1 -name "$CHECKPOINT_PREFIX-checkpoint*.json" | sort -Vrt - -k3,3 | head -n 1) + local migrated_replayer_output=$(find . -maxdepth 1 -name "${__checkpoint_prefix}-checkpoint*.json" | sort -Vrt - -k3,3 | head -n 1) mina-berkeley-migration-verifier post-fork \ --mainnet-archive-uri "$__mainnet_archive_uri" \ From 0d08a5b09b295afb081760bdb682fe5e164fbddf Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 7 May 2024 23:21:34 +0200 Subject: [PATCH 02/11] fix getting the most recent checkpoint --- .../migration/mina-berkeley-migration-script | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/scripts/archive/migration/mina-berkeley-migration-script b/scripts/archive/migration/mina-berkeley-migration-script index 4e9e885405f..ce08e824df3 100755 --- a/scripts/archive/migration/mina-berkeley-migration-script +++ b/scripts/archive/migration/mina-berkeley-migration-script @@ -773,6 +773,23 @@ function final(){ "$__checkpoint_prefix" } +function find_most_recent_checkpoint() { + + FILTER=$1 + declare -A checkpoints + + for i in $FILTER ; do + SLOT=$(cat $i | jq .start_slot_since_genesis) + checkpoints["$SLOT"]="$i" + done + + for k in "${!checkpoints[@]}" + do + echo $k ',' ${checkpoints["$k"]} + done | + sort -rn | head -n 1 | cut -d',' -f2 | xargs +} + function run_final_migration() { local __batch_size=$1 local __mainnet_archive_uri=$2 @@ -830,7 +847,14 @@ function run_final_migration() { check_new_replayer_checkpoints_for_incremental "$__checkpoint_prefix" # sort by https://stackoverflow.com/questions/60311787/how-to-sort-by-numbers-that-are-part-of-a-filename-in-bash - local migrated_replayer_output=$(find . -maxdepth 1 -name "${__checkpoint_prefix}-checkpoint*.json" | sort -Vrt - -k3,3 | head -n 1) + + migrated_replayer_output=$(find_most_recent_checkpoint $CHECKPOINT_PREFIX-checkpoint*.json) + + echo "Running validations for final migration: " + echo " mainnet-archive-uri: '$__mainnet_archive_uri'" + echo " migrated-archive-uri: '$__migrated_archive_uri'" + echo " fork-genesis-config: '$__fork_genesis_config'" + echo " migrated-replayer-output: '$migrated_replayer_output'" mina-berkeley-migration-verifier post-fork \ --mainnet-archive-uri "$__mainnet_archive_uri" \ From 723ac6ed6b5451fb7f807c5b7759051dfb842e0f Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 7 May 2024 23:44:11 +0200 Subject: [PATCH 03/11] use set checkpoint prefix rather than default one --- scripts/archive/migration/mina-berkeley-migration-script | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/archive/migration/mina-berkeley-migration-script b/scripts/archive/migration/mina-berkeley-migration-script index ce08e824df3..acdefb2748f 100755 --- a/scripts/archive/migration/mina-berkeley-migration-script +++ b/scripts/archive/migration/mina-berkeley-migration-script @@ -848,7 +848,7 @@ function run_final_migration() { # sort by https://stackoverflow.com/questions/60311787/how-to-sort-by-numbers-that-are-part-of-a-filename-in-bash - migrated_replayer_output=$(find_most_recent_checkpoint $CHECKPOINT_PREFIX-checkpoint*.json) + migrated_replayer_output=$(find_most_recent_checkpoint $__checkpoint_prefix-checkpoint*.json) echo "Running validations for final migration: " echo " mainnet-archive-uri: '$__mainnet_archive_uri'" From 0421902b1f299caa0f94879d0d656a3b4abc1a00 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 9 May 2024 14:38:27 +0200 Subject: [PATCH 04/11] Archive script needed for emergency hardfork --- scripts/archive/convert_chain_to_canonical.sh | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100755 scripts/archive/convert_chain_to_canonical.sh diff --git a/scripts/archive/convert_chain_to_canonical.sh b/scripts/archive/convert_chain_to_canonical.sh new file mode 100755 index 00000000000..eb2d88023d9 --- /dev/null +++ b/scripts/archive/convert_chain_to_canonical.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +if [[ $# -ne 2 ]]; then + echo "Usage: $0 " + echo "" + echo "Example: $0 postgres://postgres:postgres@localhost:5432/archive 3NLDtQqXRk7QybHS1b4quNoTKZDHUPeYRkRpKM641mxYjJEBwKCq" + exit 1 +fi + +CONN_STR=$1 +LAST_BLOCK_HASH=$2 + +GENESIS_HASH=$(psql "$CONN_STR" -t -c \ + "select state_hash from blocks where id = 1;" | xargs) + +PARENT_HASH=$(psql "$CONN_STR" -t -c \ + "select parent_hash from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) + +if [ -z "$PARENT_HASH" ]; then + echo "Error: Cannot find parent hash for $LAST_BLOCK_HASH. Please ensure block exists and database has no missing blocks" + exit 1 +fi + +canon_chain=("$LAST_BLOCK_HASH") + +echo "Calculating canonical chain..." +while [ -n "$PARENT_HASH" ] && [ "$PARENT_HASH" != "$GENESIS_HASH" ] +do + PARENT_HASH=$(psql "$CONN_STR" -q -t -c \ + "select parent_hash from blocks where state_hash = '$PARENT_HASH';" | xargs) + + canon_chain[${#canon_chain[@]}]="$PARENT_HASH" + +done + +echo "Updating non canonical blocks to orphaned..." +psql "$CONN_STR" -c "update blocks set chain_status = 'orphaned' where chain_status = 'pending';" + +echo "Updating blocks statuses in canonical chain to canonical (${#canon_chain[@]})..." +for block in "${canon_chain[@]}"; do + psql "$CONN_STR" -q -c "update blocks set chain_status = 'canonical' where state_hash = '$block'" +done \ No newline at end of file From d8199dec6f4139032fe240d16d4fa98189be7453 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 9 May 2024 23:04:10 +0200 Subject: [PATCH 05/11] Added automated tests and buildkite job --- buildkite/src/Jobs/Test/EmergencyHfTest.dhall | 41 +++++ scripts/archive/convert_chain_to_canonical.sh | 42 ----- .../convert_chain_to_canonical.sh | 80 +++++++++ scripts/archive/emergency_hf/test/runner.sh | 158 ++++++++++++++++++ .../test_fork_on_canonical_in_the_middle.sql | 47 ++++++ .../test/test_fork_on_last_canonical.sql | 44 +++++ .../test/test_fork_on_new_network.sql | 27 +++ .../test/test_fork_on_orphaned.sql | 46 +++++ .../test/test_fork_on_pending.sql | 40 +++++ .../test/test_surrounded_by_pendings.sql | 38 +++++ 10 files changed, 521 insertions(+), 42 deletions(-) create mode 100755 buildkite/src/Jobs/Test/EmergencyHfTest.dhall delete mode 100755 scripts/archive/convert_chain_to_canonical.sh create mode 100755 scripts/archive/emergency_hf/convert_chain_to_canonical.sh create mode 100755 scripts/archive/emergency_hf/test/runner.sh create mode 100644 scripts/archive/emergency_hf/test/test_fork_on_canonical_in_the_middle.sql create mode 100644 scripts/archive/emergency_hf/test/test_fork_on_last_canonical.sql create mode 100644 scripts/archive/emergency_hf/test/test_fork_on_new_network.sql create mode 100644 scripts/archive/emergency_hf/test/test_fork_on_orphaned.sql create mode 100644 scripts/archive/emergency_hf/test/test_fork_on_pending.sql create mode 100644 scripts/archive/emergency_hf/test/test_surrounded_by_pendings.sql diff --git a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall new file mode 100755 index 00000000000..0018022ef1b --- /dev/null +++ b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall @@ -0,0 +1,41 @@ +let S = ../../Lib/SelectFiles.dhall + +let Pipeline = ../../Pipeline/Dsl.dhall +let PipelineTag = ../../Pipeline/Tag.dhall +let JobSpec = ../../Pipeline/JobSpec.dhall + + +let Command = ../../Command/Base.dhall +let Docker = ../../Command/Docker/Type.dhall +let Size = ../../Command/Size.dhall + + +let ReplayerTest = ../../Command/ReplayerTest.dhall +let Profiles = ../../Constants/Profiles.dhall +let Dockers = ../../Constants/DockerVersions.dhall + +let Cmd = ../../Lib/Cmds.dhall + +in Pipeline.build + Pipeline.Config::{ + , spec = JobSpec::{ + , dirtyWhen = + [ S.strictlyStart (S.contains "scripts/archive/emergency_hf") + , S.strictlyStart (S.contains "src/app/archive") + ] + , path = "Test" + , name = "EmergencyHfTest" + , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ] + } + , steps = [ + Command.build + Command.Config::{ + commands = [ + Cmd.run "./scripts/archive/emergency_hg/runner.sh" + ], + label = "Emergency HF test", + key = "emergency-hf-test", + target = Size.Large + } + ] + } diff --git a/scripts/archive/convert_chain_to_canonical.sh b/scripts/archive/convert_chain_to_canonical.sh deleted file mode 100755 index eb2d88023d9..00000000000 --- a/scripts/archive/convert_chain_to_canonical.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash - -if [[ $# -ne 2 ]]; then - echo "Usage: $0 " - echo "" - echo "Example: $0 postgres://postgres:postgres@localhost:5432/archive 3NLDtQqXRk7QybHS1b4quNoTKZDHUPeYRkRpKM641mxYjJEBwKCq" - exit 1 -fi - -CONN_STR=$1 -LAST_BLOCK_HASH=$2 - -GENESIS_HASH=$(psql "$CONN_STR" -t -c \ - "select state_hash from blocks where id = 1;" | xargs) - -PARENT_HASH=$(psql "$CONN_STR" -t -c \ - "select parent_hash from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) - -if [ -z "$PARENT_HASH" ]; then - echo "Error: Cannot find parent hash for $LAST_BLOCK_HASH. Please ensure block exists and database has no missing blocks" - exit 1 -fi - -canon_chain=("$LAST_BLOCK_HASH") - -echo "Calculating canonical chain..." -while [ -n "$PARENT_HASH" ] && [ "$PARENT_HASH" != "$GENESIS_HASH" ] -do - PARENT_HASH=$(psql "$CONN_STR" -q -t -c \ - "select parent_hash from blocks where state_hash = '$PARENT_HASH';" | xargs) - - canon_chain[${#canon_chain[@]}]="$PARENT_HASH" - -done - -echo "Updating non canonical blocks to orphaned..." -psql "$CONN_STR" -c "update blocks set chain_status = 'orphaned' where chain_status = 'pending';" - -echo "Updating blocks statuses in canonical chain to canonical (${#canon_chain[@]})..." -for block in "${canon_chain[@]}"; do - psql "$CONN_STR" -q -c "update blocks set chain_status = 'canonical' where state_hash = '$block'" -done \ No newline at end of file diff --git a/scripts/archive/emergency_hf/convert_chain_to_canonical.sh b/scripts/archive/emergency_hf/convert_chain_to_canonical.sh new file mode 100755 index 00000000000..46263a7878f --- /dev/null +++ b/scripts/archive/emergency_hf/convert_chain_to_canonical.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +if [[ $# -ne 3 ]]; then + echo "Usage: $0 " + echo "" + echo "Example: $0 postgres://postgres:postgres@localhost:5432/archive 3NLDtQqXRk7QybHS1b4quNoTKZDHUPeYRkRpKM641mxYjJEBwKCq 1" + exit 1 +fi + +CONN_STR=$1 +LAST_BLOCK_HASH=$2 +PROTOCOL_VERSION=$3 + +GENESIS_HASH=$(psql "$CONN_STR" -t -c \ + "select state_hash from blocks where protocol_version_id = $PROTOCOL_VERSION and global_slot_since_hard_fork = 0 ;" | xargs) + +GENESIS_ID=$(psql "$CONN_STR" -t -c \ + "select id from blocks where protocol_version_id = $PROTOCOL_VERSION and global_slot_since_hard_fork = 0 ;" | xargs) + +HEIGHT=$(psql "$CONN_STR" -t -c \ + "select height from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) + +ID=$(psql "$CONN_STR" -t -c \ + "select id from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) + + +if [ -z "$ID" ]; then + echo "Error: Cannot find id for $LAST_BLOCK_HASH. Please ensure block exists and database has no missing blocks" + exit 1 +else + echo Fork block id $ID +fi + +if [ -z "$HEIGHT" ]; then + echo "Error: Cannot find height for $LAST_BLOCK_HASH. Please ensure block exists and database has no missing blocks" + exit 1 +else + echo Fork block height $HEIGHT +fi + + +echo "Calculating canonical chain..." +canon_chain=$(psql $CONN_STR -U postgres -t -c "WITH RECURSIVE chain AS ( + SELECT id, parent_id, height,state_hash + FROM blocks b WHERE b.id = $ID and b.protocol_version_id = $PROTOCOL_VERSION + + UNION ALL + + SELECT b.id, b.parent_id, b.height,b.state_hash + FROM blocks b + + INNER JOIN chain + + ON b.id = chain.parent_id AND (chain.id <> $GENESIS_ID OR b.id = $GENESIS_ID) WHERE b.protocol_version_id = $PROTOCOL_VERSION + + ) + + SELECT id + FROM chain ORDER BY height ASC" | xargs) + +canon_chain=(${canon_chain// / }) + +echo "Updating non canonical blocks to orphaned..." +psql "$CONN_STR" -c "update blocks set chain_status = 'orphaned' where protocol_version_id = $PROTOCOL_VERSION;" + +function join_by { + local d=${1-} f=${2-} + if shift 2; then + printf %s "$f" "${@/#/$d}" + fi +} + +echo "Updating blocks statuses in canonical chain to canonical (${#canon_chain[@]})..." +bs=500 +for ((i=0; i<${#canon_chain[@]}; i+=bs)); do + echo " - $i of ${#canon_chain[@]}" + IN_CLAUSE=$(join_by , ${canon_chain[@]:i:bs}) + psql "$CONN_STR" -q -c "update blocks set chain_status = 'canonical' where id in (${IN_CLAUSE}) and protocol_version_id = $PROTOCOL_VERSION" +done +echo " - ${#canon_chain[@]} of ${#canon_chain[@]}" \ No newline at end of file diff --git a/scripts/archive/emergency_hf/test/runner.sh b/scripts/archive/emergency_hf/test/runner.sh new file mode 100755 index 00000000000..2a5d4b5fcbf --- /dev/null +++ b/scripts/archive/emergency_hf/test/runner.sh @@ -0,0 +1,158 @@ +#!/bin/bash + +DOCKER_IMAGE=12.4-alpine +CONTAINER_FILE=docker.container + +PG_PORT=5433 +PG_PASSWORD=somepassword + +function cleanup () { + CONTAINER=$(cat $CONTAINER_FILE) + + if [[ -n $CONTAINER ]] ; then + echo "Killing, removing docker container" + for action in kill rm; do + docker container $action "$CONTAINER" + done + fi + + rm -f $CONTAINER_FILE +} + +DOCKET_NETWORK=emergency_hf +docker network create $DOCKET_NETWORK || true + +# -v mounts dir with Unix socket on host +echo "Starting docker with Postgresql" +docker run \ + --network $DOCKET_NETWORK \ + --volume "$BUILDKITE_BUILD_CHECKOUT_PATH":/workdir \ + --name replayer-postgres -d -p $PG_PORT:5432 \ + -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=$PG_PASSWORD -e POSTGRES_DB="$DB" postgres:$DOCKER_IMAGE > $CONTAINER_FILE + +trap "cleanup; exit 1" SIGINT + +# wait for Postgresql to become available +sleep 5 + +echo "Populating archive databases" + +NETWORK_GATEWAY=$(docker network inspect -f "{{(index .IPAM.Config 0).Gateway}}" $DOCKET_NETWORK) + +PG_CONN="postgres://postgres:$PG_PASSWORD@$NETWORK_GATEWAY:$PG_PORT" + + +function assert () { + local expected="$1"; local name="$2"; local db_name=$3; + + ACTUAL=$(docker exec replayer-postgres psql "$PG_CONN/$db_name" -AF '->' -t -c "select state_hash,chain_status from blocks order by state_hash asc" ) + compare "$expected" "$ACTUAL" "$name" +} + +function compare () { + local left="$1"; local right="$2"; local test_name=$3; + + if [ "$left" = "$right" ]; then + echo "PASSED: actual vs expected blocks comparision for $test_name" + else + echo "FAILED: comparision failed for $test_name" + echo "EXPECTED:" + echo "$left" + echo "ACTUAL:" + echo "$right" + exit 1 + fi +} + +function test_fork_on_canonical_in_the_middle_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'C' 2 + + EXPECTED="A->canonical +B->canonical +C->canonical +D->orphaned +E->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + + +function test_fork_on_new_network_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'C' 2 + + EXPECTED="A->canonical +B->canonical +C->canonical +D->orphaned +E->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + +function test_fork_on_last_canonical_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'C' 2 + + EXPECTED="A->canonical +B->canonical +C->canonical +D->orphaned +E->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + +function test_fork_on_orphaned_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'B' 2 + + EXPECTED="A->canonical +B->canonical +C->orphaned +D->orphaned +E->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + +function test_fork_on_pending_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'C' 2 + + EXPECTED="A->canonical +B->orphaned +C->canonical +D->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + +function test_surrounded_by_pendings_assert() { + local __db_name=$1 + + ./scripts/archive/emergency_hf/convert_chain_to_canonical.sh "$PG_CONN"/"$__db_name" 'C' 2 + + EXPECTED="A->canonical +B->orphaned +C->canonical +D->orphaned +E->orphaned" + + assert "$EXPECTED" "$FUNCNAME" "$__db_name" +} + +for file in ./scripts/archive/emergency_hf/test/*.sql; do + DB_NAME=$(basename "$file" .sql) + + docker exec replayer-postgres psql "$PG_CONN" -c "create database $DB_NAME" + docker exec replayer-postgres psql "$PG_CONN/$DB_NAME" -f /workdir/scripts/archive/emergency_hf/test/"$DB_NAME".sql + + "${DB_NAME}_assert" "$DB_NAME" +done \ No newline at end of file diff --git a/scripts/archive/emergency_hf/test/test_fork_on_canonical_in_the_middle.sql b/scripts/archive/emergency_hf/test/test_fork_on_canonical_in_the_middle.sql new file mode 100644 index 00000000000..f6ad9e3301f --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_fork_on_canonical_in_the_middle.sql @@ -0,0 +1,47 @@ +-- Fork on canonical in the new network + +-- Before: + +--A (canonical 1) +-- |--B (canonical 1) +-- `--C (canonical 2 ) +-- `--D (canonical 2 [fork]) +-- `--E (pending 2) + + +--After: + +--A (canonical 1) +-- |--B (canonical 1) +-- `--C (canonical 2) +-- `--D (canonical 2 ) +-- `--E (pending 2) + +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 2, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 2, 'canonical'), +(3, 'C', 2 , 'B', 2, 2, 3, 2, 'canonical'), +(4, 'D', 3 , 'C', 3, 3, 4, 2, 'canonical'), +(5, 'E', 4 , 'D', 4, 4, 5, 2, 'pending'); + + diff --git a/scripts/archive/emergency_hf/test/test_fork_on_last_canonical.sql b/scripts/archive/emergency_hf/test/test_fork_on_last_canonical.sql new file mode 100644 index 00000000000..f70957b0373 --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_fork_on_last_canonical.sql @@ -0,0 +1,44 @@ +--Before: + +--A (canonical) +-- |--B (orphaned) +-- `--C (canonical [fork]) +-- `--D (pending) + +--After: + +--A (canonical) +-- |--B (orphaned) +-- `--C (canonical [fork]) +-- `--D (orphaned) + + + + + +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 2, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 2, 'canonical'), +(3, 'C', 2 , 'B', 2, 2, 3, 2, 'canonical'), +(4, 'D', 3 , 'C', 3, 3, 4, 2, 'pending'), +(5, 'E', 4 , 'D', 4, 4, 5, 2, 'pending'); diff --git a/scripts/archive/emergency_hf/test/test_fork_on_new_network.sql b/scripts/archive/emergency_hf/test/test_fork_on_new_network.sql new file mode 100644 index 00000000000..54eed0facac --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_fork_on_new_network.sql @@ -0,0 +1,27 @@ +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 1, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 1, 'canonical'), +(3, 'C', 1 , 'A', 2, 0, 3, 2, 'canonical'), +(4, 'D', 3 , 'C', 3, 1, 4, 2, 'pending'), +(5, 'E', 4 , 'D', 4, 2, 5, 2, 'pending'); + diff --git a/scripts/archive/emergency_hf/test/test_fork_on_orphaned.sql b/scripts/archive/emergency_hf/test/test_fork_on_orphaned.sql new file mode 100644 index 00000000000..78393d73244 --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_fork_on_orphaned.sql @@ -0,0 +1,46 @@ +--Before: + +--A (canonical) +-- |--B (orphaned [fork]) +-- `--C (canonical) +-- `--D (canonical) +-- `--E (pending) + +--After: + +--A (canonical) +-- |--B (canonical [fork]) +-- `--C (orphaned) +-- `--D (orphaned) +-- `--E (orphaned) + + + +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 2, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 2, 'orphaned'), +(3, 'C', 1 , 'A', 2, 2, 3, 2, 'canonical'), +(4, 'D', 2 , 'B', 3, 3, 4, 2, 'canonical'), +(5, 'E', 4 , 'D', 4, 4, 5, 2, 'pending'); + + diff --git a/scripts/archive/emergency_hf/test/test_fork_on_pending.sql b/scripts/archive/emergency_hf/test/test_fork_on_pending.sql new file mode 100644 index 00000000000..55316d5ac5e --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_fork_on_pending.sql @@ -0,0 +1,40 @@ +--A (canonical) +-- |--B (pending) +-- `--C (pending [fork]) +-- `--D (pending) + + + +--A (canonical) +-- |--B (orphaned) +-- `--C (canonical [fork]) +-- `--D (orphaned) + + +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 2, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 2, 'orphaned'), +(3, 'C', 1 , 'A', 2, 2, 3, 2, 'pending'), +(4, 'D', 3 , 'C', 3, 3, 4, 2, 'pending'); + + diff --git a/scripts/archive/emergency_hf/test/test_surrounded_by_pendings.sql b/scripts/archive/emergency_hf/test/test_surrounded_by_pendings.sql new file mode 100644 index 00000000000..675b314b812 --- /dev/null +++ b/scripts/archive/emergency_hf/test/test_surrounded_by_pendings.sql @@ -0,0 +1,38 @@ +--A (canonical) +-- |--B (pending) +-- `--C (canonical [fork]) +-- `--D (pending) + +--A (canonical) +-- |--B (orphaned) +-- `--C (canonical [fork]) +-- `--D (orphaned) + + + +CREATE TABLE + blocks ( + id serial NOT NULL, + state_hash text NOT NULL, + parent_id integer NULL, + parent_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + chain_status text NOT NULL + ); + +ALTER TABLE + blocks +ADD + CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +insert into blocks ("id", "state_hash", "parent_id", "parent_hash", "global_slot_since_genesis", "global_slot_since_hard_fork", "height", "protocol_version_id","chain_status") +values +(1, 'A', null, '0', 0, 0, 1, 2, 'canonical'), +(2, 'B', 1 , 'A', 1, 1, 2, 2, 'pending'), +(3, 'C', 1 , 'A', 2, 2, 3, 2, 'canonical'), +(4, 'D', 3 , 'C', 3, 3, 4, 2, 'pending'), +(5, 'E', 4 , 'D', 4, 4, 5, 2, 'pending'); From e636a717d8b06d155540474ec00ecb540769404e Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 9 May 2024 23:27:19 +0200 Subject: [PATCH 06/11] typo --- buildkite/src/Jobs/Test/EmergencyHfTest.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall index 0018022ef1b..7aabd26e841 100755 --- a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall +++ b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall @@ -31,7 +31,7 @@ in Pipeline.build Command.build Command.Config::{ commands = [ - Cmd.run "./scripts/archive/emergency_hg/runner.sh" + Cmd.run "./scripts/archive/emergency_hf/runner.sh" ], label = "Emergency HF test", key = "emergency-hf-test", From e16a02d7e1bb1a95927821c21d51c708e507629e Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 9 May 2024 23:35:32 +0200 Subject: [PATCH 07/11] fix path --- buildkite/src/Jobs/Test/EmergencyHfTest.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall index 7aabd26e841..00f6472bac1 100755 --- a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall +++ b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall @@ -31,7 +31,7 @@ in Pipeline.build Command.build Command.Config::{ commands = [ - Cmd.run "./scripts/archive/emergency_hf/runner.sh" + Cmd.run "./scripts/archive/emergency_hf/test/runner.sh" ], label = "Emergency HF test", key = "emergency-hf-test", From 08f4dfdbd3bd44028522861cb2e47993051ccee6 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 9 May 2024 23:49:05 +0200 Subject: [PATCH 08/11] fix lack of psql --- buildkite/src/Jobs/Test/EmergencyHfTest.dhall | 2 +- .../emergency_hf/convert_chain_to_canonical.sh | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall index 00f6472bac1..876afed84de 100755 --- a/buildkite/src/Jobs/Test/EmergencyHfTest.dhall +++ b/buildkite/src/Jobs/Test/EmergencyHfTest.dhall @@ -31,7 +31,7 @@ in Pipeline.build Command.build Command.Config::{ commands = [ - Cmd.run "./scripts/archive/emergency_hf/test/runner.sh" + Cmd.run "PSQL=\"docker exec replayer-postgres psql\" ./scripts/archive/emergency_hf/test/runner.sh " ], label = "Emergency HF test", key = "emergency-hf-test", diff --git a/scripts/archive/emergency_hf/convert_chain_to_canonical.sh b/scripts/archive/emergency_hf/convert_chain_to_canonical.sh index 46263a7878f..fd5ed9448ef 100755 --- a/scripts/archive/emergency_hf/convert_chain_to_canonical.sh +++ b/scripts/archive/emergency_hf/convert_chain_to_canonical.sh @@ -7,20 +7,21 @@ if [[ $# -ne 3 ]]; then exit 1 fi +PSQL=${PSQL:-psql} CONN_STR=$1 LAST_BLOCK_HASH=$2 PROTOCOL_VERSION=$3 -GENESIS_HASH=$(psql "$CONN_STR" -t -c \ +GENESIS_HASH=$($PSQL "$CONN_STR" -t -c \ "select state_hash from blocks where protocol_version_id = $PROTOCOL_VERSION and global_slot_since_hard_fork = 0 ;" | xargs) -GENESIS_ID=$(psql "$CONN_STR" -t -c \ +GENESIS_ID=$($PSQL "$CONN_STR" -t -c \ "select id from blocks where protocol_version_id = $PROTOCOL_VERSION and global_slot_since_hard_fork = 0 ;" | xargs) -HEIGHT=$(psql "$CONN_STR" -t -c \ +HEIGHT=$($PSQL "$CONN_STR" -t -c \ "select height from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) -ID=$(psql "$CONN_STR" -t -c \ +ID=$($PSQL "$CONN_STR" -t -c \ "select id from blocks where state_hash = '$LAST_BLOCK_HASH';" | xargs) @@ -40,7 +41,7 @@ fi echo "Calculating canonical chain..." -canon_chain=$(psql $CONN_STR -U postgres -t -c "WITH RECURSIVE chain AS ( +canon_chain=$($PSQL $CONN_STR -U postgres -t -c "WITH RECURSIVE chain AS ( SELECT id, parent_id, height,state_hash FROM blocks b WHERE b.id = $ID and b.protocol_version_id = $PROTOCOL_VERSION @@ -61,7 +62,7 @@ canon_chain=$(psql $CONN_STR -U postgres -t -c "WITH RECURSIVE chain AS ( canon_chain=(${canon_chain// / }) echo "Updating non canonical blocks to orphaned..." -psql "$CONN_STR" -c "update blocks set chain_status = 'orphaned' where protocol_version_id = $PROTOCOL_VERSION;" +$PSQL "$CONN_STR" -c "update blocks set chain_status = 'orphaned' where protocol_version_id = $PROTOCOL_VERSION;" function join_by { local d=${1-} f=${2-} @@ -75,6 +76,6 @@ bs=500 for ((i=0; i<${#canon_chain[@]}; i+=bs)); do echo " - $i of ${#canon_chain[@]}" IN_CLAUSE=$(join_by , ${canon_chain[@]:i:bs}) - psql "$CONN_STR" -q -c "update blocks set chain_status = 'canonical' where id in (${IN_CLAUSE}) and protocol_version_id = $PROTOCOL_VERSION" + $PSQL "$CONN_STR" -q -c "update blocks set chain_status = 'canonical' where id in (${IN_CLAUSE}) and protocol_version_id = $PROTOCOL_VERSION" done echo " - ${#canon_chain[@]} of ${#canon_chain[@]}" \ No newline at end of file From a980e1cb54f9c0ef6f487cfc6c58d5f9582d9c9d Mon Sep 17 00:00:00 2001 From: Tang Jiawei Date: Thu, 16 May 2024 23:23:00 +0800 Subject: [PATCH 09/11] add a wait of 5 sec --- src/lib/genesis_ledger_helper/genesis_ledger_helper.ml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index 20b64dcbf18..2c95bdfeefd 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -325,6 +325,7 @@ module Ledger = struct ; ("path", `String tar_path) ; ("dir", `String dirname) ] ; + let%bind () = after (Time.Span.of_int_sec 5) in let open Deferred.Or_error.Let_syntax in let%map () = Tar.create ~root:dirname ~file:tar_path ~directory:"." () in tar_path From a8cb1fa53d7e656d9565b884bb03fc1d047af0c5 Mon Sep 17 00:00:00 2001 From: Tang Jiawei Date: Fri, 17 May 2024 01:50:28 +0800 Subject: [PATCH 10/11] update the docker image used by genesis config job --- buildkite/src/Command/MinaArtifact.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 1e7ff52d6e3..dab45b1f665 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -101,7 +101,7 @@ let hardforkPipeline : DebianVersions.DebVersion -> Pipeline.Config.Type = [ Command.build Command.Config::{ commands = [ Cmd.runInDocker Cmd.Docker::{ - image = "\\\${MINA_DAEMON_DOCKER_IMAGE:-gcr.io/o1labs-192920/mina-daemon@sha256:6da66879aacab050a6955c84347f587e43044987b04b9de4522a942f770cc5e7}" + image = "\\\${MINA_DAEMON_DOCKER_IMAGE:-gcr.io/o1labs-192920/mina-daemon:3.0.1-migration-tooling-package-generation-fix-tarball-a980e1c-bullseye-mainnet-hardfork}" , extraEnv = [ "NETWORK_NAME=\$NETWORK_NAME" , "CONFIG_JSON_GZ_URL=\$CONFIG_JSON_GZ_URL" , "AWS_ACCESS_KEY_ID" From 36dc7e5b3e2206e000212e1fc67c258924dd409c Mon Sep 17 00:00:00 2001 From: Tang Jiawei Date: Fri, 17 May 2024 02:21:56 +0800 Subject: [PATCH 11/11] add comments for my hack --- buildkite/src/Command/MinaArtifact.dhall | 2 ++ src/lib/genesis_ledger_helper/genesis_ledger_helper.ml | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index dab45b1f665..b5d700fde22 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -101,6 +101,8 @@ let hardforkPipeline : DebianVersions.DebVersion -> Pipeline.Config.Type = [ Command.build Command.Config::{ commands = [ Cmd.runInDocker Cmd.Docker::{ + -- we are using a hard-coded docker image here because we don't want to wait for the image to be built. + -- this job exists solely for saving us time during hard fork process. image = "\\\${MINA_DAEMON_DOCKER_IMAGE:-gcr.io/o1labs-192920/mina-daemon:3.0.1-migration-tooling-package-generation-fix-tarball-a980e1c-bullseye-mainnet-hardfork}" , extraEnv = [ "NETWORK_NAME=\$NETWORK_NAME" , "CONFIG_JSON_GZ_URL=\$CONFIG_JSON_GZ_URL" diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index 2c95bdfeefd..550576b40b7 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -325,7 +325,9 @@ module Ledger = struct ; ("path", `String tar_path) ; ("dir", `String dirname) ] ; - let%bind () = after (Time.Span.of_int_sec 5) in + (* This sleep for 5s is a hack for rocksdb. It seems like rocksdb would need some + time to stablize *) + let%bind () = after (Time.Span.of_int_sec 5) in let open Deferred.Or_error.Let_syntax in let%map () = Tar.create ~root:dirname ~file:tar_path ~directory:"." () in tar_path