diff --git a/CMakeLists.txt b/CMakeLists.txt
index 0c81cc813a..b0d9b665eb 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -5,10 +5,10 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++20")
find_program(CCACHE_PROGRAM ccache)
-if(CCACHE_PROGRAM)
+if (CCACHE_PROGRAM)
message(STATUS "Found ccache")
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE "${CCACHE_PROGRAM}")
-endif()
+endif ()
project(Zilliqa)
@@ -19,11 +19,11 @@ message(STATUS "We are on a ${CMAKE_SYSTEM_NAME} system")
# x64-osx-dynamic (see https://github.com/microsoft/vcpkg/issues/8421).
# This is a simple workaround against this issue.
if (APPLE)
- file(CREATE_LINK
- ${CMAKE_BINARY_DIR}/vcpkg_installed/${VCPKG_TARGET_TRIPLET}/lib
- ${CMAKE_BINARY_DIR}/vcpkg_installed/${VCPKG_TARGET_TRIPLET}/tools/lib
- SYMBOLIC)
-endif()
+ file(CREATE_LINK
+ ${CMAKE_BINARY_DIR}/vcpkg_installed/${VCPKG_TARGET_TRIPLET}/lib
+ ${CMAKE_BINARY_DIR}/vcpkg_installed/${VCPKG_TARGET_TRIPLET}/tools/lib
+ SYMBOLIC)
+endif ()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
#
@@ -35,9 +35,7 @@ find_package(bsoncxx CONFIG REQUIRED)
find_package(leveldb CONFIG REQUIRED)
find_package(g3log REQUIRED)
find_package(g3sinks REQUIRED)
-#TODO: upgrade to OpenSSL 1.1.1a
find_package(OpenSSL REQUIRED)
-
find_package(protobuf CONFIG REQUIRED)
set(THREADS_PREFER_PTHREAD_FLAG ON)
@@ -60,117 +58,117 @@ if(OPENCL_MINE)
message(STATUS "OpenCL enabled")
find_package(OpenCL REQUIRED)
add_definitions(-DOPENCL_MINE)
-endif()
+endif ()
# VC related test scenario
# For DS Block Consensus
-if(VC_TEST_DS_SUSPEND_1)
+if (VC_TEST_DS_SUSPEND_1)
message(STATUS "VC 1 test enabled")
add_definitions(-DVC_TEST_DS_SUSPEND_1)
-endif()
+endif ()
-if(VC_TEST_DS_SUSPEND_3)
+if (VC_TEST_DS_SUSPEND_3)
message(STATUS "VC 2 test enabled")
add_definitions(-DVC_TEST_DS_SUSPEND_3)
-endif()
+endif ()
-if(GOVVC_TEST_DS_SUSPEND_3)
+if (GOVVC_TEST_DS_SUSPEND_3)
message(STATUS "GOVVC 2 test enabled")
add_definitions(-DGOVVC_TEST_DS_SUSPEND_3)
-endif()
+endif ()
# For Final Block Consensus
-if(VC_TEST_FB_SUSPEND_1)
+if (VC_TEST_FB_SUSPEND_1)
message(STATUS "VC 3 test enabled")
add_definitions(-DVC_TEST_FB_SUSPEND_1)
-endif()
+endif ()
-if(VC_TEST_FB_SUSPEND_3)
+if (VC_TEST_FB_SUSPEND_3)
message(STATUS "VC 4 test enabled")
add_definitions(-DVC_TEST_FB_SUSPEND_3)
-endif()
+endif ()
# For View change Block Consensus
-if(VC_TEST_VC_SUSPEND_1)
+if (VC_TEST_VC_SUSPEND_1)
message(STATUS "VC 5 test enabled")
add_definitions(-DVC_TEST_VC_SUSPEND_1)
-endif()
+endif ()
-if(VC_TEST_VC_SUSPEND_3)
+if (VC_TEST_VC_SUSPEND_3)
message(STATUS "VC 6 test enabled")
add_definitions(-DVC_TEST_VC_SUSPEND_3)
-endif()
+endif ()
-if(VC_TEST_VC_PRECHECK_1)
+if (VC_TEST_VC_PRECHECK_1)
message(STATUS "VC 7 test enabled")
add_definitions(-DVC_TEST_VC_PRECHECK_1)
-endif()
+endif ()
-if(VC_TEST_VC_PRECHECK_2)
+if (VC_TEST_VC_PRECHECK_2)
message(STATUS "VC 8 test enabled")
add_definitions(-DVC_TEST_VC_PRECHECK_2)
-endif()
+endif ()
-if(VC_TEST_FB_SUSPEND_RESPONSE)
+if (VC_TEST_FB_SUSPEND_RESPONSE)
message(STATUS "VC 9 test enabled")
add_definitions(-DVC_TEST_FB_SUSPEND_RESPONSE)
-endif()
+endif ()
# For Merging DSMB into FINALBLOCK
-if(DM_TEST_DM_LESSTXN_ONE)
+if (DM_TEST_DM_LESSTXN_ONE)
message(STATUS "DM 1 test enabled")
add_definitions(-DDM_TEST_DM_LESSTXN_ONE)
-endif()
+endif ()
-if(DM_TEST_DM_LESSTXN_ALL)
+if (DM_TEST_DM_LESSTXN_ALL)
message(STATUS "DM 2 test enabled")
add_definitions(-DDM_TEST_DM_LESSTXN_ALL)
-endif()
+endif ()
-if(DM_TEST_DM_LESSMB_ONE)
+if (DM_TEST_DM_LESSMB_ONE)
message(STATUS "DM 3 test enabled")
add_definitions(-DDM_TEST_DM_LESSMB_ONE)
-endif()
+endif ()
-if(DM_TEST_DM_LESSMB_ALL)
+if (DM_TEST_DM_LESSMB_ALL)
message(STATUS "DM 4 test enabled")
add_definitions(-DDM_TEST_DM_LESSMB_ALL)
-endif()
+endif ()
-if(DM_TEST_DM_BAD_ANNOUNCE)
+if (DM_TEST_DM_BAD_ANNOUNCE)
message(STATUS "DM 5 test enabled")
add_definitions(-DDM_TEST_DM_BAD_ANNOUNCE)
-endif()
+endif ()
-if(DM_TEST_DM_BAD_MB_ANNOUNCE)
+if (DM_TEST_DM_BAD_MB_ANNOUNCE)
message(STATUS "DM 6 test enabled")
add_definitions(-DDM_TEST_DM_BAD_MB_ANNOUNCE)
-endif()
+endif ()
-if(DM_TEST_DM_MORETXN_LEADER)
+if (DM_TEST_DM_MORETXN_LEADER)
message(STATUS "DM 7 test enabled")
add_definitions(-DDM_TEST_DM_MORETXN_LEADER)
-endif()
+endif ()
-if(DM_TEST_DM_MORETXN_HALF)
+if (DM_TEST_DM_MORETXN_HALF)
message(STATUS "DM 8 test enabled")
add_definitions(-DDM_TEST_DM_MORETXN_HALF)
-endif()
+endif ()
-if(DM_TEST_DM_MOREMB_HALF)
+if (DM_TEST_DM_MOREMB_HALF)
message(STATUS "DM 9 test enabled")
add_definitions(-DDM_TEST_DM_MOREMB_HALF)
-endif()
+endif ()
-if(SJ_TEST_SJ_TXNBLKS_PROCESS_SLOW)
+if (SJ_TEST_SJ_TXNBLKS_PROCESS_SLOW)
message(STATUS "SJ 1 test enabled")
add_definitions(-DSJ_TEST_SJ_TXNBLKS_PROCESS_SLOW)
-endif()
+endif ()
-if(SJ_TEST_SJ_MISSING_MBTXNS)
+if (SJ_TEST_SJ_MISSING_MBTXNS)
message(STATUS "SJ 2 test enabled")
add_definitions(-DSJ_TEST_SJ_MISSING_MBTXNS)
-endif()
+endif ()
include_directories(${PROTOBUF_INCLUDE_DIR})
include_directories(${CMAKE_SOURCE_DIR}/src/depends/cryptoutils/include/)
@@ -199,37 +197,37 @@ set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Members of maintainers@zilliqa.com")
# compiler and linker options
add_compile_options(-Wall)
-add_compile_options(-Werror)
+#add_compile_options(-Werror)
add_compile_options(-pedantic)
add_compile_options(-Wextra)
-if(CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang|AppleClang")
+if (CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang|AppleClang")
add_compile_options(-Wno-error=deprecated-declarations)
add_compile_options(-Wno-unused-parameter)
-endif()
+endif ()
if (THREAD_SANITIZER AND ADDRESS_SANITIZER)
message(FATAL_ERROR "Cannot use ThreadSanitizer (THREAD_SANITIZER=ON) and AddressSanitizer (ADDRESS_SANITIZER=ON) at the same time")
-endif()
+endif ()
if (THREAD_SANITIZER)
add_compile_options(-fsanitize=thread)
link_libraries(-fsanitize=thread)
message(STATUS "THREAD SANITIZER enabled")
-endif()
+endif ()
if (ADDRESS_SANITIZER)
add_compile_options(-fsanitize=address)
add_compile_options(-fno-omit-frame-pointer)
link_libraries(-fsanitize=address)
message(STATUS "ADDRESS SANITIZER enabled")
-endif()
+endif ()
if (UNDEF_BEHAVIOR_SANITIZER)
add_compile_options(-fsanitize=undefined)
link_libraries(-fsanitize=undefined)
message(STATUS "UNDEFINED BEHAVIOR SANITIZER enabled")
-endif()
+endif ()
if (LIBFUZZER_SANITIZER)
include(CodeCoverage)
@@ -239,7 +237,7 @@ if (LIBFUZZER_SANITIZER)
add_compile_options(-fno-omit-frame-pointer)
link_libraries(-fsanitize=fuzzer,address,undefined)
message(STATUS "Libfuzzer with address and undefined behavior sanitizer enabled")
-endif()
+endif ()
# FIXME: This following
#
@@ -252,19 +250,19 @@ endif()
# Once fixed, the following if-case for allowing duplciate targets
# (which isn't supported in Xcode) can be removed.
if (NOT ${CMAKE_GENERATOR} STREQUAL "Xcode")
- set_property(GLOBAL PROPERTY ALLOW_DUPLICATE_CUSTOM_TARGETS 1)
-endif()
+ set_property(GLOBAL PROPERTY ALLOW_DUPLICATE_CUSTOM_TARGETS 1)
+endif ()
if (ENABLE_COVERAGE AND CMAKE_COMPILER_IS_GNUCXX)
if (NOT TESTS)
message(FATAL_ERROR "TESTS is not ON")
- endif()
+ endif ()
include(CodeCoverage)
add_compile_options(--coverage)
link_libraries(--coverage)
add_custom_target(ctest COMMAND ${CMAKE_CTEST_COMMAND})
# TODO: remove the hardcoded number in -j option
setup_target_for_coverage(${PROJECT_NAME}_coverage ctest coverage "--output-on-failure;--timeout;100")
-endif()
+endif ()
# using internal jsonrpc variant
include_directories(BEFORE ${CMAKE_SOURCE_DIR}/src/depends)
@@ -275,21 +273,21 @@ add_subdirectory(daemon)
if (TESTS)
enable_testing()
add_subdirectory(tests)
-endif()
+endif ()
# installation
-set_target_properties(buildTxBlockHashesToNums genaccounts genkeypair genTxnBodiesFromS3
- getpub getaddr gentxn signmultisig verifymultisig getnetworkhistory
- getrewardhistory isolatedServer sendcmd validateDB zilliqa zilliqad
+set_target_properties(connectivity buildTxBlockHashesToNums genaccounts genkeypair genTxnBodiesFromS3
+ getpub getaddr gentxn signmultisig verifymultisig getnetworkhistory
+ getrewardhistory isolatedServer sendcmd validateDB zilliqa zilliqad asio_multiplier
PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
set_target_properties(Common Trie NAT
PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
-if(OPENCL_MINE)
+if (OPENCL_MINE)
set_target_properties(ethash-cl PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
-endif()
+endif ()
install(
DIRECTORY ${CMAKE_BINARY_DIR}/bin ${CMAKE_BINARY_DIR}/lib
@@ -298,10 +296,10 @@ install(
)
# add clang-format and clang-tidy targets lastly
-if(LLVM_EXTRA_TOOLS)
+if (LLVM_EXTRA_TOOLS)
include(LLVMExtraTools)
-else()
+else ()
message(STATUS "LLVM extra tools NOT found (for clang format/tidy)")
-endif()
+endif ()
include(CPack)
diff --git a/README.md b/README.md
index 6d244c42c0..e8baec09f1 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,7 @@ Zilliqa is a scalable smart contract platform that aims to tackle the congestion
## Zilliqa Mainnet
-The current live version on the Zilliqa Mainnet is Zilliqa [v9.0.1](https://github.com/Zilliqa/Zilliqa/releases/tag/v9.0.1) and Scilla [v0.13.1](https://github.com/Zilliqa/scilla/releases/tag/v0.13.1).
+The current live version on the Zilliqa Mainnet is Zilliqa [v9.2.3](https://github.com/Zilliqa/Zilliqa/releases/tag/v9.2.3) and Scilla [v0.13.3](https://github.com/Zilliqa/scilla/releases/tag/v0.13.3).
| | URL(s) |
|:---------|:-------|
@@ -32,7 +32,7 @@ The current live version on the Zilliqa Mainnet is Zilliqa [v9.0.1](https://gith
## Developer Testnet
-The current live version on the Developer Testnet is Zilliqa [v9.0.1](https://github.com/Zilliqa/Zilliqa/releases/tag/v9.0.1) and Scilla [v0.13.1](https://github.com/Zilliqa/scilla/releases/tag/v0.13.1).
+The current live version on the Developer Testnet is Zilliqa [v9.2.5](https://github.com/Zilliqa/Zilliqa/releases/tag/v9.2.5) and Scilla [v0.13.3](https://github.com/Zilliqa/scilla/releases/tag/v0.13.3).
| | URL(s) |
|:---------|:-------|
diff --git a/build.sh b/build.sh
index eb403707f9..ddcb804891 100755
--- a/build.sh
+++ b/build.sh
@@ -91,10 +91,6 @@ CMAKE_EXTRA_OPTIONS="-DCOMMIT_ID=\"${commit_id}\" ${CMAKE_EXTRA_OPTIONS}"
for option in "$@"
do
case $option in
- opencl)
- CMAKE_EXTRA_OPTIONS="-DOPENCL_MINE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with OpenCL"
- ;;
tsan)
CMAKE_EXTRA_OPTIONS="-DTHREAD_SANITIZER=ON ${CMAKE_EXTRA_OPTIONS}"
echo "Build with ThreadSanitizer"
@@ -125,94 +121,10 @@ do
CMAKE_EXTRA_OPTIONS="-DHEARTBEATTEST=1 ${CMAKE_EXTRA_OPTIONS}"
echo "Build with HeartBeat test"
;;
- vc1)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_DS_SUSPEND_1=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test DS Suspend 1 - Suspend DS leader for 1 time (before DS block consensus)"
- ;;
- vc2)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_DS_SUSPEND_3=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test DS Suspend 3 - Suspend DS leader for 3 times (before DS block consensus)"
- ;;
- govvc2)
- CMAKE_EXTRA_OPTIONS="-DGOVVC_TEST_DS_SUSPEND_3=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with GOVVC test - Suspend DS leader for 3 times (before DS block consensus)"
- ;;
- vc3)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_FB_SUSPEND_1=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test FB Suspend 1 - Suspend DS leader for 1 time (before Final block consensus)"
- ;;
- vc4)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_FB_SUSPEND_3=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test FB Suspend 3- Suspend DS leader for 3 times (before Final block consensus)"
- ;;
- vc5)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_VC_SUSPEND_1=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test VC Suspend 1 - Suspend DS leader for 1 time (before VC block consensus)"
- ;;
- vc6)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_VC_SUSPEND_3=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test VC Suspend 3 - Suspend DS leader for 3 times (before VC block consensus)"
- ;;
- vc7)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_VC_PRECHECK_1=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test VC Precheck 1 - Caused the node to lag behind at ds epoch"
- ;;
- vc8)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_VC_PRECHECK_2=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test VC Precheck 2 - Caused the node to lag behind at tx epoch"
- ;;
- vc9)
- CMAKE_EXTRA_OPTIONS="-DVC_TEST_FB_SUSPEND_RESPONSE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with VC test FB Suspend consensus at commit done 1 - Caused the node to lag behind at tx epoch"
- ;;
- dm1)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_LESSTXN_ONE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader has some txn that one of the backups doesn't have"
- ;;
- dm2)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_LESSTXN_ALL=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader has some txn that all of backups don't have"
- ;;
- dm3)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_LESSMB_ONE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader has more microblock received than one of the backups"
- ;;
- dm4)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_LESSMB_ALL=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader has more microblock received than all of the backups"
- ;;
- dm5)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_BAD_ANNOUNCE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader composed invalid TxBlock"
- ;;
- dm6)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_BAD_MB_ANNOUNCE=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader composed invalid DSMicroBlock"
- ;;
- dm7)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_MORETXN_LEADER=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader doesn't have some txn"
- ;;
- dm8)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_MORETXN_HALF=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader and half of the DS doesn't have some txn"
- ;;
- dm9)
- CMAKE_EXTRA_OPTIONS="-DDM_TEST_DM_MOREMB_HALF=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with DSMBMerging test - DS leader and half of the DS doesn't have some microblock"
- ;;
- sj1)
- CMAKE_EXTRA_OPTIONS="-DSJ_TEST_SJ_TXNBLKS_PROCESS_SLOW=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with SJ test - New Seed take long time to process txnblocks during syncup"
- ;;
- sj2)
- CMAKE_EXTRA_OPTIONS="-DSJ_TEST_SJ_MISSING_MBTXNS=1 ${CMAKE_EXTRA_OPTIONS}"
- echo "Build with SJ test - New Seed misses the mbtxns message from multiplier"
- ;;
evm)
echo "Build EVM"
- evm_build_result=$(cd evm-ds; cargo build --release)
- exit "$evm_build_result"
+ evm_build_result=$(cd evm-ds; cargo build --release)
+ exit "$evm_build_result"
;;
ninja)
CMAKE_EXTRA_OPTIONS="-G Ninja ${CMAKE_EXTRA_OPTIONS}"
@@ -254,15 +166,11 @@ else
install_dir="${INSTALL_DIR}"
fi
-echo "Currenct directory: $(pwd)"
+echo "Current directory: $(pwd)"
echo "Build directory: ${build_dir}"
echo "Install directory: ${install_dir}"
-if [ "$n_parallel" -gt "10" ]; then
- jobs=$(( (n_parallel / 10) * 9 ))
-else
- jobs=$(( n_parallel / 2 ))
-fi
+jobs=$((n_parallel - 1))
echo building using $jobs jobs
diff --git a/configuration_files/mainnet-constants.xml b/configuration_files/mainnet-constants.xml
index d32bd7561e..8137ea4f4e 100644
--- a/configuration_files/mainnet-constants.xml
+++ b/configuration_files/mainnet-constants.xml
@@ -187,7 +187,6 @@
0
-
600
10
diff --git a/constants.xml b/constants.xml
index 5568936e12..c487099fee 100644
--- a/constants.xml
+++ b/constants.xml
@@ -2,7 +2,7 @@
- 3
+ 4
false
false
25
@@ -85,15 +85,15 @@
10
3000
3000
- 3000
+ 500
180
80
500
60
150
5
- 30000
- 15000
+ 6000
+ 6000
55
5
3
@@ -126,8 +126,8 @@
127.0.0.1
- true
- true
+ false
+ false
true
2
@@ -353,35 +353,28 @@
- 0.0.0.0
- 8085
-
+ localhost
+ 8090
PROMETHEUS
- 15000
- 10000
+ 1000
+ 500
https://opentelemetry.io/schemas/1.2.0
1.2.0
-
- ALL
+ NONE
- tempo.monitoring.svc.cluster.local
- 4317
-
- OTLPGRPC
-
- ALL
+ 0.0.0.0
+ 4318
+ OTLPHTTP
+ NONE
-
-
+ localhost
+ 8090
NONE
diff --git a/constants.xml.native.tests b/constants.xml.native.tests
new file mode 100644
index 0000000000..099b921b58
--- /dev/null
+++ b/constants.xml.native.tests
@@ -0,0 +1,523 @@
+
+
+
+
+ 1
+ false
+ false
+ 25
+ 1
+ 3
+
+ 03B70CF2ABEAE4E86DAEF1A36243E44CD61138B89055099C0D220B58FB86FF588A
+
+ .
+ 250000
+ true
+ 200
+ false
+ 0
+ 15
+ 15360
+ true
+
+
+ 1
+ 1
+ 2
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+
+
+ false
+ false
+ true
+ 5
+ 100000
+ 5
+ 10
+ false
+ 33135
+ 20
+ 2
+
+
+
+ txn
+ 27017
+ false
+
+ 1000
+ 10000
+
+
+ 0.667
+ 10
+ 10
+ 20
+ 2
+ 1
+ 80
+ 1000
+
+
+ true
+ 10
+ 10
+ 3
+ 5
+ 5
+ 10
+ 5
+
+
+ false
+ 1
+
+
+
+ 2000
+ 10
+ 500
+ 3000
+ 500
+ 180
+ 80
+ 500
+ 60
+ 20
+ 5
+ 3500
+ 1000
+ 55
+ 5
+ 3
+ 5
+ 5
+ 10
+ 3
+ 3
+ 12
+ 10
+ 1
+
+
+ 21000
+ 500000
+ 400000
+ 50
+ 10
+ 50
+ 80
+ 10
+ 70
+ 2000000000
+ 2
+ 10
+ 5
+ 15
+ 10
+ 5
+ 127.0.0.1
+
+
+ false
+ false
+ true
+
+ 2
+ 3
+ 6
+
+ 10
+ 10
+ 1000
+ 0
+ 18
+ true
+ true
+
+
+
+ 0
+
+ 128
+ 8192
+ 0
+
+
+ 128
+ 8192
+ 2
+ 4
+
+
+
+ false
+ false
+
+ 300
+ 0.334
+ 200
+
+
+ 10
+
+
+
+ 4201
+ 5
+
+ 4301
+ 127.0.0.1
+ true
+ /tmp/zilliqa.sock
+ /tmp/scilla-server.sock
+ scilla-server
+ false
+ 4401
+
+ false
+ 4501
+ false
+ 2500
+ 3
+ 1000
+ true
+
+ 60
+
+ 0
+ true
+ /home/stephen/dev/Zilliqa/evm-ds/target/release/evm-ds
+ /tmp/evm-server.sock
+ /usr/local/etc/log4rs.yml
+
+ 1000000
+
+ 59
+
+ true
+
+ true
+
+
+
+ 200
+ 5
+
+ 0.05
+
+ 3
+
+ 50
+ 10
+ 10
+
+
+ 60
+ 600
+ 3
+ 600
+ 200
+ 3
+ 128
+ 1
+ 128
+ 5000000
+ 0
+ 10000000
+ 5
+ 100
+ 20
+ 5
+ 5
+
+
+ true
+ false
+ false
+ http://127.0.0.1:4202/api
+ 15000
+ 5
+
+ 5
+
+ false
+ 4202
+ 5
+ 3
+ 8
+ 32
+ 2
+ 1000
+
+ 99
+
+ 5
+ 4
+
+ 1800
+
+ 1900
+ 4
+ 5
+
+ 120
+ 10
+ 2
+ 3
+ 360
+ 5
+ false
+
+
+
+ 0
+ 40
+ 5
+ true
+ 30
+ 5
+ false
+ 1
+ 2
+ 5
+ 1
+
+
+ true
+ /home/stephen/dev/scilla
+ bin/scilla-checker
+ bin/scilla-runner
+ scilla_files
+ _build
+ src/stdlib
+ init.json
+ input_state.json
+ input_blockchain.json
+ input_message.json
+ output.json
+ input
+ .scilla
+ .scillib
+ scilla_libs
+ false
+ false
+ false
+ 1500
+ 10
+
+
+ false
+ 100
+ false
+ false
+ 0
+ 0
+
+
+
+ 8400000000000000000000
+ 176000000000000000
+ 12600000000000000000000
+ 20
+ 40
+ 76800
+ 20
+ 100
+ 300
+ 3600
+ 50
+ 0
+ 1
+ 1572864
+ 1024
+ 2000000
+ true
+ 0
+ 10
+ 5
+ xxxxxxxxxxx
+ txnsbackup
+ false
+ false
+ 0.000001
+ 3
+
+
+
+ localhost
+ 8090
+
+ PROMETHEUS
+ 1000
+ 500
+ https://opentelemetry.io/schemas/1.2.0
+ 1.2.0
+
+ NONE
+
+
+
+
+ 0.0.0.0
+ 4318
+
+ OTLPHTTP
+
+ NONE
+
+
+
+
+ localhost
+ 8090
+
+ NONE
+
+
+
+
+
+ 175334
+ 0
+
+
+ 175637
+ 0
+
+
+ 175637
+ 1
+
+
+ 175637
+ 2
+
+
+ 175701
+ 0
+
+
+ 175701
+ 1
+
+
+ false
+
+
+ 1664279
+ 0
+
+
+
+
+ 10
+ 15
+ 10
+ 600
+
+
+
+
+ cc02a3c906612cc5bdb087a30e6093c9f0aa04fc
+
+
+
+
+
+ ee06b3c906612cc5bdb087a30e6093c9f0aa04fd
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/constants.xml.normal b/constants.xml.normal
new file mode 100644
index 0000000000..4da7086c4b
--- /dev/null
+++ b/constants.xml.normal
@@ -0,0 +1,523 @@
+
+
+
+
+ 3
+ false
+ false
+ 25
+ 1
+ 3
+
+ 03B70CF2ABEAE4E86DAEF1A36243E44CD61138B89055099C0D220B58FB86FF588A
+
+ .
+ 250000
+ true
+ 200
+ false
+ 0
+ 15
+ 15360
+ true
+
+
+ 1
+ 1
+ 2
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+
+
+ false
+ false
+ true
+ 5
+ 100000
+ 5
+ 10
+ false
+ 33135
+ 20
+ 2
+
+
+
+ txn
+ 27017
+ false
+
+ 1000
+ 10000
+
+
+ 0.667
+ 10
+ 10
+ 20
+ 2
+ 1
+ 80
+ 1000
+
+
+ true
+ 10
+ 10
+ 3
+ 5
+ 5
+ 10
+ 5
+
+
+ false
+ 1
+
+
+
+ 2000
+ 10
+ 500
+ 3000
+ 500
+ 180
+ 80
+ 500
+ 60
+ 20
+ 5
+ 3500
+ 1000
+ 55
+ 5
+ 3
+ 5
+ 5
+ 10
+ 3
+ 3
+ 12
+ 10
+ 1
+
+
+ 21000
+ 500000
+ 400000
+ 50
+ 10
+ 50
+ 80
+ 10
+ 70
+ 2000000000
+ 2
+ 10
+ 5
+ 15
+ 10
+ 5
+ 127.0.0.1
+
+
+ false
+ false
+ true
+
+ 2
+ 3
+ 6
+
+ 10
+ 10
+ 1000
+ 0
+ 18
+ true
+ true
+
+
+
+ 0
+
+ 128
+ 8192
+ 0
+
+
+ 128
+ 8192
+ 2
+ 4
+
+
+
+ false
+ false
+
+ 300
+ 0.334
+ 200
+
+
+ 10
+
+
+
+ 4201
+ 5
+
+ 4301
+ 127.0.0.1
+ true
+ /tmp/zilliqa.sock
+ /tmp/scilla-server.sock
+ scilla-server
+ false
+ 4401
+
+ false
+ 4501
+ false
+ 2500
+ 3
+ 1000
+ true
+
+ 60
+
+ 0
+ true
+ /usr/local/bin/evm-ds
+ /tmp/evm-server.sock
+ /usr/local/etc/log4rs.yml
+
+ 1000000
+
+ 59
+
+ true
+
+ true
+
+
+
+ 200
+ 5
+
+ 0.05
+
+ 3
+
+ 50
+ 10
+ 10
+
+
+ 60
+ 600
+ 3
+ 600
+ 200
+ 3
+ 128
+ 1
+ 128
+ 5000000
+ 0
+ 10000000
+ 5
+ 100
+ 20
+ 5
+ 5
+
+
+ true
+ false
+ false
+ http://127.0.0.1:4202/api
+ 15000
+ 5
+
+ 5
+
+ false
+ 4202
+ 5
+ 3
+ 8
+ 32
+ 2
+ 1000
+
+ 99
+
+ 5
+ 4
+
+ 1800
+
+ 1900
+ 4
+ 5
+
+ 120
+ 10
+ 2
+ 3
+ 360
+ 5
+ false
+
+
+
+ 0
+ 40
+ 5
+ true
+ 30
+ 5
+ false
+ 1
+ 2
+ 5
+ 1
+
+
+ true
+ /scilla
+ bin/scilla-checker
+ bin/scilla-runner
+ scilla_files
+ _build
+ src/stdlib
+ init.json
+ input_state.json
+ input_blockchain.json
+ input_message.json
+ output.json
+ input
+ .scilla
+ .scillib
+ scilla_libs
+ true
+ false
+ false
+ 1500
+ 10
+
+
+ false
+ 100
+ false
+ false
+ 0
+ 0
+
+
+
+ 8400000000000000000000
+ 176000000000000000
+ 12600000000000000000000
+ 20
+ 40
+ 76800
+ 20
+ 100
+ 300
+ 3600
+ 50
+ 0
+ 1
+ 1572864
+ 1024
+ 2000000
+ true
+ 0
+ 10
+ 5
+ xxxxxxxxxxx
+ txnsbackup
+ false
+ false
+ 0.000001
+ 3
+
+
+
+ localhost
+ 8090
+
+ PROMETHEUS
+ 1000
+ 500
+ https://opentelemetry.io/schemas/1.2.0
+ 1.2.0
+
+ NONE
+
+
+
+
+ 0.0.0.0
+ 4318
+
+ OTLPHTTP
+
+ NONE
+
+
+
+
+ localhost
+ 8090
+
+ NONE
+
+
+
+
+
+ 175334
+ 0
+
+
+ 175637
+ 0
+
+
+ 175637
+ 1
+
+
+ 175637
+ 2
+
+
+ 175701
+ 0
+
+
+ 175701
+ 1
+
+
+ false
+
+
+ 1664279
+ 0
+
+
+
+
+ 10
+ 15
+ 10
+ 600
+
+
+
+
+ cc02a3c906612cc5bdb087a30e6093c9f0aa04fc
+
+
+
+
+
+ ee06b3c906612cc5bdb087a30e6093c9f0aa04fd
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/daemon/ZilliqaDaemon.cpp b/daemon/ZilliqaDaemon.cpp
index ab663c1f39..7fb25c893f 100644
--- a/daemon/ZilliqaDaemon.cpp
+++ b/daemon/ZilliqaDaemon.cpp
@@ -68,7 +68,8 @@ ZilliqaDaemon::ZilliqaDaemon(int argc, const char* argv[], std::ofstream& log)
m_recovery(0),
m_nodeIndex(0),
m_syncType(0),
- m_cseed(false) {
+ m_cseed(false),
+ m_kill(true) {
if (ReadInputs(argc, argv) != SUCCESS) {
ZilliqaDaemon::LOG(m_log, "Failed to read inputs.");
exit(EXIT_FAILURE);
@@ -138,13 +139,13 @@ void ZilliqaDaemon::MonitorProcess(const string& name,
for (const pid_t& pid : pids) {
// If sig is 0 (the null signal), error checking is performed but no signal
// is actually sent
- if (kill(pid, 0) < 0) {
+ if (m_kill and kill(pid, 0) < 0) {
if (errno == EPERM) {
ZilliqaDaemon::LOG(m_log, "Daemon does not have permission Name: " +
name + " Id: " + to_string(pid));
} else if (errno == ESRCH) {
- ZilliqaDaemon::LOG(
- m_log, "Process died Name: " + name + " Id: " + to_string(pid));
+ ZilliqaDaemon::LOG(m_log, "We think Process died Name: " + name +
+ " Id: " + to_string(pid));
m_died[pid] = true;
} else {
ZilliqaDaemon::LOG(m_log, "Kill failed due to " + to_string(errno) +
@@ -397,8 +398,13 @@ void ZilliqaDaemon::StartScripts() {
string cmdToRun = "ps axf | grep " + script +
" | grep -v grep | awk '{print \"kill -9 \" $1}'| sh &";
- ZilliqaDaemon::LOG(m_log, "Start to run command: \"" + cmdToRun + "\"");
- ZilliqaDaemon::LOG(m_log, "\" " + Execute(cmdToRun + " 2>&1") + " \"");
+
+ if (m_kill) {
+ ZilliqaDaemon::LOG(m_log, "Start to run command: \"" + cmdToRun + "\"");
+ ZilliqaDaemon::LOG(m_log, "\" " + Execute(cmdToRun + " 2>&1") + " \"");
+ } else {
+ ZilliqaDaemon::LOG(m_log, "Not running command: \"" + cmdToRun + "\"");
+ }
cmdToRun = "python3 " + m_curPath + script +
(0 == m_nodeIndex ? "" : " -f 10") + " &";
@@ -408,13 +414,11 @@ void ZilliqaDaemon::StartScripts() {
Exit(0);
}
-void ZilliqaDaemon::Exit(int exitCode)
-{
+void ZilliqaDaemon::Exit(int exitCode) {
// Since the updater uses the Logger and the daemon keeps fork-ing
// we can't realy on exit() because it will hang when the logger
// tries to shutdown (since the child won't have the same running threads).
- if (m_updater)
- {
+ if (m_updater) {
m_log.flush();
_exit(exitCode);
}
@@ -425,10 +429,13 @@ void ZilliqaDaemon::Exit(int exitCode)
void ZilliqaDaemon::KillProcess(const string& procName) {
vector pids = ZilliqaDaemon::GetProcIdByName(procName);
for (const auto& pid : pids) {
- ZilliqaDaemon::LOG(
- m_log, "Killing " + procName + " process before launching daemon...");
- kill(pid, SIGTERM);
- ZilliqaDaemon::LOG(m_log, procName + " process killed successfully.");
+ if (m_kill) {
+ ZilliqaDaemon::LOG(
+ m_log, "Killing " + procName + " process before launching daemon...");
+
+ kill(pid, SIGTERM);
+ ZilliqaDaemon::LOG(m_log, procName + " process killed successfully.");
+ }
}
}
@@ -453,7 +460,8 @@ int ZilliqaDaemon::ReadInputs(int argc, const char* argv[]) {
"logpath,g", po::value(&m_logPath),
"customized log path, could be relative path (e.g., \"./logs/\"), or "
"absolute path (e.g., \"/usr/local/test/logs/\")")(
- "cseed,c", "Runs as cummunity seed node if set");
+ "cseed,c", "Runs as cummunity seed node if set")(
+ "killnone,k", "does not kill processes");
po::variables_map vm;
@@ -474,6 +482,11 @@ int ZilliqaDaemon::ReadInputs(int argc, const char* argv[]) {
ZilliqaDaemon::LOG(m_log, "Running Daemon for community seed node.");
m_cseed = true;
}
+ if (vm.count("killnone")) {
+ ZilliqaDaemon::LOG(
+ m_log, "does not kill things - useful for experimental native.");
+ m_kill = false;
+ }
} catch (boost::program_options::required_option& e) {
ZilliqaDaemon::LOG(m_log, "ERROR: " + string(e.what()));
return ERROR_IN_COMMAND_LINE;
@@ -528,6 +541,7 @@ int main(int argc, const char* argv[]) {
bool startNewByDaemon = true;
while (1) {
for (const auto& name : programName) {
+ std::cout << "Monitoring " << name << " process..." << std::endl;
daemon.MonitorProcess(name, startNewByDaemon);
}
diff --git a/daemon/ZilliqaDaemon.h b/daemon/ZilliqaDaemon.h
index 67e8c0bc1f..9cba95669f 100644
--- a/daemon/ZilliqaDaemon.h
+++ b/daemon/ZilliqaDaemon.h
@@ -43,6 +43,7 @@ class ZilliqaDaemon final {
int m_port, m_recovery, m_nodeIndex;
unsigned int m_syncType;
bool m_cseed;
+ bool m_kill;
std::unique_ptr m_updater;
static std::string CurrentTimeStamp();
diff --git a/docs/localdev.md b/docs/localdev.md
index 23cb2a57a1..2053dd3ac1 100644
--- a/docs/localdev.md
+++ b/docs/localdev.md
@@ -1,6 +1,6 @@
# Using localdev
-The instructions obtained by running
+The instructions obtained by running
```sh
scripts/localdev.py
diff --git a/isolated-server-accounts.json b/isolated-server-accounts.json
index 7f6b4d0004..79cb848d80 100644
--- a/isolated-server-accounts.json
+++ b/isolated-server-accounts.json
@@ -39,6 +39,46 @@
"amount": "1000000000000000000",
"nonce": 0
},
+ "6e2cf2789c5b705e0990c05ca959b5001c70ba87": {
+ "privateKey": "410b0e0a86625a10c554f8248a77c7198917bd9135c15bb28922684826bb9f14",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "d90f2e538ce0df89c8273cad3b63ec44a3c4ed82": {
+ "privateKey": "e53d1c3edaffc7a7bab5418eb836cf75819a82872b4a1a0f1c7fcf5c3e020b89",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "f0cb24ac66ba7375bf9b9c4fa91e208d9eaabd2e": {
+ "privateKey": "d96e9eb5b782a80ea153c937fa83e5948485fbfc8b7e7c069d7b914dbc350aba",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "05a321d0b9541ca08d7e32315ca186cc67a1602c": {
+ "privateKey": "e7f59a4beb997a02a13e0d5e025b39a6f0adc64d37bb1e6a849a4863b4680411",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "cf671756a8238cbeb19bcb4d77fc9091e2fce1a3": {
+ "privateKey": "589417286a3213dceb37f8f89bd164c3505a4cec9200c61f7c6db13a30a71b45",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "10200e3da08ee88729469d6eabc055cb225821e7": {
+ "privateKey": "5430365143ce0154b682301d0ab731897221906a7054bbf5bd83c7663a6cbc40",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "ac941274c3b6a50203cc5e7939b7dad9f32a0c12": {
+ "privateKey": "1080d2cca18ace8225354ac021f9977404cee46f1d12e9981af8c36322eac1a4",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
+ "ec902fe17d90203d0bddd943d97b29576ece3177": {
+ "privateKey": "254d9924fc1dcdca44ce92d80255c6a0bb690f867abde80e626fbfef4d357004",
+ "amount": "1000000000000000000",
+ "nonce": 0
+ },
"6e2cf2789c5b705e0990c05ca959b5001c70ba87": {
"privateKey": "410b0e0a86625a10c554f8248a77c7198917bd9135c15bb28922684826bb9f14",
"amount": "1000000000000000000",
diff --git a/primary_account.json b/primary_account.json
new file mode 100644
index 0000000000..10c7dc5879
--- /dev/null
+++ b/primary_account.json
@@ -0,0 +1,7 @@
+{
+ "7bb3b0e8a59f3f61d9bff038f4aeb42cae2ecce8": {
+ "privateKey": "db11cfa086b92497c8ed5a4cc6edb3a5bfe3a640c43ffb9fc6aa0873c56f2ee3",
+ "amount": "100000000000000000000",
+ "nonce": 0
+ }
+}
diff --git a/scripts/download_incr_DB.py b/scripts/download_incr_DB.py
index 6c8bc56e12..082dd94e92 100644
--- a/scripts/download_incr_DB.py
+++ b/scripts/download_incr_DB.py
@@ -56,8 +56,13 @@
DOWNLOADED_LIST = []
DOWNLOAD_STARTED_LIST = []
+def isGCP():
+ return AWS_ENDPOINT_URL and '.googleapis.com' in AWS_ENDPOINT_URL
+
def getURL():
- if AWS_ENDPOINT_URL:
+ if isGCP():
+ return "http://"+BUCKET_NAME+".storage.googleapis.com"
+ elif AWS_ENDPOINT_URL:
return f"{AWS_ENDPOINT_URL}/{BUCKET_NAME}"
else:
return "http://"+BUCKET_NAME+".s3.amazonaws.com"
@@ -157,7 +162,7 @@ def GetAllObjectsFromS3(url, folderName=""):
# Try get the entire persistence keys.
# S3 limitation to get only max 1000 keys. so work around using marker.
while True:
- response = requests.get(url, params={"prefix":prefix, "max-keys":1000, "marker": MARKER})
+ response = requests.get(url, params={"prefix":prefix, "list-type": 1, "max-keys":1000, "marker": MARKER})
tree = ET.fromstring(response.text)
print("[" + str(datetime.datetime.now()) + "] Files to be downloaded:")
lastkey = ''
diff --git a/scripts/download_static_DB.py b/scripts/download_static_DB.py
index d8db7d8527..0295617086 100644
--- a/scripts/download_static_DB.py
+++ b/scripts/download_static_DB.py
@@ -44,8 +44,13 @@
CREATED_FOLDER_LIST = []
AWS_ENDPOINT_URL=os.getenv("AWS_ENDPOINT_URL")
+def isGCP():
+ return AWS_ENDPOINT_URL and '.googleapis.com' in AWS_ENDPOINT_URL
+
def getURL():
- if AWS_ENDPOINT_URL:
+ if isGCP():
+ return "http://"+BUCKET_NAME+".storage.googleapis.com"
+ elif AWS_ENDPOINT_URL:
return f"{AWS_ENDPOINT_URL}/{BUCKET_NAME}"
else:
return "http://"+BUCKET_NAME+".s3.amazonaws.com"
@@ -169,25 +174,22 @@ def GetAllObjectsFromS3(url, folderName=""):
# Try get the entire persistence keys.
# S3 limitation to get only max 1000 keys. so work around using marker.
while True:
- response = requests.get(url, params={"prefix":prefix, "max-keys":1000, "marker": MARKER})
+ response = requests.get(url, params={"prefix":prefix, "list-type": 1, "max-keys":1000, "marker": MARKER})
tree = ET.fromstring(response.text)
- startInd = 5
- if(tree[startInd:] == []):
- print("Empty response")
- return False
print("[" + str(datetime.datetime.now()) + "] Files to be downloaded:")
lastkey = ''
- for key in tree[startInd:]:
- key_url = key[0].text
+ for key in tree.findall("{*}Contents"):
+ # skip compressed blockchain-data file i.e. testnet-name.tar.gz
+ key_url = key.find("{*}Key").text
if key_url.endswith("/"):
continue
list_of_keyurls.append(url+"/"+key_url)
print(key_url)
lastkey = key_url
- istruncated=tree[4].text
- if istruncated == 'true':
+ is_truncated = tree.find('{*}IsTruncated').text
+ if is_truncated == 'true':
MARKER=lastkey
- print(istruncated)
+ print(is_truncated)
else:
break
diff --git a/scripts/localdev.py b/scripts/localdev.py
index a57ecc338f..0922ce14bf 100755
--- a/scripts/localdev.py
+++ b/scripts/localdev.py
@@ -399,7 +399,7 @@ def grafana_down(config):
""" Let helm undeploy grafana """
helm_remove_repository(config, 'grafana')
-def prometheus_up(config, testnet_name, count = 23):
+def prometheus_up(config, testnet_name, count = 8):
""" Let helm deploy prometheus """
ips = []
while True:
@@ -729,18 +729,19 @@ def write_testnet_configuration(config, zilliqa_image, testnet_name, isolated_se
shutil.rmtree(instance_dir)
print(f"Generating testnet configuration .. ")
cmd = ["./bootstrap.py", testnet_name, "--clusters", "minikube", "--constants-from-file",
- os.path.join(ZILLIQA_DIR, "constants.xml"),
- "--image", zilliqa_image,
- "-n", "20",
- "-d", "5",
- "-l", "1",
- "--guard", "4/10",
- "--gentxn", "false",
- "--multiplier-fanout", "2",
- "--host-network", "false",
- "--https", "localdomain",
- "--seed-multiplier", "true",
- "--localstack", "true"]
+ os.path.join(ZILLIQA_DIR, "constants.xml"),
+ "--image", zilliqa_image,
+ "-n", "20",
+ "-d", "5",
+ "-l", "1",
+ "--guard", "4/10",
+ "--gentxn", "false",
+ "--multiplier-fanout", "1",
+ "--host-network", "false",
+ "--https", "localdomain",
+ "--seed-multiplier", "true",
+ "--skip-non-guard-ds", "true",
+ "--localstack", "true"]
cmd = cmd + ([ "--isolated-server-accounts", os.path.join(ZILLIQA_DIR, "isolated-server-accounts.json") ] if isolated_server_accounts else [])
cmd = cmd + [ "-f" ]
if persistence is not None and key_file is not None:
@@ -1352,7 +1353,8 @@ def cli(ctx):
You need the local-dev-minikube branch of testnet if it hasn't yet
been merged.
- You will need to have built scilla.
+ Your scilla repo must be checked out to release-v0.13.5 or a branch off
+ it.
localdev.py runs in stages:
setup - Sets up k8s (through minikube, and colima on OS X)
diff --git a/scripts/upload_incr_DB.py b/scripts/upload_incr_DB.py
index 01b8aec059..d1426cd9d3 100755
--- a/scripts/upload_incr_DB.py
+++ b/scripts/upload_incr_DB.py
@@ -54,8 +54,13 @@
std_handler.setFormatter(FORMATTER)
rootLogger.addHandler(std_handler)
+def isGCP():
+ return AWS_ENDPOINT_URL and '.googleapis.com' in AWS_ENDPOINT_URL
+
def awsS3Url():
- if AWS_ENDPOINT_URL:
+ if isGCP():
+ return "http://"+BUCKET_NAME+".storage.googleapis.com"
+ elif AWS_ENDPOINT_URL:
return f"{AWS_ENDPOINT_URL}/{BUCKET_NAME}"
else:
return "http://"+BUCKET_NAME+".s3.amazonaws.com"
diff --git a/src/cmd/CMakeLists.txt b/src/cmd/CMakeLists.txt
index a12d506186..9ca834164b 100644
--- a/src/cmd/CMakeLists.txt
+++ b/src/cmd/CMakeLists.txt
@@ -99,3 +99,19 @@ else()
# really be required and resolved by fixing the (circular) dependencies.
target_link_libraries(buildTxBlockHashesToNums PUBLIC "-Wl,--start-group" AccountData Persistence)
endif()
+
+add_executable(connectivity connectivity.cpp)
+add_custom_command(TARGET zilliqa
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy $ ${CMAKE_BINARY_DIR}/tests/Zilliqa)
+target_include_directories(connectivity PUBLIC ${CMAKE_SOURCE_DIR}/src)
+target_link_libraries(connectivity Network)
+
+find_package(cpr REQUIRED)
+add_executable(asio_multiplier asio_multiplier.cpp)
+add_custom_command(TARGET zilliqa
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy $ ${CMAKE_BINARY_DIR}/tests/asio_multiplier)
+target_include_directories(asio_multiplier PUBLIC ${CMAKE_SOURCE_DIR}/src)
+target_link_libraries(asio_multiplier Network Boost::program_options cpr::cpr)
+
diff --git a/src/cmd/asio_multiplier.cpp b/src/cmd/asio_multiplier.cpp
new file mode 100644
index 0000000000..b0d81a430d
--- /dev/null
+++ b/src/cmd/asio_multiplier.cpp
@@ -0,0 +1,278 @@
+/*
+ * Copyright (C) 2023 Zilliqa
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include "libCrypto/Sha2.h"
+#include "libMetrics/Tracing.h"
+#include "libNetwork/P2P.h"
+#include "libUtils/DetachedFunction.h"
+#include "libUtils/IPConverter.h"
+#include "libUtils/Logger.h"
+#include "libZilliqa/Zilliqa.h"
+#include "libNetwork/P2PMessage.h"
+
+using namespace zil::p2p;
+std::chrono::high_resolution_clock::time_point startTime;
+
+#define PB_SUCCESS 0
+#define ERROR_IN_COMMAND_LINE -1
+#define ERROR_HARDWARE_SPEC_MISMATCH_EXCEPTION -2
+#define ERROR_UNHANDLED_EXCEPTION -3
+#define ERROR_IN_CONSTANTS -4
+
+using VectorOfPeer = std::vector;
+
+namespace beast = boost::beast;
+namespace http = beast::http;
+
+namespace zil::multiplier::utils {
+
+std::vector split(const std::string& s, char delimiter) {
+ std::vector tokens;
+ std::string token;
+ std::istringstream tokenStream(s);
+ while (std::getline(tokenStream, token, delimiter)) {
+ tokens.push_back(token);
+ }
+ return tokens;
+}
+
+std::vector removeEmptyAddr(
+ const std::vector& addresses) {
+ std::vector result;
+ for (const std::string& address : addresses) {
+ if (!address.empty()) {
+ result.push_back(address);
+ }
+ }
+ return result;
+}
+
+std::set reportDifference(
+ const std::vector& newAddresses,
+ const std::vector& oldAddresses,
+ const std::set& addressStore) {
+ std::set difference;
+ for (const std::string& address : newAddresses) {
+ if (std::find(oldAddresses.begin(), oldAddresses.end(), address) ==
+ oldAddresses.end() &&
+ addressStore.find(address) == addressStore.end()) {
+ difference.insert(address);
+ }
+ }
+ return difference;
+}
+
+bool fetchDownstreams(const std::string downstreamURL,
+ std::vector& mirrorAddresses,
+ std::set& addressStore) {
+ cpr::Response r = cpr::Get(cpr::Url{downstreamURL});
+
+ if (r.status_code == 200) {
+ std::string contents = r.text;
+ std::vector oldAddresses = mirrorAddresses;
+ std::vector newAddresses = removeEmptyAddr(split(contents, '\n'));
+ std::set diffAddresses = reportDifference(newAddresses, oldAddresses, addressStore);
+ for (const std::string& address : diffAddresses) {
+ mirrorAddresses.push_back(address);
+ }
+ } else {
+ LOG_GENERAL(INFO,"DownstreamURL " << downstreamURL
+ << " may not be available at this moment" );
+ return false;
+ }
+ return true;
+}
+
+}; // namespace zil::multiplier::utils
+
+class registeredPeers {
+ public:
+ const VectorOfPeer& getPeers() { return m_peers; }
+ void setPeers(std::vector newPeers) { m_peers = newPeers; }
+ void addPeer(Peer newPeer) { m_peers.push_back(newPeer); }
+ void removePeer(Peer oldPeer) {
+ for (size_t i = 0; i < m_peers.size(); i++) {
+ if (m_peers[i].m_ipAddress == oldPeer.m_ipAddress &&
+ m_peers[i].m_listenPortHost == oldPeer.m_listenPortHost) {
+ m_peers.erase(m_peers.begin() + i);
+ }
+ }
+ }
+ void removePeer(int index) { m_peers.erase(m_peers.begin() + index); }
+ void clearPeers() { m_peers.clear(); }
+ int size() { return m_peers.size(); }
+ Peer getPeer(int index) { return m_peers[index]; }
+ void setPeer(int index, Peer newPeer) { m_peers[index] = newPeer; }
+ void printPeers() {
+ for (size_t i = 0; i < m_peers.size(); i++) {
+ std::cout << "Peer " << i << ": " << m_peers[i].m_ipAddress << std::endl;
+ }
+ }
+ std::vector m_peers;
+};
+
+void process_message(std::shared_ptr message,
+ registeredPeers& peers) {
+ LOG_MARKER();
+
+ if (message->msg.size() < 10) {
+ LOG_GENERAL(INFO, "Received message '"
+ << (char*)&message->msg.at(0) << "' at port "
+ << message->from.m_listenPortHost << " from address "
+ << message->from.m_ipAddress);
+ } else {
+ std::chrono::duration time_span =
+ std::chrono::high_resolution_clock::now() - startTime;
+ LOG_GENERAL(INFO, "Received " << message->msg.size() / (1024 * 1024)
+ << " MB message in " << time_span.count()
+ << " ms");
+ LOG_GENERAL(INFO, "Benchmark: " << (1000 * message->msg.size()) /
+ (time_span.count() * 1024 * 1024)
+ << " MBps");
+ }
+ zil::p2p::GetInstance().SendBroadcastMessage(peers.getPeers(), message->msg,false);
+}
+
+namespace po = boost::program_options;
+
+int main(int argc, char* argv[]) {
+ using namespace zil::multiplier::utils;
+ Peer my_network_info;
+
+ std::string url;
+ std::string logpath(std::filesystem::absolute("./").string());
+ registeredPeers our_peers{};
+ std::set addressStore;
+ std::vector mirrorAddresses;
+ int port;
+ std::atomic execution_continues{true };
+ std::mutex lock_addressStore;
+
+ po::options_description desc("Options");
+ desc.add_options()("help,h", "Print help messages")(
+ "listen,l", po::value(&port)->required()->default_value(30300),
+ "Specifies port to bind to")(
+ "url,s", po::value(&url)->required(),
+ "url of list of nodes to poll for connections")(
+ "version,v", "Displays the Zilliqa Multiplier version information");
+
+ po::variables_map vm;
+
+ try {
+ po::store(po::parse_command_line(argc, argv, desc), vm);
+
+ /** --help option
+ */
+ if (vm.count("help")) {
+ SWInfo::LogBrandBugReport();
+ return PB_SUCCESS;
+ }
+
+ if (vm.count("version")) {
+ SWInfo::LogBrandBugReport();
+ return PB_SUCCESS;
+ }
+
+ po::notify(vm);
+
+ if ((port < 0) || (port > 65535)) {
+ SWInfo::LogBrandBugReport();
+ LOG_GENERAL( INFO, "ERROR: Invalid port" );
+ return ERROR_IN_COMMAND_LINE;
+ }
+ if (url.empty()) {
+ SWInfo::LogBrandBugReport();
+ LOG_GENERAL( INFO, "ERROR: url empty" );
+ return ERROR_IN_COMMAND_LINE;
+ }
+ } catch (boost::program_options::required_option& e) {
+ SWInfo::LogBrandBugReport();
+ LOG_GENERAL( INFO, "ERROR: " << e.what() );
+ LOG_GENERAL( INFO, "ERROR: " << desc );
+ return ERROR_IN_COMMAND_LINE;
+ } catch (boost::program_options::error& e) {
+ SWInfo::LogBrandBugReport();
+ LOG_GENERAL( INFO, "ERROR: " << e.what() );
+ return ERROR_IN_COMMAND_LINE;
+ }
+
+ INIT_FILE_LOGGER("asio_multiplier", std::filesystem::current_path());
+ LOG_DISPLAY_LEVEL_ABOVE(INFO);
+
+
+ auto func = [&execution_continues, &our_peers,&port,&lock_addressStore]() mutable -> void {
+ boost::asio::io_context ctx(1);
+ boost::asio::signal_set sig(ctx, SIGINT, SIGTERM);
+ sig.async_wait([&](const boost::system::error_code&, int) {
+ ctx.stop();
+ execution_continues.store(false);
+ });
+ auto dispatcher = [&our_peers,&lock_addressStore](std::shared_ptr message) {
+ lock_addressStore.lock();
+ process_message(std::move(message), our_peers);
+ lock_addressStore.unlock();
+ };
+ zil::p2p::GetInstance().StartServer(ctx, port, 0, std::move(dispatcher));
+ ctx.run();
+ };
+
+ DetachedFunction(1, func);
+
+ while (execution_continues.load()) {
+ if (fetchDownstreams(url, mirrorAddresses, addressStore)) {
+ for (const std::string& address : mirrorAddresses) {
+ std::vector address_pair;
+ address_pair = split(address, ':');
+ if (address_pair.size() != 2) {
+ LOG_GENERAL(INFO, "Invalid address: " << address );
+ continue;
+ }
+
+ if (addressStore.find(address) == addressStore.end()) {
+ addressStore.insert(address);
+ struct in_addr ip_addr {};
+ inet_pton(AF_INET, address_pair[0].c_str(), &ip_addr);
+ {
+ LOG_GENERAL( INFO, "Updating downstream Addresses: " );
+ lock_addressStore.lock();
+ our_peers.addPeer({ip_addr.s_addr, static_cast(std::stoi(
+ address_pair[1]))});
+ lock_addressStore.unlock();
+ }
+ }
+ }
+ }
+ std::this_thread::sleep_for(std::chrono::seconds(5));
+ }
+ return 0;
+}
diff --git a/src/cmd/connectivity.cpp b/src/cmd/connectivity.cpp
new file mode 100644
index 0000000000..3d3baf332f
--- /dev/null
+++ b/src/cmd/connectivity.cpp
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2023 Zilliqa
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+#include
+
+#include
+#include
+#include
+
+#include "libCrypto/Sha2.h"
+#include "libNetwork/P2P.h"
+#include "libUtils/Logger.h"
+
+using namespace zil::p2p;
+
+namespace {
+
+using SteadyTimer = boost::asio::steady_timer;
+using ErrorCode = boost::system::error_code;
+
+using Time = std::chrono::milliseconds;
+
+// Initial delay to wait all other nodes are probably up
+static constexpr Time INITIAL_DELAY_TIME(15000);
+
+// Average delay between payload sending actions
+static constexpr size_t AVERAGE_DELAY_TIME(3333);
+static constexpr size_t DELAY_RANGE(2800);
+static_assert(AVERAGE_DELAY_TIME - DELAY_RANGE / 2 > 0);
+
+// Random payloads parametrized
+static constexpr size_t MIN_PAYLOAD_LENGTH = 3;
+static constexpr size_t MAX_PAYLOAD_LENGTH = 2023;
+
+// Check expirations rough period
+static constexpr Time CHECK_EXPIRATIONS_TIME(5000);
+
+// Check new peers rough period
+static constexpr Time CHECK_PEERS_TIME(15000);
+
+// Will warn if no ack during this time
+static constexpr Time DELAY_TIME(1000);
+
+// Will warn and delete the expected hash from the container after this period
+static constexpr Time FULL_EXPIRATION_TIME(300000);
+
+static constexpr uint16_t LISTEN_PORT = 40000;
+
+// Message types
+static constexpr uint8_t MSG_PAYLOAD = 1;
+static constexpr uint8_t MSG_ACK = 2;
+static constexpr uint8_t MSG_PEERS_REQUEST = 3;
+static constexpr uint8_t MSG_PEERS_RESPONSE = 4;
+
+zbytes HashPayload(const zbytes& payload) {
+ if (payload.size() < MIN_PAYLOAD_LENGTH) {
+ return {};
+ }
+ SHA256Calculator hasher;
+ hasher.Update(payload.data() + 1, payload.size() - 1);
+ auto hash = hasher.Finalize();
+ hash[0] = MSG_ACK;
+ return hash;
+}
+
+inline Time Clock() {
+ return std::chrono::duration_cast