diff --git a/.circleci/config.yml b/.circleci/config.yml
index 5a8df94cc6..936f5f2615 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -1,14 +1,17 @@
version: 2
jobs:
test-node10-0:
- working_directory: ~/ark-core
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
docker:
- image: 'circleci/node:10-browsers'
- image: 'postgres:alpine'
environment:
POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_development
- POSTGRES_USER: ark
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
steps:
- checkout
- run:
@@ -17,8 +20,8 @@ jobs:
sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
- run:
- name: Install xsel
- command: sudo apt-get install -q xsel
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
- run:
name: Generate cache key
command: >-
@@ -27,20 +30,18 @@ jobs:
- restore_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
- run:
- name: Install packages
- command: yarn
+ name: Install and build packages
+ command: yarn setup
- save_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
paths:
- ./packages/core/node_modules
- ./packages/core-api/node_modules
- ./packages/core-blockchain/node_modules
- - ./packages/core-config/node_modules
- ./packages/core-container/node_modules
- ./packages/core-database/node_modules
- ./packages/core-database-postgres/node_modules
- ./packages/core-debugger-cli/node_modules
- - ./packages/core-deployer/node_modules
- ./packages/core-elasticsearch/node_modules
- ./packages/core-error-tracker-bugsnag/node_modules
- ./packages/core-error-tracker-sentry/node_modules
@@ -59,26 +60,159 @@ jobs:
- ./packages/core-test-utils/node_modules
- ./packages/core-tester-cli/node_modules
- ./packages/core-transaction-pool/node_modules
- - ./packages/core-transaction-pool-mem/node_modules
- ./packages/core-utils/node_modules
- ./packages/core-vote-report/node_modules
- ./packages/core-webhooks/node_modules
- ./packages/crypto/node_modules
- ./node_modules
- run:
- name: Create .ark/database directory
- command: mkdir -p $HOME/.ark/database
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
- run:
- name: Test
- command: >
- ./node_modules/.bin/cross-env ARK_ENV=test ./node_modules/.bin/jest
- ./packages/core-webhooks/ ./packages/core-transaction-pool-mem/
- ./packages/core-test-utils/ ./packages/core-p2p/
- ./packages/core-json-rpc/ ./packages/core-http-utils/
- ./packages/core-event-emitter/ ./packages/core-elasticsearch/
- ./packages/core-database-postgres/ ./packages/core-config/
- ./packages/core/ --detectOpenHandles --runInBand --forceExit --ci
- --coverage | tee test_output.txt
+ name: core
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd ~/core/packages/core &&
+ yarn test:coverage
+ - run:
+ name: core-api
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-api && yarn test:coverage
+ - run:
+ name: core-blockchain
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-blockchain && yarn test:coverage
+ - run:
+ name: core-container
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-container && yarn test:coverage
+ - run:
+ name: core-database
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-database && yarn test:coverage
+ - run:
+ name: core-database-postgres
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-database-postgres && yarn test:coverage
+ - run:
+ name: core-debugger-cli
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-debugger-cli && yarn test:coverage
+ - run:
+ name: Last 1000 lines of test output
+ when: on_fail
+ command: tail -n 1000 test_output.txt
+ - run:
+ name: Codecov
+ command: ./node_modules/.bin/codecov
+ test-node11-0:
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
+ docker:
+ - image: 'circleci/node:11-browsers'
+ - image: 'postgres:alpine'
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
+ steps:
+ - checkout
+ - run:
+ name: Apt update
+ command: >-
+ sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
+ contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
+ - run:
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
+ - run:
+ name: Generate cache key
+ command: >-
+ find ./packages/ -name package.json -print0 | sort -z | xargs -r0
+ echo ./package.json | xargs md5sum | md5sum - > checksum.txt
+ - restore_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ - run:
+ name: Install and build packages
+ command: yarn setup
+ - save_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ paths:
+ - ./packages/core/node_modules
+ - ./packages/core-api/node_modules
+ - ./packages/core-blockchain/node_modules
+ - ./packages/core-container/node_modules
+ - ./packages/core-database/node_modules
+ - ./packages/core-database-postgres/node_modules
+ - ./packages/core-debugger-cli/node_modules
+ - ./packages/core-elasticsearch/node_modules
+ - ./packages/core-error-tracker-bugsnag/node_modules
+ - ./packages/core-error-tracker-sentry/node_modules
+ - ./packages/core-event-emitter/node_modules
+ - ./packages/core-forger/node_modules
+ - ./packages/core-graphql/node_modules
+ - ./packages/core-http-utils/node_modules
+ - ./packages/core-interfaces/node_modules
+ - ./packages/core-jest-matchers/node_modules
+ - ./packages/core-json-rpc/node_modules
+ - ./packages/core-logger/node_modules
+ - ./packages/core-logger-winston/node_modules
+ - ./packages/core-p2p/node_modules
+ - ./packages/core-snapshots/node_modules
+ - ./packages/core-snapshots-cli/node_modules
+ - ./packages/core-test-utils/node_modules
+ - ./packages/core-tester-cli/node_modules
+ - ./packages/core-transaction-pool/node_modules
+ - ./packages/core-utils/node_modules
+ - ./packages/core-vote-report/node_modules
+ - ./packages/core-webhooks/node_modules
+ - ./packages/crypto/node_modules
+ - ./node_modules
+ - run:
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
+ - run:
+ name: core
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd ~/core/packages/core &&
+ yarn test:coverage
+ - run:
+ name: core-api
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-api && yarn test:coverage
+ - run:
+ name: core-blockchain
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-blockchain && yarn test:coverage
+ - run:
+ name: core-container
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-container && yarn test:coverage
+ - run:
+ name: core-database
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-database && yarn test:coverage
+ - run:
+ name: core-database-postgres
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-database-postgres && yarn test:coverage
+ - run:
+ name: core-debugger-cli
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-debugger-cli && yarn test:coverage
- run:
name: Last 1000 lines of test output
when: on_fail
@@ -87,14 +221,17 @@ jobs:
name: Codecov
command: ./node_modules/.bin/codecov
test-node10-1:
- working_directory: ~/ark-core
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
docker:
- image: 'circleci/node:10-browsers'
- image: 'postgres:alpine'
environment:
POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_development
- POSTGRES_USER: ark
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
steps:
- checkout
- run:
@@ -103,8 +240,8 @@ jobs:
sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
- run:
- name: Install xsel
- command: sudo apt-get install -q xsel
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
- run:
name: Generate cache key
command: >-
@@ -113,20 +250,18 @@ jobs:
- restore_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
- run:
- name: Install packages
- command: yarn
+ name: Install and build packages
+ command: yarn setup
- save_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
paths:
- ./packages/core/node_modules
- ./packages/core-api/node_modules
- ./packages/core-blockchain/node_modules
- - ./packages/core-config/node_modules
- ./packages/core-container/node_modules
- ./packages/core-database/node_modules
- ./packages/core-database-postgres/node_modules
- ./packages/core-debugger-cli/node_modules
- - ./packages/core-deployer/node_modules
- ./packages/core-elasticsearch/node_modules
- ./packages/core-error-tracker-bugsnag/node_modules
- ./packages/core-error-tracker-sentry/node_modules
@@ -145,26 +280,59 @@ jobs:
- ./packages/core-test-utils/node_modules
- ./packages/core-tester-cli/node_modules
- ./packages/core-transaction-pool/node_modules
- - ./packages/core-transaction-pool-mem/node_modules
- ./packages/core-utils/node_modules
- ./packages/core-vote-report/node_modules
- ./packages/core-webhooks/node_modules
- ./packages/crypto/node_modules
- ./node_modules
- run:
- name: Create .ark/database directory
- command: mkdir -p $HOME/.ark/database
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
- run:
- name: Test
- command: >
- ./node_modules/.bin/cross-env ARK_ENV=test ./node_modules/.bin/jest
- ./packages/crypto/ ./packages/core-utils/
- ./packages/core-tester-cli/ ./packages/core-snapshots/
- ./packages/core-logger/ ./packages/core-interfaces/
- ./packages/core-forger/ ./packages/core-error-tracker-bugsnag/
- ./packages/core-debugger-cli/ ./packages/core-container/
- ./packages/core-api/ --detectOpenHandles --runInBand --forceExit
- --ci --coverage | tee test_output.txt
+ name: core-event-emitter
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-event-emitter && yarn test:coverage
+ - run:
+ name: core-forger
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-forger && yarn test:coverage
+ - run:
+ name: core-graphql
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-graphql && yarn test:coverage
+ - run:
+ name: core-http-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-http-utils && yarn test:coverage
+ - run:
+ name: core-jest-matchers
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-jest-matchers && yarn test:coverage
+ - run:
+ name: core-json-rpc
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-json-rpc && yarn test:coverage
+ - run:
+ name: core-logger
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-logger && yarn test:coverage
+ - run:
+ name: core-logger-winston
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-logger-winston && yarn test:coverage
+ - run:
+ name: core-p2p
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-p2p && yarn test:coverage
- run:
name: Last 1000 lines of test output
when: on_fail
@@ -173,14 +341,17 @@ jobs:
name: Codecov
command: ./node_modules/.bin/codecov
test-node10-2:
- working_directory: ~/ark-core
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
docker:
- image: 'circleci/node:10-browsers'
- image: 'postgres:alpine'
environment:
POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_development
- POSTGRES_USER: ark
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
steps:
- checkout
- run:
@@ -189,8 +360,8 @@ jobs:
sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
- run:
- name: Install xsel
- command: sudo apt-get install -q xsel
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
- run:
name: Generate cache key
command: >-
@@ -199,20 +370,18 @@ jobs:
- restore_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
- run:
- name: Install packages
- command: yarn
+ name: Install and build packages
+ command: yarn setup
- save_cache:
key: 'core-node10-{{ checksum "checksum.txt" }}-1'
paths:
- ./packages/core/node_modules
- ./packages/core-api/node_modules
- ./packages/core-blockchain/node_modules
- - ./packages/core-config/node_modules
- ./packages/core-container/node_modules
- ./packages/core-database/node_modules
- ./packages/core-database-postgres/node_modules
- ./packages/core-debugger-cli/node_modules
- - ./packages/core-deployer/node_modules
- ./packages/core-elasticsearch/node_modules
- ./packages/core-error-tracker-bugsnag/node_modules
- ./packages/core-error-tracker-sentry/node_modules
@@ -231,26 +400,289 @@ jobs:
- ./packages/core-test-utils/node_modules
- ./packages/core-tester-cli/node_modules
- ./packages/core-transaction-pool/node_modules
- - ./packages/core-transaction-pool-mem/node_modules
- ./packages/core-utils/node_modules
- ./packages/core-vote-report/node_modules
- ./packages/core-webhooks/node_modules
- ./packages/crypto/node_modules
- ./node_modules
- run:
- name: Create .ark/database directory
- command: mkdir -p $HOME/.ark/database
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
+ - run:
+ name: core-snapshots
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-snapshots && yarn test:coverage
+ - run:
+ name: core-test-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-test-utils && yarn test:coverage
+ - run:
+ name: core-tester-cli
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-tester-cli && yarn test:coverage
+ - run:
+ name: core-transaction-pool
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-transaction-pool && yarn test:coverage
+ - run:
+ name: core-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-utils && yarn test:coverage
+ - run:
+ name: core-vote-report
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-vote-report && yarn test:coverage
+ - run:
+ name: core-webhooks
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-webhooks && yarn test:coverage
+ - run:
+ name: crypto
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd ~/core/packages/crypto
+ && yarn test:coverage
+ - run:
+ name: Last 1000 lines of test output
+ when: on_fail
+ command: tail -n 1000 test_output.txt
+ - run:
+ name: Codecov
+ command: ./node_modules/.bin/codecov
+ test-node11-1:
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
+ docker:
+ - image: 'circleci/node:11-browsers'
+ - image: 'postgres:alpine'
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
+ steps:
+ - checkout
+ - run:
+ name: Apt update
+ command: >-
+ sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
+ contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
+ - run:
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
+ - run:
+ name: Generate cache key
+ command: >-
+ find ./packages/ -name package.json -print0 | sort -z | xargs -r0
+ echo ./package.json | xargs md5sum | md5sum - > checksum.txt
+ - restore_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ - run:
+ name: Install and build packages
+ command: yarn setup
+ - save_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ paths:
+ - ./packages/core/node_modules
+ - ./packages/core-api/node_modules
+ - ./packages/core-blockchain/node_modules
+ - ./packages/core-container/node_modules
+ - ./packages/core-database/node_modules
+ - ./packages/core-database-postgres/node_modules
+ - ./packages/core-debugger-cli/node_modules
+ - ./packages/core-elasticsearch/node_modules
+ - ./packages/core-error-tracker-bugsnag/node_modules
+ - ./packages/core-error-tracker-sentry/node_modules
+ - ./packages/core-event-emitter/node_modules
+ - ./packages/core-forger/node_modules
+ - ./packages/core-graphql/node_modules
+ - ./packages/core-http-utils/node_modules
+ - ./packages/core-interfaces/node_modules
+ - ./packages/core-jest-matchers/node_modules
+ - ./packages/core-json-rpc/node_modules
+ - ./packages/core-logger/node_modules
+ - ./packages/core-logger-winston/node_modules
+ - ./packages/core-p2p/node_modules
+ - ./packages/core-snapshots/node_modules
+ - ./packages/core-snapshots-cli/node_modules
+ - ./packages/core-test-utils/node_modules
+ - ./packages/core-tester-cli/node_modules
+ - ./packages/core-transaction-pool/node_modules
+ - ./packages/core-utils/node_modules
+ - ./packages/core-vote-report/node_modules
+ - ./packages/core-webhooks/node_modules
+ - ./packages/crypto/node_modules
+ - ./node_modules
+ - run:
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
+ - run:
+ name: core-event-emitter
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-event-emitter && yarn test:coverage
+ - run:
+ name: core-forger
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-forger && yarn test:coverage
+ - run:
+ name: core-graphql
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-graphql && yarn test:coverage
+ - run:
+ name: core-http-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-http-utils && yarn test:coverage
+ - run:
+ name: core-jest-matchers
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-jest-matchers && yarn test:coverage
- run:
- name: Test
- command: >
- ./node_modules/.bin/cross-env ARK_ENV=test ./node_modules/.bin/jest
- ./packages/core-vote-report/ ./packages/core-transaction-pool/
- ./packages/core-snapshots-cli/ ./packages/core-logger-winston/
- ./packages/core-jest-matchers/ ./packages/core-graphql/
- ./packages/core-error-tracker-sentry/ ./packages/core-deployer/
- ./packages/core-database/ ./packages/core-blockchain/
- --detectOpenHandles --runInBand --forceExit --ci --coverage | tee
- test_output.txt
+ name: core-json-rpc
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-json-rpc && yarn test:coverage
+ - run:
+ name: core-logger
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-logger && yarn test:coverage
+ - run:
+ name: core-logger-winston
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-logger-winston && yarn test:coverage
+ - run:
+ name: core-p2p
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-p2p && yarn test:coverage
+ - run:
+ name: Last 1000 lines of test output
+ when: on_fail
+ command: tail -n 1000 test_output.txt
+ - run:
+ name: Codecov
+ command: ./node_modules/.bin/codecov
+ test-node11-2:
+ working_directory: ~/core
+ environment:
+ CORE_DB_DATABASE: core_development
+ CORE_DB_USERNAME: core
+ docker:
+ - image: 'circleci/node:11-browsers'
+ - image: 'postgres:alpine'
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_development
+ POSTGRES_USER: core
+ steps:
+ - checkout
+ - run:
+ name: Apt update
+ command: >-
+ sudo sh -c 'echo "deb http://ftp.debian.org/debian stable main
+ contrib non-free" >> /etc/apt/sources.list' && sudo apt-get update
+ - run:
+ name: Install xsel & postgresql-client
+ command: sudo apt-get install -q xsel postgresql-client
+ - run:
+ name: Generate cache key
+ command: >-
+ find ./packages/ -name package.json -print0 | sort -z | xargs -r0
+ echo ./package.json | xargs md5sum | md5sum - > checksum.txt
+ - restore_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ - run:
+ name: Install and build packages
+ command: yarn setup
+ - save_cache:
+ key: 'core-node11-{{ checksum "checksum.txt" }}-1'
+ paths:
+ - ./packages/core/node_modules
+ - ./packages/core-api/node_modules
+ - ./packages/core-blockchain/node_modules
+ - ./packages/core-container/node_modules
+ - ./packages/core-database/node_modules
+ - ./packages/core-database-postgres/node_modules
+ - ./packages/core-debugger-cli/node_modules
+ - ./packages/core-elasticsearch/node_modules
+ - ./packages/core-error-tracker-bugsnag/node_modules
+ - ./packages/core-error-tracker-sentry/node_modules
+ - ./packages/core-event-emitter/node_modules
+ - ./packages/core-forger/node_modules
+ - ./packages/core-graphql/node_modules
+ - ./packages/core-http-utils/node_modules
+ - ./packages/core-interfaces/node_modules
+ - ./packages/core-jest-matchers/node_modules
+ - ./packages/core-json-rpc/node_modules
+ - ./packages/core-logger/node_modules
+ - ./packages/core-logger-winston/node_modules
+ - ./packages/core-p2p/node_modules
+ - ./packages/core-snapshots/node_modules
+ - ./packages/core-snapshots-cli/node_modules
+ - ./packages/core-test-utils/node_modules
+ - ./packages/core-tester-cli/node_modules
+ - ./packages/core-transaction-pool/node_modules
+ - ./packages/core-utils/node_modules
+ - ./packages/core-vote-report/node_modules
+ - ./packages/core-webhooks/node_modules
+ - ./packages/crypto/node_modules
+ - ./node_modules
+ - run:
+ name: Create .core/database directory
+ command: mkdir -p $HOME/.core/database
+ - run:
+ name: core-snapshots
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-snapshots && yarn test:coverage
+ - run:
+ name: core-test-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-test-utils && yarn test:coverage
+ - run:
+ name: core-tester-cli
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-tester-cli && yarn test:coverage
+ - run:
+ name: core-transaction-pool
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-transaction-pool && yarn test:coverage
+ - run:
+ name: core-utils
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-utils && yarn test:coverage
+ - run:
+ name: core-vote-report
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-vote-report && yarn test:coverage
+ - run:
+ name: core-webhooks
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd
+ ~/core/packages/core-webhooks && yarn test:coverage
+ - run:
+ name: crypto
+ command: >-
+ cd ~/core/.circleci && ./rebuild-db.sh && cd ~/core/packages/crypto
+ && yarn test:coverage
- run:
name: Last 1000 lines of test output
when: on_fail
@@ -265,3 +697,6 @@ workflows:
- test-node10-0
- test-node10-1
- test-node10-2
+ - test-node11-0
+ - test-node11-1
+ - test-node11-2
diff --git a/.circleci/configTemplate.json b/.circleci/configTemplate.json
index cc32aa212d..6d249c90b9 100644
--- a/.circleci/configTemplate.json
+++ b/.circleci/configTemplate.json
@@ -1,90 +1,177 @@
{
- "version": 2,
- "jobs": {
- "test-node10-0": {
- "working_directory": "~/ark-core",
- "docker": [
- {
- "image": "circleci/node:10-browsers"
+ "version": 2,
+ "jobs": {
+ "test-node10-0": {
+ "working_directory": "~/core",
+ "environment": {
+ "CORE_DB_DATABASE": "core_development",
+ "CORE_DB_USERNAME": "core"
+ },
+ "docker": [
+ {
+ "image": "circleci/node:10-browsers"
+ },
+ {
+ "image": "postgres:alpine",
+ "environment": {
+ "POSTGRES_PASSWORD": "password",
+ "POSTGRES_DB": "core_development",
+ "POSTGRES_USER": "core"
+ }
+ }
+ ],
+ "steps": [
+ "checkout",
+ {
+ "run": {
+ "name": "Apt update",
+ "command": "sudo sh -c 'echo \"deb http://ftp.debian.org/debian stable main contrib non-free\" >> /etc/apt/sources.list' && sudo apt-get update"
+ }
+ },
+ {
+ "run": {
+ "name": "Install xsel & postgresql-client",
+ "command": "sudo apt-get install -q xsel postgresql-client"
+ }
+ },
+ {
+ "run": {
+ "name": "Generate cache key",
+ "command": "find ./packages/ -name package.json -print0 | sort -z | xargs -r0 echo ./package.json | xargs md5sum | md5sum - > checksum.txt"
+ }
+ },
+ {
+ "restore_cache": {
+ "key": "core-node10-{{ checksum \"checksum.txt\" }}-1"
+ }
+ },
+ {
+ "run": {
+ "name": "Install and build packages",
+ "command": "yarn setup"
+ }
+ },
+ {
+ "save_cache": {
+ "key": "core-node10-{{ checksum \"checksum.txt\" }}-1",
+ "paths": []
+ }
+ },
+ {
+ "run": {
+ "name": "Create .core/database directory",
+ "command": "mkdir -p $HOME/.core/database"
+ }
+ },
+ {
+ "run": {
+ "name": "Test",
+ "command": ""
+ }
+ },
+ {
+ "run": {
+ "name": "Last 1000 lines of test output",
+ "when": "on_fail",
+ "command": "tail -n 1000 test_output.txt"
+ }
+ },
+ {
+ "run": {
+ "name": "Codecov",
+ "command": "./node_modules/.bin/codecov"
+ }
+ }
+ ]
},
- {
- "image": "postgres:alpine",
- "environment": {
- "POSTGRES_PASSWORD": "password",
- "POSTGRES_DB": "ark_development",
- "POSTGRES_USER": "ark"
- }
+ "test-node11-0": {
+ "working_directory": "~/core",
+ "environment": {
+ "CORE_DB_DATABASE": "core_development",
+ "CORE_DB_USERNAME": "core"
+ },
+ "docker": [
+ {
+ "image": "circleci/node:11-browsers"
+ },
+ {
+ "image": "postgres:alpine",
+ "environment": {
+ "POSTGRES_PASSWORD": "password",
+ "POSTGRES_DB": "core_development",
+ "POSTGRES_USER": "core"
+ }
+ }
+ ],
+ "steps": [
+ "checkout",
+ {
+ "run": {
+ "name": "Apt update",
+ "command": "sudo sh -c 'echo \"deb http://ftp.debian.org/debian stable main contrib non-free\" >> /etc/apt/sources.list' && sudo apt-get update"
+ }
+ },
+ {
+ "run": {
+ "name": "Install xsel & postgresql-client",
+ "command": "sudo apt-get install -q xsel postgresql-client"
+ }
+ },
+ {
+ "run": {
+ "name": "Generate cache key",
+ "command": "find ./packages/ -name package.json -print0 | sort -z | xargs -r0 echo ./package.json | xargs md5sum | md5sum - > checksum.txt"
+ }
+ },
+ {
+ "restore_cache": {
+ "key": "core-node11-{{ checksum \"checksum.txt\" }}-1"
+ }
+ },
+ {
+ "run": {
+ "name": "Install and build packages",
+ "command": "yarn setup"
+ }
+ },
+ {
+ "save_cache": {
+ "key": "core-node11-{{ checksum \"checksum.txt\" }}-1",
+ "paths": []
+ }
+ },
+ {
+ "run": {
+ "name": "Create .core/database directory",
+ "command": "mkdir -p $HOME/.core/database"
+ }
+ },
+ {
+ "run": {
+ "name": "Test",
+ "command": ""
+ }
+ },
+ {
+ "run": {
+ "name": "Last 1000 lines of test output",
+ "when": "on_fail",
+ "command": "tail -n 1000 test_output.txt"
+ }
+ },
+ {
+ "run": {
+ "name": "Codecov",
+ "command": "./node_modules/.bin/codecov"
+ }
+ }
+ ]
}
- ],
- "steps": [
- "checkout",
- {
- "run": {
- "name": "Apt update",
- "command": "sudo sh -c 'echo \"deb http://ftp.debian.org/debian stable main contrib non-free\" >> /etc/apt/sources.list' && sudo apt-get update"
- }
- },
- {
- "run": {
- "name": "Install xsel",
- "command": "sudo apt-get install -q xsel"
- }
- },
- {
- "run": {
- "name": "Generate cache key",
- "command": "find ./packages/ -name package.json -print0 | sort -z | xargs -r0 echo ./package.json | xargs md5sum | md5sum - > checksum.txt"
- }
- },
- {
- "restore_cache": {
- "key": "core-node10-{{ checksum \"checksum.txt\" }}-1"
- }
- },
- {
- "run": {
- "name": "Install packages",
- "command": "yarn"
- }
- },
- {
- "save_cache": {
- "key": "core-node10-{{ checksum \"checksum.txt\" }}-1",
- "paths": []
- }
- },
- {
- "run": {
- "name": "Create .ark/database directory",
- "command": "mkdir -p $HOME/.ark/database"
- }
- },
- {
- "run": {
- "name": "Test",
- "command": "./node_modules/.bin/cross-env ARK_ENV=test ./node_modules/.bin/jest {{TESTPATHS}} --detectOpenHandles --runInBand --forceExit --ci --coverage | tee test_output.txt\n"
- }
- },
- {
- "run": {
- "name": "Last 1000 lines of test output",
- "when": "on_fail",
- "command": "tail -n 1000 test_output.txt"
- }
- },
- {
- "run": {
- "name": "Codecov",
- "command": "./node_modules/.bin/codecov"
- }
+ },
+ "workflows": {
+ "version": 2,
+ "build_and_test": {
+ "jobs": []
}
- ]
- }
- },
- "workflows": {
- "version": 2,
- "build_and_test": {
- "jobs": []
}
- }
}
diff --git a/.circleci/generateConfig.js b/.circleci/generateConfig.js
index b729ba31e8..73cb30527d 100644
--- a/.circleci/generateConfig.js
+++ b/.circleci/generateConfig.js
@@ -1,90 +1,73 @@
-const yaml = require('js-yaml')
-const fs = require('fs')
-const path = require('path')
+const yaml = require("js-yaml");
+const fs = require("fs");
+const path = require("path");
+const chunk = require("lodash.chunk");
-const config = require('./configTemplate.json')
+const config = require("./configTemplate.json");
-generateConfig()
-
-function generateConfig() {
- fs.readdir('./packages', (err, packages) => genYaml({ packages }))
+function jason(value) {
+ return JSON.parse(JSON.stringify(value));
}
-function genYaml(options) {
- // save cache
- const saveCacheStep = config.jobs['test-node10-0'].steps.find(
- step => typeof step === 'object' && step.save_cache,
- )
- saveCacheStep.save_cache.paths = options.packages
- .map(package => `./packages/${package}/node_modules`)
- .concat('./node_modules')
-
- // test split
- const packagesSplit = splitPackagesByTestFiles(options.packages, 3)
-
- const jobs = [
- config.jobs['test-node10-0'],
- JSON.parse(JSON.stringify(config.jobs['test-node10-0'])),
- JSON.parse(JSON.stringify(config.jobs['test-node10-0'])),
- ]
-
- jobs.forEach((job, index) => {
- const testStep = job.steps.find(
- step => typeof step === 'object' && step.run && step.run.name === 'Test',
- )
- testStep.run.command = testStep.run.command.replace(
- '{{TESTPATHS}}',
- packagesSplit[index].map(package => `./packages/${package}/`).join(' '),
- )
-
- config.jobs[`test-node10-${index}`] = job
- config.workflows.build_and_test.jobs.push(`test-node10-${index}`)
- })
-
- fs.writeFile('.circleci/config.yml', yaml.safeDump(config), 'utf8', err => {
- if (err) console.error(err)
- })
-}
+fs.readdir("./packages", (_, packages) => {
+ // test split
+ const packagesSplit = chunk(packages.sort(), 10);
-function splitPackagesByTestFiles(packages, splitNumber) {
- /* distribute test packages by test files count : start by most files package,
- and distribute package by package in each _packagesSplit_ (not the most effective
- distribution but simple and enough for now) */
- const packagesWithCount = packages.map(package => ({
- package,
- count: countFiles(`packages/${package}/__tests__`, '.test.js'),
- }))
- const packagesSortedByCount = packagesWithCount.sort(
- (pkgA, pkgB) => pkgA.count > pkgB.count,
- )
-
- const packagesSplit = new Array(splitNumber)
- packagesSortedByCount.forEach(
- (pkg, index) =>
- (packagesSplit[index % splitNumber] = [pkg.package].concat(
- packagesSplit[index % splitNumber] || [],
- )),
- )
-
- return packagesSplit
-}
+ const resetSqlCommand = "cd ~/core/.circleci && ./rebuild-db.sh"
+
+ for (const [name, job] of Object.entries(config.jobs)) {
+ // save cache
+ const saveCacheStep = config.jobs[name].steps.find(step => typeof step === "object" && step.save_cache);
+ saveCacheStep.save_cache.paths = packages
+ .map(package => `./packages/${package}/node_modules`)
+ .concat("./node_modules");
+
+ const jobs = [
+ config.jobs[name],
+ jason(config.jobs[name]),
+ jason(config.jobs[name]),
+ ];
+
+ jobs.forEach((job, index) => {
+ const testStepIndex = job.steps.findIndex(
+ step => typeof step === "object" && step.run && step.run.name === "Test",
+ );
+
+ const pkgs = packagesSplit[index].map(package => `./packages/${package}/`);
-function countFiles(startPath, filter) {
- let count = 0
- if (!fs.existsSync(startPath)) {
- return
- }
-
- var files = fs.readdirSync(startPath)
- for (let i = 0; i < files.length; i++) {
- const filename = path.join(startPath, files[i])
- const stat = fs.lstatSync(filename)
- if (stat.isDirectory()) {
- count += countFiles(filename, filter)
- } else if (filename.indexOf(filter) >= 0) {
- count++
+ const steps = pkgs
+ .map(pkg => {
+ const name = path.basename(pkg);
+
+ return {
+ run: {
+ name,
+ command: `${resetSqlCommand} && cd ~/core/packages/${name} && yarn test:coverage`,
+ },
+ };
+ })
+ .filter(pkg => {
+ const {
+ scripts
+ } = require(path.resolve(__dirname, `../packages/${pkg.run.name}/package.json`));
+
+ return Object.keys(scripts).includes("test:coverage");
+ });
+
+ const stepLog = job.steps[9];
+ const stepCoverage = job.steps[10];
+
+ for (i = 0; i < steps.length; i++) {
+ job.steps[testStepIndex + i] = steps[i];
+ }
+
+ job.steps.push(stepLog);
+ job.steps.push(stepCoverage);
+
+ config.jobs[name.slice(0, -1) + index] = job;
+ config.workflows.build_and_test.jobs.push(name.slice(0, -1) + index);
+ });
}
- }
- return count
-}
+ fs.writeFileSync(".circleci/config.yml", yaml.safeDump(config));
+});
diff --git a/.circleci/rebuild-db.sh b/.circleci/rebuild-db.sh
new file mode 100755
index 0000000000..3798325e18
--- /dev/null
+++ b/.circleci/rebuild-db.sh
@@ -0,0 +1,15 @@
+tables='rounds blocks transactions wallets'
+
+for table in $tables
+do
+ dropcmd=$(echo "drop table if exists ${table};")
+ psql -h localhost -U core -d core_development -c "${dropcmd}"
+done
+
+cd ../packages/core-database-postgres/src/migrations/
+
+for sqlFile in ./*.sql
+do
+ sqlcmd=$(cat $sqlFile | sed 's/${schema~}\.//g')
+ psql -h localhost -U core -d core_development -c "${sqlcmd}"
+done
\ No newline at end of file
diff --git a/.codecov.yml b/.codecov.yml
index f0d614a77b..3e621d21c5 100644
--- a/.codecov.yml
+++ b/.codecov.yml
@@ -1,3 +1,14 @@
ignore:
+ - "packages/**/src/defaults.ts"
+ - "packages/**/src/index.ts"
+ - "packages/**/src/plugin.ts"
+ - "packages/core-error-tracker-bugsnag/**/*"
+ - "packages/core-error-tracker-sentry/**/*"
+ - "packages/core-graphql/**/*"
+ - "packages/core-http-utils/**/*"
+ - "packages/core-logger-winston/src/formatter.ts"
+ - "packages/core-snapshots-cli/**/*"
+ - "packages/core-test-utils/**/*"
+ - "packages/core-test-utils/src/fixtures/**/*"
- "packages/core-tester-cli/**/*"
- - "packages/**/lib/index"
+ - "packages/core-webhooks/src/database/migrations/**/*"
diff --git a/.editorconfig b/.editorconfig
index 99580d06fe..cf80b4e9c2 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -5,5 +5,5 @@ charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
-indent_size = 2
+indent_size = 4
trim_trailing_whitespace = true
diff --git a/.eslintignore b/.eslintignore
deleted file mode 100644
index a04bc1cdb1..0000000000
--- a/.eslintignore
+++ /dev/null
@@ -1,9 +0,0 @@
-/build/**
-/dist/**
-/packages/dist/**
-/.coverage/**
-/docs/**
-/tmp/**
-/fixtures/**
-/__fixtures__/**
-!.eslintrc.js
diff --git a/.eslintrc.json b/.eslintrc.json
deleted file mode 100644
index ab219a187a..0000000000
--- a/.eslintrc.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "env": {
- "commonjs": true,
- "node": true,
- "jest": true
- },
- "extends": ["@arkecosystem/eslint-config-base", "prettier"],
- "rules": {
- "class-methods-use-this": "off",
- "complexity": "off",
- "global-require": "off",
- "import/no-dynamic-require": "off",
- "no-restricted-syntax": "off",
- "no-console": [
- "error",
- { "allow": ["error", "info", "warn", "time", "timeEnd"] }
- ],
- "no-plusplus": "off",
- "no-continue": "off",
- "no-param-reassign": "off",
- "max-len": [
- "warn",
- {
- "code": 120,
- "ignoreTemplateLiterals": true,
- "ignoreRegExpLiterals": true
- }
- ],
- "import/no-extraneous-dependencies": "off",
- // TODO: fix later as they require a lot of changes
- "consistent-return": "off",
- "no-unused-expressions": "off",
- "no-underscore-dangle": "off",
- "no-unused-vars": "off",
- "prefer-destructuring": "off",
- "radix": "off"
- }
-}
diff --git a/.github/ISSUE_TEMPLATE/Documentation_issue.md b/.github/ISSUE_TEMPLATE/Documentation_issue.md
new file mode 100644
index 0000000000..089a660657
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/Documentation_issue.md
@@ -0,0 +1,8 @@
+---
+name: "Documentation Issue"
+about: "For documentation issues, see: https://github.com/ArkEcosystem/docs/issues"
+---
+
+The Ark Core documentation has its own dedicated repository. Please open your documentation-related issue at https://github.com/ArkEcosystem/docs/issues. **However, it's best to simply make a pull request to correct the issue you have found!**
+
+Thanks!
diff --git a/.github/ISSUE_TEMPLATE/Security_vulnerabilities.md b/.github/ISSUE_TEMPLATE/Security_vulnerabilities.md
new file mode 100644
index 0000000000..1f0c3c599b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/Security_vulnerabilities.md
@@ -0,0 +1,8 @@
+---
+name: "Security Vulnerabilities"
+about: "For reporting security-related issues, see: https://docs.ark.io/security/"
+---
+
+PLEASE DON'T DISCLOSE SECURITY-RELATED ISSUES PUBLICLY, SEE BELOW.
+
+If you discover a security vulnerability within Core, please send an e-mail to security@ark.io. All security vulnerabilities will be promptly addressed.
diff --git a/.github/ISSUE_TEMPLATE/Support_question.md b/.github/ISSUE_TEMPLATE/Support_question.md
new file mode 100644
index 0000000000..3fb7a681ac
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/Support_question.md
@@ -0,0 +1,8 @@
+---
+name: "Support Question"
+about: "This repository is only for reporting bugs or problems."
+---
+
+This repository is only for reporting bugs or issues. If you need support, please send an e-mail to support@ark.io.
+
+Thanks!
diff --git a/.gitignore b/.gitignore
index da756344b3..a470a2cc41 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,6 +43,7 @@ docs
# Optional npm cache directory
.npm
+package-lock.json
# Mac OS X local settings
.DS_Store
@@ -60,4 +61,7 @@ packages/**/dist/
*.sqlite
# Random
-peers_backup.json
+docker/development
+
+#Webstorm/Intellij
+.idea
diff --git a/.lintstagedrc.json b/.lintstagedrc.json
index a8cd9eebff..9e4311289b 100644
--- a/.lintstagedrc.json
+++ b/.lintstagedrc.json
@@ -1,4 +1,4 @@
{
- "*.js": ["eslint --fix", "prettier --write", "git add"],
- "*.{json,md}": ["prettier --write", "git add"]
+ "*.ts": ["yarn lint", "prettier --write", "git add"],
+ "*.{json,md}": ["prettier --write", "git add"]
}
diff --git a/.prettierignore b/.prettierignore
index 6350e98682..ccd28dd23c 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -1 +1,3 @@
.coverage
+dist
+docs
diff --git a/.prettierrc.json b/.prettierrc.json
index 89a0a2b7f0..3149fccdfb 100644
--- a/.prettierrc.json
+++ b/.prettierrc.json
@@ -1,5 +1,7 @@
{
- "singleQuote": true,
- "trailingComma": "all",
- "semi": false
+ "printWidth": 120,
+ "singleQuote": false,
+ "tabWidth": 4,
+ "trailingComma": "all",
+ "useTabs": false
}
diff --git a/.snyk b/.snyk
index 384ff6609d..d57e0639a7 100644
--- a/.snyk
+++ b/.snyk
@@ -215,7 +215,7 @@ ignore:
reason: None given
expires: '2018-12-11T05:03:38.314Z'
'npm:chownr:20180731':
- - '@arkecosystem/core > @arkecosystem/core-transaction-pool-mem > better-sqlite3 > tar > chownr':
+ - '@arkecosystem/core > @arkecosystem/core-transaction-pool > better-sqlite3 > tar > chownr':
reason: None given
expires: '2018-12-11T05:03:38.314Z'
- '@arkecosystem/core > @arkecosystem/core-webhooks > sqlite3 > node-pre-gyp > tar > chownr':
diff --git a/.yarnrc b/.yarnrc
deleted file mode 100644
index 4f14322dc8..0000000000
--- a/.yarnrc
+++ /dev/null
@@ -1 +0,0 @@
---ignore-engines true
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000000..e0f975c7b6
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,272 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
+
+## [Unreleased]
+
+## [2.1.0] - 2019-02-11
+
+### Added
+
+- Added a `milestoneHash` identifier to use for peer banning ([#1837])
+- Added TypeScript declarations for `core-logger` ([#1833])
+- Added TypeScript declarations for `core-logger-winston` ([#1887])
+- Added TypeScript declarations for `core-container` ([#1891])
+- Added TypeScript declarations for `core-database` ([#1901], [#1905])
+- Added TypeScript declarations for `core-transaction-pool` ([#1906])
+- Added TypeScript declarations for `core-blockchain` ([#1943])
+- Added TypeScript declarations for `core-snapshots` ([#1947])
+- Added TypeScript declarations for `core-api` ([#1948])
+- Added TypeScript declarations for `crypto` ([#1917])
+- Added the `core-jest-matchers` package ([#1926])
+- Added the `core-interfaces` package ([#1924])
+- Return the transaction expiration time via API ([#1927])
+- Added the ability to disable the public API cache ([#1930])
+- Return the vote of a wallet via public API ([#2009])
+- Upgrade script for 2.1 ([#1999])
+- Installation script for deb/rpm distros ([#2016])
+- Case specific errors for `crypto` ([#2038])
+
+### Changed
+
+- Migrated from JavaScript to TypeScript ([#1625])
+- Moved the `peers.json` configuration into `core-p2p` ([#1625])
+- Merged `core-transaction-pool-mem` into `core-transaction-pool` ([#1625])
+- Use a faster alternative to derive an estimate ([#1655])
+- Reworked crypto configuration to make it simpler ([#1733])
+- Moved the dynamic fees configuration into `core-transaction-pool` ([#1733])
+- Periodically check for new peers instead of retrying until finding some ([#1738])
+- Adjusted some banning times for peers to make network recovery smoother ([#1730])
+- Simplified configuration by further separating network and core ([#1733])
+- Take the `minFeeBroadcast` value into account for fee statistics ([#1873])
+- Only allow vendor fields for type 0 and 6 transactions ([#1931])
+- Improved the network quorum details and feedback ([#1898])
+- Only return errors when broadcast and pool fees are too low ([#1940])
+- Improved performance of BIP38 ([#1941])
+- Cleaned up the logic of block processing ([#1953])
+- Cleaned up the logic of serialise/deserialise in crypto ([#1969])
+- Replaced all ARK naming with CORE ([#1970])
+- Use system paths for data and configuration ([#1987])
+- Increased the maximum transaction age to 6 hours ([#1996])
+- Replaced progress bars with logging to reduce noise ([#2044])
+- Replaced commander.js with @oclif in `core-debugger-cli` ([#2049])
+- Replaced commander.js with @oclif in `core-snapshots-cli` ([#2050])
+- Replaced commander.js with @oclif in `core-tester-cli` ([#2051])
+- Moved docker files from `docker/*` to `docker/development/*` ([#2053])
+- Moved the genesis blocks from the `core` configuration to the network configuration in `crypto` ([#2052])
+- Separate business-logic from data-layer logic ([#2055])
+
+### Fixed
+
+- Resolved an issue with the `resolveOptions` method that would result in options being resolved for plugins that are not registered in the container ([#1625])
+- Malformed condition for filtering peers ([#1689])
+- Use the correct pagination schema for the v2 public API ([#1717])
+- Ensure that delegate searches can handle undefined values ([#1831])
+- Mark semantically invalid versions as invalid overall ([#1836])
+- Ordering of delegates via public API ([#1731])
+- Handle webhooks that have no conditions ([#1869])
+- Validate the network byte on transactions ([#1853])
+- Use correct schemas for address, public key and username validation in the public API ([#1954])
+- Populate the last block of all delegates ([#1919])
+- Return the transaction forging timestamp instead of signing timestamp ([#1957])
+- Mark cold wallets as not found in the legacy API ([#1955])
+- A malformed condition that resulted in wrong peer lists ([#1939])
+- Properly verify block slot timestamps ([#1985])
+- Return fixed peer states for v1 and v2 API responses ([#2027])
+- Validate IP ranges to detect loopbacks ([#2045])
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-010.md ([#2046])
+- Check if the blockchain state storage is available before performing fork checks ([#2047])
+- Gracefully handle a corrupted cached `peers.json` file ([#2061])
+- Always sort transactions by sequence and the requested field to make API sorting deterministic ([#2058])
+- Disallow multiple registrations for same delegate ([#2080])
+
+### Removed
+
+- Removed the `transactionsFromIds` P2P endpoint ([#1911])
+- Removed the `validator` and `rules` fron `@arkecosystem/crypto` ([#2021])
+- Ended support for the legacy multisignatures from the previous LISK fork ([#2057])
+
+## [2.0.19] - 2019-01-31
+
+### Fixed
+
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-009.md
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-010.md
+
+## [2.0.18] - 2019-01-28
+
+### Fixed
+
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-011.md
+
+## [2.0.17] - 2019-01-15
+
+### Fixed
+
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-008.md
+- https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-007.md
+
+## [2.0.16] - 2018-12-17
+
+### Fixed
+
+- Prevent the list of peers to become too short. This is related to the nodes running behind a firewall.
+
+Closed security vulnerabilities:
+
+- [CORE-SV-004](https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-004.md)
+- [CORE-SV-003](https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-003.md)
+- [CORE-SV-002](https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-002.md)
+- [CORE-SV-001](https://github.com/ArkEcosystem/security-vulnerabilities/blob/master/core/core-sv-001.md)
+
+## [2.0.15] - 2018-12-11
+
+### Fixed
+
+- Ensure no local peers are enlisted and that the IP of the TCP connection is used ([#1695])
+
+## [2.0.14] - 2018-12-10
+
+### Fixed
+
+- Reset last downloaded block when block is discarded ([#1692])
+
+## [2.0.13] - 2018-12-07
+
+### Fixed
+
+- Ensure safe integer range for block height lookups via API ([#1673])
+
+## [2.0.12] - 2018-12-06
+
+### Fixed
+
+- Perform second-signature checks in the `canApply` logic of multi-signatures ([#1658])
+- return the encoded WIF for BIP38 wallets instead of the encrypted WIF ([#1653])
+
+## [2.0.11] - 2018-12-05
+
+### Added
+
+- Store executed migrations in the database ([#1648])
+
+### Changed
+
+- Increase cache generation timeout and make it configurable ([#1645], [#1646])
+
+## [2.0.1] - 2018-12-05
+
+### Added
+
+- Retrieve blocks via height or ID per public API ([#1626])
+
+### Changed
+
+- Improved performance for block and transaction queries by adding more indices on critical columns ([#1636], [#1638], [#1634])
+
+### Fixed
+
+- Take milestones into account for supply calculations ([#1640])
+- Use the raw transaction data in `acceptChainedBlock` to avoid timestamp mismatches and second signature double spend errors ([#1564])
+- Return the correct peer count for the v2 public API ([#1563])
+
+## [2.0.0] - 2018-12-03
+
+- Initial Release
+
+[unreleased]: https://github.com/ArkEcosystem/core/compare/2.0.19...develop
+[2.1.0]: https://github.com/ArkEcosystem/core/compare/2.0.19...2.1.0
+[2.0.19]: https://github.com/ArkEcosystem/core/compare/2.0.18...2.0.19
+[2.0.18]: https://github.com/ArkEcosystem/core/compare/2.0.17...2.0.18
+[2.0.17]: https://github.com/ArkEcosystem/core/compare/2.0.16...2.0.17
+[2.0.16]: https://github.com/ArkEcosystem/core/compare/2.0.15...2.0.16
+[2.0.15]: https://github.com/ArkEcosystem/core/compare/2.0.14...2.0.15
+[2.0.14]: https://github.com/ArkEcosystem/core/compare/2.0.13...2.0.14
+[2.0.13]: https://github.com/ArkEcosystem/core/compare/2.0.12...2.0.13
+[2.0.12]: https://github.com/ArkEcosystem/core/compare/2.0.11...2.0.12
+[2.0.11]: https://github.com/ArkEcosystem/core/compare/2.0.1...2.0.11
+[2.0.1]: https://github.com/ArkEcosystem/core/compare/2.0.0...2.0.1
+[2.0.0]: https://github.com/ArkEcosystem/core/compare/0.1.1...2.0.0
+[#1563]: https://github.com/ArkEcosystem/core/pull/1563
+[#1564]: https://github.com/ArkEcosystem/core/pull/1564
+[#1625]: https://github.com/ArkEcosystem/core/pull/1625
+[#1626]: https://github.com/ArkEcosystem/core/pull/1626
+[#1634]: https://github.com/ArkEcosystem/core/pull/1634
+[#1636]: https://github.com/ArkEcosystem/core/pull/1636
+[#1638]: https://github.com/ArkEcosystem/core/pull/1638
+[#1638]: https://github.com/ArkEcosystem/core/pull/1638
+[#1640]: https://github.com/ArkEcosystem/core/pull/1640
+[#1645]: https://github.com/ArkEcosystem/core/pull/1645
+[#1646]: https://github.com/ArkEcosystem/core/pull/1646
+[#1648]: https://github.com/ArkEcosystem/core/pull/1648
+[#1653]: https://github.com/ArkEcosystem/core/pull/1653
+[#1655]: https://github.com/ArkEcosystem/core/pull/1655
+[#1658]: https://github.com/ArkEcosystem/core/pull/1658
+[#1673]: https://github.com/ArkEcosystem/core/pull/1673
+[#1689]: https://github.com/ArkEcosystem/core/pull/1689
+[#1692]: https://github.com/ArkEcosystem/core/pull/1692
+[#1695]: https://github.com/ArkEcosystem/core/pull/1695
+[#1717]: https://github.com/ArkEcosystem/core/pull/1717
+[#1730]: https://github.com/ArkEcosystem/core/pull/1730
+[#1731]: https://github.com/ArkEcosystem/core/pull/1731
+[#1732]: https://github.com/ArkEcosystem/core/pull/1732
+[#1733]: https://github.com/ArkEcosystem/core/pull/1733
+[#1738]: https://github.com/ArkEcosystem/core/pull/1738
+[#1831]: https://github.com/ArkEcosystem/core/pull/1831
+[#1833]: https://github.com/ArkEcosystem/core/pull/1833
+[#1836]: https://github.com/ArkEcosystem/core/pull/1836
+[#1837]: https://github.com/ArkEcosystem/core/pull/1837
+[#1853]: https://github.com/ArkEcosystem/core/pull/1853
+[#1869]: https://github.com/ArkEcosystem/core/pull/1869
+[#1873]: https://github.com/ArkEcosystem/core/pull/1873
+[#1887]: https://github.com/ArkEcosystem/core/pull/1887
+[#1891]: https://github.com/ArkEcosystem/core/pull/1891
+[#1898]: https://github.com/ArkEcosystem/core/pull/1898
+[#1901]: https://github.com/ArkEcosystem/core/pull/1901
+[#1905]: https://github.com/ArkEcosystem/core/pull/1905
+[#1906]: https://github.com/ArkEcosystem/core/pull/1906
+[#1911]: https://github.com/ArkEcosystem/core/pull/1911
+[#1917]: https://github.com/ArkEcosystem/core/pull/1917
+[#1919]: https://github.com/ArkEcosystem/core/pull/1919
+[#1924]: https://github.com/ArkEcosystem/core/pull/1924
+[#1926]: https://github.com/ArkEcosystem/core/pull/1926
+[#1927]: https://github.com/ArkEcosystem/core/pull/1927
+[#1930]: https://github.com/ArkEcosystem/core/pull/1930
+[#1931]: https://github.com/ArkEcosystem/core/pull/1931
+[#1939]: https://github.com/ArkEcosystem/core/pull/1939
+[#1940]: https://github.com/ArkEcosystem/core/pull/1940
+[#1941]: https://github.com/ArkEcosystem/core/pull/1941
+[#1943]: https://github.com/ArkEcosystem/core/pull/1943
+[#1947]: https://github.com/ArkEcosystem/core/pull/1947
+[#1948]: https://github.com/ArkEcosystem/core/pull/1948
+[#1953]: https://github.com/ArkEcosystem/core/pull/1953
+[#1954]: https://github.com/ArkEcosystem/core/pull/1954
+[#1955]: https://github.com/ArkEcosystem/core/pull/1955
+[#1957]: https://github.com/ArkEcosystem/core/pull/1957
+[#1969]: https://github.com/ArkEcosystem/core/pull/1969
+[#1970]: https://github.com/ArkEcosystem/core/pull/1970
+[#1985]: https://github.com/ArkEcosystem/core/pull/1985
+[#1987]: https://github.com/ArkEcosystem/core/pull/1987
+[#1996]: https://github.com/ArkEcosystem/core/pull/1996
+[#1999]: https://github.com/ArkEcosystem/core/pull/1999
+[#2009]: https://github.com/ArkEcosystem/core/pull/2009
+[#2016]: https://github.com/ArkEcosystem/core/pull/2016
+[#2021]: https://github.com/ArkEcosystem/core/pull/2021
+[#2038]: https://github.com/ArkEcosystem/core/pull/2038
+[#2044]: https://github.com/ArkEcosystem/core/pull/2044
+[#2045]: https://github.com/ArkEcosystem/core/pull/2045
+[#2046]: https://github.com/ArkEcosystem/core/pull/2046
+[#2047]: https://github.com/ArkEcosystem/core/pull/2047
+[#2049]: https://github.com/ArkEcosystem/core/pull/2049
+[#2050]: https://github.com/ArkEcosystem/core/pull/2050
+[#2051]: https://github.com/ArkEcosystem/core/pull/2051
+[#2052]: https://github.com/ArkEcosystem/core/pull/2052
+[#2053]: https://github.com/ArkEcosystem/core/pull/2053
+[#2055]: https://github.com/ArkEcosystem/core/pull/2055
+[#2057]: https://github.com/ArkEcosystem/core/pull/2057
+[#2058]: https://github.com/ArkEcosystem/core/pull/2058
+[#2061]: https://github.com/ArkEcosystem/core/pull/2061
+[#2080]: https://github.com/ArkEcosystem/core/pull/2080
diff --git a/README.md b/README.md
index 5ba201e18b..2865acf7e2 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,6 @@
-[](https://greenkeeper.io/)
[](https://circleci.com/gh/ArkEcosystem/core)
[](https://codecov.io/gh/arkecosystem/core)
[](https://opensource.org/licenses/MIT)
@@ -15,41 +14,50 @@ This repository contains all plugins that make up the Ark Core.
## Documentation
-- Development : https://docs.ark.io/guidebook/core/development.html
-- Docker : https://docs.ark.io/guidebook/core/docker.html
+- Development : https://docs.ark.io/guidebook/core/development.html
+- Docker : https://docs.ark.io/guidebook/core/docker.html
## API Documentation
-- API v1 : https://docs.ark.io/api/public/v1/
-- API v2 : https://docs.ark.io/api/public/v2/
+- API v1 : https://docs.ark.io/api/public/v1/
+- API v2 : https://docs.ark.io/api/public/v2/
## GitHub Development Bounty
-- Get involved with Ark development and start earning ARK : https://bounty.ark.io
+- Get involved with Ark development and start earning ARK : https://bounty.ark.io
## Core Packages
-| Package | Version | Description |
-| -------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------- |
-| **[core](/packages/core)** | [](https://www.npmjs.com/package/@arkecosystem/core) | **Includes all packages** |
-| [core-api](/packages/core-api) | [](https://www.npmjs.com/package/@arkecosystem/core-api) | Public API |
-| [core-blockchain](/packages/core-blockchain) | [](https://www.npmjs.com/package/@arkecosystem/core-blockchain) | Blockchain Management |
-| [core-config](/packages/core-config) | [](https://www.npmjs.com/package/@arkecosystem/core-config) | Configuration Loader |
-| [core-container](/packages/core-container) | [](https://www.npmjs.com/package/@arkecosystem/core-container) | Container Management |
-| [core-database](/packages/core-database) | [](https://www.npmjs.com/package/@arkecosystem/core-database) | Database Interface |
-| [core-deployer](/packages/core-deployer) | [](https://www.npmjs.com/package/@arkecosystem/core-deployer) | Deployer CLI |
-| [core-event-emitter](/packages/core-event-emitter) | [](https://www.npmjs.com/package/@arkecosystem/core-event-emitter) | Event Manager |
-| [core-forger](/packages/core-forger) | [](https://www.npmjs.com/package/@arkecosystem/core-forger) | Forger Manager |
-| [core-graphql](/packages/core-graphql) | [](https://www.npmjs.com/package/@arkecosystem/core-graphql) | GraphQL Provider |
-| [core-json-rpc](/packages/core-json-rpc) | [](https://www.npmjs.com/package/@arkecosystem/core-json-rpc) | JSON-RPC Server |
-| [core-logger](/packages/core-logger) | [](https://www.npmjs.com/package/@arkecosystem/core-logger) | Logger Manager |
-| [core-logger-winston](/packages/core-logger-winston) | [](https://www.npmjs.com/package/@arkecosystem/core-logger-winston) | Winston Logger Provider |
-| [core-p2p](/packages/core-p2p) | [](https://www.npmjs.com/package/@arkecosystem/core-p2p) | P2P API |
-| [test-utils](/packages/core-test-utils) | [](https://www.npmjs.com/package/@arkecosystem/core-test-utils) | Test Utilities |
-| [tester-cli](/packages/core-tester-cli) | [](https://www.npmjs.com/package/@arkecosystem/core-tester-cli) | Tester CLI |
-| [core-transaction-pool](/packages/core-transaction-pool) | [](https://www.npmjs.com/package/@arkecosystem/core-transaction-pool) | Transaction Pool Interface |
-| [core-webhooks](/packages/core-webhooks) | [](https://www.npmjs.com/package/@arkecosystem/core-webhooks) | Webhooks Manager |
-| [crypto](/packages/crypto) | [](https://www.npmjs.com/package/@arkecosystem/crypto) | Crypto Utilities |
+| Package | Version | Description |
+| ------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------ |
+| **[core](/packages/core)** | [](https://www.npmjs.com/package/@arkecosystem/core) | **Includes all packages** |
+| [core-api](/packages/core-api) | [](https://www.npmjs.com/package/@arkecosystem/core-api) | Public REST API |
+| [core-blockchain](/packages/core-blockchain) | [](https://www.npmjs.com/package/@arkecosystem/core-blockchain) | Blockchain Managment |
+| [core-container](/packages/core-container) | [](https://www.npmjs.com/package/@arkecosystem/core-container) | Container Managment |
+| [core-database](/packages/core-database) | [](https://www.npmjs.com/package/@arkecosystem/core-database) | Database Interface |
+| [core-database-postgres](/packages/core-database-postgres) | [](https://www.npmjs.com/package/@arkecosystem/core-database-postgres) | Database Implementation - PostgreSQL |
+| [core-debugger-cli](/packages/core-debugger-cli) | [](https://www.npmjs.com/package/@arkecosystem/core-debugger-cli) | Debugger CLI _(development only)_ |
+| [core-deployer](/packages/core-deployer) | [](https://www.npmjs.com/package/@arkecosystem/core-deployer) | Deployer CLI |
+| [core-elasticsearch](/packages/core-elasticsearch) | [](https://www.npmjs.com/package/@arkecosystem/core-elasticsearch) | Elasticsearch Server |
+| [core-error-tracker-bugsnag](/packages/core-error-tracker-bugsnag) | [](https://www.npmjs.com/package/@arkecosystem/core-error-tracker-bugsnag) | Error Tracking - Bugsnag |
+| [core-error-tracker-sentry](/packages/core-error-tracker-sentry) | [](https://www.npmjs.com/package/@arkecosystem/core-error-tracker-sentry) | Error Tracking - Sentry |
+| [core-event-emitter](/packages/core-event-emitter) | [](https://www.npmjs.com/package/@arkecosystem/core-event-emitter) | Event Emitter |
+| [core-forger](/packages/core-forger) | [](https://www.npmjs.com/package/@arkecosystem/core-forger) | Forger Manager |
+| [core-graphql](/packages/core-graphql) | [](https://www.npmjs.com/package/@arkecosystem/core-graphql) | GraphQL Server |
+| [core-http-utils](/packages/core-http-utils) | [](https://www.npmjs.com/package/@arkecosystem/core-http-utils) | HTTP Utilities |
+| [core-json-rpc](/packages/core-json-rpc) | [](https://www.npmjs.com/package/@arkecosystem/core-json-rpc) | JSON-RPC Server |
+| [core-logger](/packages/core-logger) | [](https://www.npmjs.com/package/@arkecosystem/core-logger) | Logger Interface |
+| [core-logger-winston](/packages/core-logger-winston) | [](https://www.npmjs.com/package/@arkecosystem/core-logger-winston) | Logger Implementation - Winston |
+| [core-p2p](/packages/core-p2p) | [](https://www.npmjs.com/package/@arkecosystem/core-p2p) | P2P Communication |
+| [core-snapshots](/packages/core-snapshots) | [](https://www.npmjs.com/package/@arkecosystem/core-snapshots) | Snapshot Manager |
+| [core-snapshots-cli](/packages/core-snapshots-cli) | [](https://www.npmjs.com/package/@arkecosystem/core-snapshots-cli) | Snapshot CLI |
+| [core-test-utils](/packages/core-test-utils) | [](https://www.npmjs.com/package/@arkecosystem/core-test-utils) | Test Utilities _(development only)_ |
+| [core-tester-cli](/packages/core-tester-cli) | [](https://www.npmjs.com/package/@arkecosystem/core-tester-cli) | Tester CLi _(development only)_ |
+| [core-transaction-pool](/packages/core-transaction-pool) | [](https://www.npmjs.com/package/@arkecosystem/core-transaction-pool) | Transaction Pool |
+| [core-utils](/packages/core-utils) | [](https://www.npmjs.com/package/@arkecosystem/core-utils) | Utilities |
+| [core-vote-report](/packages/core-vote-report) | [](https://www.npmjs.com/package/@arkecosystem/core-vote-report) | Vote Report |
+| [core-webhooks](/packages/core-webhooks) | [](https://www.npmjs.com/package/@arkecosystem/core-webhooks) | Webhook Server |
+| [crypto](/packages/crypto) | [](https://www.npmjs.com/package/@arkecosystem/crypto) | Cryptography |
## Security
@@ -57,11 +65,13 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [François-Xavier Thoorens](https://github.com/fix)
-- [Kristjan Košič](https://github.com/kristjank)
-- [Brian Faust](https://github.com/faustbrian)
-- [Alex Barnsley](https://github.com/alexbarnsley)
-- [All Contributors](../../contributors)
+- [All Contributors](../../contributors)
+- [Alex Barnsley](https://github.com/alexbarnsley)
+- [Brian Faust](https://github.com/faustbrian)
+- [François-Xavier Thoorens](https://github.com/fix)
+- [Joshua Noack](https://github.com/supaiku0)
+- [Kristjan Košič](https://github.com/kristjank)
+- [Vasil Dimov](https://github.com/vasild)
## License
diff --git a/benchmark/block/deserialize/0.js b/benchmark/block/deserialize/0.js
new file mode 100644
index 0000000000..8f9a9c7f50
--- /dev/null
+++ b/benchmark/block/deserialize/0.js
@@ -0,0 +1,9 @@
+const {
+ deserialize
+} = require('./methods')
+
+const data = require('../../helpers').getFixture('block/serialized/no-transactions.txt');
+
+exports['core'] = () => {
+ return deserialize(data);
+};
diff --git a/benchmark/block/deserialize/150.js b/benchmark/block/deserialize/150.js
new file mode 100644
index 0000000000..30a3017ff9
--- /dev/null
+++ b/benchmark/block/deserialize/150.js
@@ -0,0 +1,9 @@
+const {
+ deserialize
+} = require('./methods')
+
+const data = require('../../helpers').getFixture('block/serialized/transactions.txt');
+
+exports['core'] = () => {
+ return deserialize(data);
+};
diff --git a/benchmark/block/deserialize/methods.js b/benchmark/block/deserialize/methods.js
new file mode 100644
index 0000000000..07326048e0
--- /dev/null
+++ b/benchmark/block/deserialize/methods.js
@@ -0,0 +1,5 @@
+const { models } = require('@arkecosystem/crypto')
+
+exports.deserialize = data => {
+ return models.Block.deserialize(data)
+}
diff --git a/benchmark/block/serialize.js b/benchmark/block/serialize.js
new file mode 100644
index 0000000000..7360bfeb4b
--- /dev/null
+++ b/benchmark/block/serialize.js
@@ -0,0 +1,9 @@
+const {
+ models
+} = require('@arkecosystem/crypto')
+
+const data = require('../helpers').getJSONFixture('block/deserialized/no-transactions');
+
+exports['core'] = () => {
+ return models.Block.serialize(data);
+};
diff --git a/benchmark/block/serializeFull.js b/benchmark/block/serializeFull.js
new file mode 100644
index 0000000000..d8e5eba5cf
--- /dev/null
+++ b/benchmark/block/serializeFull.js
@@ -0,0 +1,9 @@
+const {
+ models
+} = require('@arkecosystem/crypto')
+
+const data = require('../helpers').getJSONFixture('block/deserialized/transactions');
+
+exports['core'] = () => {
+ return models.Block.serializeFull(data);
+};
diff --git a/benchmark/fixtures/block/deserialized/no-transactions.json b/benchmark/fixtures/block/deserialized/no-transactions.json
new file mode 100644
index 0000000000..792a633096
--- /dev/null
+++ b/benchmark/fixtures/block/deserialized/no-transactions.json
@@ -0,0 +1,17 @@
+{
+ "version": 0,
+ "timestamp": 58126418,
+ "height": 19,
+ "previousBlockHex": "a099b7651f0e5eb8",
+ "previousBlock": "11572482362445815480",
+ "numberOfTransactions": 150,
+ "totalAmount": "30000000000",
+ "totalFee": "1500000000",
+ "reward": "0",
+ "payloadLength": 4800,
+ "payloadHash": "e5a7e9b5a8a8e2f47f7d8a532e0e9c43d44052dc6c6339ad57246e9a339665e3",
+ "generatorPublicKey": "03a46f2547d20b47003c1c376788db5a54d67264df2ae914f70bf453b6a1fa1b3a",
+ "blockSignature": "304402204e31f1ae02cbcf2bb936e225f9f9db332ac275577b777a389b2d713e48b78c9002203f11c4ee0d30d2e10b2cb4a7fb59569e761571971ffe1be5abaa32fdc42a056b",
+ "idHex": "d9401ad36a03b8b4",
+ "id": "15654541800058894516"
+}
diff --git a/benchmark/fixtures/block/deserialized/transactions.json b/benchmark/fixtures/block/deserialized/transactions.json
new file mode 100644
index 0000000000..fb2c7b9e87
--- /dev/null
+++ b/benchmark/fixtures/block/deserialized/transactions.json
@@ -0,0 +1,4669 @@
+{
+ "version": 0,
+ "timestamp": 58126418,
+ "height": 19,
+ "previousBlockHex": "a099b7651f0e5eb8",
+ "previousBlock": "11572482362445815480",
+ "numberOfTransactions": 150,
+ "totalAmount": "30000000000",
+ "totalFee": "1500000000",
+ "reward": "0",
+ "payloadLength": 4800,
+ "payloadHash": "e5a7e9b5a8a8e2f47f7d8a532e0e9c43d44052dc6c6339ad57246e9a339665e3",
+ "generatorPublicKey": "03a46f2547d20b47003c1c376788db5a54d67264df2ae914f70bf453b6a1fa1b3a",
+ "blockSignature": "304402204e31f1ae02cbcf2bb936e225f9f9db332ac275577b777a389b2d713e48b78c9002203f11c4ee0d30d2e10b2cb4a7fb59569e761571971ffe1be5abaa32fdc42a056b",
+ "transactions": [
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b00000000000000001759e7dc56557733804418f0ea6fd3b2573a9aabdd3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427d",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "expiration": 0,
+ "recipientId": "APyFYXxXtUrvZFnEuwLopfst94GMY5Zkeq",
+ "signature": "3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427d",
+ "vendorField": "Transaction 7",
+ "id": "a4b0eed1247ae168a8e425e020edd26e7bb05260cd0c63e3c3df5d1ce02f63f8"
+ },
+ "verified": false,
+ "id": "a4b0eed1247ae168a8e425e020edd26e7bb05260cd0c63e3c3df5d1ce02f63f8",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APyFYXxXtUrvZFnEuwLopfst94GMY5Zkeq",
+ "type": 0,
+ "vendorField": "Transaction 7",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427d",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b00000000000000001779265225b00860251a567d176a8927c8416d27f7304502210090d88b95320b5e0d51eec36a007bfe1f0a95c7b2c4f9ad00e833bc9f2dd4160102203c4f24c4cb1f8faa60ab139a2245c977eb39b4b6e09ea1d575beede498cf0908",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "expiration": 0,
+ "recipientId": "ASpTKHsiVWoqSPqBnkde54c32kNqqETyxk",
+ "signature": "304502210090d88b95320b5e0d51eec36a007bfe1f0a95c7b2c4f9ad00e833bc9f2dd4160102203c4f24c4cb1f8faa60ab139a2245c977eb39b4b6e09ea1d575beede498cf0908",
+ "vendorField": "Transaction 40",
+ "id": "585ed6b426b5359ad219101223aaafdc0ef23695a71ab624ee1db1acd7ba9fd6"
+ },
+ "verified": false,
+ "id": "585ed6b426b5359ad219101223aaafdc0ef23695a71ab624ee1db1acd7ba9fd6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASpTKHsiVWoqSPqBnkde54c32kNqqETyxk",
+ "type": 0,
+ "vendorField": "Transaction 40",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210090d88b95320b5e0d51eec36a007bfe1f0a95c7b2c4f9ad00e833bc9f2dd4160102203c4f24c4cb1f8faa60ab139a2245c977eb39b4b6e09ea1d575beede498cf0908",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b00000000000000001740d8fa2bdb69bd24ff0b861241632fa355be348a3044022014e308901e42c8881964cb4abc5af1122e1be51cf39548fb0461f44cf26394410220484234dd58a91ffffd0439c158c47f08652f1c180148fc41caa62421df962dce",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "expiration": 0,
+ "recipientId": "AMgkpcndp2JHrqQEm3ftnBHrCaN59N4WLh",
+ "signature": "3044022014e308901e42c8881964cb4abc5af1122e1be51cf39548fb0461f44cf26394410220484234dd58a91ffffd0439c158c47f08652f1c180148fc41caa62421df962dce",
+ "vendorField": "Transaction 36",
+ "id": "d1b83f1abdf43adae989c06ff22f6ef0fc96f4a8477ec9c951ddf3d60be6bc69"
+ },
+ "verified": false,
+ "id": "d1b83f1abdf43adae989c06ff22f6ef0fc96f4a8477ec9c951ddf3d60be6bc69",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMgkpcndp2JHrqQEm3ftnBHrCaN59N4WLh",
+ "type": 0,
+ "vendorField": "Transaction 36",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022014e308901e42c8881964cb4abc5af1122e1be51cf39548fb0461f44cf26394410220484234dd58a91ffffd0439c158c47f08652f1c180148fc41caa62421df962dce",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017cce58b0a597c0eb2b10b68178e6bddd32e1e15193045022100c0cd610e0230d66200c15b6b4e1e34806ee17db7ec593b395f0b33f1544c3b8c0220211ce028811af38210c3f4768110df1173c373ed89574e3d0eee7275c9b660ca",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "expiration": 0,
+ "recipientId": "AaTGUZtFw5fkK9yndiSHoJaHYHLRP9NUdS",
+ "signature": "3045022100c0cd610e0230d66200c15b6b4e1e34806ee17db7ec593b395f0b33f1544c3b8c0220211ce028811af38210c3f4768110df1173c373ed89574e3d0eee7275c9b660ca",
+ "vendorField": "Transaction 18",
+ "id": "f393e06b672e11510086cf6a1f889d331cd84b400317652a28c6d98d572f32aa"
+ },
+ "verified": false,
+ "id": "f393e06b672e11510086cf6a1f889d331cd84b400317652a28c6d98d572f32aa",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AaTGUZtFw5fkK9yndiSHoJaHYHLRP9NUdS",
+ "type": 0,
+ "vendorField": "Transaction 18",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c0cd610e0230d66200c15b6b4e1e34806ee17db7ec593b395f0b33f1544c3b8c0220211ce028811af38210c3f4768110df1173c373ed89574e3d0eee7275c9b660ca",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b00000000000000001737cb57f4bf6e9f4e64fe975a2b26d8ad37b5859b304502210083bd4d5ac41a8072672573c2fe008d5a626306b7d5bb3b775e429bd50a3a51a20220422ee1674f478bcca69527ccd5347c711aa04026702eb258658935aeeb451621",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "expiration": 0,
+ "recipientId": "ALrtQcFHYk8SjXNqoQy5vq4CQn9dDZ32HV",
+ "signature": "304502210083bd4d5ac41a8072672573c2fe008d5a626306b7d5bb3b775e429bd50a3a51a20220422ee1674f478bcca69527ccd5347c711aa04026702eb258658935aeeb451621",
+ "vendorField": "Transaction 15",
+ "id": "0d7a15c9805853d25ea37362c03c18fb675fa0f24ec713c0066c477f29b37823"
+ },
+ "verified": false,
+ "id": "0d7a15c9805853d25ea37362c03c18fb675fa0f24ec713c0066c477f29b37823",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALrtQcFHYk8SjXNqoQy5vq4CQn9dDZ32HV",
+ "type": 0,
+ "vendorField": "Transaction 15",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210083bd4d5ac41a8072672573c2fe008d5a626306b7d5bb3b775e429bd50a3a51a20220422ee1674f478bcca69527ccd5347c711aa04026702eb258658935aeeb451621",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b0000000000000000174736fe7ebfdf155ab1eaa5da635391ec140ceff43045022100c0f7ddd725c781c57efcec81aa42872b1791320889b820d8db2da9172df6cc150220560872a2e522fe8851f9642e42551e72ebc8e0bfa0428cb535dc4285a62337d5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "expiration": 0,
+ "recipientId": "ANGRWBD9E58V4pz8XZPhYc6XMQ5TCpWxWv",
+ "signature": "3045022100c0f7ddd725c781c57efcec81aa42872b1791320889b820d8db2da9172df6cc150220560872a2e522fe8851f9642e42551e72ebc8e0bfa0428cb535dc4285a62337d5",
+ "vendorField": "Transaction 1",
+ "id": "67d71cc75ff85938faee0b14a653b060305dcc0f5113fe735f59fcf7bd86a0f5"
+ },
+ "verified": false,
+ "id": "67d71cc75ff85938faee0b14a653b060305dcc0f5113fe735f59fcf7bd86a0f5",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANGRWBD9E58V4pz8XZPhYc6XMQ5TCpWxWv",
+ "type": 0,
+ "vendorField": "Transaction 1",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c0f7ddd725c781c57efcec81aa42872b1791320889b820d8db2da9172df6cc150220560872a2e522fe8851f9642e42551e72ebc8e0bfa0428cb535dc4285a62337d5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b0000000000000000172f5a665ddf88e839f2be0898b0a00b30606988233045022100e768703636589e566a237a52b7ffc47f33a90b43378b72ed8c4d8a253c3beff102205cbd729b8b00e0d27c342e174b136f11152b52b8c243b0f0d16cbfe9408eb700",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "expiration": 0,
+ "recipientId": "AL6FhcBeyDW7dpX4PdRTQvjgLzPKBZa6zf",
+ "signature": "3045022100e768703636589e566a237a52b7ffc47f33a90b43378b72ed8c4d8a253c3beff102205cbd729b8b00e0d27c342e174b136f11152b52b8c243b0f0d16cbfe9408eb700",
+ "vendorField": "Transaction 17",
+ "id": "e5d725d3432d64b69bcb4d54e36939735165833864fc7559ee43387b09610cb9"
+ },
+ "verified": false,
+ "id": "e5d725d3432d64b69bcb4d54e36939735165833864fc7559ee43387b09610cb9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AL6FhcBeyDW7dpX4PdRTQvjgLzPKBZa6zf",
+ "type": 0,
+ "vendorField": "Transaction 17",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e768703636589e566a237a52b7ffc47f33a90b43378b72ed8c4d8a253c3beff102205cbd729b8b00e0d27c342e174b136f11152b52b8c243b0f0d16cbfe9408eb700",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017b384f7340916f6343bb04f6839faaa3743c2f28f304402206f203df1a332df9029eb362d75af589f391f301330ac4f9ac9311f4c46e26ac602201fc0ac2e97b2ed60737644f1906fb7265815db6a2013c3df674e8274a8352f0e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "expiration": 0,
+ "recipientId": "AY95tT5ZJruhxS2nozU3tJG2YgJTqHSDiC",
+ "signature": "304402206f203df1a332df9029eb362d75af589f391f301330ac4f9ac9311f4c46e26ac602201fc0ac2e97b2ed60737644f1906fb7265815db6a2013c3df674e8274a8352f0e",
+ "vendorField": "Transaction 13",
+ "id": "903e3476ce07fa27d0d2499c85d162f61d396b2c02413d97ee7a0c1ef0b95bb6"
+ },
+ "verified": false,
+ "id": "903e3476ce07fa27d0d2499c85d162f61d396b2c02413d97ee7a0c1ef0b95bb6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AY95tT5ZJruhxS2nozU3tJG2YgJTqHSDiC",
+ "type": 0,
+ "vendorField": "Transaction 13",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206f203df1a332df9029eb362d75af589f391f301330ac4f9ac9311f4c46e26ac602201fc0ac2e97b2ed60737644f1906fb7265815db6a2013c3df674e8274a8352f0e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b0000000000000000178c86c220a027fb8e55429eaa91c92fbe9fc978aa3044022061dd2066ec8fed52d6992b91f724fc4b10d5ee772d2bd2e7d980501d773c0c1a0220189932d1ea738fd54227ec0019ddd7446b216cb0d839f716a7899521d47ca15c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "expiration": 0,
+ "recipientId": "AUaugXUAx2hskhLAQ8kFkYxKTr3r8Gk1yu",
+ "signature": "3044022061dd2066ec8fed52d6992b91f724fc4b10d5ee772d2bd2e7d980501d773c0c1a0220189932d1ea738fd54227ec0019ddd7446b216cb0d839f716a7899521d47ca15c",
+ "vendorField": "Transaction 9",
+ "id": "f3aa47c5af1f5346f48579981b4264b920415c2b7671bf930167fc2f29aeb9c6"
+ },
+ "verified": false,
+ "id": "f3aa47c5af1f5346f48579981b4264b920415c2b7671bf930167fc2f29aeb9c6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AUaugXUAx2hskhLAQ8kFkYxKTr3r8Gk1yu",
+ "type": 0,
+ "vendorField": "Transaction 9",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022061dd2066ec8fed52d6992b91f724fc4b10d5ee772d2bd2e7d980501d773c0c1a0220189932d1ea738fd54227ec0019ddd7446b216cb0d839f716a7899521d47ca15c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b0000000000000000170e52656c664a3401457644d379eccf63219051ee304502210090364cc31769fe896d265797f1048147f374ce55d5b4544403459e95f66567cb0220246454cdf2961c4246b00a70f0c9e5f853f5e3ba34ada1f45ba5b40ad86a5c8e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "expiration": 0,
+ "recipientId": "AH5bpAtRB7yKYG31biiDCJVAQF5qwRyQfB",
+ "signature": "304502210090364cc31769fe896d265797f1048147f374ce55d5b4544403459e95f66567cb0220246454cdf2961c4246b00a70f0c9e5f853f5e3ba34ada1f45ba5b40ad86a5c8e",
+ "vendorField": "Transaction 26",
+ "id": "c32e5c1337ffde88d46b9e2403f3b2a647c40a4027610b3670525c54dc5e1497"
+ },
+ "verified": false,
+ "id": "c32e5c1337ffde88d46b9e2403f3b2a647c40a4027610b3670525c54dc5e1497",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AH5bpAtRB7yKYG31biiDCJVAQF5qwRyQfB",
+ "type": 0,
+ "vendorField": "Transaction 26",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210090364cc31769fe896d265797f1048147f374ce55d5b4544403459e95f66567cb0220246454cdf2961c4246b00a70f0c9e5f853f5e3ba34ada1f45ba5b40ad86a5c8e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017c277b97583e4e3709292528d50a988631d73bb27304402201955362c3285d9452ff7a212fdb963cc80346e1d1ec883cffba07e7fa53929d7022013cc3982a6c3fa482e1545b5a856b4589f2212782c7aa4efde6863d092971187",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "expiration": 0,
+ "recipientId": "AZW8AKAaGESzJCLVcVLaExR1ZXBNfsyJcK",
+ "signature": "304402201955362c3285d9452ff7a212fdb963cc80346e1d1ec883cffba07e7fa53929d7022013cc3982a6c3fa482e1545b5a856b4589f2212782c7aa4efde6863d092971187",
+ "vendorField": "Transaction 2",
+ "id": "9f4c4b1b020917cfe97bca0936d15f09c189a955c4072d4265a8cd7559c46000"
+ },
+ "verified": false,
+ "id": "9f4c4b1b020917cfe97bca0936d15f09c189a955c4072d4265a8cd7559c46000",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AZW8AKAaGESzJCLVcVLaExR1ZXBNfsyJcK",
+ "type": 0,
+ "vendorField": "Transaction 2",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402201955362c3285d9452ff7a212fdb963cc80346e1d1ec883cffba07e7fa53929d7022013cc3982a6c3fa482e1545b5a856b4589f2212782c7aa4efde6863d092971187",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b0000000000000000170a953805b93337eac6e84e7546ce431ffa11e071304502210089b1238ad440434ddadd661bbe1c9545990f9d7e18625ea6d1501608cb408168022002bd4e6022f29d967fa28b3030ad6f5a50d3d82b8a2d0f851b6c1d266745deef",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "expiration": 0,
+ "recipientId": "AGjqAKhY362t93gYQH5MxM7PQjyhEFxHzE",
+ "signature": "304502210089b1238ad440434ddadd661bbe1c9545990f9d7e18625ea6d1501608cb408168022002bd4e6022f29d967fa28b3030ad6f5a50d3d82b8a2d0f851b6c1d266745deef",
+ "vendorField": "Transaction 19",
+ "id": "0511ec2b530191df929f71f27b46d21463d3b3df9027acc384f440d8d82da4a6"
+ },
+ "verified": false,
+ "id": "0511ec2b530191df929f71f27b46d21463d3b3df9027acc384f440d8d82da4a6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGjqAKhY362t93gYQH5MxM7PQjyhEFxHzE",
+ "type": 0,
+ "vendorField": "Transaction 19",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210089b1238ad440434ddadd661bbe1c9545990f9d7e18625ea6d1501608cb408168022002bd4e6022f29d967fa28b3030ad6f5a50d3d82b8a2d0f851b6c1d266745deef",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000176b7f10a49c1e137a728a6952f0b5236b841d677d30440220779694b27ec4fc1861b071708caaa0f57975f1335e9cacd71d4e7880cd4b1fcb022006badd77518de6eca6d7149c240ca7d028063552f4653db46794e818b21e047a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "expiration": 0,
+ "recipientId": "ARaGAezxE446zDfh5HnXpViRXHd3kpzy1L",
+ "signature": "30440220779694b27ec4fc1861b071708caaa0f57975f1335e9cacd71d4e7880cd4b1fcb022006badd77518de6eca6d7149c240ca7d028063552f4653db46794e818b21e047a",
+ "vendorField": "Transaction 28",
+ "id": "49cd6e3eb00b7780710d58d6d653f3fabec38f8e91e1240c81e56110cb1bdd4d"
+ },
+ "verified": false,
+ "id": "49cd6e3eb00b7780710d58d6d653f3fabec38f8e91e1240c81e56110cb1bdd4d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ARaGAezxE446zDfh5HnXpViRXHd3kpzy1L",
+ "type": 0,
+ "vendorField": "Transaction 28",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220779694b27ec4fc1861b071708caaa0f57975f1335e9cacd71d4e7880cd4b1fcb022006badd77518de6eca6d7149c240ca7d028063552f4653db46794e818b21e047a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017fdebc4b239afc2e6903ed93105125aa256416a3130440220405044882c636f9cd266ba6dab3fb1044b7225e6c8c31823b1c07f643dc2246c02202bc752c7c077310a1a9b07431b32f6333492b6cd18f0eabdd6250fe827e8da74",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "expiration": 0,
+ "recipientId": "AevV3W8yKoC5ajqRG9PvPc6Ugg82Ne9kmV",
+ "signature": "30440220405044882c636f9cd266ba6dab3fb1044b7225e6c8c31823b1c07f643dc2246c02202bc752c7c077310a1a9b07431b32f6333492b6cd18f0eabdd6250fe827e8da74",
+ "vendorField": "Transaction 21",
+ "id": "6d1488d7c5439e10553cf48e1a3cb2be849cd2ab9ba7b482bb792ed6cf5618c5"
+ },
+ "verified": false,
+ "id": "6d1488d7c5439e10553cf48e1a3cb2be849cd2ab9ba7b482bb792ed6cf5618c5",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AevV3W8yKoC5ajqRG9PvPc6Ugg82Ne9kmV",
+ "type": 0,
+ "vendorField": "Transaction 21",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220405044882c636f9cd266ba6dab3fb1044b7225e6c8c31823b1c07f643dc2246c02202bc752c7c077310a1a9b07431b32f6333492b6cd18f0eabdd6250fe827e8da74",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b000000000000000017d5da08ba4319905bc4d2052d603e0ca776b73fb73045022100aceb5f53684a8c9c8339132f30369a82677cee0ed4485421a70a34016ac61a8d0220499ddca0d7a6180452c25f109f737a8d982674f0dd3f2d42a191c5790dfbf34e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "expiration": 0,
+ "recipientId": "AbGcmde2Xgar2sK9hJNNytmWRn4f7CixxX",
+ "signature": "3045022100aceb5f53684a8c9c8339132f30369a82677cee0ed4485421a70a34016ac61a8d0220499ddca0d7a6180452c25f109f737a8d982674f0dd3f2d42a191c5790dfbf34e",
+ "vendorField": "Transaction 3",
+ "id": "79154b4d3b3b64fb9506dda7eb5010b96c0ad531efaaf18f208abeeb70a9179e"
+ },
+ "verified": false,
+ "id": "79154b4d3b3b64fb9506dda7eb5010b96c0ad531efaaf18f208abeeb70a9179e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AbGcmde2Xgar2sK9hJNNytmWRn4f7CixxX",
+ "type": 0,
+ "vendorField": "Transaction 3",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100aceb5f53684a8c9c8339132f30369a82677cee0ed4485421a70a34016ac61a8d0220499ddca0d7a6180452c25f109f737a8d982674f0dd3f2d42a191c5790dfbf34e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b000000000000000017594547725060beb91e991aceb0cceeec84d4b7953045022100f966cc6d0d00932284e8088557cc113378847236cb22151b8bbd10bd9acfaa7602203bd4e93dcd72343a9dfe60b36d7e0deaaaffe7676ef6083e839cd2451aac7f36",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "expiration": 0,
+ "recipientId": "APutnAcu2uWwrTxR5JhY8FKYwFJvU14hLs",
+ "signature": "3045022100f966cc6d0d00932284e8088557cc113378847236cb22151b8bbd10bd9acfaa7602203bd4e93dcd72343a9dfe60b36d7e0deaaaffe7676ef6083e839cd2451aac7f36",
+ "vendorField": "Transaction 23",
+ "id": "52a06db107d480e222273d1c8430e711e4fbeec55524d463a22b41dd5b61dac4"
+ },
+ "verified": false,
+ "id": "52a06db107d480e222273d1c8430e711e4fbeec55524d463a22b41dd5b61dac4",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APutnAcu2uWwrTxR5JhY8FKYwFJvU14hLs",
+ "type": 0,
+ "vendorField": "Transaction 23",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f966cc6d0d00932284e8088557cc113378847236cb22151b8bbd10bd9acfaa7602203bd4e93dcd72343a9dfe60b36d7e0deaaaffe7676ef6083e839cd2451aac7f36",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b00000000000000001762ca3bd2c81b0ee0730b2edfb63dafc7510ffd703045022100cfdefde6923cd0c64d2048d6859dde6ec75b335883043cd547153df64d83e80c0220341247ce11b798fb90cc0a3f49e2413e826d8b020a0dbabfbf7104b8d4829b16",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "expiration": 0,
+ "recipientId": "AQnE8juEecusbwxDEqo1f7rdJo8bSsCTuU",
+ "signature": "3045022100cfdefde6923cd0c64d2048d6859dde6ec75b335883043cd547153df64d83e80c0220341247ce11b798fb90cc0a3f49e2413e826d8b020a0dbabfbf7104b8d4829b16",
+ "vendorField": "Transaction 32",
+ "id": "7426f4de247c99f1c86b50b562e81ebebe428df61a6be3082930a7dae741e6cf"
+ },
+ "verified": false,
+ "id": "7426f4de247c99f1c86b50b562e81ebebe428df61a6be3082930a7dae741e6cf",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AQnE8juEecusbwxDEqo1f7rdJo8bSsCTuU",
+ "type": 0,
+ "vendorField": "Transaction 32",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100cfdefde6923cd0c64d2048d6859dde6ec75b335883043cd547153df64d83e80c0220341247ce11b798fb90cc0a3f49e2413e826d8b020a0dbabfbf7104b8d4829b16",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b0000000000000000175099ae927c19d292911fcdc5427e5ffbe3fb104030440220177594e9b7d1966081acb422076bf4a585ff2e715667b048d74f63ca7c7ff8e502203b2505cec5776e092bcf6a56bab9fa8198d607f554f7794da9d51aa75282bdf2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "expiration": 0,
+ "recipientId": "AP83orNCj2iB7RnDFWqmcB5MtURZR1NgDQ",
+ "signature": "30440220177594e9b7d1966081acb422076bf4a585ff2e715667b048d74f63ca7c7ff8e502203b2505cec5776e092bcf6a56bab9fa8198d607f554f7794da9d51aa75282bdf2",
+ "vendorField": "Transaction 9",
+ "id": "8f6a9db646930fcd84ce680c0b557c981e69c9b3b130316a4d689e69c17cd19e"
+ },
+ "verified": false,
+ "id": "8f6a9db646930fcd84ce680c0b557c981e69c9b3b130316a4d689e69c17cd19e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AP83orNCj2iB7RnDFWqmcB5MtURZR1NgDQ",
+ "type": 0,
+ "vendorField": "Transaction 9",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220177594e9b7d1966081acb422076bf4a585ff2e715667b048d74f63ca7c7ff8e502203b2505cec5776e092bcf6a56bab9fa8198d607f554f7794da9d51aa75282bdf2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b0000000000000000174a0005e4612043b8a2f0b8d6a4fddf19a4c5a4103044022051cd54f7d947ef4f7f673ae2af5b6410c109a9fe788cd99c0273d0effdf6109702206705b4362c7fdbc57b10afac5e98ad274686de1d30ac3859d6cd73241e3c1851",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "expiration": 0,
+ "recipientId": "ANX9gHwmuvnvx8n8HTor42bB498xyiW6iY",
+ "signature": "3044022051cd54f7d947ef4f7f673ae2af5b6410c109a9fe788cd99c0273d0effdf6109702206705b4362c7fdbc57b10afac5e98ad274686de1d30ac3859d6cd73241e3c1851",
+ "vendorField": "Transaction 20",
+ "id": "3c6bfe5f3129853c71a8c3d4783c57962ee0e4a5dd89a5373d66d24dae54ac51"
+ },
+ "verified": false,
+ "id": "3c6bfe5f3129853c71a8c3d4783c57962ee0e4a5dd89a5373d66d24dae54ac51",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANX9gHwmuvnvx8n8HTor42bB498xyiW6iY",
+ "type": 0,
+ "vendorField": "Transaction 20",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022051cd54f7d947ef4f7f673ae2af5b6410c109a9fe788cd99c0273d0effdf6109702206705b4362c7fdbc57b10afac5e98ad274686de1d30ac3859d6cd73241e3c1851",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000177c6239437aa46aa5ef2075666ca4297eb9f8410f30440220185d5b31db25b2feb61564f36d57262e2c86826dd71d0100f8fe7e208aecfa2002200c38836a96b603cef11d506ae4fd6604b364197a342dd3ba6bf4944d1b1a7985",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "expiration": 0,
+ "recipientId": "AT7Z6zBK89EBgdy1YbokYP9L7qwrUSbKsa",
+ "signature": "30440220185d5b31db25b2feb61564f36d57262e2c86826dd71d0100f8fe7e208aecfa2002200c38836a96b603cef11d506ae4fd6604b364197a342dd3ba6bf4944d1b1a7985",
+ "vendorField": "Transaction 11",
+ "id": "d18814a5a84ae0a71f41f23178bba5945ccec1f0a9f5962b2367984d3e112d5f"
+ },
+ "verified": false,
+ "id": "d18814a5a84ae0a71f41f23178bba5945ccec1f0a9f5962b2367984d3e112d5f",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AT7Z6zBK89EBgdy1YbokYP9L7qwrUSbKsa",
+ "type": 0,
+ "vendorField": "Transaction 11",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220185d5b31db25b2feb61564f36d57262e2c86826dd71d0100f8fe7e208aecfa2002200c38836a96b603cef11d506ae4fd6604b364197a342dd3ba6bf4944d1b1a7985",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b0000000000000000177fe700020debbafcaf8f8c1a862289916a11fb693045022100b47e7694ac15badd2f31e1cdac3be26dc454902a0855d8c989246c31126631c5022049691f87c1c178db989a6d8c1f42e24ba4ae1e767163378afb9a028112c37500",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "expiration": 0,
+ "recipientId": "ATSAC2NA4ZHGrUjsg5swPZzKYPifeAuJK8",
+ "signature": "3045022100b47e7694ac15badd2f31e1cdac3be26dc454902a0855d8c989246c31126631c5022049691f87c1c178db989a6d8c1f42e24ba4ae1e767163378afb9a028112c37500",
+ "vendorField": "Transaction 22",
+ "id": "81b6af8f3051eb134c99f77eb53ce2df26d6af1de6b3153ab9ae6dab1443dc84"
+ },
+ "verified": false,
+ "id": "81b6af8f3051eb134c99f77eb53ce2df26d6af1de6b3153ab9ae6dab1443dc84",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATSAC2NA4ZHGrUjsg5swPZzKYPifeAuJK8",
+ "type": 0,
+ "vendorField": "Transaction 22",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b47e7694ac15badd2f31e1cdac3be26dc454902a0855d8c989246c31126631c5022049691f87c1c178db989a6d8c1f42e24ba4ae1e767163378afb9a028112c37500",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b0000000000000000174c2241fe6d8f9d5d11fad9462c878784095518df304402205f405a4cd637263fe0a168b9ceaa848c96db344ecd7b16ef0263e2db7d9c3020022063978a6a0b77c796a279e404d8d736e99810205517f797e5787f10db7ed469d9",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "expiration": 0,
+ "recipientId": "ANiS3MGiAka88wftSzEWpwqEaKd3u7EPm8",
+ "signature": "304402205f405a4cd637263fe0a168b9ceaa848c96db344ecd7b16ef0263e2db7d9c3020022063978a6a0b77c796a279e404d8d736e99810205517f797e5787f10db7ed469d9",
+ "vendorField": "Transaction 16",
+ "id": "75f000a3d5e31b1450da46631f8d6007fe30c93c8d2333ceec06003f7141aa4e"
+ },
+ "verified": false,
+ "id": "75f000a3d5e31b1450da46631f8d6007fe30c93c8d2333ceec06003f7141aa4e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANiS3MGiAka88wftSzEWpwqEaKd3u7EPm8",
+ "type": 0,
+ "vendorField": "Transaction 16",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402205f405a4cd637263fe0a168b9ceaa848c96db344ecd7b16ef0263e2db7d9c3020022063978a6a0b77c796a279e404d8d736e99810205517f797e5787f10db7ed469d9",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b00000000000000001711beac93f240795e4b480aebc0103bee86580ba33045022100b9c5d38b2eb6a7cd625ca8d4d9f9856a95516c9eb0971bb77ce71e6f2c3955b5022029d762f117a27b4ec61abd6b9f929e4b95bff88b687a8fbcd424d51a958bb7aa",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "expiration": 0,
+ "recipientId": "AHPhZ2icNbffxDy2WMSPA4iM3TK19ZWcn3",
+ "signature": "3045022100b9c5d38b2eb6a7cd625ca8d4d9f9856a95516c9eb0971bb77ce71e6f2c3955b5022029d762f117a27b4ec61abd6b9f929e4b95bff88b687a8fbcd424d51a958bb7aa",
+ "vendorField": "Transaction 35",
+ "id": "5562cee9bffef6452f6e1a610ce772ed3f2fff0315bc56874eb63c635e45fed2"
+ },
+ "verified": false,
+ "id": "5562cee9bffef6452f6e1a610ce772ed3f2fff0315bc56874eb63c635e45fed2",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AHPhZ2icNbffxDy2WMSPA4iM3TK19ZWcn3",
+ "type": 0,
+ "vendorField": "Transaction 35",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b9c5d38b2eb6a7cd625ca8d4d9f9856a95516c9eb0971bb77ce71e6f2c3955b5022029d762f117a27b4ec61abd6b9f929e4b95bff88b687a8fbcd424d51a958bb7aa",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017a4fda16aad9000bd4fadbecca4d3a0a2deb0103d3044022070700d8de3ecac8750cf3eb624972cb1b9395eb9cc06871c59392ffad2b879b80220161eaa96bb92ce95c345981866ca67dedd1df04a137e27b7cbbed5b236293539",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "expiration": 0,
+ "recipientId": "AWpGJWwKfyyEFL2mNNvSM8QiayjEvF6vrh",
+ "signature": "3044022070700d8de3ecac8750cf3eb624972cb1b9395eb9cc06871c59392ffad2b879b80220161eaa96bb92ce95c345981866ca67dedd1df04a137e27b7cbbed5b236293539",
+ "vendorField": "Transaction 30",
+ "id": "f5c2d1ad717255da737818b91dfb4c72096b8680948b2a1494afd7812e4dfe36"
+ },
+ "verified": false,
+ "id": "f5c2d1ad717255da737818b91dfb4c72096b8680948b2a1494afd7812e4dfe36",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AWpGJWwKfyyEFL2mNNvSM8QiayjEvF6vrh",
+ "type": 0,
+ "vendorField": "Transaction 30",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022070700d8de3ecac8750cf3eb624972cb1b9395eb9cc06871c59392ffad2b879b80220161eaa96bb92ce95c345981866ca67dedd1df04a137e27b7cbbed5b236293539",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017840121097caceef4c5a46fbe679ba6ecbc3bc05e30450221009b1ac5b9b02b87a1cec17e945e70ec1c1940b6d48d09679029dbef496f8d031c0220567914580441466a1339d92d99cf0893234ed6b5e7b9f00407e6e3bd972051b7",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "expiration": 0,
+ "recipientId": "ATorCGV5D9XdjDLLrgaNiJeTTnZ8mQ5da4",
+ "signature": "30450221009b1ac5b9b02b87a1cec17e945e70ec1c1940b6d48d09679029dbef496f8d031c0220567914580441466a1339d92d99cf0893234ed6b5e7b9f00407e6e3bd972051b7",
+ "vendorField": "Transaction 1",
+ "id": "a1bc69dd7a02161360d038d1f3ce6de02cabcb883d5d453251f35ee9c68e7542"
+ },
+ "verified": false,
+ "id": "a1bc69dd7a02161360d038d1f3ce6de02cabcb883d5d453251f35ee9c68e7542",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATorCGV5D9XdjDLLrgaNiJeTTnZ8mQ5da4",
+ "type": 0,
+ "vendorField": "Transaction 1",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30450221009b1ac5b9b02b87a1cec17e945e70ec1c1940b6d48d09679029dbef496f8d031c0220567914580441466a1339d92d99cf0893234ed6b5e7b9f00407e6e3bd972051b7",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017822df13e7b00e0f671a7f120a8f78bd78eb31adf3044022059f6d5aa1a5e59b9d5b378ed40ab3d109f175d97c8c2cc1e0fa98ff45ca93ef50220056f0a2fe0c3b6fafdb3eae2c49ae05ac6cb62a3b6b2571dd45d1774b90bf384",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "expiration": 0,
+ "recipientId": "ATeCXZrxPo1YzX7vexsDH86wHvuyRngphs",
+ "signature": "3044022059f6d5aa1a5e59b9d5b378ed40ab3d109f175d97c8c2cc1e0fa98ff45ca93ef50220056f0a2fe0c3b6fafdb3eae2c49ae05ac6cb62a3b6b2571dd45d1774b90bf384",
+ "vendorField": "Transaction 19",
+ "id": "c26bd597e820d7abfb5d8218f1738b82ec71f05f1eb50a3b66fc7cac8c90514f"
+ },
+ "verified": false,
+ "id": "c26bd597e820d7abfb5d8218f1738b82ec71f05f1eb50a3b66fc7cac8c90514f",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATeCXZrxPo1YzX7vexsDH86wHvuyRngphs",
+ "type": 0,
+ "vendorField": "Transaction 19",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022059f6d5aa1a5e59b9d5b378ed40ab3d109f175d97c8c2cc1e0fa98ff45ca93ef50220056f0a2fe0c3b6fafdb3eae2c49ae05ac6cb62a3b6b2571dd45d1774b90bf384",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b000000000000000017efad13da9347cbbc96683505e808c8ed14d46dc230440220538781f8c1a5587feda6b1333b86d979360a9f53cf93b29230c13416dad81634022019daed2374f9c6d767635413bba689ee3ca28b308c600f86d1a5fde4080164c7",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "expiration": 0,
+ "recipientId": "AddAUz62XrVRBUztnE5Z4cka99Azkdhz9u",
+ "signature": "30440220538781f8c1a5587feda6b1333b86d979360a9f53cf93b29230c13416dad81634022019daed2374f9c6d767635413bba689ee3ca28b308c600f86d1a5fde4080164c7",
+ "vendorField": "Transaction 22",
+ "id": "56f375bcd1d8f7c28574c571dce9604ddd18e34ff2b84eaefb61a84a972565d6"
+ },
+ "verified": false,
+ "id": "56f375bcd1d8f7c28574c571dce9604ddd18e34ff2b84eaefb61a84a972565d6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AddAUz62XrVRBUztnE5Z4cka99Azkdhz9u",
+ "type": 0,
+ "vendorField": "Transaction 22",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220538781f8c1a5587feda6b1333b86d979360a9f53cf93b29230c13416dad81634022019daed2374f9c6d767635413bba689ee3ca28b308c600f86d1a5fde4080164c7",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b0000000000000000171217f283de4bcf1b28e1e05c5320373f9cdc195630450221008fe76370d9968412848282580eb9a8c253d027fc4e610673857c15271246774b022003c249b93d51d344c2c92435c6adf782dff83da7a38b37445c7e0f007fdea786",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "expiration": 0,
+ "recipientId": "AHRYVqhuLo2SwR6YymAMAjQVEBDYDapRKV",
+ "signature": "30450221008fe76370d9968412848282580eb9a8c253d027fc4e610673857c15271246774b022003c249b93d51d344c2c92435c6adf782dff83da7a38b37445c7e0f007fdea786",
+ "vendorField": "Transaction 6",
+ "id": "09dc531495fcc3fc6cbcb2bebeb1e683026a030b6a74bb33f12edd84b78b8dbd"
+ },
+ "verified": false,
+ "id": "09dc531495fcc3fc6cbcb2bebeb1e683026a030b6a74bb33f12edd84b78b8dbd",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AHRYVqhuLo2SwR6YymAMAjQVEBDYDapRKV",
+ "type": 0,
+ "vendorField": "Transaction 6",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30450221008fe76370d9968412848282580eb9a8c253d027fc4e610673857c15271246774b022003c249b93d51d344c2c92435c6adf782dff83da7a38b37445c7e0f007fdea786",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b0000000000000000179a9e5009b0145145d954df555acf50b36b0469443045022100b3861e0c3333c7bef777d9b3342749cabeb006d898c028c39cc331f17d8e379702207eebde356b9f3f5dc5c2f9db0f46fb1fd2e350ec796d023a065c8dd23faf4bb1",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "expiration": 0,
+ "recipientId": "AVsRMo7Q1CH3ni1miN8bcxNEfD78PDqsaA",
+ "signature": "3045022100b3861e0c3333c7bef777d9b3342749cabeb006d898c028c39cc331f17d8e379702207eebde356b9f3f5dc5c2f9db0f46fb1fd2e350ec796d023a065c8dd23faf4bb1",
+ "vendorField": "Transaction 31",
+ "id": "9baff7e39223d0b95ae87569398eb4237ba6d8132ac29d2ae610358945dd74ec"
+ },
+ "verified": false,
+ "id": "9baff7e39223d0b95ae87569398eb4237ba6d8132ac29d2ae610358945dd74ec",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AVsRMo7Q1CH3ni1miN8bcxNEfD78PDqsaA",
+ "type": 0,
+ "vendorField": "Transaction 31",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b3861e0c3333c7bef777d9b3342749cabeb006d898c028c39cc331f17d8e379702207eebde356b9f3f5dc5c2f9db0f46fb1fd2e350ec796d023a065c8dd23faf4bb1",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017305bcd10e47d011cc8a30c8680a09bb192253ff23045022100cf711a506ab8e2abf5bf7323c5e5ac51776fc7778845dc7f9686286d7044f20302200bc0fc09b71567a83447983623f45266a68d8a6428dd3d18acc82a042945251f",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "expiration": 0,
+ "recipientId": "ALBa49tysfKQQuxUTY7CYGMGSqSznWJxE8",
+ "signature": "3045022100cf711a506ab8e2abf5bf7323c5e5ac51776fc7778845dc7f9686286d7044f20302200bc0fc09b71567a83447983623f45266a68d8a6428dd3d18acc82a042945251f",
+ "vendorField": "Transaction 10",
+ "id": "46e2f9f86f01ebeb373fd1b73a14f58ef2a067e279fe9886c4b2702b5bdb61eb"
+ },
+ "verified": false,
+ "id": "46e2f9f86f01ebeb373fd1b73a14f58ef2a067e279fe9886c4b2702b5bdb61eb",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALBa49tysfKQQuxUTY7CYGMGSqSznWJxE8",
+ "type": 0,
+ "vendorField": "Transaction 10",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100cf711a506ab8e2abf5bf7323c5e5ac51776fc7778845dc7f9686286d7044f20302200bc0fc09b71567a83447983623f45266a68d8a6428dd3d18acc82a042945251f",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b00000000000000001747ba8f15f9b7d8f3a4f7d450d5544b985692110c3045022100c1657829e9bf119301eb33228787b438aed6f3f33b61e3c174080dacfb9990ad022022759aaa39ee30254c272d8015b4fb33fd466e830f9c484e530f4169acc17b85",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "expiration": 0,
+ "recipientId": "ANK97TdSNRZ1jypr8s3NWZs4JsnqWTpiPi",
+ "signature": "3045022100c1657829e9bf119301eb33228787b438aed6f3f33b61e3c174080dacfb9990ad022022759aaa39ee30254c272d8015b4fb33fd466e830f9c484e530f4169acc17b85",
+ "vendorField": "Transaction 29",
+ "id": "c76840b93e2628d8660d39eeb512c19d814730b540bd8d94bda82c71d5c7703a"
+ },
+ "verified": false,
+ "id": "c76840b93e2628d8660d39eeb512c19d814730b540bd8d94bda82c71d5c7703a",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANK97TdSNRZ1jypr8s3NWZs4JsnqWTpiPi",
+ "type": 0,
+ "vendorField": "Transaction 29",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c1657829e9bf119301eb33228787b438aed6f3f33b61e3c174080dacfb9990ad022022759aaa39ee30254c272d8015b4fb33fd466e830f9c484e530f4169acc17b85",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b00000000000000001779e55b60c0e72d3d785bebbd33df99d6bea7af843044022052a46f491850b93b1520071b6f7b6525b97b604fa7e93f8172eeaf30baad247802203ceb22389721211041ccaaff83c9d0279c81e25afe632bb2e254f35e521cb94e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "expiration": 0,
+ "recipientId": "AStQAi7nWidJxZRUUGtLqUPJMu6KsmDVXk",
+ "signature": "3044022052a46f491850b93b1520071b6f7b6525b97b604fa7e93f8172eeaf30baad247802203ceb22389721211041ccaaff83c9d0279c81e25afe632bb2e254f35e521cb94e",
+ "vendorField": "Transaction 3",
+ "id": "e21215785d7334a5943fbfb18515dbf84968a69c8788d62eb4f50441faddcfef"
+ },
+ "verified": false,
+ "id": "e21215785d7334a5943fbfb18515dbf84968a69c8788d62eb4f50441faddcfef",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AStQAi7nWidJxZRUUGtLqUPJMu6KsmDVXk",
+ "type": 0,
+ "vendorField": "Transaction 3",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022052a46f491850b93b1520071b6f7b6525b97b604fa7e93f8172eeaf30baad247802203ceb22389721211041ccaaff83c9d0279c81e25afe632bb2e254f35e521cb94e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017cf09b439324369cd49096fd2746367081689895c3045022100b7e1e1bc99c5d3144bce62785d82189232bd332afb98732b51f80747dcd2701302207caf6782b25e97c47eb6ea1d436f84b528b3d8f3af7998625b74e68845de7f6a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "expiration": 0,
+ "recipientId": "Aaeb9TDVxUkQKfKVkSkDZqUfUxbXp4oakc",
+ "signature": "3045022100b7e1e1bc99c5d3144bce62785d82189232bd332afb98732b51f80747dcd2701302207caf6782b25e97c47eb6ea1d436f84b528b3d8f3af7998625b74e68845de7f6a",
+ "vendorField": "Transaction 21",
+ "id": "f1d7f48249c928d16aff4e23b16737004e9dfb5c5d53f7fa42a3bb6a3f97b78d"
+ },
+ "verified": false,
+ "id": "f1d7f48249c928d16aff4e23b16737004e9dfb5c5d53f7fa42a3bb6a3f97b78d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Aaeb9TDVxUkQKfKVkSkDZqUfUxbXp4oakc",
+ "type": 0,
+ "vendorField": "Transaction 21",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b7e1e1bc99c5d3144bce62785d82189232bd332afb98732b51f80747dcd2701302207caf6782b25e97c47eb6ea1d436f84b528b3d8f3af7998625b74e68845de7f6a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017ebb33c4df59859924201cbb64ced6bef3adbc5a43045022100f7a8046fd9560543185d153cc9ee7785a295b2b32357d635131cc242dc42749d02204fb0b2238ffca4fc4af42607f435ece3fa35ade4d6bfeff6ffeee817f5498412",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "expiration": 0,
+ "recipientId": "AdG9A5pFFrLhyZnwaJ7zNZpKHCwhVySGnx",
+ "signature": "3045022100f7a8046fd9560543185d153cc9ee7785a295b2b32357d635131cc242dc42749d02204fb0b2238ffca4fc4af42607f435ece3fa35ade4d6bfeff6ffeee817f5498412",
+ "vendorField": "Transaction 10",
+ "id": "bb2d7328394c1ecaee73984a73e726dc2803a2de06809f1a74fd2948354fe062"
+ },
+ "verified": false,
+ "id": "bb2d7328394c1ecaee73984a73e726dc2803a2de06809f1a74fd2948354fe062",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdG9A5pFFrLhyZnwaJ7zNZpKHCwhVySGnx",
+ "type": 0,
+ "vendorField": "Transaction 10",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f7a8046fd9560543185d153cc9ee7785a295b2b32357d635131cc242dc42749d02204fb0b2238ffca4fc4af42607f435ece3fa35ade4d6bfeff6ffeee817f5498412",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b000000000000000017ec4d31936cd27e442479e0b3fcb0085c66e150173045022100ec3d0752a914671cb143dc3c3020c47ccb72876b268c0ef1d4d82fc2ee3f000c02207e7e8bcee0a10e659e350af5ff00e5c937f9ed1f016004d9841d988fc162e237",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "expiration": 0,
+ "recipientId": "AdKKbHAczw24UU9VTeNByKo6eipUfbVvW7",
+ "signature": "3045022100ec3d0752a914671cb143dc3c3020c47ccb72876b268c0ef1d4d82fc2ee3f000c02207e7e8bcee0a10e659e350af5ff00e5c937f9ed1f016004d9841d988fc162e237",
+ "vendorField": "Transaction 3",
+ "id": "f49b25cb08cc808db7e93dcbb517fec13a96e98307255147d88fd594a2ddc022"
+ },
+ "verified": false,
+ "id": "f49b25cb08cc808db7e93dcbb517fec13a96e98307255147d88fd594a2ddc022",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdKKbHAczw24UU9VTeNByKo6eipUfbVvW7",
+ "type": 0,
+ "vendorField": "Transaction 3",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ec3d0752a914671cb143dc3c3020c47ccb72876b268c0ef1d4d82fc2ee3f000c02207e7e8bcee0a10e659e350af5ff00e5c937f9ed1f016004d9841d988fc162e237",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b00000000000000001784de56a2ae261edbf5713002f4a606030a5488a7304402206c8ffd3d3c28c0e2873e526b774705ae81fff2ee4db765966b6c7865ea223d590220225a29f3fa0f2adc23a6e1e16ee3cd5a48b5d8bd6c6baa49321b720d8646d35e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "expiration": 0,
+ "recipientId": "ATtRCAUYrimPzhXvwNHfpQErC3G9rv6HSE",
+ "signature": "304402206c8ffd3d3c28c0e2873e526b774705ae81fff2ee4db765966b6c7865ea223d590220225a29f3fa0f2adc23a6e1e16ee3cd5a48b5d8bd6c6baa49321b720d8646d35e",
+ "vendorField": "Transaction 5",
+ "id": "f8ec27684c01792d0ae427cc8590b996e7f6a7f8445e6e70bec7f63ac0e4b5d0"
+ },
+ "verified": false,
+ "id": "f8ec27684c01792d0ae427cc8590b996e7f6a7f8445e6e70bec7f63ac0e4b5d0",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATtRCAUYrimPzhXvwNHfpQErC3G9rv6HSE",
+ "type": 0,
+ "vendorField": "Transaction 5",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206c8ffd3d3c28c0e2873e526b774705ae81fff2ee4db765966b6c7865ea223d590220225a29f3fa0f2adc23a6e1e16ee3cd5a48b5d8bd6c6baa49321b720d8646d35e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b00000000000000001768889616a3bae1ed2ef024f0223ea0b605354ab130450221008e576c71758d736f6cd200ef8faf4f7c9c6419328e46402b4e01f390cf997a0f022068a6a539caf98a7e89ca703258b8bbbb0b70aebd3bc8bdc67d8a8705ae23b791",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "expiration": 0,
+ "recipientId": "ARJbYdVb91atBUMNah8XBfsbUfWjwnPW8d",
+ "signature": "30450221008e576c71758d736f6cd200ef8faf4f7c9c6419328e46402b4e01f390cf997a0f022068a6a539caf98a7e89ca703258b8bbbb0b70aebd3bc8bdc67d8a8705ae23b791",
+ "vendorField": "Transaction 34",
+ "id": "d7d564bad2a69a7d0f36d335ef8e3bd01e8eea42b6270e721feeeee6c8a39e07"
+ },
+ "verified": false,
+ "id": "d7d564bad2a69a7d0f36d335ef8e3bd01e8eea42b6270e721feeeee6c8a39e07",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ARJbYdVb91atBUMNah8XBfsbUfWjwnPW8d",
+ "type": 0,
+ "vendorField": "Transaction 34",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30450221008e576c71758d736f6cd200ef8faf4f7c9c6419328e46402b4e01f390cf997a0f022068a6a539caf98a7e89ca703258b8bbbb0b70aebd3bc8bdc67d8a8705ae23b791",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001759a274212ccfe83a17c09d8c13ff6645d4a1c5073045022100b082481a85c259ee986c56ce3c2c8ad768408df9232081664f1a37b65e8f0ff702203084e6e60d2fb70e02bb67f581f5c3bdf7e8d4cc3129d220ecca3a81d47b57a0",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "expiration": 0,
+ "recipientId": "APwpQ3hkivvRPeLBL64bYtQi7rKFmB69JP",
+ "signature": "3045022100b082481a85c259ee986c56ce3c2c8ad768408df9232081664f1a37b65e8f0ff702203084e6e60d2fb70e02bb67f581f5c3bdf7e8d4cc3129d220ecca3a81d47b57a0",
+ "vendorField": "Transaction 14",
+ "id": "58efe350ca24b4346545e1cb5e5ed413eb0a9316825509927305a5c9722d0637"
+ },
+ "verified": false,
+ "id": "58efe350ca24b4346545e1cb5e5ed413eb0a9316825509927305a5c9722d0637",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APwpQ3hkivvRPeLBL64bYtQi7rKFmB69JP",
+ "type": 0,
+ "vendorField": "Transaction 14",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b082481a85c259ee986c56ce3c2c8ad768408df9232081664f1a37b65e8f0ff702203084e6e60d2fb70e02bb67f581f5c3bdf7e8d4cc3129d220ecca3a81d47b57a0",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b0000000000000000176857f83702fef96513cf41985225b48e34436b80304502210095b3298dc543b24a40f72560170e2275ac832f58fcc67eea128f604a8a9af4e602203d653556edbd647b108c0e15254a7f4cd494c8d2f4baf7b8150d47c4a17fabb7",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "expiration": 0,
+ "recipientId": "ARHbJgZbBW7W6sxDhKsdHyrUMz3Kho3vtV",
+ "signature": "304502210095b3298dc543b24a40f72560170e2275ac832f58fcc67eea128f604a8a9af4e602203d653556edbd647b108c0e15254a7f4cd494c8d2f4baf7b8150d47c4a17fabb7",
+ "vendorField": "Transaction 3",
+ "id": "c0d0ea323459548205d559f75499583023a794d378a5977e1ffa2a483efc2aa1"
+ },
+ "verified": false,
+ "id": "c0d0ea323459548205d559f75499583023a794d378a5977e1ffa2a483efc2aa1",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ARHbJgZbBW7W6sxDhKsdHyrUMz3Kho3vtV",
+ "type": 0,
+ "vendorField": "Transaction 3",
+ "vendorFieldHex": "5472616e73616374696f6e2033",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210095b3298dc543b24a40f72560170e2275ac832f58fcc67eea128f604a8a9af4e602203d653556edbd647b108c0e15254a7f4cd494c8d2f4baf7b8150d47c4a17fabb7",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b00000000000000001732da82ee270f1e2c2fc7deb39e6e26871528dd74304402201feda5a75b2a8959d50e83765dea95f7fc9dcb446cac5bd7fae66b19cc1bf2cd0220397c484c6f76fdea8b284e65fb9c5795bb4ed9f98d7ddf079699b75ec27b341d",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "expiration": 0,
+ "recipientId": "ALQmCXmay1kLd1ojxrSXkT3hk2VPtuTCAH",
+ "signature": "304402201feda5a75b2a8959d50e83765dea95f7fc9dcb446cac5bd7fae66b19cc1bf2cd0220397c484c6f76fdea8b284e65fb9c5795bb4ed9f98d7ddf079699b75ec27b341d",
+ "vendorField": "Transaction 26",
+ "id": "6509579164c4079156085f5878810800b9bc8162e5979eff4e94997fb9145039"
+ },
+ "verified": false,
+ "id": "6509579164c4079156085f5878810800b9bc8162e5979eff4e94997fb9145039",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALQmCXmay1kLd1ojxrSXkT3hk2VPtuTCAH",
+ "type": 0,
+ "vendorField": "Transaction 26",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402201feda5a75b2a8959d50e83765dea95f7fc9dcb446cac5bd7fae66b19cc1bf2cd0220397c484c6f76fdea8b284e65fb9c5795bb4ed9f98d7ddf079699b75ec27b341d",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b000000000000000017556357283c7a590f3f75446a2c57730709abe737304402200d7ddb69d9c65602a44908106a18dd0d6edd3e1380a68b0a11238f1ecf1ab97502203ab0f152aab1edc581863328e07368553641488ac1db0eaedf8c3280e5d18bd5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "expiration": 0,
+ "recipientId": "APZN66kPF4qoYMvYrrsB58y2FjDYGbbfrr",
+ "signature": "304402200d7ddb69d9c65602a44908106a18dd0d6edd3e1380a68b0a11238f1ecf1ab97502203ab0f152aab1edc581863328e07368553641488ac1db0eaedf8c3280e5d18bd5",
+ "vendorField": "Transaction 9",
+ "id": "156d73a47ffd6364b0b9a5e628aa0775284af02ccb2ca25292247beaa43401d9"
+ },
+ "verified": false,
+ "id": "156d73a47ffd6364b0b9a5e628aa0775284af02ccb2ca25292247beaa43401d9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APZN66kPF4qoYMvYrrsB58y2FjDYGbbfrr",
+ "type": 0,
+ "vendorField": "Transaction 9",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402200d7ddb69d9c65602a44908106a18dd0d6edd3e1380a68b0a11238f1ecf1ab97502203ab0f152aab1edc581863328e07368553641488ac1db0eaedf8c3280e5d18bd5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b000000000000000017306330b1af7a053147d782c915501f46a42856703045022100c5409f6d6c89159b29fbc4a5d37f05c868ba78158f1ffab9f0bf10f1040500e4022011bfe8fbae70afcd5d13b24e96d7e077bf1bcae1054cb71a6b343108d2132d50",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "expiration": 0,
+ "recipientId": "ALBiuZbozcH3YEc9mbuCZEPj3DFTCiRDuv",
+ "signature": "3045022100c5409f6d6c89159b29fbc4a5d37f05c868ba78158f1ffab9f0bf10f1040500e4022011bfe8fbae70afcd5d13b24e96d7e077bf1bcae1054cb71a6b343108d2132d50",
+ "vendorField": "Transaction 7",
+ "id": "2a29c07a81f38d08626aeee58992d1ab30efb3aba93aa2898042759d0067493c"
+ },
+ "verified": false,
+ "id": "2a29c07a81f38d08626aeee58992d1ab30efb3aba93aa2898042759d0067493c",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALBiuZbozcH3YEc9mbuCZEPj3DFTCiRDuv",
+ "type": 0,
+ "vendorField": "Transaction 7",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c5409f6d6c89159b29fbc4a5d37f05c868ba78158f1ffab9f0bf10f1040500e4022011bfe8fbae70afcd5d13b24e96d7e077bf1bcae1054cb71a6b343108d2132d50",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b0000000000000000173b362003c69ae3902ff355ba3ab40fda6b6a20fe3045022100ceaa0614b91be2d58f959ddaab130b28247cc3bfea0e224e9e6e155198d48ad2022076fde90c50e6e2d420258fac8007f6f6986bb328ac5df947d6a3746499dc722a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "expiration": 0,
+ "recipientId": "AMAxMVSqGsifAEjcoZrpDBYRerH128jBkD",
+ "signature": "3045022100ceaa0614b91be2d58f959ddaab130b28247cc3bfea0e224e9e6e155198d48ad2022076fde90c50e6e2d420258fac8007f6f6986bb328ac5df947d6a3746499dc722a",
+ "vendorField": "Transaction 38",
+ "id": "70b6ca1cbc74bd45a0f5d2dd115e44f88f3a80ccd3db62861910cdc68220715c"
+ },
+ "verified": false,
+ "id": "70b6ca1cbc74bd45a0f5d2dd115e44f88f3a80ccd3db62861910cdc68220715c",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMAxMVSqGsifAEjcoZrpDBYRerH128jBkD",
+ "type": 0,
+ "vendorField": "Transaction 38",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ceaa0614b91be2d58f959ddaab130b28247cc3bfea0e224e9e6e155198d48ad2022076fde90c50e6e2d420258fac8007f6f6986bb328ac5df947d6a3746499dc722a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001754f899a104b8f9bf0baf55f4c75e3fcfc0f76670304402206c78646b85d465a51845908d8909275269ba13ffc735730173b0b7025f0affa3022068bb2b5ca22549c13dcd4b6a1bf41001b166f074181f104d44358bd7d4091248",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "expiration": 0,
+ "recipientId": "APXADehRbC7bh7VzPAz4CRwEHv7bFBSDjX",
+ "signature": "304402206c78646b85d465a51845908d8909275269ba13ffc735730173b0b7025f0affa3022068bb2b5ca22549c13dcd4b6a1bf41001b166f074181f104d44358bd7d4091248",
+ "vendorField": "Transaction 14",
+ "id": "8b725504cfedce616efc2cb16ccf983d93cadd519156ee6846665a56473a2e5a"
+ },
+ "verified": false,
+ "id": "8b725504cfedce616efc2cb16ccf983d93cadd519156ee6846665a56473a2e5a",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APXADehRbC7bh7VzPAz4CRwEHv7bFBSDjX",
+ "type": 0,
+ "vendorField": "Transaction 14",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206c78646b85d465a51845908d8909275269ba13ffc735730173b0b7025f0affa3022068bb2b5ca22549c13dcd4b6a1bf41001b166f074181f104d44358bd7d4091248",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017f0deb3cb96b71fd78fc597f46f72a082008f0a6e3045022100a3253f00c5384bf3f0a63197d389bd640243350f389b4fc6db38cebd4df11b0f0220222592faaff23d0d063ca7db91116525f9374102f69bd8cd9929f543ca8ee391",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "expiration": 0,
+ "recipientId": "AdjUcAcDkbjiA4H6YhgGd4mHx2AFdqx8dN",
+ "signature": "3045022100a3253f00c5384bf3f0a63197d389bd640243350f389b4fc6db38cebd4df11b0f0220222592faaff23d0d063ca7db91116525f9374102f69bd8cd9929f543ca8ee391",
+ "vendorField": "Transaction 1",
+ "id": "f90d4faf12215275914f04fc15e2a565ec2e1a9259b85edb177a7dc002530d92"
+ },
+ "verified": false,
+ "id": "f90d4faf12215275914f04fc15e2a565ec2e1a9259b85edb177a7dc002530d92",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdjUcAcDkbjiA4H6YhgGd4mHx2AFdqx8dN",
+ "type": 0,
+ "vendorField": "Transaction 1",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100a3253f00c5384bf3f0a63197d389bd640243350f389b4fc6db38cebd4df11b0f0220222592faaff23d0d063ca7db91116525f9374102f69bd8cd9929f543ca8ee391",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017ca534e2c38361fbbb25d59cea2e835b4fa954f023045022100fe7f435c46def0f2be3966ec5b54e612465e053ab052534035c8e756796548ad0220019c4bc353605ddd555673d490901063b0fdd571ba03068cdee77135f63cd30c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "expiration": 0,
+ "recipientId": "AaDfwQWRKRHDshBb7kakwEEMZvRS2rFgnk",
+ "signature": "3045022100fe7f435c46def0f2be3966ec5b54e612465e053ab052534035c8e756796548ad0220019c4bc353605ddd555673d490901063b0fdd571ba03068cdee77135f63cd30c",
+ "vendorField": "Transaction 24",
+ "id": "92565bd3fedc8f3a5d2a3df145f6ed5c6ef29b2318731e5f499ba1a928a3edb7"
+ },
+ "verified": false,
+ "id": "92565bd3fedc8f3a5d2a3df145f6ed5c6ef29b2318731e5f499ba1a928a3edb7",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AaDfwQWRKRHDshBb7kakwEEMZvRS2rFgnk",
+ "type": 0,
+ "vendorField": "Transaction 24",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100fe7f435c46def0f2be3966ec5b54e612465e053ab052534035c8e756796548ad0220019c4bc353605ddd555673d490901063b0fdd571ba03068cdee77135f63cd30c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b000000000000000017407c452cd3cb205bb8588c690866a777280a697c3045022100eefcc04db1af1cea3ffbeb1c2bc53a8e0726ce2c4c435e817786db7c6ef632ed02204dd9eac43ea01295a255aed970edbe118be1adb4d3521d12076eb81e77809110",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "expiration": 0,
+ "recipientId": "AMeqmDo3yPyUYiE64zYtmKX6hBfysXoSBh",
+ "signature": "3045022100eefcc04db1af1cea3ffbeb1c2bc53a8e0726ce2c4c435e817786db7c6ef632ed02204dd9eac43ea01295a255aed970edbe118be1adb4d3521d12076eb81e77809110",
+ "vendorField": "Transaction 31",
+ "id": "47c9604d80c43c08f9a1a7ea2310125e40583a129c51e626644f9b9857f86bdf"
+ },
+ "verified": false,
+ "id": "47c9604d80c43c08f9a1a7ea2310125e40583a129c51e626644f9b9857f86bdf",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMeqmDo3yPyUYiE64zYtmKX6hBfysXoSBh",
+ "type": 0,
+ "vendorField": "Transaction 31",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100eefcc04db1af1cea3ffbeb1c2bc53a8e0726ce2c4c435e817786db7c6ef632ed02204dd9eac43ea01295a255aed970edbe118be1adb4d3521d12076eb81e77809110",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017b5b7c3d4561bab29857aad0c20b356b429dc8ca83045022100f76c828f2837c9487a4b1bbd5756170479fa3a4ed48dca031cf3f620f62a99ee02204da13730862ccd2b389dee441a7086f0191282ef2c5e2a3d32290a15401b2a90",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "expiration": 0,
+ "recipientId": "AYLi6S4DsQMZZvVYxw5dTEBdseHCF3W8n1",
+ "signature": "3045022100f76c828f2837c9487a4b1bbd5756170479fa3a4ed48dca031cf3f620f62a99ee02204da13730862ccd2b389dee441a7086f0191282ef2c5e2a3d32290a15401b2a90",
+ "vendorField": "Transaction 27",
+ "id": "5ea41b33eb0455e55bacbec38898632da7ebb712a0168137550f86a6a3860a6e"
+ },
+ "verified": false,
+ "id": "5ea41b33eb0455e55bacbec38898632da7ebb712a0168137550f86a6a3860a6e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYLi6S4DsQMZZvVYxw5dTEBdseHCF3W8n1",
+ "type": 0,
+ "vendorField": "Transaction 27",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f76c828f2837c9487a4b1bbd5756170479fa3a4ed48dca031cf3f620f62a99ee02204da13730862ccd2b389dee441a7086f0191282ef2c5e2a3d32290a15401b2a90",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b000000000000000017711127026596a58864e8346d1b4b43265dd9be163045022100ada204d17b23ad4cc21a645e5cca0a472a2646526242cf8683236b9aaf85a21c0220348f7e2b164ca4c373557b4426c9953e7b14c4dd130c6857d8ee314db0a92305",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "expiration": 0,
+ "recipientId": "AS5iYwfzg9ZJskiz1BHePjHJFYH7Titbxu",
+ "signature": "3045022100ada204d17b23ad4cc21a645e5cca0a472a2646526242cf8683236b9aaf85a21c0220348f7e2b164ca4c373557b4426c9953e7b14c4dd130c6857d8ee314db0a92305",
+ "vendorField": "Transaction 23",
+ "id": "9c4fcbc20df740613ed300ec72ba350b14a50b5a9233f488764c2acdb617a90e"
+ },
+ "verified": false,
+ "id": "9c4fcbc20df740613ed300ec72ba350b14a50b5a9233f488764c2acdb617a90e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AS5iYwfzg9ZJskiz1BHePjHJFYH7Titbxu",
+ "type": 0,
+ "vendorField": "Transaction 23",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ada204d17b23ad4cc21a645e5cca0a472a2646526242cf8683236b9aaf85a21c0220348f7e2b164ca4c373557b4426c9953e7b14c4dd130c6857d8ee314db0a92305",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017dde2acf4d0798abf18ec499385dd96a948441e3f3044022041d3dc6d101ccaa3acb9622d9876e0ae610246147b19fc0831928933abb6ad5b022055a06fd34646626595f3b4bee99d49a44cc5dba02ede0f6ea35487a4f89511b3",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "expiration": 0,
+ "recipientId": "Ac16XbCJYi9DRn1zcxjfYsQ9ndg9N9idzE",
+ "signature": "3044022041d3dc6d101ccaa3acb9622d9876e0ae610246147b19fc0831928933abb6ad5b022055a06fd34646626595f3b4bee99d49a44cc5dba02ede0f6ea35487a4f89511b3",
+ "vendorField": "Transaction 18",
+ "id": "b6a3df24a42270f18ed26524c28154f17c28c5d012ed303e633cc0c820e91fe6"
+ },
+ "verified": false,
+ "id": "b6a3df24a42270f18ed26524c28154f17c28c5d012ed303e633cc0c820e91fe6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Ac16XbCJYi9DRn1zcxjfYsQ9ndg9N9idzE",
+ "type": 0,
+ "vendorField": "Transaction 18",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022041d3dc6d101ccaa3acb9622d9876e0ae610246147b19fc0831928933abb6ad5b022055a06fd34646626595f3b4bee99d49a44cc5dba02ede0f6ea35487a4f89511b3",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017a332076f2167b39601fc396d5813f1df168d5ecf3045022100bb096efaa786f0117b07303ead48985ec0ef4fc80de31fe8f456b3e412f5d8bb02200aff57978be91751b75dd0e5816c64b7248d7a5b3c8c3df00130a2c80608da10",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "expiration": 0,
+ "recipientId": "AWemiqyoSy1P2BD9u9LD7vZeibdTU12A2U",
+ "signature": "3045022100bb096efaa786f0117b07303ead48985ec0ef4fc80de31fe8f456b3e412f5d8bb02200aff57978be91751b75dd0e5816c64b7248d7a5b3c8c3df00130a2c80608da10",
+ "vendorField": "Transaction 19",
+ "id": "244da0db919c4a1ab4afa095b81066d95d0167a0786aaa552e3a49331ace4c5b"
+ },
+ "verified": false,
+ "id": "244da0db919c4a1ab4afa095b81066d95d0167a0786aaa552e3a49331ace4c5b",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AWemiqyoSy1P2BD9u9LD7vZeibdTU12A2U",
+ "type": 0,
+ "vendorField": "Transaction 19",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100bb096efaa786f0117b07303ead48985ec0ef4fc80de31fe8f456b3e412f5d8bb02200aff57978be91751b75dd0e5816c64b7248d7a5b3c8c3df00130a2c80608da10",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000170a30bc64993a755c45f212733eb031a4285d2d123045022100f4245b3606e03770b98a7fa39565d7509a76f810924055da9a6a600c6c01959a022050282b850eee19017814f547ea3587a033a0c93ff5d2bb9beb1cb67cba138646",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "expiration": 0,
+ "recipientId": "AGhkneUqnoVePbAvPBeucjVpCmzoV4fwMc",
+ "signature": "3045022100f4245b3606e03770b98a7fa39565d7509a76f810924055da9a6a600c6c01959a022050282b850eee19017814f547ea3587a033a0c93ff5d2bb9beb1cb67cba138646",
+ "vendorField": "Transaction 28",
+ "id": "53ef20d3d76f25e10104a32919f3777ec66a6dc4260782ab5b1e5d168dd468e0"
+ },
+ "verified": false,
+ "id": "53ef20d3d76f25e10104a32919f3777ec66a6dc4260782ab5b1e5d168dd468e0",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGhkneUqnoVePbAvPBeucjVpCmzoV4fwMc",
+ "type": 0,
+ "vendorField": "Transaction 28",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f4245b3606e03770b98a7fa39565d7509a76f810924055da9a6a600c6c01959a022050282b850eee19017814f547ea3587a033a0c93ff5d2bb9beb1cb67cba138646",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b000000000000000017e8cc0e60adc7d87069f3ee3ce27fab54e28b195b3045022100ef03265298c52635384e183286e4624414bd92fdaa0141d05526cb7869ce9e370220718af74f83c06e5fd13aab1ceb21f4440191275a308811ec7c2b707035a7324e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "expiration": 0,
+ "recipientId": "Aczns5EGokPonwGjWdjt2MtvzvLAsy66vB",
+ "signature": "3045022100ef03265298c52635384e183286e4624414bd92fdaa0141d05526cb7869ce9e370220718af74f83c06e5fd13aab1ceb21f4440191275a308811ec7c2b707035a7324e",
+ "vendorField": "Transaction 26",
+ "id": "ba1c5e5693587bf489784f8b5673becea7f0bba9004a11f532d994b56f7b501e"
+ },
+ "verified": false,
+ "id": "ba1c5e5693587bf489784f8b5673becea7f0bba9004a11f532d994b56f7b501e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Aczns5EGokPonwGjWdjt2MtvzvLAsy66vB",
+ "type": 0,
+ "vendorField": "Transaction 26",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ef03265298c52635384e183286e4624414bd92fdaa0141d05526cb7869ce9e370220718af74f83c06e5fd13aab1ceb21f4440191275a308811ec7c2b707035a7324e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017710ff266d4bc013a13c9fbebc80b0f3669740de53045022100e083480002bafe1234fb63485f21d1be94419ed69ed432ad182939a9a4fc4d780220367bb8bf10c3c5a8a514db19f0ac65cbd6a1791c8be4dbaf78e4ba32624ac4e1",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "expiration": 0,
+ "recipientId": "AS5h7BcxxTR9grNxUDAMwzdbsP5oFqGRaN",
+ "signature": "3045022100e083480002bafe1234fb63485f21d1be94419ed69ed432ad182939a9a4fc4d780220367bb8bf10c3c5a8a514db19f0ac65cbd6a1791c8be4dbaf78e4ba32624ac4e1",
+ "vendorField": "Transaction 25",
+ "id": "ba7ca935c4358cb5818e5f768df1821a570657c5a04994c24a32926015428afa"
+ },
+ "verified": false,
+ "id": "ba7ca935c4358cb5818e5f768df1821a570657c5a04994c24a32926015428afa",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AS5h7BcxxTR9grNxUDAMwzdbsP5oFqGRaN",
+ "type": 0,
+ "vendorField": "Transaction 25",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e083480002bafe1234fb63485f21d1be94419ed69ed432ad182939a9a4fc4d780220367bb8bf10c3c5a8a514db19f0ac65cbd6a1791c8be4dbaf78e4ba32624ac4e1",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017eeb555fc83eba31e50fc83abb167759703a1491f3045022100f0efae4a199bdaf36375e9904c5ef61d3b9128f3f0f00136762b8c877084a231022012bfb4eda5252c660c926b4a8881c0752bd658d0692701636430ed1bae39aa21",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "expiration": 0,
+ "recipientId": "AdY3hbJ4CKGC62v7D5QXvN5YuWZL2vDR1P",
+ "signature": "3045022100f0efae4a199bdaf36375e9904c5ef61d3b9128f3f0f00136762b8c877084a231022012bfb4eda5252c660c926b4a8881c0752bd658d0692701636430ed1bae39aa21",
+ "vendorField": "Transaction 25",
+ "id": "a9d38be7c04605dd4c42f7d764fa65505fc4e62c85032e904fe2f888e37b7c12"
+ },
+ "verified": false,
+ "id": "a9d38be7c04605dd4c42f7d764fa65505fc4e62c85032e904fe2f888e37b7c12",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdY3hbJ4CKGC62v7D5QXvN5YuWZL2vDR1P",
+ "type": 0,
+ "vendorField": "Transaction 25",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f0efae4a199bdaf36375e9904c5ef61d3b9128f3f0f00136762b8c877084a231022012bfb4eda5252c660c926b4a8881c0752bd658d0692701636430ed1bae39aa21",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b000000000000000017740cdd08268c4e41b44b6c58fa2b281d02ff9d833044022040f3c91ff3012f2f74d93c2dc22459dba4f5c598d74cecdcfc8ccb095bfad9d30220025d0a85f560c0ac71be17806c2758db315537bc9eda6dfc2ab5eb597e8e7a7a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "expiration": 0,
+ "recipientId": "ASMVSWsE8tEu8WDt24oTACXJcpxKwuLpKx",
+ "signature": "3044022040f3c91ff3012f2f74d93c2dc22459dba4f5c598d74cecdcfc8ccb095bfad9d30220025d0a85f560c0ac71be17806c2758db315537bc9eda6dfc2ab5eb597e8e7a7a",
+ "vendorField": "Transaction 36",
+ "id": "55cc3155086caa1b2efd11038dcc9c04cb17cb41c8952d5897687bd176a687b5"
+ },
+ "verified": false,
+ "id": "55cc3155086caa1b2efd11038dcc9c04cb17cb41c8952d5897687bd176a687b5",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASMVSWsE8tEu8WDt24oTACXJcpxKwuLpKx",
+ "type": 0,
+ "vendorField": "Transaction 36",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022040f3c91ff3012f2f74d93c2dc22459dba4f5c598d74cecdcfc8ccb095bfad9d30220025d0a85f560c0ac71be17806c2758db315537bc9eda6dfc2ab5eb597e8e7a7a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017cbb5cc21a6b935cc0ab0c8d27ce3aa73ac23fa503045022100ed3e51d4a1e313a45c423ca67ac851119ebff00f2fe7408f2f8276da09d0e18d02200e679fd84c0ea05bcde16e87752744db95c1d6623500dd6e8b3eac91850e72b6",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "expiration": 0,
+ "recipientId": "AaLzbwhXj2PESCwtYjBr9N1V2EkH4enX8y",
+ "signature": "3045022100ed3e51d4a1e313a45c423ca67ac851119ebff00f2fe7408f2f8276da09d0e18d02200e679fd84c0ea05bcde16e87752744db95c1d6623500dd6e8b3eac91850e72b6",
+ "vendorField": "Transaction 27",
+ "id": "9dd92590759a2770bbaa297e1ef1a4d7bd85d764532916ed60e7f7f130d1b91b"
+ },
+ "verified": false,
+ "id": "9dd92590759a2770bbaa297e1ef1a4d7bd85d764532916ed60e7f7f130d1b91b",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AaLzbwhXj2PESCwtYjBr9N1V2EkH4enX8y",
+ "type": 0,
+ "vendorField": "Transaction 27",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ed3e51d4a1e313a45c423ca67ac851119ebff00f2fe7408f2f8276da09d0e18d02200e679fd84c0ea05bcde16e87752744db95c1d6623500dd6e8b3eac91850e72b6",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b000000000000000017c798a993d4823466b864f36fb8ea9b30c51ea2403045022100f8ad6ad2a942b64efbfacce82b6491b1389404b9a6633f8e10e0201369d7bf0902207e1c519cd878036d16e57bdd1cfc4e95c706dda677b98db6334beba7a9a491c5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "expiration": 0,
+ "recipientId": "AZyEzqqqUbqgQhvqdKe9Ruq18ScyR95rUh",
+ "signature": "3045022100f8ad6ad2a942b64efbfacce82b6491b1389404b9a6633f8e10e0201369d7bf0902207e1c519cd878036d16e57bdd1cfc4e95c706dda677b98db6334beba7a9a491c5",
+ "vendorField": "Transaction 31",
+ "id": "da50f7ceda83c293913f5e9e55b032074eb5012f91427c32d9c3768711259c09"
+ },
+ "verified": false,
+ "id": "da50f7ceda83c293913f5e9e55b032074eb5012f91427c32d9c3768711259c09",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AZyEzqqqUbqgQhvqdKe9Ruq18ScyR95rUh",
+ "type": 0,
+ "vendorField": "Transaction 31",
+ "vendorFieldHex": "5472616e73616374696f6e203331",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f8ad6ad2a942b64efbfacce82b6491b1389404b9a6633f8e10e0201369d7bf0902207e1c519cd878036d16e57bdd1cfc4e95c706dda677b98db6334beba7a9a491c5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b0000000000000000170e98048c8bb91aa761e72235da3de66efdfdbd963045022100b4b976f7303b3d4638a2432d9ecc99c2ea790761274c613f4730991ff1c40cf302203a749033f2dfba987a103e27d55d8280b8e55f0fc121204eae576ef72ea7687f",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "expiration": 0,
+ "recipientId": "AH73DZeigYYRwR4msu9AEyuNpPWGit35gz",
+ "signature": "3045022100b4b976f7303b3d4638a2432d9ecc99c2ea790761274c613f4730991ff1c40cf302203a749033f2dfba987a103e27d55d8280b8e55f0fc121204eae576ef72ea7687f",
+ "vendorField": "Transaction 5",
+ "id": "db77b5b2488e5208c861b12224adc1d66b1bfb6754badc66e0d5dd49122edff2"
+ },
+ "verified": false,
+ "id": "db77b5b2488e5208c861b12224adc1d66b1bfb6754badc66e0d5dd49122edff2",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AH73DZeigYYRwR4msu9AEyuNpPWGit35gz",
+ "type": 0,
+ "vendorField": "Transaction 5",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b4b976f7303b3d4638a2432d9ecc99c2ea790761274c613f4730991ff1c40cf302203a749033f2dfba987a103e27d55d8280b8e55f0fc121204eae576ef72ea7687f",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b0000000000000000170a654cb7b1392a92a702d532d73557cd81c4ec7430440220533598e3d736748f22a7f96e362b3f33c6ddccddcdd46acebce7ef598864b284022063ed82a97e87173926cf48e69cd1656dfdbcd69c1d5dde0e31b2d7a63828fd6f",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "expiration": 0,
+ "recipientId": "AGiqkqmYooLZ9oMEiqusKFLaxS8W5H6mRk",
+ "signature": "30440220533598e3d736748f22a7f96e362b3f33c6ddccddcdd46acebce7ef598864b284022063ed82a97e87173926cf48e69cd1656dfdbcd69c1d5dde0e31b2d7a63828fd6f",
+ "vendorField": "Transaction 2",
+ "id": "1a701201d3bfbcde486e4eae9458e7e23f66ea665db7beb4c4f560488d910f41"
+ },
+ "verified": false,
+ "id": "1a701201d3bfbcde486e4eae9458e7e23f66ea665db7beb4c4f560488d910f41",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGiqkqmYooLZ9oMEiqusKFLaxS8W5H6mRk",
+ "type": 0,
+ "vendorField": "Transaction 2",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220533598e3d736748f22a7f96e362b3f33c6ddccddcdd46acebce7ef598864b284022063ed82a97e87173926cf48e69cd1656dfdbcd69c1d5dde0e31b2d7a63828fd6f",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017b527062800be5242c6dd288e5fc842d323b880df3045022100e03cd37d0dca988195191175306878832915fac89923ea3e6c4db35e424e8b2f02202fac9bfa81869d2bd6bf4e239a58aee6aab5eb795f06e6a20bb41a9ea692eb72",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "expiration": 0,
+ "recipientId": "AYHihf7pU41bhvqnJ3TGqeWzb2LM31ux7W",
+ "signature": "3045022100e03cd37d0dca988195191175306878832915fac89923ea3e6c4db35e424e8b2f02202fac9bfa81869d2bd6bf4e239a58aee6aab5eb795f06e6a20bb41a9ea692eb72",
+ "vendorField": "Transaction 2",
+ "id": "def30141e316c3d27a8f8d7dc35513ed0c88f58409bef0ae51d234e392544e80"
+ },
+ "verified": false,
+ "id": "def30141e316c3d27a8f8d7dc35513ed0c88f58409bef0ae51d234e392544e80",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYHihf7pU41bhvqnJ3TGqeWzb2LM31ux7W",
+ "type": 0,
+ "vendorField": "Transaction 2",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e03cd37d0dca988195191175306878832915fac89923ea3e6c4db35e424e8b2f02202fac9bfa81869d2bd6bf4e239a58aee6aab5eb795f06e6a20bb41a9ea692eb72",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b000000000000000017e2ede64fdaadc3535d06d208b820edcb175162a63044022024c14e8b3d656e736890766706713f1224c58a461de599201a2cccb586cad23f02202d6fa8ae3ae24f494037827d1e27511073422496b244a25ce610348f52e7c3ff",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "expiration": 0,
+ "recipientId": "AcTmMQj6cvfWCAppoCTxEHsou1jZGBXxBv",
+ "signature": "3044022024c14e8b3d656e736890766706713f1224c58a461de599201a2cccb586cad23f02202d6fa8ae3ae24f494037827d1e27511073422496b244a25ce610348f52e7c3ff",
+ "vendorField": "Transaction 29",
+ "id": "d0558448ca870e6d57d1badeaa7573988bbe3aa0f4f3e2d563eb5cb14fffb5e9"
+ },
+ "verified": false,
+ "id": "d0558448ca870e6d57d1badeaa7573988bbe3aa0f4f3e2d563eb5cb14fffb5e9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AcTmMQj6cvfWCAppoCTxEHsou1jZGBXxBv",
+ "type": 0,
+ "vendorField": "Transaction 29",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022024c14e8b3d656e736890766706713f1224c58a461de599201a2cccb586cad23f02202d6fa8ae3ae24f494037827d1e27511073422496b244a25ce610348f52e7c3ff",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b000000000000000017424f8c905f2c4533176b88af2cd8cbd16bcb1d873045022100adb464c5e55b27e3575b3c0d721b69fb99f13299cd6b3191d16fa6b298683b5a02200b920190f7b37551c217ff94570c0b3eebf572a64765f7212ec324f9a2034ed5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "expiration": 0,
+ "recipientId": "AMpVYKrowfTSQsTY8A24PkcNZeh9FHxN9C",
+ "signature": "3045022100adb464c5e55b27e3575b3c0d721b69fb99f13299cd6b3191d16fa6b298683b5a02200b920190f7b37551c217ff94570c0b3eebf572a64765f7212ec324f9a2034ed5",
+ "vendorField": "Transaction 33",
+ "id": "bd0a417ee133d277354dabec0d1cd59db0e1f53aab3a9c197f7403fda378f3d5"
+ },
+ "verified": false,
+ "id": "bd0a417ee133d277354dabec0d1cd59db0e1f53aab3a9c197f7403fda378f3d5",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMpVYKrowfTSQsTY8A24PkcNZeh9FHxN9C",
+ "type": 0,
+ "vendorField": "Transaction 33",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100adb464c5e55b27e3575b3c0d721b69fb99f13299cd6b3191d16fa6b298683b5a02200b920190f7b37551c217ff94570c0b3eebf572a64765f7212ec324f9a2034ed5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b000000000000000017d3fece02925920575ef90eb229aa631707084bd73045022100b632230b766f89e91e24519a93446f7c8a7c6e68e34c7ca00e1ee41b945d78b302207401fd3c6be212dba6763564f5d25865168b8ff54fe1dc52f5d5b12479f3ded4",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "expiration": 0,
+ "recipientId": "Ab6oU7ky1rVN1LdTFvSph9tDVChErPobBt",
+ "signature": "3045022100b632230b766f89e91e24519a93446f7c8a7c6e68e34c7ca00e1ee41b945d78b302207401fd3c6be212dba6763564f5d25865168b8ff54fe1dc52f5d5b12479f3ded4",
+ "vendorField": "Transaction 14",
+ "id": "ea5f93dc1a8ecef5ca71d8450dbcfb3ba8e55829ba6dd494ee40b844c9ad5f68"
+ },
+ "verified": false,
+ "id": "ea5f93dc1a8ecef5ca71d8450dbcfb3ba8e55829ba6dd494ee40b844c9ad5f68",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Ab6oU7ky1rVN1LdTFvSph9tDVChErPobBt",
+ "type": 0,
+ "vendorField": "Transaction 14",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b632230b766f89e91e24519a93446f7c8a7c6e68e34c7ca00e1ee41b945d78b302207401fd3c6be212dba6763564f5d25865168b8ff54fe1dc52f5d5b12479f3ded4",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b000000000000000017737f4c47c74919c2aa05befa3a17145334fc0b2330440220142eb40c16bbf50d63ee294a9cd87945e7a942c2c3cf852443be583fba20542e022053050ef63794d52a261e531f16cb3f01a118619fd1c4763bd93928eee7fa036b",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "expiration": 0,
+ "recipientId": "ASJZrNp1N14Mwj9NGYc8z9qFtiB9CPVcQU",
+ "signature": "30440220142eb40c16bbf50d63ee294a9cd87945e7a942c2c3cf852443be583fba20542e022053050ef63794d52a261e531f16cb3f01a118619fd1c4763bd93928eee7fa036b",
+ "vendorField": "Transaction 32",
+ "id": "644acb276c22a56cdcb86f10870194a37d9a4fc13d20a6d67594bfcfb49ab7bb"
+ },
+ "verified": false,
+ "id": "644acb276c22a56cdcb86f10870194a37d9a4fc13d20a6d67594bfcfb49ab7bb",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASJZrNp1N14Mwj9NGYc8z9qFtiB9CPVcQU",
+ "type": 0,
+ "vendorField": "Transaction 32",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220142eb40c16bbf50d63ee294a9cd87945e7a942c2c3cf852443be583fba20542e022053050ef63794d52a261e531f16cb3f01a118619fd1c4763bd93928eee7fa036b",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b0000000000000000179d2c9324e662ac5cf922b9f5a0bdf806baf737273045022100c3ba0ffcbfa9db3bcef1595e5836b501de96ce676f0aa33fe885d0721292d93502205cfbf32420b07708340c3ed60ffd29a22cd37b3e55abbfcd2aaa118d48e30347",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "expiration": 0,
+ "recipientId": "AW6w8j3yW3Gtn6ApFxCHu8fkcaTpVzG9E4",
+ "signature": "3045022100c3ba0ffcbfa9db3bcef1595e5836b501de96ce676f0aa33fe885d0721292d93502205cfbf32420b07708340c3ed60ffd29a22cd37b3e55abbfcd2aaa118d48e30347",
+ "vendorField": "Transaction 23",
+ "id": "0cf9d9ecaac1c3c423539194f58a0a53585d87ea4f583552fba58fee13da3c11"
+ },
+ "verified": false,
+ "id": "0cf9d9ecaac1c3c423539194f58a0a53585d87ea4f583552fba58fee13da3c11",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AW6w8j3yW3Gtn6ApFxCHu8fkcaTpVzG9E4",
+ "type": 0,
+ "vendorField": "Transaction 23",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c3ba0ffcbfa9db3bcef1595e5836b501de96ce676f0aa33fe885d0721292d93502205cfbf32420b07708340c3ed60ffd29a22cd37b3e55abbfcd2aaa118d48e30347",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000174d48fbcb340a2691556715b1995e51c360adffe73044022019c6fb89c93e746bf5ec3a2eb1bca072da112663427f2db5778d1f9a7e4b7b22022018f48b791669852e13837236d5eddcfc61fc575c8fd15ceb8dc54304b1edb1a2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "expiration": 0,
+ "recipientId": "ANpX7FqMm9ZfwYJFeFhFEAmryXu5vTaUNF",
+ "signature": "3044022019c6fb89c93e746bf5ec3a2eb1bca072da112663427f2db5778d1f9a7e4b7b22022018f48b791669852e13837236d5eddcfc61fc575c8fd15ceb8dc54304b1edb1a2",
+ "vendorField": "Transaction 8",
+ "id": "f6a8bc081c1f8ff8012f3b447f425aab78eb71079b177ac724505cf265ee1fc0"
+ },
+ "verified": false,
+ "id": "f6a8bc081c1f8ff8012f3b447f425aab78eb71079b177ac724505cf265ee1fc0",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANpX7FqMm9ZfwYJFeFhFEAmryXu5vTaUNF",
+ "type": 0,
+ "vendorField": "Transaction 8",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022019c6fb89c93e746bf5ec3a2eb1bca072da112663427f2db5778d1f9a7e4b7b22022018f48b791669852e13837236d5eddcfc61fc575c8fd15ceb8dc54304b1edb1a2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b000000000000000017d190f1e81c972becdaa6d8ac66628d1b5a8e82de304402205547a585292262f56a6534c99685e1786b13812773d132476d09c19cad545a5002204af895618f5cf873edfda3f1a20f46ff8eb40ba1027021f5670b112a1a8d4b1e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "expiration": 0,
+ "recipientId": "AasxWYQykcpFSRoVEnU1n7ZSbT8MtP4juo",
+ "signature": "304402205547a585292262f56a6534c99685e1786b13812773d132476d09c19cad545a5002204af895618f5cf873edfda3f1a20f46ff8eb40ba1027021f5670b112a1a8d4b1e",
+ "vendorField": "Transaction 17",
+ "id": "5330521c33ac88a7232e9d9fe111d87f0f02d7e5f92b6acb4253f2a847e9886d"
+ },
+ "verified": false,
+ "id": "5330521c33ac88a7232e9d9fe111d87f0f02d7e5f92b6acb4253f2a847e9886d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AasxWYQykcpFSRoVEnU1n7ZSbT8MtP4juo",
+ "type": 0,
+ "vendorField": "Transaction 17",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402205547a585292262f56a6534c99685e1786b13812773d132476d09c19cad545a5002204af895618f5cf873edfda3f1a20f46ff8eb40ba1027021f5670b112a1a8d4b1e",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b000000000000000017d25825969c0d9d28ffd5bb1b3478901080edb0de3044022026416f54ae43a24634b705374dfe8cc1abace3c3013ba51eb350d0a02bde75440220726b910bf7304342b5eac57cd19c494cb2ef04b3ae0eba0bed266a26b9348c24",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "expiration": 0,
+ "recipientId": "Aax59LCQ2Eeiix3KbbJaBcLiK51w7iy6R4",
+ "signature": "3044022026416f54ae43a24634b705374dfe8cc1abace3c3013ba51eb350d0a02bde75440220726b910bf7304342b5eac57cd19c494cb2ef04b3ae0eba0bed266a26b9348c24",
+ "vendorField": "Transaction 6",
+ "id": "40a66e28f1a9a9fd902c7990b9765662c2442d2ef6ca85ac9ab36baaca488669"
+ },
+ "verified": false,
+ "id": "40a66e28f1a9a9fd902c7990b9765662c2442d2ef6ca85ac9ab36baaca488669",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Aax59LCQ2Eeiix3KbbJaBcLiK51w7iy6R4",
+ "type": 0,
+ "vendorField": "Transaction 6",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022026416f54ae43a24634b705374dfe8cc1abace3c3013ba51eb350d0a02bde75440220726b910bf7304342b5eac57cd19c494cb2ef04b3ae0eba0bed266a26b9348c24",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017bbc327b6644dc42d55a8e8f32ffeabac37f742c63045022100f92ba535acd2d4cb2aa14103ad2b2f894cc78167899d16061a85e215f5ec213802206ce4ad145df8663dc16bfa18bc8397daef98c3d2b05f02a1a1630f2ba59157fd",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "expiration": 0,
+ "recipientId": "AYtfnzAzEYxK8DKUmZcBwT61jtmPpwFjH8",
+ "signature": "3045022100f92ba535acd2d4cb2aa14103ad2b2f894cc78167899d16061a85e215f5ec213802206ce4ad145df8663dc16bfa18bc8397daef98c3d2b05f02a1a1630f2ba59157fd",
+ "vendorField": "Transaction 16",
+ "id": "49defe929cedf5bac4e47964c97d61a763c8f0b1103e5b1aaa6bbd2b5b2a3d03"
+ },
+ "verified": false,
+ "id": "49defe929cedf5bac4e47964c97d61a763c8f0b1103e5b1aaa6bbd2b5b2a3d03",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYtfnzAzEYxK8DKUmZcBwT61jtmPpwFjH8",
+ "type": 0,
+ "vendorField": "Transaction 16",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f92ba535acd2d4cb2aa14103ad2b2f894cc78167899d16061a85e215f5ec213802206ce4ad145df8663dc16bfa18bc8397daef98c3d2b05f02a1a1630f2ba59157fd",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b000000000000000017abe730919c31b471ac0dbf9b6f12bad34416f8bb3045022100f9400dabee7c2749136995628b0813cfab1dd763bd7450717cc0af2bb11810560220683de15f7d713affe5e6de9e3321a6248481246a571ec9f96e648e2f1a0ee24a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "expiration": 0,
+ "recipientId": "AXSp9eMQ9GDQPfwG8b94QHWEUPs2CnVh8g",
+ "signature": "3045022100f9400dabee7c2749136995628b0813cfab1dd763bd7450717cc0af2bb11810560220683de15f7d713affe5e6de9e3321a6248481246a571ec9f96e648e2f1a0ee24a",
+ "vendorField": "Transaction 40",
+ "id": "0d9ce68fd7204c0e870cbc6ea16c54bbd33f173834efbdb5dc10492afef52230"
+ },
+ "verified": false,
+ "id": "0d9ce68fd7204c0e870cbc6ea16c54bbd33f173834efbdb5dc10492afef52230",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AXSp9eMQ9GDQPfwG8b94QHWEUPs2CnVh8g",
+ "type": 0,
+ "vendorField": "Transaction 40",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f9400dabee7c2749136995628b0813cfab1dd763bd7450717cc0af2bb11810560220683de15f7d713affe5e6de9e3321a6248481246a571ec9f96e648e2f1a0ee24a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b000000000000000017527ce40209b488f22a14f1ea2880bbe37c15e5963044022005cb3b17865f2ef8580115395aa593abf9b074a70aa2859c17f5fa1f2b5629fd0220512306967c8f5e33eed952bb3d26de6f451b2c3954198e8db1bb5cb045521c50",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "expiration": 0,
+ "recipientId": "APJ2fnPPkt1F2kCVjg6q3eu8VrcQKxhRMm",
+ "signature": "3044022005cb3b17865f2ef8580115395aa593abf9b074a70aa2859c17f5fa1f2b5629fd0220512306967c8f5e33eed952bb3d26de6f451b2c3954198e8db1bb5cb045521c50",
+ "vendorField": "Transaction 28",
+ "id": "d1e887f6b3f3203f9b32203120606fd935452c140721d094308cb0800d3c95a3"
+ },
+ "verified": false,
+ "id": "d1e887f6b3f3203f9b32203120606fd935452c140721d094308cb0800d3c95a3",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APJ2fnPPkt1F2kCVjg6q3eu8VrcQKxhRMm",
+ "type": 0,
+ "vendorField": "Transaction 28",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022005cb3b17865f2ef8580115395aa593abf9b074a70aa2859c17f5fa1f2b5629fd0220512306967c8f5e33eed952bb3d26de6f451b2c3954198e8db1bb5cb045521c50",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017e9beb02e59ebcf1e87f5b2990cd5fc8a523ed152304402200746188cd79a980f63a0011884ea6d34d4af34e533ab0bb18e622bc4b1b1b1b102201de516bd3458ada944d0d46eb2295873e5ab5f929c2fb728fb45ad81a53bbc6f",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "expiration": 0,
+ "recipientId": "Ad5oXSgxt7eVHjmFTMud9KtFpXhDyXiD7F",
+ "signature": "304402200746188cd79a980f63a0011884ea6d34d4af34e533ab0bb18e622bc4b1b1b1b102201de516bd3458ada944d0d46eb2295873e5ab5f929c2fb728fb45ad81a53bbc6f",
+ "vendorField": "Transaction 21",
+ "id": "35486bab705e07df164eeb939de1d6e771c42d5bb20854f10cf95a6a6a4af764"
+ },
+ "verified": false,
+ "id": "35486bab705e07df164eeb939de1d6e771c42d5bb20854f10cf95a6a6a4af764",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Ad5oXSgxt7eVHjmFTMud9KtFpXhDyXiD7F",
+ "type": 0,
+ "vendorField": "Transaction 21",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402200746188cd79a980f63a0011884ea6d34d4af34e533ab0bb18e622bc4b1b1b1b102201de516bd3458ada944d0d46eb2295873e5ab5f929c2fb728fb45ad81a53bbc6f",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000173a8aefd16949d537bbc3fe0a4535920af7501aab3044022010cd7a8ce57ce47221da59de3f75e571fe56e86626d9017ca6b80a0ef3a3508b0220078ee6036fcd468eebadd85b0e0a0a6f8dbb952bdc24feef3ca4decf1e058155",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "expiration": 0,
+ "recipientId": "AM7RH827Rv9Y4VnzCwsQjf3BW1PpcAwaSL",
+ "signature": "3044022010cd7a8ce57ce47221da59de3f75e571fe56e86626d9017ca6b80a0ef3a3508b0220078ee6036fcd468eebadd85b0e0a0a6f8dbb952bdc24feef3ca4decf1e058155",
+ "vendorField": "Transaction 11",
+ "id": "a30d605b13c94574c88954ab7c8f3526a19eb47b3b8b271c32fa67154eba41f5"
+ },
+ "verified": false,
+ "id": "a30d605b13c94574c88954ab7c8f3526a19eb47b3b8b271c32fa67154eba41f5",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AM7RH827Rv9Y4VnzCwsQjf3BW1PpcAwaSL",
+ "type": 0,
+ "vendorField": "Transaction 11",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022010cd7a8ce57ce47221da59de3f75e571fe56e86626d9017ca6b80a0ef3a3508b0220078ee6036fcd468eebadd85b0e0a0a6f8dbb952bdc24feef3ca4decf1e058155",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017fb982d2e09e48f462322061b520f5c4c95567c673045022100b2e14e6c3b68c1d819c8da960c6d47b25bdd25c4856c3e1bd718265b292080c5022040dabf8f95d8e98c962d1733d10b8cfa6749da5b2dd1a521b7778a0306f05d9c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "expiration": 0,
+ "recipientId": "AeiBZ4A1HDwLkDf2kKoBjpaTRH7BZapZ6F",
+ "signature": "3045022100b2e14e6c3b68c1d819c8da960c6d47b25bdd25c4856c3e1bd718265b292080c5022040dabf8f95d8e98c962d1733d10b8cfa6749da5b2dd1a521b7778a0306f05d9c",
+ "vendorField": "Transaction 12",
+ "id": "3050cde59690739f92f84b1f93a5f5bb25723f342fe6eccb0cb42558e80c1025"
+ },
+ "verified": false,
+ "id": "3050cde59690739f92f84b1f93a5f5bb25723f342fe6eccb0cb42558e80c1025",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AeiBZ4A1HDwLkDf2kKoBjpaTRH7BZapZ6F",
+ "type": 0,
+ "vendorField": "Transaction 12",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b2e14e6c3b68c1d819c8da960c6d47b25bdd25c4856c3e1bd718265b292080c5022040dabf8f95d8e98c962d1733d10b8cfa6749da5b2dd1a521b7778a0306f05d9c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017a045594b433e0b2c1f50dd607688a182d624d5a8304402206bae61e2254f416d8be818e16ad5eb508f2f5dadd9ecbca1ffe8ae427721974902204be5a5e1e9103898eb8e8922391fdfaa5c9d49c0015d28d2b7c462fb8f0838cc",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "expiration": 0,
+ "recipientId": "AWPJqdXeNXZ6xE2VTx9aym5sSt8dYAUdo7",
+ "signature": "304402206bae61e2254f416d8be818e16ad5eb508f2f5dadd9ecbca1ffe8ae427721974902204be5a5e1e9103898eb8e8922391fdfaa5c9d49c0015d28d2b7c462fb8f0838cc",
+ "vendorField": "Transaction 24",
+ "id": "3978312d18ab98a7f381b338961c591db5c3e2b648fd023c0c739d7de7d8f204"
+ },
+ "verified": false,
+ "id": "3978312d18ab98a7f381b338961c591db5c3e2b648fd023c0c739d7de7d8f204",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AWPJqdXeNXZ6xE2VTx9aym5sSt8dYAUdo7",
+ "type": 0,
+ "vendorField": "Transaction 24",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206bae61e2254f416d8be818e16ad5eb508f2f5dadd9ecbca1ffe8ae427721974902204be5a5e1e9103898eb8e8922391fdfaa5c9d49c0015d28d2b7c462fb8f0838cc",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017ae968f9f5c4b0427d350ec8502fb3867c52e79903044022009959bb250bab7e1fd893fe7fd841fb17431892df577953bafb31f16ab20babc02205859d9bef6e191aff9c75f799fd7bdae6d8646ecfadd3d8b29a7c13471dfc3e2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "expiration": 0,
+ "recipientId": "AXh1b3HhCcZs91mFA1H7KrBW9UXPUftGqg",
+ "signature": "3044022009959bb250bab7e1fd893fe7fd841fb17431892df577953bafb31f16ab20babc02205859d9bef6e191aff9c75f799fd7bdae6d8646ecfadd3d8b29a7c13471dfc3e2",
+ "vendorField": "Transaction 20",
+ "id": "8e07995a33f7c57997beb07887aa91c1e7e386c75ce865be31fd4e84dd0f6cd3"
+ },
+ "verified": false,
+ "id": "8e07995a33f7c57997beb07887aa91c1e7e386c75ce865be31fd4e84dd0f6cd3",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AXh1b3HhCcZs91mFA1H7KrBW9UXPUftGqg",
+ "type": 0,
+ "vendorField": "Transaction 20",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022009959bb250bab7e1fd893fe7fd841fb17431892df577953bafb31f16ab20babc02205859d9bef6e191aff9c75f799fd7bdae6d8646ecfadd3d8b29a7c13471dfc3e2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b000000000000000017d0c946c5e6e51a6c879677042422799a80e6e5fd304402204bdcd333bc3b41557b7b805a89f1e8b77fdb6d499736078ccb48c58f6431621e02202b727f6b6708d9459556c02950a350eb75e804bfdce9fcffb67c8cc95a8e532c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "expiration": 0,
+ "recipientId": "AaoqKLEhoHYZAm5tvxo9tEEVUmA8P98tEc",
+ "signature": "304402204bdcd333bc3b41557b7b805a89f1e8b77fdb6d499736078ccb48c58f6431621e02202b727f6b6708d9459556c02950a350eb75e804bfdce9fcffb67c8cc95a8e532c",
+ "vendorField": "Transaction 22",
+ "id": "b5cd93b7e0b6e659ebf10c0a39237e59a585e18f3c83e77e6f652dd4f73e234d"
+ },
+ "verified": false,
+ "id": "b5cd93b7e0b6e659ebf10c0a39237e59a585e18f3c83e77e6f652dd4f73e234d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AaoqKLEhoHYZAm5tvxo9tEEVUmA8P98tEc",
+ "type": 0,
+ "vendorField": "Transaction 22",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402204bdcd333bc3b41557b7b805a89f1e8b77fdb6d499736078ccb48c58f6431621e02202b727f6b6708d9459556c02950a350eb75e804bfdce9fcffb67c8cc95a8e532c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017bdf12bd72e628fa60082963673586e9b96a8725c3045022100ebf7c3d8abefd8fe062fe52d1a226179fe2165f013099b46ba3724c345d51ed202203d8444c63f2d7a0f36bacb1643b33cbd47b02d800d0dd233fbb6e64385779380",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "expiration": 0,
+ "recipientId": "AZ6CGdJHTdskyuchUvj9N6WodAPc5LE5V2",
+ "signature": "3045022100ebf7c3d8abefd8fe062fe52d1a226179fe2165f013099b46ba3724c345d51ed202203d8444c63f2d7a0f36bacb1643b33cbd47b02d800d0dd233fbb6e64385779380",
+ "vendorField": "Transaction 24",
+ "id": "459051ee27928fe97d06ab88850f794a60bf8e8cbcb688bb794d38f8e24d428f"
+ },
+ "verified": false,
+ "id": "459051ee27928fe97d06ab88850f794a60bf8e8cbcb688bb794d38f8e24d428f",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AZ6CGdJHTdskyuchUvj9N6WodAPc5LE5V2",
+ "type": 0,
+ "vendorField": "Transaction 24",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ebf7c3d8abefd8fe062fe52d1a226179fe2165f013099b46ba3724c345d51ed202203d8444c63f2d7a0f36bacb1643b33cbd47b02d800d0dd233fbb6e64385779380",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b0000000000000000177cb1f05f837c26723f4a3412fb2353226b245a7f3045022100e7b66efac64def1c8001cfb6b68e2b16f85c0f20684a760bc93aae385126d25702200253492c751f8c704eb042192cd524791ead3b5eda6e9afd0e71c58576127fb2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "expiration": 0,
+ "recipientId": "AT9CbhbmQRYPmuLe2d6aCVLzTFciN6SWFd",
+ "signature": "3045022100e7b66efac64def1c8001cfb6b68e2b16f85c0f20684a760bc93aae385126d25702200253492c751f8c704eb042192cd524791ead3b5eda6e9afd0e71c58576127fb2",
+ "vendorField": "Transaction 12",
+ "id": "ed5002c4698331b4b3c0a56d19f74dfaf2156acb20457cb3ead7ec3ead3c64e7"
+ },
+ "verified": false,
+ "id": "ed5002c4698331b4b3c0a56d19f74dfaf2156acb20457cb3ead7ec3ead3c64e7",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AT9CbhbmQRYPmuLe2d6aCVLzTFciN6SWFd",
+ "type": 0,
+ "vendorField": "Transaction 12",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e7b66efac64def1c8001cfb6b68e2b16f85c0f20684a760bc93aae385126d25702200253492c751f8c704eb042192cd524791ead3b5eda6e9afd0e71c58576127fb2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017ef886d3c11609a79d95ae6f1ec733c0dcd9a6ebe3045022100f5396a022fccf2e70b2eb3873271e9021dfbd5effd706997f704e24ad722d6f502201343d51f913ecbc9a9ea6be1a97e5531f98107132a9ffe6a0e5b860d4333d039",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "expiration": 0,
+ "recipientId": "AdcQaSQrQskuN7yrKFcPhV9kkwpCBSgc5z",
+ "signature": "3045022100f5396a022fccf2e70b2eb3873271e9021dfbd5effd706997f704e24ad722d6f502201343d51f913ecbc9a9ea6be1a97e5531f98107132a9ffe6a0e5b860d4333d039",
+ "vendorField": "Transaction 30",
+ "id": "9844e01a0333c7996aa842b0e328a3b76035cf3103416e32116713aadc656f46"
+ },
+ "verified": false,
+ "id": "9844e01a0333c7996aa842b0e328a3b76035cf3103416e32116713aadc656f46",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdcQaSQrQskuN7yrKFcPhV9kkwpCBSgc5z",
+ "type": 0,
+ "vendorField": "Transaction 30",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f5396a022fccf2e70b2eb3873271e9021dfbd5effd706997f704e24ad722d6f502201343d51f913ecbc9a9ea6be1a97e5531f98107132a9ffe6a0e5b860d4333d039",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b000000000000000017841a086dcdb01b5a3688648e467449ff4ccc71473045022100e9221f75315a65c2b7fadba4dd5f668df15f339061b8e862e926fee8d7990e44022005f3a33f1a680fb9822c5a0ccdaee45a8db4c3f2fabe5b81ccd4c0e93ea8daa6",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "expiration": 0,
+ "recipientId": "ATpN2csdQKh7TqLYNWuwdVTYrVVWSeabtC",
+ "signature": "3045022100e9221f75315a65c2b7fadba4dd5f668df15f339061b8e862e926fee8d7990e44022005f3a33f1a680fb9822c5a0ccdaee45a8db4c3f2fabe5b81ccd4c0e93ea8daa6",
+ "vendorField": "Transaction 15",
+ "id": "72a1d155ec6b8463c423489cd14996967eec26fd08cd4c35ff67a742b00c7c12"
+ },
+ "verified": false,
+ "id": "72a1d155ec6b8463c423489cd14996967eec26fd08cd4c35ff67a742b00c7c12",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATpN2csdQKh7TqLYNWuwdVTYrVVWSeabtC",
+ "type": 0,
+ "vendorField": "Transaction 15",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e9221f75315a65c2b7fadba4dd5f668df15f339061b8e862e926fee8d7990e44022005f3a33f1a680fb9822c5a0ccdaee45a8db4c3f2fabe5b81ccd4c0e93ea8daa6",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017559f8ef0575e8ade7a3b7040f02f54652e95bc943045022100c21caaecd718c72077f0d09e8ce04293d1ac27473c0545f3e3b1c8d0283b99ab022000c1a1a65dec757dfae0bc41d220a142aeb9ee311f14cbebe6bf977a0cfbe53b",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "expiration": 0,
+ "recipientId": "APacE7VarcoLLYwDDnApGiT3Zgm8uZizos",
+ "signature": "3045022100c21caaecd718c72077f0d09e8ce04293d1ac27473c0545f3e3b1c8d0283b99ab022000c1a1a65dec757dfae0bc41d220a142aeb9ee311f14cbebe6bf977a0cfbe53b",
+ "vendorField": "Transaction 19",
+ "id": "363f20cbd56703fbd06227d0103eb0db40ae109c5f2c877fd8a215f572b9d626"
+ },
+ "verified": false,
+ "id": "363f20cbd56703fbd06227d0103eb0db40ae109c5f2c877fd8a215f572b9d626",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APacE7VarcoLLYwDDnApGiT3Zgm8uZizos",
+ "type": 0,
+ "vendorField": "Transaction 19",
+ "vendorFieldHex": "5472616e73616374696f6e203139",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c21caaecd718c72077f0d09e8ce04293d1ac27473c0545f3e3b1c8d0283b99ab022000c1a1a65dec757dfae0bc41d220a142aeb9ee311f14cbebe6bf977a0cfbe53b",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b00000000000000001797982b30f6b666ec86b0e2fdc3b08a3b7479cbf0304402202e93971c4bef3ec7a4117177ad3be74b0067dc863d136d14e50e43feb62a6515022044bca36c4e4df2d40bb1f610eaeb0ce814acc0acbeb4c46c876a10d533071619",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "expiration": 0,
+ "recipientId": "AVbRyLEdaiKoiYEojRAt3xzP3bXxRwEQKG",
+ "signature": "304402202e93971c4bef3ec7a4117177ad3be74b0067dc863d136d14e50e43feb62a6515022044bca36c4e4df2d40bb1f610eaeb0ce814acc0acbeb4c46c876a10d533071619",
+ "vendorField": "Transaction 38",
+ "id": "0b95146bd6388cc6b678402ff4db5ac0a104647b4bee8762e0c846c3d5f57d98"
+ },
+ "verified": false,
+ "id": "0b95146bd6388cc6b678402ff4db5ac0a104647b4bee8762e0c846c3d5f57d98",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AVbRyLEdaiKoiYEojRAt3xzP3bXxRwEQKG",
+ "type": 0,
+ "vendorField": "Transaction 38",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402202e93971c4bef3ec7a4117177ad3be74b0067dc863d136d14e50e43feb62a6515022044bca36c4e4df2d40bb1f610eaeb0ce814acc0acbeb4c46c876a10d533071619",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b000000000000000017b406dcd28615b6787189c1c4e9f6e3348037d8b03045022100d09d4720d0517c00672733b12f328a6a2d98d7ab150bdd810c82df4a0e021df7022066aa615c7b9e481ad31c06c77cb6f3c92018c6ed8c120b2cf6e14320679b5738",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "expiration": 0,
+ "recipientId": "AYBmVrBPAFVqpy63Yrov7Sg3TiMR8qnY6k",
+ "signature": "3045022100d09d4720d0517c00672733b12f328a6a2d98d7ab150bdd810c82df4a0e021df7022066aa615c7b9e481ad31c06c77cb6f3c92018c6ed8c120b2cf6e14320679b5738",
+ "vendorField": "Transaction 32",
+ "id": "d765387d4dc54a6362643beaae925500ec0a27a8c06ebf2f61a60e26500bc9c9"
+ },
+ "verified": false,
+ "id": "d765387d4dc54a6362643beaae925500ec0a27a8c06ebf2f61a60e26500bc9c9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYBmVrBPAFVqpy63Yrov7Sg3TiMR8qnY6k",
+ "type": 0,
+ "vendorField": "Transaction 32",
+ "vendorFieldHex": "5472616e73616374696f6e203332",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100d09d4720d0517c00672733b12f328a6a2d98d7ab150bdd810c82df4a0e021df7022066aa615c7b9e481ad31c06c77cb6f3c92018c6ed8c120b2cf6e14320679b5738",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b00000000000000001798c09cff1628733ff3baca4b61e838abd4fd17663044022056d79e8035d942eaf1c5de8722904e4ebdf4dc05a72ed6bb21fd16dc6808044d02202ec527272752ce4d75886dd0e39690e6bbe2a652db51addd4279dfd149f4dbe4",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "expiration": 0,
+ "recipientId": "AVhZ6fHoZAc3vdBLor1izJFFxRJaBMj7xy",
+ "signature": "3044022056d79e8035d942eaf1c5de8722904e4ebdf4dc05a72ed6bb21fd16dc6808044d02202ec527272752ce4d75886dd0e39690e6bbe2a652db51addd4279dfd149f4dbe4",
+ "vendorField": "Transaction 17",
+ "id": "f080cb1b2b377d7722a0b84628da10422bc7d35485979038b5352de2a743d27e"
+ },
+ "verified": false,
+ "id": "f080cb1b2b377d7722a0b84628da10422bc7d35485979038b5352de2a743d27e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AVhZ6fHoZAc3vdBLor1izJFFxRJaBMj7xy",
+ "type": 0,
+ "vendorField": "Transaction 17",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022056d79e8035d942eaf1c5de8722904e4ebdf4dc05a72ed6bb21fd16dc6808044d02202ec527272752ce4d75886dd0e39690e6bbe2a652db51addd4279dfd149f4dbe4",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b0000000000000000177a10018a8e0b53ef2ba10a669892a7ed8120e4d43045022100dd67d44ee667c3223ca757fed91fa2cbe1cb9cb42109fabce6eb83dcfd7d8747022045811f62696dc8c6af6554f55d8a9c5f2bd3f80271897c18dfdf03f60349036c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "expiration": 0,
+ "recipientId": "ASuHG25ddYuaaF6hACzHf3mAKd4b66kjfm",
+ "signature": "3045022100dd67d44ee667c3223ca757fed91fa2cbe1cb9cb42109fabce6eb83dcfd7d8747022045811f62696dc8c6af6554f55d8a9c5f2bd3f80271897c18dfdf03f60349036c",
+ "vendorField": "Transaction 25",
+ "id": "2e0fa5c36c09d4eda9cfa53ce69ddcc6e4d2d113f9562c0a68c90163b54625a8"
+ },
+ "verified": false,
+ "id": "2e0fa5c36c09d4eda9cfa53ce69ddcc6e4d2d113f9562c0a68c90163b54625a8",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASuHG25ddYuaaF6hACzHf3mAKd4b66kjfm",
+ "type": 0,
+ "vendorField": "Transaction 25",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100dd67d44ee667c3223ca757fed91fa2cbe1cb9cb42109fabce6eb83dcfd7d8747022045811f62696dc8c6af6554f55d8a9c5f2bd3f80271897c18dfdf03f60349036c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000177b5daa22d0d47c8aabd77e099011a5c5de5ec0d130440220348a8a248a43b290989893e559fe0a041d77357a62edc4dc990555da00fa817202206aa92a6d3b06695e7600550540204c7b8abedf0b686449a07a4e6a70c1fac395",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "expiration": 0,
+ "recipientId": "AT2Ay2KRUTjvBm7p3NbWWyQhhU4P1hH2Ns",
+ "signature": "30440220348a8a248a43b290989893e559fe0a041d77357a62edc4dc990555da00fa817202206aa92a6d3b06695e7600550540204c7b8abedf0b686449a07a4e6a70c1fac395",
+ "vendorField": "Transaction 11",
+ "id": "da77cf38c9507f29b4bee0e29b6fa8b49fcd636fd5261cf773efc1c1332f8ad3"
+ },
+ "verified": false,
+ "id": "da77cf38c9507f29b4bee0e29b6fa8b49fcd636fd5261cf773efc1c1332f8ad3",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AT2Ay2KRUTjvBm7p3NbWWyQhhU4P1hH2Ns",
+ "type": 0,
+ "vendorField": "Transaction 11",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220348a8a248a43b290989893e559fe0a041d77357a62edc4dc990555da00fa817202206aa92a6d3b06695e7600550540204c7b8abedf0b686449a07a4e6a70c1fac395",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b0000000000000000179c89afefd44cae11fa228af7cfe1470208eeef4e304502210099c1981e0e8f968bc115039c025eeb94d371b241a19d98002821f10002c8aeab022046e9aed58e820cf371e194d408d01090bb7b867ac918d47015225ab62157e462",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "expiration": 0,
+ "recipientId": "AW3a16sCCRn2LSnGnWZtQCPUnZMsdE6uVU",
+ "signature": "304502210099c1981e0e8f968bc115039c025eeb94d371b241a19d98002821f10002c8aeab022046e9aed58e820cf371e194d408d01090bb7b867ac918d47015225ab62157e462",
+ "vendorField": "Transaction 7",
+ "id": "f0971e62d506eefbced73550c73dd21423bfc9d9d76ae49ad7b6a52f0d894e74"
+ },
+ "verified": false,
+ "id": "f0971e62d506eefbced73550c73dd21423bfc9d9d76ae49ad7b6a52f0d894e74",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AW3a16sCCRn2LSnGnWZtQCPUnZMsdE6uVU",
+ "type": 0,
+ "vendorField": "Transaction 7",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210099c1981e0e8f968bc115039c025eeb94d371b241a19d98002821f10002c8aeab022046e9aed58e820cf371e194d408d01090bb7b867ac918d47015225ab62157e462",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017fe49b6f1b2bacbbafe7263bf3f4c66c2332e71bb304402206b86a185dce4e776bb5f9f95d4b5cec4eba62873fe88ccdb864946fbbe60cc7902200a821a5bb1d0f2d983f91d282ce60826b7c616005ff6db0d4dbe3ea11ddf8ee2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "expiration": 0,
+ "recipientId": "AexRb1FqLzBZPGVsLWSYZ1haE5NqUZuNA6",
+ "signature": "304402206b86a185dce4e776bb5f9f95d4b5cec4eba62873fe88ccdb864946fbbe60cc7902200a821a5bb1d0f2d983f91d282ce60826b7c616005ff6db0d4dbe3ea11ddf8ee2",
+ "vendorField": "Transaction 2",
+ "id": "3a7f71110427141aec4353f9ce6ec167ae9d7d7df0b020474b59876792577b30"
+ },
+ "verified": false,
+ "id": "3a7f71110427141aec4353f9ce6ec167ae9d7d7df0b020474b59876792577b30",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AexRb1FqLzBZPGVsLWSYZ1haE5NqUZuNA6",
+ "type": 0,
+ "vendorField": "Transaction 2",
+ "vendorFieldHex": "5472616e73616374696f6e2032",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206b86a185dce4e776bb5f9f95d4b5cec4eba62873fe88ccdb864946fbbe60cc7902200a821a5bb1d0f2d983f91d282ce60826b7c616005ff6db0d4dbe3ea11ddf8ee2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b000000000000000017bb95af90f1587796c7d844c28d016ecb96cf7b323045022100cd1b94922995ba4e16935f2380234ce7b6da26fcbab29f3e957aeb8ba8e8d87d022071757f37077132b0db0f217b657791ebb971fbaa7ff95e183ba3cb5858d525f4",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "expiration": 0,
+ "recipientId": "AYsjKisSCaZ1rVMo8ciezerDdWrZ8RYVfc",
+ "signature": "3045022100cd1b94922995ba4e16935f2380234ce7b6da26fcbab29f3e957aeb8ba8e8d87d022071757f37077132b0db0f217b657791ebb971fbaa7ff95e183ba3cb5858d525f4",
+ "vendorField": "Transaction 37",
+ "id": "9042462c36186b082e1f503081c0695c9f9d49a7dac64cf2804097812f769b03"
+ },
+ "verified": false,
+ "id": "9042462c36186b082e1f503081c0695c9f9d49a7dac64cf2804097812f769b03",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYsjKisSCaZ1rVMo8ciezerDdWrZ8RYVfc",
+ "type": 0,
+ "vendorField": "Transaction 37",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100cd1b94922995ba4e16935f2380234ce7b6da26fcbab29f3e957aeb8ba8e8d87d022071757f37077132b0db0f217b657791ebb971fbaa7ff95e183ba3cb5858d525f4",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b0000000000000000170e2059d5f792f009d8457f67d29d72ebd5cd8e35304402200a1537addb2f2f2e434a8c6e4ab32acc59f7653226218b7fe9bd5f82f98778b202205c77341d3ffc0e049aac2a476ee978acca2829d111552645797613f3ca616a4b",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "expiration": 0,
+ "recipientId": "AH4ZrxwiuaL9gWxTguvCtfgQdYWHPegK2B",
+ "signature": "304402200a1537addb2f2f2e434a8c6e4ab32acc59f7653226218b7fe9bd5f82f98778b202205c77341d3ffc0e049aac2a476ee978acca2829d111552645797613f3ca616a4b",
+ "vendorField": "Transaction 33",
+ "id": "c75d1303e71739561e9515e3d6ecb4e9f7627968c5b16b54bb03ef3933da224d"
+ },
+ "verified": false,
+ "id": "c75d1303e71739561e9515e3d6ecb4e9f7627968c5b16b54bb03ef3933da224d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AH4ZrxwiuaL9gWxTguvCtfgQdYWHPegK2B",
+ "type": 0,
+ "vendorField": "Transaction 33",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402200a1537addb2f2f2e434a8c6e4ab32acc59f7653226218b7fe9bd5f82f98778b202205c77341d3ffc0e049aac2a476ee978acca2829d111552645797613f3ca616a4b",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b00000000000000001756cf720576a7f775bd1d4ac4f74bb17e45138740304402206af353161fc543cfab5d90a58dba1438571506cad30a5c78e49cd2f28ebd542002206ab2833769213fa2603ceb14e1bf88a1647dfa4a28e5459fbc47c829adc34026",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "expiration": 0,
+ "recipientId": "APgtGZ7i6cju7pAw5z6Wa3igbXyTxvmCym",
+ "signature": "304402206af353161fc543cfab5d90a58dba1438571506cad30a5c78e49cd2f28ebd542002206ab2833769213fa2603ceb14e1bf88a1647dfa4a28e5459fbc47c829adc34026",
+ "vendorField": "Transaction 38",
+ "id": "2b8dcee1e8bbb486509b85350bebd747e6e837e29ea674581aa529299d74fac6"
+ },
+ "verified": false,
+ "id": "2b8dcee1e8bbb486509b85350bebd747e6e837e29ea674581aa529299d74fac6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APgtGZ7i6cju7pAw5z6Wa3igbXyTxvmCym",
+ "type": 0,
+ "vendorField": "Transaction 38",
+ "vendorFieldHex": "5472616e73616374696f6e203338",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402206af353161fc543cfab5d90a58dba1438571506cad30a5c78e49cd2f28ebd542002206ab2833769213fa2603ceb14e1bf88a1647dfa4a28e5459fbc47c829adc34026",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017b858992acb67521bcafc16e7d4d1e93005c380443045022100ef53b0b4dee8630494f812ef6056c5a5821eb2478a0cabf55039d35db9ac3838022061845377b9eee8d4c20cf1a99c22f424a9ea21d842bc85cdaa8c4f4ba05b1f35",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "expiration": 0,
+ "recipientId": "AYac7iLi12Dsgi1J9cT5bfDp22ruSDnVdZ",
+ "signature": "3045022100ef53b0b4dee8630494f812ef6056c5a5821eb2478a0cabf55039d35db9ac3838022061845377b9eee8d4c20cf1a99c22f424a9ea21d842bc85cdaa8c4f4ba05b1f35",
+ "vendorField": "Transaction 12",
+ "id": "a1588a7cf0a141a10e0c75b3b586b7b3137aec5781a904d2d2a749f5b6f98447"
+ },
+ "verified": false,
+ "id": "a1588a7cf0a141a10e0c75b3b586b7b3137aec5781a904d2d2a749f5b6f98447",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYac7iLi12Dsgi1J9cT5bfDp22ruSDnVdZ",
+ "type": 0,
+ "vendorField": "Transaction 12",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ef53b0b4dee8630494f812ef6056c5a5821eb2478a0cabf55039d35db9ac3838022061845377b9eee8d4c20cf1a99c22f424a9ea21d842bc85cdaa8c4f4ba05b1f35",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b0000000000000000175060e8c493e5772f02b9e88e3973ae1ad35c4b6d3043021f0ef0634992f4b2ad6b038e58d247ef4476a1eda7f2ce48ae48e24a1d3bb54902203a769d1199251a4df6d83d0db299e3bd0c1363c51fa3e9f545dd5c13d730bbd1",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "expiration": 0,
+ "recipientId": "AP6soDZoHGm5j48dqsSeJkn8KEnPi5HB9e",
+ "signature": "3043021f0ef0634992f4b2ad6b038e58d247ef4476a1eda7f2ce48ae48e24a1d3bb54902203a769d1199251a4df6d83d0db299e3bd0c1363c51fa3e9f545dd5c13d730bbd1",
+ "vendorField": "Transaction 27",
+ "id": "bb0dcfd1488eaed1a2c99d776cb8fbed8b56d7bff33ffd479299761ff945580d"
+ },
+ "verified": false,
+ "id": "bb0dcfd1488eaed1a2c99d776cb8fbed8b56d7bff33ffd479299761ff945580d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AP6soDZoHGm5j48dqsSeJkn8KEnPi5HB9e",
+ "type": 0,
+ "vendorField": "Transaction 27",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3043021f0ef0634992f4b2ad6b038e58d247ef4476a1eda7f2ce48ae48e24a1d3bb54902203a769d1199251a4df6d83d0db299e3bd0c1363c51fa3e9f545dd5c13d730bbd1",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017229c0d0469156c68ff3bedef21be4a5a652549f630440220435a475d12adb82f40a30520bfde3072e9a8fbf016529faacfde40dea4c7603102202d7d784c34f089ce93af3a902936de4960005d17a9d64f096a87364d4cd98625",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "expiration": 0,
+ "recipientId": "AJvsZd83zVB3S1JWjKg3bpHjuGNSxSHzo3",
+ "signature": "30440220435a475d12adb82f40a30520bfde3072e9a8fbf016529faacfde40dea4c7603102202d7d784c34f089ce93af3a902936de4960005d17a9d64f096a87364d4cd98625",
+ "vendorField": "Transaction 25",
+ "id": "04b65d78d21317a83e4f6b14ec7bcdcbd51090ccf76f0a4d6e53b1e65d4ef5cf"
+ },
+ "verified": false,
+ "id": "04b65d78d21317a83e4f6b14ec7bcdcbd51090ccf76f0a4d6e53b1e65d4ef5cf",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AJvsZd83zVB3S1JWjKg3bpHjuGNSxSHzo3",
+ "type": 0,
+ "vendorField": "Transaction 25",
+ "vendorFieldHex": "5472616e73616374696f6e203235",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220435a475d12adb82f40a30520bfde3072e9a8fbf016529faacfde40dea4c7603102202d7d784c34f089ce93af3a902936de4960005d17a9d64f096a87364d4cd98625",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b000000000000000017e7f5bc11a5b2237519bf751220a5968483b3dfc93045022100ce0abaa1e6656daf0cab3652b8f9355633ac16beb264dff7f076c9760b4259d3022070a870ab901efa8fa4e54b9c11fe201716ad922aa73e69cc9b7c88b71acc73ee",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "expiration": 0,
+ "recipientId": "AcvN7kfFEbBX9uBRi9dqmToM94W7Bs2oVV",
+ "signature": "3045022100ce0abaa1e6656daf0cab3652b8f9355633ac16beb264dff7f076c9760b4259d3022070a870ab901efa8fa4e54b9c11fe201716ad922aa73e69cc9b7c88b71acc73ee",
+ "vendorField": "Transaction 7",
+ "id": "823ee127cfec5125bd6a32bacfbfc231f25b2bb294962f353a39ffe52bf2a2e9"
+ },
+ "verified": false,
+ "id": "823ee127cfec5125bd6a32bacfbfc231f25b2bb294962f353a39ffe52bf2a2e9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AcvN7kfFEbBX9uBRi9dqmToM94W7Bs2oVV",
+ "type": 0,
+ "vendorField": "Transaction 7",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ce0abaa1e6656daf0cab3652b8f9355633ac16beb264dff7f076c9760b4259d3022070a870ab901efa8fa4e54b9c11fe201716ad922aa73e69cc9b7c88b71acc73ee",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b000000000000000017f0157c4c48288c93e420b6932ee7c10c119d7d983045022100c33a6b77cd9cd63bc5257537dc90f0506ecee8fd0ad34facfed9132c54b9c17902205546cc10e43e60167964f37881412c1f4e742be6f1218749e63e1bdbd5be5023",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "expiration": 0,
+ "recipientId": "AdfKZNwNHLMw12KhtfSrWpSaRAXbTqJgwC",
+ "signature": "3045022100c33a6b77cd9cd63bc5257537dc90f0506ecee8fd0ad34facfed9132c54b9c17902205546cc10e43e60167964f37881412c1f4e742be6f1218749e63e1bdbd5be5023",
+ "vendorField": "Transaction 39",
+ "id": "5b9a04a10927103d683007a7ad99692684bcd07c8f149adb0a24c0852cf7b3d0"
+ },
+ "verified": false,
+ "id": "5b9a04a10927103d683007a7ad99692684bcd07c8f149adb0a24c0852cf7b3d0",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AdfKZNwNHLMw12KhtfSrWpSaRAXbTqJgwC",
+ "type": 0,
+ "vendorField": "Transaction 39",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c33a6b77cd9cd63bc5257537dc90f0506ecee8fd0ad34facfed9132c54b9c17902205546cc10e43e60167964f37881412c1f4e742be6f1218749e63e1bdbd5be5023",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b0000000000000000177f106853bea88b3b4ca2daaa2e9677ffb019d274304402205d25a0dbb4bd258e2392bf3ac4faedea4e6d24bdbbcdae653641db5eb532cf9e02200a38c7ada6731f470861b929d5f15f9c63a56a1985345b9d86ae005acf13b073",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "expiration": 0,
+ "recipientId": "ATMj7skUvdwcJ7c1s8yjbBu8ZQ8o5BdRyU",
+ "signature": "304402205d25a0dbb4bd258e2392bf3ac4faedea4e6d24bdbbcdae653641db5eb532cf9e02200a38c7ada6731f470861b929d5f15f9c63a56a1985345b9d86ae005acf13b073",
+ "vendorField": "Transaction 26",
+ "id": "53c85885bc55b42afb7e8768f53b3981d711a88419649a930cbb28aa0fc74faa"
+ },
+ "verified": false,
+ "id": "53c85885bc55b42afb7e8768f53b3981d711a88419649a930cbb28aa0fc74faa",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ATMj7skUvdwcJ7c1s8yjbBu8ZQ8o5BdRyU",
+ "type": 0,
+ "vendorField": "Transaction 26",
+ "vendorFieldHex": "5472616e73616374696f6e203236",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402205d25a0dbb4bd258e2392bf3ac4faedea4e6d24bdbbcdae653641db5eb532cf9e02200a38c7ada6731f470861b929d5f15f9c63a56a1985345b9d86ae005acf13b073",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b0000000000000000172ff8cc458a6815c2c15687aadc6351bd3dabb1993045022100d2aa5946b05215cd1cca45fefe390ec78081406d6b8f13e724cff599f046d6f1022035a2e6ff995bac39bae2dae1232f7ea2d0ba5897b1fe0ffccf1a0c1172d89308",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "expiration": 0,
+ "recipientId": "AL9XTJEn4nMjvwX9oWqCh7oLxCn1Z25xAY",
+ "signature": "3045022100d2aa5946b05215cd1cca45fefe390ec78081406d6b8f13e724cff599f046d6f1022035a2e6ff995bac39bae2dae1232f7ea2d0ba5897b1fe0ffccf1a0c1172d89308",
+ "vendorField": "Transaction 15",
+ "id": "07aa07921ae7251b6c54abd394439031eed86272509fcecc1f9bde91d25460e8"
+ },
+ "verified": false,
+ "id": "07aa07921ae7251b6c54abd394439031eed86272509fcecc1f9bde91d25460e8",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AL9XTJEn4nMjvwX9oWqCh7oLxCn1Z25xAY",
+ "type": 0,
+ "vendorField": "Transaction 15",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100d2aa5946b05215cd1cca45fefe390ec78081406d6b8f13e724cff599f046d6f1022035a2e6ff995bac39bae2dae1232f7ea2d0ba5897b1fe0ffccf1a0c1172d89308",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b0000000000000000174e4248b0e82e17bce3d97fd49c9b9190e252b62d3045022100e88aaf7ef7fce75a0554d61932375ec058d4f0e35979bc2d3114e8f1c9d08040022051dd6c51e46258ce1b5c6c57e84194325629213b7f9bcabb879d51ac037aa6b5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "expiration": 0,
+ "recipientId": "ANufkx6DZAWtpEVfnhYrRdtW5T5brh4ro1",
+ "signature": "3045022100e88aaf7ef7fce75a0554d61932375ec058d4f0e35979bc2d3114e8f1c9d08040022051dd6c51e46258ce1b5c6c57e84194325629213b7f9bcabb879d51ac037aa6b5",
+ "vendorField": "Transaction 15",
+ "id": "3e9f6aff929dfc4ceaad1ec02cfdc7991c0409082278b5724b1344dee50f0912"
+ },
+ "verified": false,
+ "id": "3e9f6aff929dfc4ceaad1ec02cfdc7991c0409082278b5724b1344dee50f0912",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ANufkx6DZAWtpEVfnhYrRdtW5T5brh4ro1",
+ "type": 0,
+ "vendorField": "Transaction 15",
+ "vendorFieldHex": "5472616e73616374696f6e203135",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e88aaf7ef7fce75a0554d61932375ec058d4f0e35979bc2d3114e8f1c9d08040022051dd6c51e46258ce1b5c6c57e84194325629213b7f9bcabb879d51ac037aa6b5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b000000000000000017d320a267170f665d111adaa1960d95f9708e7d073045022100accbc39d4f4e1e863962caf8e6cc548dea20ee1afa9e6e1806e067da9ce4d3d202200a1a38c83468157af05584a85ccfebb9e4c551c2237befbfd54a51fdcc96b168",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "expiration": 0,
+ "recipientId": "Ab2DKT8n2i87bXtwnGedLhfFCW83qWwWHf",
+ "signature": "3045022100accbc39d4f4e1e863962caf8e6cc548dea20ee1afa9e6e1806e067da9ce4d3d202200a1a38c83468157af05584a85ccfebb9e4c551c2237befbfd54a51fdcc96b168",
+ "vendorField": "Transaction 35",
+ "id": "807d8c88ae5f5addc24abfbea35c14d4e8c1a7557ba094dd5c63a8c4cbf97517"
+ },
+ "verified": false,
+ "id": "807d8c88ae5f5addc24abfbea35c14d4e8c1a7557ba094dd5c63a8c4cbf97517",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Ab2DKT8n2i87bXtwnGedLhfFCW83qWwWHf",
+ "type": 0,
+ "vendorField": "Transaction 35",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100accbc39d4f4e1e863962caf8e6cc548dea20ee1afa9e6e1806e067da9ce4d3d202200a1a38c83468157af05584a85ccfebb9e4c551c2237befbfd54a51fdcc96b168",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b00000000000000001768c6c5f98b69bd4d2f43ccd0d7842d24d0ddf23e3045022100c7c0193c159cbde8a09dead106919732a3b4e30a5cf51617314133454c65e1ef022074b355a40e59075104f694c19d785c2b8ccd3c1ffc0ee17fc8a2c919bbdc3ef2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "expiration": 0,
+ "recipientId": "ARKt3Tmo23EYt7cTkrZiaoW9CZ5Z7mKcHm",
+ "signature": "3045022100c7c0193c159cbde8a09dead106919732a3b4e30a5cf51617314133454c65e1ef022074b355a40e59075104f694c19d785c2b8ccd3c1ffc0ee17fc8a2c919bbdc3ef2",
+ "vendorField": "Transaction 18",
+ "id": "86fea37d45a620a5920849271c85f1cba4525f93c2f18785986f8d93e2c33152"
+ },
+ "verified": false,
+ "id": "86fea37d45a620a5920849271c85f1cba4525f93c2f18785986f8d93e2c33152",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ARKt3Tmo23EYt7cTkrZiaoW9CZ5Z7mKcHm",
+ "type": 0,
+ "vendorField": "Transaction 18",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c7c0193c159cbde8a09dead106919732a3b4e30a5cf51617314133454c65e1ef022074b355a40e59075104f694c19d785c2b8ccd3c1ffc0ee17fc8a2c919bbdc3ef2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b00000000000000001730cc52afbe325a52f2bf8983937210c52650a5ed3045022100eaec6482374438ccc03a12bfc06b8ada1c4f2769d1b8451eb643213be07f25f802207bf68341be74bc813eb1ad74a0b56d16e27cb5954a61aa971296f81823d65d6b",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "expiration": 0,
+ "recipientId": "ALDtrKKeA8bgmiJ8obDPkEvjfWT1AUhf1Q",
+ "signature": "3045022100eaec6482374438ccc03a12bfc06b8ada1c4f2769d1b8451eb643213be07f25f802207bf68341be74bc813eb1ad74a0b56d16e27cb5954a61aa971296f81823d65d6b",
+ "vendorField": "Transaction 40",
+ "id": "7f1dd47f8316475a37888d72537091ab3529778127d8752f2689d585ecad248f"
+ },
+ "verified": false,
+ "id": "7f1dd47f8316475a37888d72537091ab3529778127d8752f2689d585ecad248f",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALDtrKKeA8bgmiJ8obDPkEvjfWT1AUhf1Q",
+ "type": 0,
+ "vendorField": "Transaction 40",
+ "vendorFieldHex": "5472616e73616374696f6e203430",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100eaec6482374438ccc03a12bfc06b8ada1c4f2769d1b8451eb643213be07f25f802207bf68341be74bc813eb1ad74a0b56d16e27cb5954a61aa971296f81823d65d6b",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017042b228f903384321ef7e2e38f413a9049052170304502210090998a023faa9149d75cb51f38f98adf38e39cbb5dc15cdfcc0eb5beff9c2eec022034f33fa9ccb43a93965804e455e08adb9f00910063cd5a87ee70ef376a4c5db7",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "expiration": 0,
+ "recipientId": "AG9v2LZyt8f6dAjb72z1MCoy1MVQs4gP3C",
+ "signature": "304502210090998a023faa9149d75cb51f38f98adf38e39cbb5dc15cdfcc0eb5beff9c2eec022034f33fa9ccb43a93965804e455e08adb9f00910063cd5a87ee70ef376a4c5db7",
+ "vendorField": "Transaction 30",
+ "id": "050f1ee15b00a7cd89a5c7faf7af6c3522d3aa7413eadf272eb17611613ef242"
+ },
+ "verified": false,
+ "id": "050f1ee15b00a7cd89a5c7faf7af6c3522d3aa7413eadf272eb17611613ef242",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AG9v2LZyt8f6dAjb72z1MCoy1MVQs4gP3C",
+ "type": 0,
+ "vendorField": "Transaction 30",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210090998a023faa9149d75cb51f38f98adf38e39cbb5dc15cdfcc0eb5beff9c2eec022034f33fa9ccb43a93965804e455e08adb9f00910063cd5a87ee70ef376a4c5db7",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b0000000000000000175a821c2ba6fd549322c6a7994ac6fe5a9417f9b930440220494a3190e1fc982d01badc71006b776df163583ae1ac365d7dc4cda0a061373a02201aa1f3bb1d2ba405750c585784caf24f25b83f57fc11e0f675d12d2fe3d666de",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "expiration": 0,
+ "recipientId": "AQ2SKy3vxivCKiELzHJPdnmEcm5VYFdFcw",
+ "signature": "30440220494a3190e1fc982d01badc71006b776df163583ae1ac365d7dc4cda0a061373a02201aa1f3bb1d2ba405750c585784caf24f25b83f57fc11e0f675d12d2fe3d666de",
+ "vendorField": "Transaction 21",
+ "id": "e2d37d92b291a8286fe0fd345d1235769a083b627b0927adbe6044491101a5e0"
+ },
+ "verified": false,
+ "id": "e2d37d92b291a8286fe0fd345d1235769a083b627b0927adbe6044491101a5e0",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AQ2SKy3vxivCKiELzHJPdnmEcm5VYFdFcw",
+ "type": 0,
+ "vendorField": "Transaction 21",
+ "vendorFieldHex": "5472616e73616374696f6e203231",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220494a3190e1fc982d01badc71006b776df163583ae1ac365d7dc4cda0a061373a02201aa1f3bb1d2ba405750c585784caf24f25b83f57fc11e0f675d12d2fe3d666de",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b00000000000000001759854238ed033c028098256d30c4bd03e090f233304502210080d7de0342c4855c0c1de956e716623341fce1b4856a1c6e00ebba7bdeee4b6d0220172e8ec11914e21ed369fad2615dfa95ce09b3eec8f3a03e453b4050b7baefd6",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "expiration": 0,
+ "recipientId": "APwDRYfREvgg1ymd53WDd4VtLBw2gEx1fY",
+ "signature": "304502210080d7de0342c4855c0c1de956e716623341fce1b4856a1c6e00ebba7bdeee4b6d0220172e8ec11914e21ed369fad2615dfa95ce09b3eec8f3a03e453b4050b7baefd6",
+ "vendorField": "Transaction 35",
+ "id": "78f40efe33dd9aeab22596360a534f4cae1fca08342540f3666e1a66ee40fc59"
+ },
+ "verified": false,
+ "id": "78f40efe33dd9aeab22596360a534f4cae1fca08342540f3666e1a66ee40fc59",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APwDRYfREvgg1ymd53WDd4VtLBw2gEx1fY",
+ "type": 0,
+ "vendorField": "Transaction 35",
+ "vendorFieldHex": "5472616e73616374696f6e203335",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210080d7de0342c4855c0c1de956e716623341fce1b4856a1c6e00ebba7bdeee4b6d0220172e8ec11914e21ed369fad2615dfa95ce09b3eec8f3a03e453b4050b7baefd6",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017927ac44a3d0a64edebbfe4dafc8510001eff90a530440220257e5ed2aaea59a5f1a4ae74acc59dd1c68a821ecd9eb506ae7fbb01cfe6511f02203eadfccae466fda0800b0cd401284f79016742f94cb6b8e23766e625e7634855",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "expiration": 0,
+ "recipientId": "AV8PNVLjPSfuS8xijSPsxHFQ57HQpBUV67",
+ "signature": "30440220257e5ed2aaea59a5f1a4ae74acc59dd1c68a821ecd9eb506ae7fbb01cfe6511f02203eadfccae466fda0800b0cd401284f79016742f94cb6b8e23766e625e7634855",
+ "vendorField": "Transaction 13",
+ "id": "2695b3c15c822b4d76c551db00bf12f0994bb51ac8228429596d7658ad5cacdf"
+ },
+ "verified": false,
+ "id": "2695b3c15c822b4d76c551db00bf12f0994bb51ac8228429596d7658ad5cacdf",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AV8PNVLjPSfuS8xijSPsxHFQ57HQpBUV67",
+ "type": 0,
+ "vendorField": "Transaction 13",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220257e5ed2aaea59a5f1a4ae74acc59dd1c68a821ecd9eb506ae7fbb01cfe6511f02203eadfccae466fda0800b0cd401284f79016742f94cb6b8e23766e625e7634855",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017bf515df18a873bf775604cf142690c5c9e2216af304502210089973d46853f64b22a8e5ea082741ce61a5b65ea627c2e7d65c6b68761ea9307022064f1232ac967e636f86ba2a1cb84b898e6ef433fc201b893914ec350ae5424c9",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "expiration": 0,
+ "recipientId": "AZDUBcZSWweTX73zhckwqSzNSfSo92THV3",
+ "signature": "304502210089973d46853f64b22a8e5ea082741ce61a5b65ea627c2e7d65c6b68761ea9307022064f1232ac967e636f86ba2a1cb84b898e6ef433fc201b893914ec350ae5424c9",
+ "vendorField": "Transaction 18",
+ "id": "ae70da6e453d96ea9d51be065112cdec453657ab46f87ea98ca60aaf93c961f9"
+ },
+ "verified": false,
+ "id": "ae70da6e453d96ea9d51be065112cdec453657ab46f87ea98ca60aaf93c961f9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AZDUBcZSWweTX73zhckwqSzNSfSo92THV3",
+ "type": 0,
+ "vendorField": "Transaction 18",
+ "vendorFieldHex": "5472616e73616374696f6e203138",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210089973d46853f64b22a8e5ea082741ce61a5b65ea627c2e7d65c6b68761ea9307022064f1232ac967e636f86ba2a1cb84b898e6ef433fc201b893914ec350ae5424c9",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000179a245b5d5ace31e5432c7cf0e0ef83ddce7143a33045022100d399c822102d6ef40c2e9d4117055604358ef4db80943936c99c3dbe2fe0c9e0022076522ed280a8b48dcf2e1050ada11da7d0ba410cd405cc3575886e524bf74b3b",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "expiration": 0,
+ "recipientId": "AVpuGAKLThfwEWPrpDD5oMG4HeyC9fRLG8",
+ "signature": "3045022100d399c822102d6ef40c2e9d4117055604358ef4db80943936c99c3dbe2fe0c9e0022076522ed280a8b48dcf2e1050ada11da7d0ba410cd405cc3575886e524bf74b3b",
+ "vendorField": "Transaction 4",
+ "id": "8ac3927c38bcc0dc237f1241aa03663cb09f0adcb28645e26093a8219e3247cb"
+ },
+ "verified": false,
+ "id": "8ac3927c38bcc0dc237f1241aa03663cb09f0adcb28645e26093a8219e3247cb",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AVpuGAKLThfwEWPrpDD5oMG4HeyC9fRLG8",
+ "type": 0,
+ "vendorField": "Transaction 4",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100d399c822102d6ef40c2e9d4117055604358ef4db80943936c99c3dbe2fe0c9e0022076522ed280a8b48dcf2e1050ada11da7d0ba410cd405cc3575886e524bf74b3b",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b0000000000000000170ce3d695aa0015271cadb4ae7cabde52e7bf881b3045022100f4160aad6909f8159bb2ba3b677a8d4ccdafd769fb7fb193cf45ef5c24e90b6702206f9c06f30f72624b140d0e79994d655095b9acfc633d65662ef982b52b3554df",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "expiration": 0,
+ "recipientId": "AGx2hH3XAaqmm26HfKCN3BbPSafuWdhoFA",
+ "signature": "3045022100f4160aad6909f8159bb2ba3b677a8d4ccdafd769fb7fb193cf45ef5c24e90b6702206f9c06f30f72624b140d0e79994d655095b9acfc633d65662ef982b52b3554df",
+ "vendorField": "Transaction 39",
+ "id": "4d210f7ba219a822077319a43ed31173675617b5fe5ecbccfde23acbdf5724ec"
+ },
+ "verified": false,
+ "id": "4d210f7ba219a822077319a43ed31173675617b5fe5ecbccfde23acbdf5724ec",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGx2hH3XAaqmm26HfKCN3BbPSafuWdhoFA",
+ "type": 0,
+ "vendorField": "Transaction 39",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f4160aad6909f8159bb2ba3b677a8d4ccdafd769fb7fb193cf45ef5c24e90b6702206f9c06f30f72624b140d0e79994d655095b9acfc633d65662ef982b52b3554df",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b000000000000000017b6f21649c7b1b1b9975312888b2809b028cbb8353045022100f21501ede148ae4b1825747d5a35b2f212392ed213a3de1353cc554cf9fb450002203540a0542bdc74befa5e7737af0a76fd511fcc1d663f3c0ab21e1ca9bdfe69d2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "expiration": 0,
+ "recipientId": "AYTCdv41dTobzqLctawRcoG4h1v8wcaNya",
+ "signature": "3045022100f21501ede148ae4b1825747d5a35b2f212392ed213a3de1353cc554cf9fb450002203540a0542bdc74befa5e7737af0a76fd511fcc1d663f3c0ab21e1ca9bdfe69d2",
+ "vendorField": "Transaction 37",
+ "id": "4d69c457fcc88f8d068a41070e0298771f4ff9c9adb58fbc4ab20910ff8baa4e"
+ },
+ "verified": false,
+ "id": "4d69c457fcc88f8d068a41070e0298771f4ff9c9adb58fbc4ab20910ff8baa4e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYTCdv41dTobzqLctawRcoG4h1v8wcaNya",
+ "type": 0,
+ "vendorField": "Transaction 37",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f21501ede148ae4b1825747d5a35b2f212392ed213a3de1353cc554cf9fb450002203540a0542bdc74befa5e7737af0a76fd511fcc1d663f3c0ab21e1ca9bdfe69d2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b0000000000000000173894e92a8d11945bfb593acdb9e619c22c989d2d3045022100d6ba427e0f702f74d0e7ce778354f5ff0376c775e53991bf9f4057957feb2b7902207ef90dfb96a4c8c5a296ec89589e631913145e9736251a82ea93c48fc765dc3d",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "expiration": 0,
+ "recipientId": "ALw3skAPFsg8v5JCQSeaBQ3gUfXqJBmKZF",
+ "signature": "3045022100d6ba427e0f702f74d0e7ce778354f5ff0376c775e53991bf9f4057957feb2b7902207ef90dfb96a4c8c5a296ec89589e631913145e9736251a82ea93c48fc765dc3d",
+ "vendorField": "Transaction 22",
+ "id": "6d5dac8209b4bb2d7f7230ff06d04950d66585dad88fff1f37cc3876d9cfc71b"
+ },
+ "verified": false,
+ "id": "6d5dac8209b4bb2d7f7230ff06d04950d66585dad88fff1f37cc3876d9cfc71b",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ALw3skAPFsg8v5JCQSeaBQ3gUfXqJBmKZF",
+ "type": 0,
+ "vendorField": "Transaction 22",
+ "vendorFieldHex": "5472616e73616374696f6e203232",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100d6ba427e0f702f74d0e7ce778354f5ff0376c775e53991bf9f4057957feb2b7902207ef90dfb96a4c8c5a296ec89589e631913145e9736251a82ea93c48fc765dc3d",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b00000000000000001779389f9aefdd03d9d3788bb08f1e5d8b6567aa1c3045022100e921d8a535a948010be53a418b14537894e5cd59f4798c28afb3a28b1eef3a6702201933fc9972f945e6a75b98bad43ab4c1d5aa50108ecfb0fc6d4d98692cea74c2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "expiration": 0,
+ "recipientId": "ASpqEybey5oEhiFkLuZw2j83n3gDRJRqQv",
+ "signature": "3045022100e921d8a535a948010be53a418b14537894e5cd59f4798c28afb3a28b1eef3a6702201933fc9972f945e6a75b98bad43ab4c1d5aa50108ecfb0fc6d4d98692cea74c2",
+ "vendorField": "Transaction 37",
+ "id": "fc9d4fb1d1130b060ddc08a7e1188b288cbe82d2af2ba2c3fad73f28c851c411"
+ },
+ "verified": false,
+ "id": "fc9d4fb1d1130b060ddc08a7e1188b288cbe82d2af2ba2c3fad73f28c851c411",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASpqEybey5oEhiFkLuZw2j83n3gDRJRqQv",
+ "type": 0,
+ "vendorField": "Transaction 37",
+ "vendorFieldHex": "5472616e73616374696f6e203337",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e921d8a535a948010be53a418b14537894e5cd59f4798c28afb3a28b1eef3a6702201933fc9972f945e6a75b98bad43ab4c1d5aa50108ecfb0fc6d4d98692cea74c2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b0000000000000000171785c644be770b6b42ba6d168e8bd5dbe1067cf63044022069b32ab58125181997f13326bd5bbd8a51c05ce6ca1cb3e05d211aabcdf094c202203ff626f8686e5eaadfbb3f0b0fd3c2d97a27da67df60d449bdfd1a7111691998",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "expiration": 0,
+ "recipientId": "AHvFSiz8JHpEtRMLxckmaM5cCZyzrWXvKd",
+ "signature": "3044022069b32ab58125181997f13326bd5bbd8a51c05ce6ca1cb3e05d211aabcdf094c202203ff626f8686e5eaadfbb3f0b0fd3c2d97a27da67df60d449bdfd1a7111691998",
+ "vendorField": "Transaction 29",
+ "id": "a79c95b5373df15f4168dffe812bf47ce65dbf4a7787d912b967e33fa8596d6d"
+ },
+ "verified": false,
+ "id": "a79c95b5373df15f4168dffe812bf47ce65dbf4a7787d912b967e33fa8596d6d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AHvFSiz8JHpEtRMLxckmaM5cCZyzrWXvKd",
+ "type": 0,
+ "vendorField": "Transaction 29",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022069b32ab58125181997f13326bd5bbd8a51c05ce6ca1cb3e05d211aabcdf094c202203ff626f8686e5eaadfbb3f0b0fd3c2d97a27da67df60d449bdfd1a7111691998",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017067c78e18d6221da34c66c5b99c49b6acdf7d6a03044022046e3fe677c86fe992839c0b4b6c8d52ed9b0468d746144f090a3210c3b22839a02200edefed7a6009d614c52d0258f137a9469e1b945cc44a9a966074039037b6050",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "expiration": 0,
+ "recipientId": "AGNAp8RYuM5EaT44P2r3JnfAqF5gLqWwSR",
+ "signature": "3044022046e3fe677c86fe992839c0b4b6c8d52ed9b0468d746144f090a3210c3b22839a02200edefed7a6009d614c52d0258f137a9469e1b945cc44a9a966074039037b6050",
+ "vendorField": "Transaction 16",
+ "id": "2f493c6bacc89027ac4d4c81f0d72678e992782a4ab5778ebfcc7b0bf5747bc1"
+ },
+ "verified": false,
+ "id": "2f493c6bacc89027ac4d4c81f0d72678e992782a4ab5778ebfcc7b0bf5747bc1",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGNAp8RYuM5EaT44P2r3JnfAqF5gLqWwSR",
+ "type": 0,
+ "vendorField": "Transaction 16",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022046e3fe677c86fe992839c0b4b6c8d52ed9b0468d746144f090a3210c3b22839a02200edefed7a6009d614c52d0258f137a9469e1b945cc44a9a966074039037b6050",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b000000000000000017da2752064521eadb7cac06cc69a82ba14bd78abe3045022100991d550584f937ab579d30b9836d2ddb9d07a53b62088b9ec7d812e831eea3270220130e6d8efe60fec26dd14dc6190c1f68bd37b044c5e48669da7d129671d2fbe9",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "expiration": 0,
+ "recipientId": "AbfN4M6HPEYzii1jGGyp28U6dFZnjRX3tU",
+ "signature": "3045022100991d550584f937ab579d30b9836d2ddb9d07a53b62088b9ec7d812e831eea3270220130e6d8efe60fec26dd14dc6190c1f68bd37b044c5e48669da7d129671d2fbe9",
+ "vendorField": "Transaction 9",
+ "id": "3faa245ee252b5d55c5e78ca10972aa88ae814d2f2fa4dfecbee07769f9731da"
+ },
+ "verified": false,
+ "id": "3faa245ee252b5d55c5e78ca10972aa88ae814d2f2fa4dfecbee07769f9731da",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AbfN4M6HPEYzii1jGGyp28U6dFZnjRX3tU",
+ "type": 0,
+ "vendorField": "Transaction 9",
+ "vendorFieldHex": "5472616e73616374696f6e2039",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100991d550584f937ab579d30b9836d2ddb9d07a53b62088b9ec7d812e831eea3270220130e6d8efe60fec26dd14dc6190c1f68bd37b044c5e48669da7d129671d2fbe9",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b000000000000000017dce1e04722ba02e42d74bd57303917048a3b9a0c304402203d37d32b85c84245f3cf89d3e3187c61eb28f9140cf07914891da61d58f8268b02203e68eec8d8c2c2e06261e5a39d0a58fafd90c46123f7e09b28c6ba88a489ff24",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "expiration": 0,
+ "recipientId": "Abuntr48GXDBuFRUTepCySB4YUdkTJmyBB",
+ "signature": "304402203d37d32b85c84245f3cf89d3e3187c61eb28f9140cf07914891da61d58f8268b02203e68eec8d8c2c2e06261e5a39d0a58fafd90c46123f7e09b28c6ba88a489ff24",
+ "vendorField": "Transaction 34",
+ "id": "2c0286e103c53db7932ba9c407fe5844a7ae390cef6e21279012fb9e7c77378d"
+ },
+ "verified": false,
+ "id": "2c0286e103c53db7932ba9c407fe5844a7ae390cef6e21279012fb9e7c77378d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Abuntr48GXDBuFRUTepCySB4YUdkTJmyBB",
+ "type": 0,
+ "vendorField": "Transaction 34",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402203d37d32b85c84245f3cf89d3e3187c61eb28f9140cf07914891da61d58f8268b02203e68eec8d8c2c2e06261e5a39d0a58fafd90c46123f7e09b28c6ba88a489ff24",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017fbe158a34cc27bb228e101f482ed036f96b824743045022100c1fbc4813b8370edb55d74cff95931f9e5d2e44587c362a184e0a60861aaaf94022075e2c4fabcbeeb54f0ac98f3197bb7aa064e5609a4f7004a51b180030d98ec78",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "expiration": 0,
+ "recipientId": "AejhCyk3z4zFaSQQqtv8qy6zdm6hhEmCLj",
+ "signature": "3045022100c1fbc4813b8370edb55d74cff95931f9e5d2e44587c362a184e0a60861aaaf94022075e2c4fabcbeeb54f0ac98f3197bb7aa064e5609a4f7004a51b180030d98ec78",
+ "vendorField": "Transaction 20",
+ "id": "2f8dbafc501d882fe41752ac497dc9ad164c5a909c3eb5b827f8c4f0ef3e46d6"
+ },
+ "verified": false,
+ "id": "2f8dbafc501d882fe41752ac497dc9ad164c5a909c3eb5b827f8c4f0ef3e46d6",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AejhCyk3z4zFaSQQqtv8qy6zdm6hhEmCLj",
+ "type": 0,
+ "vendorField": "Transaction 20",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c1fbc4813b8370edb55d74cff95931f9e5d2e44587c362a184e0a60861aaaf94022075e2c4fabcbeeb54f0ac98f3197bb7aa064e5609a4f7004a51b180030d98ec78",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000171e0f108e44028a522525837008ba0dbc307e5a1d3045022100ddb81efcdf3bd77b70c048f3d8cf9752bf1bcef935c73e5f9a92066a4f2c5c18022034a7cd1b0d4a62e88a1d289244462794648d0643015c3ae2fd6c3e92bf9269a8",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "expiration": 0,
+ "recipientId": "AJWoxxvEdq5d8QJAnBwACKiQb7YNbgrjJG",
+ "signature": "3045022100ddb81efcdf3bd77b70c048f3d8cf9752bf1bcef935c73e5f9a92066a4f2c5c18022034a7cd1b0d4a62e88a1d289244462794648d0643015c3ae2fd6c3e92bf9269a8",
+ "vendorField": "Transaction 4",
+ "id": "31135b9668f682268a53f6591c6ec7857dea4d6ed5d3149ea79c47132f7dfc41"
+ },
+ "verified": false,
+ "id": "31135b9668f682268a53f6591c6ec7857dea4d6ed5d3149ea79c47132f7dfc41",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AJWoxxvEdq5d8QJAnBwACKiQb7YNbgrjJG",
+ "type": 0,
+ "vendorField": "Transaction 4",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ddb81efcdf3bd77b70c048f3d8cf9752bf1bcef935c73e5f9a92066a4f2c5c18022034a7cd1b0d4a62e88a1d289244462794648d0643015c3ae2fd6c3e92bf9269a8",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017ba531b66b0cdecdbc3cf271612d42265b1d3a5503044022048e574b2570893304e81957458715508df9160125def85414f5120bb8d5120610220737e6c0775977e19ff310a5480a0a402fc6b6ae6868cdd551d2b30244b15285a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "expiration": 0,
+ "recipientId": "AYm4tZFX4HWiwCrsXRRxTj4reW3svsEmXr",
+ "signature": "3044022048e574b2570893304e81957458715508df9160125def85414f5120bb8d5120610220737e6c0775977e19ff310a5480a0a402fc6b6ae6868cdd551d2b30244b15285a",
+ "vendorField": "Transaction 12",
+ "id": "fee20788502217f052eb27d20c9686364fef47f43f8838941e43448559d0df3b"
+ },
+ "verified": false,
+ "id": "fee20788502217f052eb27d20c9686364fef47f43f8838941e43448559d0df3b",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AYm4tZFX4HWiwCrsXRRxTj4reW3svsEmXr",
+ "type": 0,
+ "vendorField": "Transaction 12",
+ "vendorFieldHex": "5472616e73616374696f6e203132",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022048e574b2570893304e81957458715508df9160125def85414f5120bb8d5120610220737e6c0775977e19ff310a5480a0a402fc6b6ae6868cdd551d2b30244b15285a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017257049419ec6524607a68b33a0a36ca96ac30d5d30440220106fcf7ed726167522846df88446c32ba548a325254710ee229de51b6f1dda6b022054bb708966d6978b949cbc5697d3fa653281d16c7881443377d27307ace5bccc",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "expiration": 0,
+ "recipientId": "AKBqANhvyzT6x99rfK1FPqk54uZ7cBz8Qa",
+ "signature": "30440220106fcf7ed726167522846df88446c32ba548a325254710ee229de51b6f1dda6b022054bb708966d6978b949cbc5697d3fa653281d16c7881443377d27307ace5bccc",
+ "vendorField": "Transaction 27",
+ "id": "49fc8777ddc49162a2b4badf83a18e559851ad0430b6e7a922f16ae897f4e829"
+ },
+ "verified": false,
+ "id": "49fc8777ddc49162a2b4badf83a18e559851ad0430b6e7a922f16ae897f4e829",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AKBqANhvyzT6x99rfK1FPqk54uZ7cBz8Qa",
+ "type": 0,
+ "vendorField": "Transaction 27",
+ "vendorFieldHex": "5472616e73616374696f6e203237",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220106fcf7ed726167522846df88446c32ba548a325254710ee229de51b6f1dda6b022054bb708966d6978b949cbc5697d3fa653281d16c7881443377d27307ace5bccc",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b0000000000000000172c04bc6a74f4c3dd84a9a58123906e30786c68ca3045022100f348c5c1571df94075fe18c72472afff2a8163e6af45cd6bfc4c55bc7af895b302200140edc244687b44fdc245bc0a18edc09352a953f6ce7ff659f8935508027e93",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "expiration": 0,
+ "recipientId": "AKnd414ZAXQM5HX1pGKt45M6AuX9EY92d9",
+ "signature": "3045022100f348c5c1571df94075fe18c72472afff2a8163e6af45cd6bfc4c55bc7af895b302200140edc244687b44fdc245bc0a18edc09352a953f6ce7ff659f8935508027e93",
+ "vendorField": "Transaction 29",
+ "id": "add61d18b7230d3b8c5ccb9f9f170bfb5f9cd9b41bb57e03c7cd481c23336133"
+ },
+ "verified": false,
+ "id": "add61d18b7230d3b8c5ccb9f9f170bfb5f9cd9b41bb57e03c7cd481c23336133",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AKnd414ZAXQM5HX1pGKt45M6AuX9EY92d9",
+ "type": 0,
+ "vendorField": "Transaction 29",
+ "vendorFieldHex": "5472616e73616374696f6e203239",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f348c5c1571df94075fe18c72472afff2a8163e6af45cd6bfc4c55bc7af895b302200140edc244687b44fdc245bc0a18edc09352a953f6ce7ff659f8935508027e93",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b0000000000000000174222f8f0ed1b116bd939ba2d602e0ece7b3f49b43045022100ec192e32473517250c25ce2dfb79082f34c63017ba2532a6e56406dad64dd8cc02207c5f0d699c9b76987e06fff3ad9788dbab9019653f0ae15555763b4e22bb02b4",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "expiration": 0,
+ "recipientId": "AMoa95wFinKdBnrYCSqHc7RWqWNAuVGS3d",
+ "signature": "3045022100ec192e32473517250c25ce2dfb79082f34c63017ba2532a6e56406dad64dd8cc02207c5f0d699c9b76987e06fff3ad9788dbab9019653f0ae15555763b4e22bb02b4",
+ "vendorField": "Transaction 17",
+ "id": "8473fd6929f72f146933a2b4e4d5e10e9eeaf2e720a1ba12095579483164e51e"
+ },
+ "verified": false,
+ "id": "8473fd6929f72f146933a2b4e4d5e10e9eeaf2e720a1ba12095579483164e51e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMoa95wFinKdBnrYCSqHc7RWqWNAuVGS3d",
+ "type": 0,
+ "vendorField": "Transaction 17",
+ "vendorFieldHex": "5472616e73616374696f6e203137",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ec192e32473517250c25ce2dfb79082f34c63017ba2532a6e56406dad64dd8cc02207c5f0d699c9b76987e06fff3ad9788dbab9019653f0ae15555763b4e22bb02b4",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b0000000000000000171b272cd1e6b978ae9fb815b40329988d2dc113e93044022059870246ff0cf10fd681b29f392c8d046d8344838222f0b53b96f2b436e73c42022070b12ca14c2dee42057a4bb5986461d415502f08abfded6ce85abecdaf851615",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "expiration": 0,
+ "recipientId": "AJFSpcKsjiK2WqspHpU6uiCSD5zGUAoMTQ",
+ "signature": "3044022059870246ff0cf10fd681b29f392c8d046d8344838222f0b53b96f2b436e73c42022070b12ca14c2dee42057a4bb5986461d415502f08abfded6ce85abecdaf851615",
+ "vendorField": "Transaction 23",
+ "id": "9c0afc8a3c5e80a93157df60c77191606dc4479e7de9cae4a215e94cbfd0ed30"
+ },
+ "verified": false,
+ "id": "9c0afc8a3c5e80a93157df60c77191606dc4479e7de9cae4a215e94cbfd0ed30",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AJFSpcKsjiK2WqspHpU6uiCSD5zGUAoMTQ",
+ "type": 0,
+ "vendorField": "Transaction 23",
+ "vendorFieldHex": "5472616e73616374696f6e203233",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022059870246ff0cf10fd681b29f392c8d046d8344838222f0b53b96f2b436e73c42022070b12ca14c2dee42057a4bb5986461d415502f08abfded6ce85abecdaf851615",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017afb8147376f4e4b1c486e12385d9f5516d32785930440220105f2ab1c8a75d33f29ffd4ceddfd3b175a39cd141a976645faafee06a0708e50220178e2dd98924ed91d1691a3d747fcf8233edc33e98272a24b17b77602dd0fcee",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "expiration": 0,
+ "recipientId": "AXnzRARyqdectrBNQSzLQXyCXxDRzHPfBm",
+ "signature": "30440220105f2ab1c8a75d33f29ffd4ceddfd3b175a39cd141a976645faafee06a0708e50220178e2dd98924ed91d1691a3d747fcf8233edc33e98272a24b17b77602dd0fcee",
+ "vendorField": "Transaction 13",
+ "id": "5b4f5596695cebb5e1b9a24ed449bcfcce356f2aa1b6bf9e1d380b8048e8ad5d"
+ },
+ "verified": false,
+ "id": "5b4f5596695cebb5e1b9a24ed449bcfcce356f2aa1b6bf9e1d380b8048e8ad5d",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AXnzRARyqdectrBNQSzLQXyCXxDRzHPfBm",
+ "type": 0,
+ "vendorField": "Transaction 13",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220105f2ab1c8a75d33f29ffd4ceddfd3b175a39cd141a976645faafee06a0708e50220178e2dd98924ed91d1691a3d747fcf8233edc33e98272a24b17b77602dd0fcee",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b0000000000000000171b21d99d4c50d090209cfb717e7e3171445a61e83044022070d12ef771ea3708d2145923d4c0b4c1eb31f8b3719a30cb9fd32566ded28eea02201a744120749b42c39479ad088a462f6f9fec468d85cdc9cf83e5cedc90940832",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "expiration": 0,
+ "recipientId": "AJFLScxy42BZy1dGPr8DRvnQui3suX93o7",
+ "signature": "3044022070d12ef771ea3708d2145923d4c0b4c1eb31f8b3719a30cb9fd32566ded28eea02201a744120749b42c39479ad088a462f6f9fec468d85cdc9cf83e5cedc90940832",
+ "vendorField": "Transaction 6",
+ "id": "27cf3c5737d61c7c84bfff48c4822da9e31220c320d3cc1d177821c066282423"
+ },
+ "verified": false,
+ "id": "27cf3c5737d61c7c84bfff48c4822da9e31220c320d3cc1d177821c066282423",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AJFLScxy42BZy1dGPr8DRvnQui3suX93o7",
+ "type": 0,
+ "vendorField": "Transaction 6",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022070d12ef771ea3708d2145923d4c0b4c1eb31f8b3719a30cb9fd32566ded28eea02201a744120749b42c39479ad088a462f6f9fec468d85cdc9cf83e5cedc90940832",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b000000000000000017263931c7543c6fc43bc3312498b241707bca3c043045022100cf63f159f40a6f75ddf5c4b206acce3813d94ab1cdee8c910dede9657d50afe60220724da18645f4ee23be41b697690e55a5e82a2ce1ed3808e104f0902de2c01c67",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "expiration": 0,
+ "recipientId": "AKFyqjAWiyWLHnNm7ZgRkabq5LUgimHUuf",
+ "signature": "3045022100cf63f159f40a6f75ddf5c4b206acce3813d94ab1cdee8c910dede9657d50afe60220724da18645f4ee23be41b697690e55a5e82a2ce1ed3808e104f0902de2c01c67",
+ "vendorField": "Transaction 34",
+ "id": "12fa416fc5d1d6dfb141d6052537b0e37462a8fd0301847dc20bfe64e5c7f33a"
+ },
+ "verified": false,
+ "id": "12fa416fc5d1d6dfb141d6052537b0e37462a8fd0301847dc20bfe64e5c7f33a",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AKFyqjAWiyWLHnNm7ZgRkabq5LUgimHUuf",
+ "type": 0,
+ "vendorField": "Transaction 34",
+ "vendorFieldHex": "5472616e73616374696f6e203334",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100cf63f159f40a6f75ddf5c4b206acce3813d94ab1cdee8c910dede9657d50afe60220724da18645f4ee23be41b697690e55a5e82a2ce1ed3808e104f0902de2c01c67",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b0000000000000000173e1b19bf4d66515f03c6f15e868001ab959b4f403045022100e5936646e020734f85e7601323f4be503f67da4a31f05210210a4d02808af39602205db4b337e1d883b1312bc03ca0195fc77a74f860ceb7867e197d9647f5d26d33",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "expiration": 0,
+ "recipientId": "AMSG1NaLZswewAjDLbHEQz2haZ3wGhf3ZB",
+ "signature": "3045022100e5936646e020734f85e7601323f4be503f67da4a31f05210210a4d02808af39602205db4b337e1d883b1312bc03ca0195fc77a74f860ceb7867e197d9647f5d26d33",
+ "vendorField": "Transaction 5",
+ "id": "3edd9cd75a9c4da0bd0829d9a15bfa93f9e4cc832f09b430474574ae28bfa659"
+ },
+ "verified": false,
+ "id": "3edd9cd75a9c4da0bd0829d9a15bfa93f9e4cc832f09b430474574ae28bfa659",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AMSG1NaLZswewAjDLbHEQz2haZ3wGhf3ZB",
+ "type": 0,
+ "vendorField": "Transaction 5",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e5936646e020734f85e7601323f4be503f67da4a31f05210210a4d02808af39602205db4b337e1d883b1312bc03ca0195fc77a74f860ceb7867e197d9647f5d26d33",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000170dcd69f016fb9efd5f9ae47d578ed0d04bd58b01304402205e0fe17e03c3666f986dd243bc057dff4b190f14ba1cb7e19804e4c1042deec0022059cf68c76c5b2931b83e9bd3f75cfd75267b8f35aa957cf63cce3b2226665707",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "expiration": 0,
+ "recipientId": "AH2rWPqT9WeFvwrUM56EeLY7Ew2eTQTr5r",
+ "signature": "304402205e0fe17e03c3666f986dd243bc057dff4b190f14ba1cb7e19804e4c1042deec0022059cf68c76c5b2931b83e9bd3f75cfd75267b8f35aa957cf63cce3b2226665707",
+ "vendorField": "Transaction 4",
+ "id": "34d18166b8fd36708268bc343c378fd3e1d72ec4c89563078b9616386e8c7763"
+ },
+ "verified": false,
+ "id": "34d18166b8fd36708268bc343c378fd3e1d72ec4c89563078b9616386e8c7763",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AH2rWPqT9WeFvwrUM56EeLY7Ew2eTQTr5r",
+ "type": 0,
+ "vendorField": "Transaction 4",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402205e0fe17e03c3666f986dd243bc057dff4b190f14ba1cb7e19804e4c1042deec0022059cf68c76c5b2931b83e9bd3f75cfd75267b8f35aa957cf63cce3b2226665707",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b00000000000000001705902ff13f07ff4a607ca76390eca20df01539183045022100967f65a4252ef1e2f1f69b754cc8f3b19046af3c7efae86baf81a85031311709022075f0ebbdf75c720696c925bee4f6161bf11a279bf7e9aaaf67234a53dcd614c2",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "expiration": 0,
+ "recipientId": "AGHHkmKccHarK97aMYHvhFMpDTwX5cuJpG",
+ "signature": "3045022100967f65a4252ef1e2f1f69b754cc8f3b19046af3c7efae86baf81a85031311709022075f0ebbdf75c720696c925bee4f6161bf11a279bf7e9aaaf67234a53dcd614c2",
+ "vendorField": "Transaction 24",
+ "id": "2614740b90b174b59976ba912df669f2f0be380831c9973dcb73972cc04674b7"
+ },
+ "verified": false,
+ "id": "2614740b90b174b59976ba912df669f2f0be380831c9973dcb73972cc04674b7",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AGHHkmKccHarK97aMYHvhFMpDTwX5cuJpG",
+ "type": 0,
+ "vendorField": "Transaction 24",
+ "vendorFieldHex": "5472616e73616374696f6e203234",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100967f65a4252ef1e2f1f69b754cc8f3b19046af3c7efae86baf81a85031311709022075f0ebbdf75c720696c925bee4f6161bf11a279bf7e9aaaf67234a53dcd614c2",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000175e0496aeecc3ed4be7f98c8679efc4b3d9a398fb3045022100f5bf4a65a49d712c81d8ceb9180a17527852ae393f3fbee6f556f695ea1be8100220332353e697d4916247ec68f40764c5b6411a45d0b3db65d19b699b92e5fa4fb7",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "expiration": 0,
+ "recipientId": "AQLzfMPcSMoVzSuTtqsmmuMM43ocf4sz1c",
+ "signature": "3045022100f5bf4a65a49d712c81d8ceb9180a17527852ae393f3fbee6f556f695ea1be8100220332353e697d4916247ec68f40764c5b6411a45d0b3db65d19b699b92e5fa4fb7",
+ "vendorField": "Transaction 8",
+ "id": "f68eb2d1d4527e80901cd19b31c87f44845fd9659141c07278859911d0555f0a"
+ },
+ "verified": false,
+ "id": "f68eb2d1d4527e80901cd19b31c87f44845fd9659141c07278859911d0555f0a",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AQLzfMPcSMoVzSuTtqsmmuMM43ocf4sz1c",
+ "type": 0,
+ "vendorField": "Transaction 8",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f5bf4a65a49d712c81d8ceb9180a17527852ae393f3fbee6f556f695ea1be8100220332353e697d4916247ec68f40764c5b6411a45d0b3db65d19b699b92e5fa4fb7",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b000000000000000017af608296a6d81f700f792a6ac9da2d170749c40e3044022049637b4877124be3de9ba317970c35fe2f16dce72fe3f5752c1df56b1c14bc1c0220765bd7c7245298676e9943b7443548d723c60def0b7217fa6d991d5866406932",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "expiration": 0,
+ "recipientId": "AXmBWi4TBzmAZgdmRZdLT7sk2VJZtHtvUP",
+ "signature": "3044022049637b4877124be3de9ba317970c35fe2f16dce72fe3f5752c1df56b1c14bc1c0220765bd7c7245298676e9943b7443548d723c60def0b7217fa6d991d5866406932",
+ "vendorField": "Transaction 8",
+ "id": "53dc7dcf859b985e43a81d1813a1d54937de846d935d333500d7e5fc5c3bb548"
+ },
+ "verified": false,
+ "id": "53dc7dcf859b985e43a81d1813a1d54937de846d935d333500d7e5fc5c3bb548",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AXmBWi4TBzmAZgdmRZdLT7sk2VJZtHtvUP",
+ "type": 0,
+ "vendorField": "Transaction 8",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022049637b4877124be3de9ba317970c35fe2f16dce72fe3f5752c1df56b1c14bc1c0220765bd7c7245298676e9943b7443548d723c60def0b7217fa6d991d5866406932",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b00000000000000001755d63414206f2d2acfb498cb2c2c6f0b9b84b97e3045022100ff478f5c5d86597ebc267e5e819782a5316e79987df57d154046c9c7b742d2e602206a0092b9a8709c3fd1f6168e1cba5bb8e84789140111addd99a3cff0c8dc663d",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "expiration": 0,
+ "recipientId": "APbjgvGm593CBT9qVzvVUUqpeHXsPoep1M",
+ "signature": "3045022100ff478f5c5d86597ebc267e5e819782a5316e79987df57d154046c9c7b742d2e602206a0092b9a8709c3fd1f6168e1cba5bb8e84789140111addd99a3cff0c8dc663d",
+ "vendorField": "Transaction 30",
+ "id": "d8e05bf677daeff8b4b0a54e08b3e03656baed782379a15bf9c2514c7c2e7012"
+ },
+ "verified": false,
+ "id": "d8e05bf677daeff8b4b0a54e08b3e03656baed782379a15bf9c2514c7c2e7012",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APbjgvGm593CBT9qVzvVUUqpeHXsPoep1M",
+ "type": 0,
+ "vendorField": "Transaction 30",
+ "vendorFieldHex": "5472616e73616374696f6e203330",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100ff478f5c5d86597ebc267e5e819782a5316e79987df57d154046c9c7b742d2e602206a0092b9a8709c3fd1f6168e1cba5bb8e84789140111addd99a3cff0c8dc663d",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001723f80cdfc6e8737f06a9c02d6ca93594388767f83044022055b7ca7c018bf871e7200ebef17ea826b817305e942b3403ab57a1d2f138b75f02202e8a79588f7802aed93470cce823a5f764715b23ecd84fe531d68019b5792bfb",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "expiration": 0,
+ "recipientId": "AK44T3mwTGAduzFB3V61sVUwoZiXsypLob",
+ "signature": "3044022055b7ca7c018bf871e7200ebef17ea826b817305e942b3403ab57a1d2f138b75f02202e8a79588f7802aed93470cce823a5f764715b23ecd84fe531d68019b5792bfb",
+ "vendorField": "Transaction 14",
+ "id": "fd678492c1a5ee208ab815e8bb3868aa296cd1c0f574635205969249b42eff1b"
+ },
+ "verified": false,
+ "id": "fd678492c1a5ee208ab815e8bb3868aa296cd1c0f574635205969249b42eff1b",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AK44T3mwTGAduzFB3V61sVUwoZiXsypLob",
+ "type": 0,
+ "vendorField": "Transaction 14",
+ "vendorFieldHex": "5472616e73616374696f6e203134",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022055b7ca7c018bf871e7200ebef17ea826b817305e942b3403ab57a1d2f138b75f02202e8a79588f7802aed93470cce823a5f764715b23ecd84fe531d68019b5792bfb",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b00000000000000001773938849133e49752e6322fb06520820eb82ab5230440220546f1457a0e381e332b2ae915a4505a86cb15816cd2cb143411a433847c17aed02202e05926f9270e36d9d9371531d68d365de3ed3ccbb7a2331c8a3215a318c451a",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "expiration": 0,
+ "recipientId": "ASJz6Hcak9JeMpCKgiQPVf3U4b8UnZ79Rq",
+ "signature": "30440220546f1457a0e381e332b2ae915a4505a86cb15816cd2cb143411a433847c17aed02202e05926f9270e36d9d9371531d68d365de3ed3ccbb7a2331c8a3215a318c451a",
+ "vendorField": "Transaction 11",
+ "id": "0f9e0912bc9c1c845a116281be23b3aaab97ebed8b172de4aa4321a6e8203505"
+ },
+ "verified": false,
+ "id": "0f9e0912bc9c1c845a116281be23b3aaab97ebed8b172de4aa4321a6e8203505",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ASJz6Hcak9JeMpCKgiQPVf3U4b8UnZ79Rq",
+ "type": 0,
+ "vendorField": "Transaction 11",
+ "vendorFieldHex": "5472616e73616374696f6e203131",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220546f1457a0e381e332b2ae915a4505a86cb15816cd2cb143411a433847c17aed02202e05926f9270e36d9d9371531d68d365de3ed3ccbb7a2331c8a3215a318c451a",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b0000000000000000173abaff16c478b48ec930bc54dcb2a85767ae61833045022100b9db8445e61c6645aae74c4ee5c536ae0bfe18f63dbc5316085f895db7ae8706022029bbda8d6f0609bdb34eca51a050327a86120733cca1df7eee94f598d0baa7f5",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "expiration": 0,
+ "recipientId": "AM8QrN87nGuYB1Jwni8WdSceLdvNTjCPoC",
+ "signature": "3045022100b9db8445e61c6645aae74c4ee5c536ae0bfe18f63dbc5316085f895db7ae8706022029bbda8d6f0609bdb34eca51a050327a86120733cca1df7eee94f598d0baa7f5",
+ "vendorField": "Transaction 36",
+ "id": "1eb72cbf3f052aa1e13f6b5838b22a05685ffc99f5c19f5051da2e63be88befe"
+ },
+ "verified": false,
+ "id": "1eb72cbf3f052aa1e13f6b5838b22a05685ffc99f5c19f5051da2e63be88befe",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AM8QrN87nGuYB1Jwni8WdSceLdvNTjCPoC",
+ "type": 0,
+ "vendorField": "Transaction 36",
+ "vendorFieldHex": "5472616e73616374696f6e203336",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100b9db8445e61c6645aae74c4ee5c536ae0bfe18f63dbc5316085f895db7ae8706022029bbda8d6f0609bdb34eca51a050327a86120733cca1df7eee94f598d0baa7f5",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017236b377b2f235de51e10128a7a5c5cad4c6e1dd130440220186bc71fdfaedc0c31285917f74997cfd66ea4a72e690d480d5f8ef5e7a1d1a20220710c383ea6e8b4f64ed2eaffae33a1f0bafff1fb1c578a1b369dd8151ef3f0bc",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "expiration": 0,
+ "recipientId": "AK19jm62mDF8b62XtszDf1sqGeVkGmp7CE",
+ "signature": "30440220186bc71fdfaedc0c31285917f74997cfd66ea4a72e690d480d5f8ef5e7a1d1a20220710c383ea6e8b4f64ed2eaffae33a1f0bafff1fb1c578a1b369dd8151ef3f0bc",
+ "vendorField": "Transaction 10",
+ "id": "e75b3d52fcf745c1b64f6fc2863b673bbe61d687ed0ca7f68c7e88ceb6fae8f9"
+ },
+ "verified": false,
+ "id": "e75b3d52fcf745c1b64f6fc2863b673bbe61d687ed0ca7f68c7e88ceb6fae8f9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AK19jm62mDF8b62XtszDf1sqGeVkGmp7CE",
+ "type": 0,
+ "vendorField": "Transaction 10",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30440220186bc71fdfaedc0c31285917f74997cfd66ea4a72e690d480d5f8ef5e7a1d1a20220710c383ea6e8b4f64ed2eaffae33a1f0bafff1fb1c578a1b369dd8151ef3f0bc",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017b35eba0e5bbe1a15c6e2504779938e94572bc7e1304402204f1f1527f8d037d69ff3de5766b719b4b84f29d5a79ff390877a84bb9856d5da022008656e66808082b8dad2ef6d2168e031f04db664eaa3bed647395b42ffc9f759",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "expiration": 0,
+ "recipientId": "AY8J5ZuVC4p5QdEsuZSLHKjXsGGvvvr2mE",
+ "signature": "304402204f1f1527f8d037d69ff3de5766b719b4b84f29d5a79ff390877a84bb9856d5da022008656e66808082b8dad2ef6d2168e031f04db664eaa3bed647395b42ffc9f759",
+ "vendorField": "Transaction 20",
+ "id": "a67eea8b6191b8d35cbab1dc45d9a6795f86c0299557764a7d3ed7794b64d699"
+ },
+ "verified": false,
+ "id": "a67eea8b6191b8d35cbab1dc45d9a6795f86c0299557764a7d3ed7794b64d699",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AY8J5ZuVC4p5QdEsuZSLHKjXsGGvvvr2mE",
+ "type": 0,
+ "vendorField": "Transaction 20",
+ "vendorFieldHex": "5472616e73616374696f6e203230",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402204f1f1527f8d037d69ff3de5766b719b4b84f29d5a79ff390877a84bb9856d5da022008656e66808082b8dad2ef6d2168e031f04db664eaa3bed647395b42ffc9f759",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000179c375cc35993d1b822494819e4e36f0035d2fc9b3045022100eacd0a48fcee232b4592465351612f802e31e0aec809a04aedb1ffc77b67970a02207ddeb454d49fcd5dd03eabcb28fd6a54356ee8ce513ee4eef6c3dad2bcc035d1",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "expiration": 0,
+ "recipientId": "AW1sP4tJNL5PdXGFrnxvTzDhgVkXYtEKWW",
+ "signature": "3045022100eacd0a48fcee232b4592465351612f802e31e0aec809a04aedb1ffc77b67970a02207ddeb454d49fcd5dd03eabcb28fd6a54356ee8ce513ee4eef6c3dad2bcc035d1",
+ "vendorField": "Transaction 8",
+ "id": "17f6fe38989462776bfee0b769db550913f6f18f5d7a0901998fcd3203f2b224"
+ },
+ "verified": false,
+ "id": "17f6fe38989462776bfee0b769db550913f6f18f5d7a0901998fcd3203f2b224",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AW1sP4tJNL5PdXGFrnxvTzDhgVkXYtEKWW",
+ "type": 0,
+ "vendorField": "Transaction 8",
+ "vendorFieldHex": "5472616e73616374696f6e2038",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100eacd0a48fcee232b4592465351612f802e31e0aec809a04aedb1ffc77b67970a02207ddeb454d49fcd5dd03eabcb28fd6a54356ee8ce513ee4eef6c3dad2bcc035d1",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b000000000000000017db12692f9dc88fe4d380436e4a3824076a4f843f30450221008aaa31bd67638a4896b251f1e34148abcb9834c8324ad2aea68b85747525a98502203021aae408cd18175eb04e45a8bcedc5fffaa6e6effdb1c11a336976be49f240",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "expiration": 0,
+ "recipientId": "AbkDgihWECuCuz5hNmag2w25JQ9E1FGaNX",
+ "signature": "30450221008aaa31bd67638a4896b251f1e34148abcb9834c8324ad2aea68b85747525a98502203021aae408cd18175eb04e45a8bcedc5fffaa6e6effdb1c11a336976be49f240",
+ "vendorField": "Transaction 33",
+ "id": "0058abf9805e5e8ea182830c638704da731da75407c3b676dc56ac37c97de148"
+ },
+ "verified": false,
+ "id": "0058abf9805e5e8ea182830c638704da731da75407c3b676dc56ac37c97de148",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AbkDgihWECuCuz5hNmag2w25JQ9E1FGaNX",
+ "type": 0,
+ "vendorField": "Transaction 33",
+ "vendorFieldHex": "5472616e73616374696f6e203333",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "30450221008aaa31bd67638a4896b251f1e34148abcb9834c8324ad2aea68b85747525a98502203021aae408cd18175eb04e45a8bcedc5fffaa6e6effdb1c11a336976be49f240",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000176d9b88a2a27357f2d7eeae78b386d2edf8df3b84304402200b48815ef066a0eac8a79ec4d0fc4f22b105eff437163af0573683d879b5a93802200bf1f2c2977b4065f5522bb8b917141cb82806446e57593d14dd9d632e0f7013",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "expiration": 0,
+ "recipientId": "ARmRd5edg5V3mRas7gLMyfsvtSoXacZPjo",
+ "signature": "304402200b48815ef066a0eac8a79ec4d0fc4f22b105eff437163af0573683d879b5a93802200bf1f2c2977b4065f5522bb8b917141cb82806446e57593d14dd9d632e0f7013",
+ "vendorField": "Transaction 4",
+ "id": "a2da696115bf832cc4ed9458ff608ce10f339876f77d04ea28bbb22eea10aec2"
+ },
+ "verified": false,
+ "id": "a2da696115bf832cc4ed9458ff608ce10f339876f77d04ea28bbb22eea10aec2",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "ARmRd5edg5V3mRas7gLMyfsvtSoXacZPjo",
+ "type": 0,
+ "vendorField": "Transaction 4",
+ "vendorFieldHex": "5472616e73616374696f6e2034",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402200b48815ef066a0eac8a79ec4d0fc4f22b105eff437163af0573683d879b5a93802200bf1f2c2977b4065f5522bb8b917141cb82806446e57593d14dd9d632e0f7013",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b0000000000000000176358f2ca1a4c272cce0eedb92f3bf8b837fd8cf03044022070c9c7d962d31b5189bf1747aa6e06fd35ae00e0b12efa6899173b3ea5e343180220146328f4a1ed1548a5a95ca8489af16a095cf0095924084f5a508c9e010d9e86",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "expiration": 0,
+ "recipientId": "AQqB6jSG9a4xKwSFCnwY2yRJJAfYAjyvZA",
+ "signature": "3044022070c9c7d962d31b5189bf1747aa6e06fd35ae00e0b12efa6899173b3ea5e343180220146328f4a1ed1548a5a95ca8489af16a095cf0095924084f5a508c9e010d9e86",
+ "vendorField": "Transaction 10",
+ "id": "3a125825e9e154c2cb546f5aadabd72e2a15cb75e6746d9edd425ff4c921bc51"
+ },
+ "verified": false,
+ "id": "3a125825e9e154c2cb546f5aadabd72e2a15cb75e6746d9edd425ff4c921bc51",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AQqB6jSG9a4xKwSFCnwY2yRJJAfYAjyvZA",
+ "type": 0,
+ "vendorField": "Transaction 10",
+ "vendorFieldHex": "5472616e73616374696f6e203130",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3044022070c9c7d962d31b5189bf1747aa6e06fd35ae00e0b12efa6899173b3ea5e343180220146328f4a1ed1548a5a95ca8489af16a095cf0095924084f5a508c9e010d9e86",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b0000000000000000171b96e2b2a75c0ae6c7fa5626e3d79c20175f65913045022100c8ec04674367b578456e43088752a04b0f59de4bbda197ae38a94a1ad0db39f2022011bedc437d4a025b2eb0a0df29d61b55aa2d4e49a0ab01dd48e050833c153d15",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "expiration": 0,
+ "recipientId": "AJHkePW35duSTiqbVxcFbtTvvnQr2PBgYK",
+ "signature": "3045022100c8ec04674367b578456e43088752a04b0f59de4bbda197ae38a94a1ad0db39f2022011bedc437d4a025b2eb0a0df29d61b55aa2d4e49a0ab01dd48e050833c153d15",
+ "vendorField": "Transaction 39",
+ "id": "9d1d3da7979adffb6a7d6dd4e4d6a4863ae412c1e909595bca2bb6dee68b812c"
+ },
+ "verified": false,
+ "id": "9d1d3da7979adffb6a7d6dd4e4d6a4863ae412c1e909595bca2bb6dee68b812c",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AJHkePW35duSTiqbVxcFbtTvvnQr2PBgYK",
+ "type": 0,
+ "vendorField": "Transaction 39",
+ "vendorFieldHex": "5472616e73616374696f6e203339",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100c8ec04674367b578456e43088752a04b0f59de4bbda197ae38a94a1ad0db39f2022011bedc437d4a025b2eb0a0df29d61b55aa2d4e49a0ab01dd48e050833c153d15",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017f8c689bc5455e9c7add499f969c3979f6cf1e6c8304502210096e14fce152f9f532f1836d260f535c3316fa8ab5e1bdf1e4c1275fcea5444dd02203be1c81790b12abacd63b89b3fe11cd664a2c9a3519eac0303e3d091dfaac54c",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "expiration": 0,
+ "recipientId": "AeTH4jfbYikFC52t4DaDM2sBF83d5AaDM1",
+ "signature": "304502210096e14fce152f9f532f1836d260f535c3316fa8ab5e1bdf1e4c1275fcea5444dd02203be1c81790b12abacd63b89b3fe11cd664a2c9a3519eac0303e3d091dfaac54c",
+ "vendorField": "Transaction 13",
+ "id": "14d7359e411e9c9eac713973be00b3bb4e2960648b67f0e6f0dc469a25a7c6d9"
+ },
+ "verified": false,
+ "id": "14d7359e411e9c9eac713973be00b3bb4e2960648b67f0e6f0dc469a25a7c6d9",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AeTH4jfbYikFC52t4DaDM2sBF83d5AaDM1",
+ "type": 0,
+ "vendorField": "Transaction 13",
+ "vendorFieldHex": "5472616e73616374696f6e203133",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210096e14fce152f9f532f1836d260f535c3316fa8ab5e1bdf1e4c1275fcea5444dd02203be1c81790b12abacd63b89b3fe11cd664a2c9a3519eac0303e3d091dfaac54c",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000172fe38431dd11fa9a38bac772feeeb266a13ddb0b3045022100e67098e07447a4f767c9309ba315e89b332cabee4259887111745e83f9cdc4cd0220531cb5f0372caac1ed702394f9722dcf1106f115dc772272fb470db885eb49eb",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "expiration": 0,
+ "recipientId": "AL95xdVWbvjmU7132YdCBKfLUC9CVGqjDL",
+ "signature": "3045022100e67098e07447a4f767c9309ba315e89b332cabee4259887111745e83f9cdc4cd0220531cb5f0372caac1ed702394f9722dcf1106f115dc772272fb470db885eb49eb",
+ "vendorField": "Transaction 28",
+ "id": "82001d46768ed6b5b456159f6af708140711135403d57119aa931c1683b2a26e"
+ },
+ "verified": false,
+ "id": "82001d46768ed6b5b456159f6af708140711135403d57119aa931c1683b2a26e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AL95xdVWbvjmU7132YdCBKfLUC9CVGqjDL",
+ "type": 0,
+ "vendorField": "Transaction 28",
+ "vendorFieldHex": "5472616e73616374696f6e203238",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100e67098e07447a4f767c9309ba315e89b332cabee4259887111745e83f9cdc4cd0220531cb5f0372caac1ed702394f9722dcf1106f115dc772272fb470db885eb49eb",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b000000000000000017fee784411bbe0538f549eee19cf6593ee1d0378f304402202fd4dd9351912093bf7289fc6a2204cedea60dee16bf6c169f82fd919e94326402207d2f7ffa0cd4e559e39421fec818020859e080d66d4511f6b86af8922c4bbfcf",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "expiration": 0,
+ "recipientId": "Af1gdHBt627NsJXZUwW3Dn8gWQxnmeGj1F",
+ "signature": "304402202fd4dd9351912093bf7289fc6a2204cedea60dee16bf6c169f82fd919e94326402207d2f7ffa0cd4e559e39421fec818020859e080d66d4511f6b86af8922c4bbfcf",
+ "vendorField": "Transaction 6",
+ "id": "4003f9ce2b82fe56150892d2592de29b7878342dd92fcf431f4fc4c261f57156"
+ },
+ "verified": false,
+ "id": "4003f9ce2b82fe56150892d2592de29b7878342dd92fcf431f4fc4c261f57156",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "Af1gdHBt627NsJXZUwW3Dn8gWQxnmeGj1F",
+ "type": 0,
+ "vendorField": "Transaction 6",
+ "vendorFieldHex": "5472616e73616374696f6e2036",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304402202fd4dd9351912093bf7289fc6a2204cedea60dee16bf6c169f82fd919e94326402207d2f7ffa0cd4e559e39421fec818020859e080d66d4511f6b86af8922c4bbfcf",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017910efbc8d502d7a24a13533a6de93b41c5c74f4c3045022100f1fdf68f3d428be0f9dabd8cf20aabd4bc576ab63e666c339d7d17597f8b023b02203854f0db9240abf17d2f245478ad287d8514c1670fe2604fd7772adc23fae095",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "expiration": 0,
+ "recipientId": "AUzsaPT53NvnhgUwJmKmy7RKWkD5RrZQ1t",
+ "signature": "3045022100f1fdf68f3d428be0f9dabd8cf20aabd4bc576ab63e666c339d7d17597f8b023b02203854f0db9240abf17d2f245478ad287d8514c1670fe2604fd7772adc23fae095",
+ "vendorField": "Transaction 16",
+ "id": "f7b2236a623feff92b3dbb7728c2d15c120693a16007cf3087d288b7beb80797"
+ },
+ "verified": false,
+ "id": "f7b2236a623feff92b3dbb7728c2d15c120693a16007cf3087d288b7beb80797",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AUzsaPT53NvnhgUwJmKmy7RKWkD5RrZQ1t",
+ "type": 0,
+ "vendorField": "Transaction 16",
+ "vendorFieldHex": "5472616e73616374696f6e203136",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100f1fdf68f3d428be0f9dabd8cf20aabd4bc576ab63e666c339d7d17597f8b023b02203854f0db9240abf17d2f245478ad287d8514c1670fe2604fd7772adc23fae095",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b00000000000000001753fdee9530e0d82aa9fcb0d8a378bc0647584765304502210081b45f9b0bdf11c4e0944eef58a960b99ed022643b2f3d18dba890c1f02d4364022050eeca2c2e24440f6f18fec7f019dd27c54f7cb04b152a1a75b324816a859c79",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "expiration": 0,
+ "recipientId": "APRyvvTGyM5yRRzmTDjVEHhP7xMsEPyNrV",
+ "signature": "304502210081b45f9b0bdf11c4e0944eef58a960b99ed022643b2f3d18dba890c1f02d4364022050eeca2c2e24440f6f18fec7f019dd27c54f7cb04b152a1a75b324816a859c79",
+ "vendorField": "Transaction 5",
+ "id": "374d96b38602581d6074ba4822f8b19264236a255453d9c7ac37f4cd5eab1b3e"
+ },
+ "verified": false,
+ "id": "374d96b38602581d6074ba4822f8b19264236a255453d9c7ac37f4cd5eab1b3e",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "APRyvvTGyM5yRRzmTDjVEHhP7xMsEPyNrV",
+ "type": 0,
+ "vendorField": "Transaction 5",
+ "vendorFieldHex": "5472616e73616374696f6e2035",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "304502210081b45f9b0bdf11c4e0944eef58a960b99ed022643b2f3d18dba890c1f02d4364022050eeca2c2e24440f6f18fec7f019dd27c54f7cb04b152a1a75b324816a859c79",
+ "expiration": 0
+ },
+ {
+ "serialized": "ff01170052f0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017751964a7f75cd482dcc3838024c6dfa7e05f2eb83045022100af03a6ea5d5d9a997465d16580a7e59fe6bc1a4cdf85304f0fd14b76ea83d88e02206be74c7f22acbfb8496bb10c60024dfbe0924de7e26b912735826c8a90908c4e",
+ "data": {
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "expiration": 0,
+ "recipientId": "AST38HtwFJrJctcmFRP8Rqyy1eVZ4sVYBL",
+ "signature": "3045022100af03a6ea5d5d9a997465d16580a7e59fe6bc1a4cdf85304f0fd14b76ea83d88e02206be74c7f22acbfb8496bb10c60024dfbe0924de7e26b912735826c8a90908c4e",
+ "vendorField": "Transaction 1",
+ "id": "804b281bf92f80cba9fa7098d729d3a04371f98be93c39d8b54ecf3ce3bdd842"
+ },
+ "verified": false,
+ "id": "804b281bf92f80cba9fa7098d729d3a04371f98be93c39d8b54ecf3ce3bdd842",
+ "version": 1,
+ "timestamp": 58126418,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "recipientId": "AST38HtwFJrJctcmFRP8Rqyy1eVZ4sVYBL",
+ "type": 0,
+ "vendorField": "Transaction 1",
+ "vendorFieldHex": "5472616e73616374696f6e2031",
+ "amount": "200000000",
+ "fee": "10000000",
+ "signature": "3045022100af03a6ea5d5d9a997465d16580a7e59fe6bc1a4cdf85304f0fd14b76ea83d88e02206be74c7f22acbfb8496bb10c60024dfbe0924de7e26b912735826c8a90908c4e",
+ "expiration": 0
+ }
+ ],
+ "idHex": "d9401ad36a03b8b4",
+ "id": "15654541800058894516"
+}
diff --git a/benchmark/fixtures/block/serialized/no-transactions.txt b/benchmark/fixtures/block/serialized/no-transactions.txt
new file mode 100644
index 0000000000..508d427dca
--- /dev/null
+++ b/benchmark/fixtures/block/serialized/no-transactions.txt
@@ -0,0 +1 @@
+0000000052f0760313000000a099b7651f0e5eb89600000000ac23fc06000000002f6859000000000000000000000000c0120000e5a7e9b5a8a8e2f47f7d8a532e0e9c43d44052dc6c6339ad57246e9a339665e303a46f2547d20b47003c1c376788db5a54d67264df2ae914f70bf453b6a1fa1b3a304402204e31f1ae02cbcf2bb936e225f9f9db332ac275577b777a389b2d713e48b78c9002203f11c4ee0d30d2e10b2cb4a7fb59569e761571971ffe1be5abaa32fdc42a056b
diff --git a/benchmark/fixtures/block/serialized/transactions.txt b/benchmark/fixtures/block/serialized/transactions.txt
new file mode 100644
index 0000000000..689bed3e80
--- /dev/null
+++ b/benchmark/fixtures/block/serialized/transactions.txt
@@ -0,0 +1 @@
+0000000052f0760313000000a099b7651f0e5eb89600000000ac23fc06000000002f6859000000000000000000000000c0120000e5a7e9b5a8a8e2f47f7d8a532e0e9c43d44052dc6c6339ad57246e9a339665e303a46f2547d20b47003c1c376788db5a54d67264df2ae914f70bf453b6a1fa1b3a304402204e31f1ae02cbcf2bb936e225f9f9db332ac275577b777a389b2d713e48b78c9002203f11c4ee0d30d2e10b2cb4a7fb59569e761571971ffe1be5abaa32fdc42a056ba7000000a8000000a7000000a8000000a8000000a7000000a8000000a7000000a6000000a8000000a6000000a8000000a7000000a7000000a7000000a8000000a8000000a6000000a7000000a7000000a8000000a7000000a8000000a7000000a7000000a7000000a7000000a7000000a8000000a8000000a8000000a6000000a8000000a8000000a7000000a6000000a8000000a8000000a7000000a7000000a6000000a7000000a8000000a7000000a7000000a8000000a8000000a8000000a8000000a7000000a8000000a8000000a8000000a8000000a8000000a7000000a8000000a8000000a7000000a6000000a7000000a7000000a8000000a8000000a7000000a8000000a6000000a7000000a6000000a8000000a8000000a7000000a7000000a7000000a8000000a7000000a7000000a7000000a8000000a8000000a8000000a8000000a8000000a7000000a8000000a7000000a8000000a7000000a7000000a6000000a8000000a7000000a7000000a8000000a6000000a7000000a7000000a8000000a7000000a8000000a8000000a8000000a8000000a8000000a8000000a7000000a8000000a7000000a8000000a7000000a8000000a8000000a8000000a8000000a7000000a7000000a7000000a7000000a8000000a7000000a7000000a7000000a8000000a8000000a7000000a7000000a6000000a8000000a7000000a6000000a8000000a7000000a6000000a8000000a7000000a7000000a8000000a7000000a7000000a7000000a8000000a6000000a7000000a8000000a8000000a8000000a6000000a8000000a7000000a7000000ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b00000000000000001759e7dc56557733804418f0ea6fd3b2573a9aabdd3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427dff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b00000000000000001779265225b00860251a567d176a8927c8416d27f7304502210090d88b95320b5e0d51eec36a007bfe1f0a95c7b2c4f9ad00e833bc9f2dd4160102203c4f24c4cb1f8faa60ab139a2245c977eb39b4b6e09ea1d575beede498cf0908ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b00000000000000001740d8fa2bdb69bd24ff0b861241632fa355be348a3044022014e308901e42c8881964cb4abc5af1122e1be51cf39548fb0461f44cf26394410220484234dd58a91ffffd0439c158c47f08652f1c180148fc41caa62421df962dceff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017cce58b0a597c0eb2b10b68178e6bddd32e1e15193045022100c0cd610e0230d66200c15b6b4e1e34806ee17db7ec593b395f0b33f1544c3b8c0220211ce028811af38210c3f4768110df1173c373ed89574e3d0eee7275c9b660caff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b00000000000000001737cb57f4bf6e9f4e64fe975a2b26d8ad37b5859b304502210083bd4d5ac41a8072672573c2fe008d5a626306b7d5bb3b775e429bd50a3a51a20220422ee1674f478bcca69527ccd5347c711aa04026702eb258658935aeeb451621ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b0000000000000000174736fe7ebfdf155ab1eaa5da635391ec140ceff43045022100c0f7ddd725c781c57efcec81aa42872b1791320889b820d8db2da9172df6cc150220560872a2e522fe8851f9642e42551e72ebc8e0bfa0428cb535dc4285a62337d5ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b0000000000000000172f5a665ddf88e839f2be0898b0a00b30606988233045022100e768703636589e566a237a52b7ffc47f33a90b43378b72ed8c4d8a253c3beff102205cbd729b8b00e0d27c342e174b136f11152b52b8c243b0f0d16cbfe9408eb700ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017b384f7340916f6343bb04f6839faaa3743c2f28f304402206f203df1a332df9029eb362d75af589f391f301330ac4f9ac9311f4c46e26ac602201fc0ac2e97b2ed60737644f1906fb7265815db6a2013c3df674e8274a8352f0eff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b0000000000000000178c86c220a027fb8e55429eaa91c92fbe9fc978aa3044022061dd2066ec8fed52d6992b91f724fc4b10d5ee772d2bd2e7d980501d773c0c1a0220189932d1ea738fd54227ec0019ddd7446b216cb0d839f716a7899521d47ca15cff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b0000000000000000170e52656c664a3401457644d379eccf63219051ee304502210090364cc31769fe896d265797f1048147f374ce55d5b4544403459e95f66567cb0220246454cdf2961c4246b00a70f0c9e5f853f5e3ba34ada1f45ba5b40ad86a5c8eff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017c277b97583e4e3709292528d50a988631d73bb27304402201955362c3285d9452ff7a212fdb963cc80346e1d1ec883cffba07e7fa53929d7022013cc3982a6c3fa482e1545b5a856b4589f2212782c7aa4efde6863d092971187ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b0000000000000000170a953805b93337eac6e84e7546ce431ffa11e071304502210089b1238ad440434ddadd661bbe1c9545990f9d7e18625ea6d1501608cb408168022002bd4e6022f29d967fa28b3030ad6f5a50d3d82b8a2d0f851b6c1d266745deefff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000176b7f10a49c1e137a728a6952f0b5236b841d677d30440220779694b27ec4fc1861b071708caaa0f57975f1335e9cacd71d4e7880cd4b1fcb022006badd77518de6eca6d7149c240ca7d028063552f4653db46794e818b21e047aff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017fdebc4b239afc2e6903ed93105125aa256416a3130440220405044882c636f9cd266ba6dab3fb1044b7225e6c8c31823b1c07f643dc2246c02202bc752c7c077310a1a9b07431b32f6333492b6cd18f0eabdd6250fe827e8da74ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b000000000000000017d5da08ba4319905bc4d2052d603e0ca776b73fb73045022100aceb5f53684a8c9c8339132f30369a82677cee0ed4485421a70a34016ac61a8d0220499ddca0d7a6180452c25f109f737a8d982674f0dd3f2d42a191c5790dfbf34eff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b000000000000000017594547725060beb91e991aceb0cceeec84d4b7953045022100f966cc6d0d00932284e8088557cc113378847236cb22151b8bbd10bd9acfaa7602203bd4e93dcd72343a9dfe60b36d7e0deaaaffe7676ef6083e839cd2451aac7f36ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b00000000000000001762ca3bd2c81b0ee0730b2edfb63dafc7510ffd703045022100cfdefde6923cd0c64d2048d6859dde6ec75b335883043cd547153df64d83e80c0220341247ce11b798fb90cc0a3f49e2413e826d8b020a0dbabfbf7104b8d4829b16ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b0000000000000000175099ae927c19d292911fcdc5427e5ffbe3fb104030440220177594e9b7d1966081acb422076bf4a585ff2e715667b048d74f63ca7c7ff8e502203b2505cec5776e092bcf6a56bab9fa8198d607f554f7794da9d51aa75282bdf2ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b0000000000000000174a0005e4612043b8a2f0b8d6a4fddf19a4c5a4103044022051cd54f7d947ef4f7f673ae2af5b6410c109a9fe788cd99c0273d0effdf6109702206705b4362c7fdbc57b10afac5e98ad274686de1d30ac3859d6cd73241e3c1851ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000177c6239437aa46aa5ef2075666ca4297eb9f8410f30440220185d5b31db25b2feb61564f36d57262e2c86826dd71d0100f8fe7e208aecfa2002200c38836a96b603cef11d506ae4fd6604b364197a342dd3ba6bf4944d1b1a7985ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b0000000000000000177fe700020debbafcaf8f8c1a862289916a11fb693045022100b47e7694ac15badd2f31e1cdac3be26dc454902a0855d8c989246c31126631c5022049691f87c1c178db989a6d8c1f42e24ba4ae1e767163378afb9a028112c37500ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b0000000000000000174c2241fe6d8f9d5d11fad9462c878784095518df304402205f405a4cd637263fe0a168b9ceaa848c96db344ecd7b16ef0263e2db7d9c3020022063978a6a0b77c796a279e404d8d736e99810205517f797e5787f10db7ed469d9ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b00000000000000001711beac93f240795e4b480aebc0103bee86580ba33045022100b9c5d38b2eb6a7cd625ca8d4d9f9856a95516c9eb0971bb77ce71e6f2c3955b5022029d762f117a27b4ec61abd6b9f929e4b95bff88b687a8fbcd424d51a958bb7aaff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017a4fda16aad9000bd4fadbecca4d3a0a2deb0103d3044022070700d8de3ecac8750cf3eb624972cb1b9395eb9cc06871c59392ffad2b879b80220161eaa96bb92ce95c345981866ca67dedd1df04a137e27b7cbbed5b236293539ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017840121097caceef4c5a46fbe679ba6ecbc3bc05e30450221009b1ac5b9b02b87a1cec17e945e70ec1c1940b6d48d09679029dbef496f8d031c0220567914580441466a1339d92d99cf0893234ed6b5e7b9f00407e6e3bd972051b7ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017822df13e7b00e0f671a7f120a8f78bd78eb31adf3044022059f6d5aa1a5e59b9d5b378ed40ab3d109f175d97c8c2cc1e0fa98ff45ca93ef50220056f0a2fe0c3b6fafdb3eae2c49ae05ac6cb62a3b6b2571dd45d1774b90bf384ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b000000000000000017efad13da9347cbbc96683505e808c8ed14d46dc230440220538781f8c1a5587feda6b1333b86d979360a9f53cf93b29230c13416dad81634022019daed2374f9c6d767635413bba689ee3ca28b308c600f86d1a5fde4080164c7ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b0000000000000000171217f283de4bcf1b28e1e05c5320373f9cdc195630450221008fe76370d9968412848282580eb9a8c253d027fc4e610673857c15271246774b022003c249b93d51d344c2c92435c6adf782dff83da7a38b37445c7e0f007fdea786ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b0000000000000000179a9e5009b0145145d954df555acf50b36b0469443045022100b3861e0c3333c7bef777d9b3342749cabeb006d898c028c39cc331f17d8e379702207eebde356b9f3f5dc5c2f9db0f46fb1fd2e350ec796d023a065c8dd23faf4bb1ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017305bcd10e47d011cc8a30c8680a09bb192253ff23045022100cf711a506ab8e2abf5bf7323c5e5ac51776fc7778845dc7f9686286d7044f20302200bc0fc09b71567a83447983623f45266a68d8a6428dd3d18acc82a042945251fff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b00000000000000001747ba8f15f9b7d8f3a4f7d450d5544b985692110c3045022100c1657829e9bf119301eb33228787b438aed6f3f33b61e3c174080dacfb9990ad022022759aaa39ee30254c272d8015b4fb33fd466e830f9c484e530f4169acc17b85ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b00000000000000001779e55b60c0e72d3d785bebbd33df99d6bea7af843044022052a46f491850b93b1520071b6f7b6525b97b604fa7e93f8172eeaf30baad247802203ceb22389721211041ccaaff83c9d0279c81e25afe632bb2e254f35e521cb94eff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017cf09b439324369cd49096fd2746367081689895c3045022100b7e1e1bc99c5d3144bce62785d82189232bd332afb98732b51f80747dcd2701302207caf6782b25e97c47eb6ea1d436f84b528b3d8f3af7998625b74e68845de7f6aff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017ebb33c4df59859924201cbb64ced6bef3adbc5a43045022100f7a8046fd9560543185d153cc9ee7785a295b2b32357d635131cc242dc42749d02204fb0b2238ffca4fc4af42607f435ece3fa35ade4d6bfeff6ffeee817f5498412ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b000000000000000017ec4d31936cd27e442479e0b3fcb0085c66e150173045022100ec3d0752a914671cb143dc3c3020c47ccb72876b268c0ef1d4d82fc2ee3f000c02207e7e8bcee0a10e659e350af5ff00e5c937f9ed1f016004d9841d988fc162e237ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b00000000000000001784de56a2ae261edbf5713002f4a606030a5488a7304402206c8ffd3d3c28c0e2873e526b774705ae81fff2ee4db765966b6c7865ea223d590220225a29f3fa0f2adc23a6e1e16ee3cd5a48b5d8bd6c6baa49321b720d8646d35eff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b00000000000000001768889616a3bae1ed2ef024f0223ea0b605354ab130450221008e576c71758d736f6cd200ef8faf4f7c9c6419328e46402b4e01f390cf997a0f022068a6a539caf98a7e89ca703258b8bbbb0b70aebd3bc8bdc67d8a8705ae23b791ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001759a274212ccfe83a17c09d8c13ff6645d4a1c5073045022100b082481a85c259ee986c56ce3c2c8ad768408df9232081664f1a37b65e8f0ff702203084e6e60d2fb70e02bb67f581f5c3bdf7e8d4cc3129d220ecca3a81d47b57a0ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203300c2eb0b0000000000000000176857f83702fef96513cf41985225b48e34436b80304502210095b3298dc543b24a40f72560170e2275ac832f58fcc67eea128f604a8a9af4e602203d653556edbd647b108c0e15254a7f4cd494c8d2f4baf7b8150d47c4a17fabb7ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b00000000000000001732da82ee270f1e2c2fc7deb39e6e26871528dd74304402201feda5a75b2a8959d50e83765dea95f7fc9dcb446cac5bd7fae66b19cc1bf2cd0220397c484c6f76fdea8b284e65fb9c5795bb4ed9f98d7ddf079699b75ec27b341dff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b000000000000000017556357283c7a590f3f75446a2c57730709abe737304402200d7ddb69d9c65602a44908106a18dd0d6edd3e1380a68b0a11238f1ecf1ab97502203ab0f152aab1edc581863328e07368553641488ac1db0eaedf8c3280e5d18bd5ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b000000000000000017306330b1af7a053147d782c915501f46a42856703045022100c5409f6d6c89159b29fbc4a5d37f05c868ba78158f1ffab9f0bf10f1040500e4022011bfe8fbae70afcd5d13b24e96d7e077bf1bcae1054cb71a6b343108d2132d50ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b0000000000000000173b362003c69ae3902ff355ba3ab40fda6b6a20fe3045022100ceaa0614b91be2d58f959ddaab130b28247cc3bfea0e224e9e6e155198d48ad2022076fde90c50e6e2d420258fac8007f6f6986bb328ac5df947d6a3746499dc722aff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001754f899a104b8f9bf0baf55f4c75e3fcfc0f76670304402206c78646b85d465a51845908d8909275269ba13ffc735730173b0b7025f0affa3022068bb2b5ca22549c13dcd4b6a1bf41001b166f074181f104d44358bd7d4091248ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017f0deb3cb96b71fd78fc597f46f72a082008f0a6e3045022100a3253f00c5384bf3f0a63197d389bd640243350f389b4fc6db38cebd4df11b0f0220222592faaff23d0d063ca7db91116525f9374102f69bd8cd9929f543ca8ee391ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017ca534e2c38361fbbb25d59cea2e835b4fa954f023045022100fe7f435c46def0f2be3966ec5b54e612465e053ab052534035c8e756796548ad0220019c4bc353605ddd555673d490901063b0fdd571ba03068cdee77135f63cd30cff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b000000000000000017407c452cd3cb205bb8588c690866a777280a697c3045022100eefcc04db1af1cea3ffbeb1c2bc53a8e0726ce2c4c435e817786db7c6ef632ed02204dd9eac43ea01295a255aed970edbe118be1adb4d3521d12076eb81e77809110ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017b5b7c3d4561bab29857aad0c20b356b429dc8ca83045022100f76c828f2837c9487a4b1bbd5756170479fa3a4ed48dca031cf3f620f62a99ee02204da13730862ccd2b389dee441a7086f0191282ef2c5e2a3d32290a15401b2a90ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b000000000000000017711127026596a58864e8346d1b4b43265dd9be163045022100ada204d17b23ad4cc21a645e5cca0a472a2646526242cf8683236b9aaf85a21c0220348f7e2b164ca4c373557b4426c9953e7b14c4dd130c6857d8ee314db0a92305ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017dde2acf4d0798abf18ec499385dd96a948441e3f3044022041d3dc6d101ccaa3acb9622d9876e0ae610246147b19fc0831928933abb6ad5b022055a06fd34646626595f3b4bee99d49a44cc5dba02ede0f6ea35487a4f89511b3ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017a332076f2167b39601fc396d5813f1df168d5ecf3045022100bb096efaa786f0117b07303ead48985ec0ef4fc80de31fe8f456b3e412f5d8bb02200aff57978be91751b75dd0e5816c64b7248d7a5b3c8c3df00130a2c80608da10ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000170a30bc64993a755c45f212733eb031a4285d2d123045022100f4245b3606e03770b98a7fa39565d7509a76f810924055da9a6a600c6c01959a022050282b850eee19017814f547ea3587a033a0c93ff5d2bb9beb1cb67cba138646ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b000000000000000017e8cc0e60adc7d87069f3ee3ce27fab54e28b195b3045022100ef03265298c52635384e183286e4624414bd92fdaa0141d05526cb7869ce9e370220718af74f83c06e5fd13aab1ceb21f4440191275a308811ec7c2b707035a7324eff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017710ff266d4bc013a13c9fbebc80b0f3669740de53045022100e083480002bafe1234fb63485f21d1be94419ed69ed432ad182939a9a4fc4d780220367bb8bf10c3c5a8a514db19f0ac65cbd6a1791c8be4dbaf78e4ba32624ac4e1ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017eeb555fc83eba31e50fc83abb167759703a1491f3045022100f0efae4a199bdaf36375e9904c5ef61d3b9128f3f0f00136762b8c877084a231022012bfb4eda5252c660c926b4a8881c0752bd658d0692701636430ed1bae39aa21ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b000000000000000017740cdd08268c4e41b44b6c58fa2b281d02ff9d833044022040f3c91ff3012f2f74d93c2dc22459dba4f5c598d74cecdcfc8ccb095bfad9d30220025d0a85f560c0ac71be17806c2758db315537bc9eda6dfc2ab5eb597e8e7a7aff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017cbb5cc21a6b935cc0ab0c8d27ce3aa73ac23fa503045022100ed3e51d4a1e313a45c423ca67ac851119ebff00f2fe7408f2f8276da09d0e18d02200e679fd84c0ea05bcde16e87752744db95c1d6623500dd6e8b3eac91850e72b6ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333100c2eb0b000000000000000017c798a993d4823466b864f36fb8ea9b30c51ea2403045022100f8ad6ad2a942b64efbfacce82b6491b1389404b9a6633f8e10e0201369d7bf0902207e1c519cd878036d16e57bdd1cfc4e95c706dda677b98db6334beba7a9a491c5ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b0000000000000000170e98048c8bb91aa761e72235da3de66efdfdbd963045022100b4b976f7303b3d4638a2432d9ecc99c2ea790761274c613f4730991ff1c40cf302203a749033f2dfba987a103e27d55d8280b8e55f0fc121204eae576ef72ea7687fff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b0000000000000000170a654cb7b1392a92a702d532d73557cd81c4ec7430440220533598e3d736748f22a7f96e362b3f33c6ddccddcdd46acebce7ef598864b284022063ed82a97e87173926cf48e69cd1656dfdbcd69c1d5dde0e31b2d7a63828fd6fff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017b527062800be5242c6dd288e5fc842d323b880df3045022100e03cd37d0dca988195191175306878832915fac89923ea3e6c4db35e424e8b2f02202fac9bfa81869d2bd6bf4e239a58aee6aab5eb795f06e6a20bb41a9ea692eb72ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b000000000000000017e2ede64fdaadc3535d06d208b820edcb175162a63044022024c14e8b3d656e736890766706713f1224c58a461de599201a2cccb586cad23f02202d6fa8ae3ae24f494037827d1e27511073422496b244a25ce610348f52e7c3ffff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b000000000000000017424f8c905f2c4533176b88af2cd8cbd16bcb1d873045022100adb464c5e55b27e3575b3c0d721b69fb99f13299cd6b3191d16fa6b298683b5a02200b920190f7b37551c217ff94570c0b3eebf572a64765f7212ec324f9a2034ed5ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b000000000000000017d3fece02925920575ef90eb229aa631707084bd73045022100b632230b766f89e91e24519a93446f7c8a7c6e68e34c7ca00e1ee41b945d78b302207401fd3c6be212dba6763564f5d25865168b8ff54fe1dc52f5d5b12479f3ded4ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b000000000000000017737f4c47c74919c2aa05befa3a17145334fc0b2330440220142eb40c16bbf50d63ee294a9cd87945e7a942c2c3cf852443be583fba20542e022053050ef63794d52a261e531f16cb3f01a118619fd1c4763bd93928eee7fa036bff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b0000000000000000179d2c9324e662ac5cf922b9f5a0bdf806baf737273045022100c3ba0ffcbfa9db3bcef1595e5836b501de96ce676f0aa33fe885d0721292d93502205cfbf32420b07708340c3ed60ffd29a22cd37b3e55abbfcd2aaa118d48e30347ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000174d48fbcb340a2691556715b1995e51c360adffe73044022019c6fb89c93e746bf5ec3a2eb1bca072da112663427f2db5778d1f9a7e4b7b22022018f48b791669852e13837236d5eddcfc61fc575c8fd15ceb8dc54304b1edb1a2ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b000000000000000017d190f1e81c972becdaa6d8ac66628d1b5a8e82de304402205547a585292262f56a6534c99685e1786b13812773d132476d09c19cad545a5002204af895618f5cf873edfda3f1a20f46ff8eb40ba1027021f5670b112a1a8d4b1eff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b000000000000000017d25825969c0d9d28ffd5bb1b3478901080edb0de3044022026416f54ae43a24634b705374dfe8cc1abace3c3013ba51eb350d0a02bde75440220726b910bf7304342b5eac57cd19c494cb2ef04b3ae0eba0bed266a26b9348c24ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017bbc327b6644dc42d55a8e8f32ffeabac37f742c63045022100f92ba535acd2d4cb2aa14103ad2b2f894cc78167899d16061a85e215f5ec213802206ce4ad145df8663dc16bfa18bc8397daef98c3d2b05f02a1a1630f2ba59157fdff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b000000000000000017abe730919c31b471ac0dbf9b6f12bad34416f8bb3045022100f9400dabee7c2749136995628b0813cfab1dd763bd7450717cc0af2bb11810560220683de15f7d713affe5e6de9e3321a6248481246a571ec9f96e648e2f1a0ee24aff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b000000000000000017527ce40209b488f22a14f1ea2880bbe37c15e5963044022005cb3b17865f2ef8580115395aa593abf9b074a70aa2859c17f5fa1f2b5629fd0220512306967c8f5e33eed952bb3d26de6f451b2c3954198e8db1bb5cb045521c50ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b000000000000000017e9beb02e59ebcf1e87f5b2990cd5fc8a523ed152304402200746188cd79a980f63a0011884ea6d34d4af34e533ab0bb18e622bc4b1b1b1b102201de516bd3458ada944d0d46eb2295873e5ab5f929c2fb728fb45ad81a53bbc6fff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000173a8aefd16949d537bbc3fe0a4535920af7501aab3044022010cd7a8ce57ce47221da59de3f75e571fe56e86626d9017ca6b80a0ef3a3508b0220078ee6036fcd468eebadd85b0e0a0a6f8dbb952bdc24feef3ca4decf1e058155ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017fb982d2e09e48f462322061b520f5c4c95567c673045022100b2e14e6c3b68c1d819c8da960c6d47b25bdd25c4856c3e1bd718265b292080c5022040dabf8f95d8e98c962d1733d10b8cfa6749da5b2dd1a521b7778a0306f05d9cff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017a045594b433e0b2c1f50dd607688a182d624d5a8304402206bae61e2254f416d8be818e16ad5eb508f2f5dadd9ecbca1ffe8ae427721974902204be5a5e1e9103898eb8e8922391fdfaa5c9d49c0015d28d2b7c462fb8f0838ccff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017ae968f9f5c4b0427d350ec8502fb3867c52e79903044022009959bb250bab7e1fd893fe7fd841fb17431892df577953bafb31f16ab20babc02205859d9bef6e191aff9c75f799fd7bdae6d8646ecfadd3d8b29a7c13471dfc3e2ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b000000000000000017d0c946c5e6e51a6c879677042422799a80e6e5fd304402204bdcd333bc3b41557b7b805a89f1e8b77fdb6d499736078ccb48c58f6431621e02202b727f6b6708d9459556c02950a350eb75e804bfdce9fcffb67c8cc95a8e532cff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b000000000000000017bdf12bd72e628fa60082963673586e9b96a8725c3045022100ebf7c3d8abefd8fe062fe52d1a226179fe2165f013099b46ba3724c345d51ed202203d8444c63f2d7a0f36bacb1643b33cbd47b02d800d0dd233fbb6e64385779380ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b0000000000000000177cb1f05f837c26723f4a3412fb2353226b245a7f3045022100e7b66efac64def1c8001cfb6b68e2b16f85c0f20684a760bc93aae385126d25702200253492c751f8c704eb042192cd524791ead3b5eda6e9afd0e71c58576127fb2ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017ef886d3c11609a79d95ae6f1ec733c0dcd9a6ebe3045022100f5396a022fccf2e70b2eb3873271e9021dfbd5effd706997f704e24ad722d6f502201343d51f913ecbc9a9ea6be1a97e5531f98107132a9ffe6a0e5b860d4333d039ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b000000000000000017841a086dcdb01b5a3688648e467449ff4ccc71473045022100e9221f75315a65c2b7fadba4dd5f668df15f339061b8e862e926fee8d7990e44022005f3a33f1a680fb9822c5a0ccdaee45a8db4c3f2fabe5b81ccd4c0e93ea8daa6ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313900c2eb0b000000000000000017559f8ef0575e8ade7a3b7040f02f54652e95bc943045022100c21caaecd718c72077f0d09e8ce04293d1ac27473c0545f3e3b1c8d0283b99ab022000c1a1a65dec757dfae0bc41d220a142aeb9ee311f14cbebe6bf977a0cfbe53bff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b00000000000000001797982b30f6b666ec86b0e2fdc3b08a3b7479cbf0304402202e93971c4bef3ec7a4117177ad3be74b0067dc863d136d14e50e43feb62a6515022044bca36c4e4df2d40bb1f610eaeb0ce814acc0acbeb4c46c876a10d533071619ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333200c2eb0b000000000000000017b406dcd28615b6787189c1c4e9f6e3348037d8b03045022100d09d4720d0517c00672733b12f328a6a2d98d7ab150bdd810c82df4a0e021df7022066aa615c7b9e481ad31c06c77cb6f3c92018c6ed8c120b2cf6e14320679b5738ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b00000000000000001798c09cff1628733ff3baca4b61e838abd4fd17663044022056d79e8035d942eaf1c5de8722904e4ebdf4dc05a72ed6bb21fd16dc6808044d02202ec527272752ce4d75886dd0e39690e6bbe2a652db51addd4279dfd149f4dbe4ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b0000000000000000177a10018a8e0b53ef2ba10a669892a7ed8120e4d43045022100dd67d44ee667c3223ca757fed91fa2cbe1cb9cb42109fabce6eb83dcfd7d8747022045811f62696dc8c6af6554f55d8a9c5f2bd3f80271897c18dfdf03f60349036cff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b0000000000000000177b5daa22d0d47c8aabd77e099011a5c5de5ec0d130440220348a8a248a43b290989893e559fe0a041d77357a62edc4dc990555da00fa817202206aa92a6d3b06695e7600550540204c7b8abedf0b686449a07a4e6a70c1fac395ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b0000000000000000179c89afefd44cae11fa228af7cfe1470208eeef4e304502210099c1981e0e8f968bc115039c025eeb94d371b241a19d98002821f10002c8aeab022046e9aed58e820cf371e194d408d01090bb7b867ac918d47015225ab62157e462ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203200c2eb0b000000000000000017fe49b6f1b2bacbbafe7263bf3f4c66c2332e71bb304402206b86a185dce4e776bb5f9f95d4b5cec4eba62873fe88ccdb864946fbbe60cc7902200a821a5bb1d0f2d983f91d282ce60826b7c616005ff6db0d4dbe3ea11ddf8ee2ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b000000000000000017bb95af90f1587796c7d844c28d016ecb96cf7b323045022100cd1b94922995ba4e16935f2380234ce7b6da26fcbab29f3e957aeb8ba8e8d87d022071757f37077132b0db0f217b657791ebb971fbaa7ff95e183ba3cb5858d525f4ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b0000000000000000170e2059d5f792f009d8457f67d29d72ebd5cd8e35304402200a1537addb2f2f2e434a8c6e4ab32acc59f7653226218b7fe9bd5f82f98778b202205c77341d3ffc0e049aac2a476ee978acca2829d111552645797613f3ca616a4bff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333800c2eb0b00000000000000001756cf720576a7f775bd1d4ac4f74bb17e45138740304402206af353161fc543cfab5d90a58dba1438571506cad30a5c78e49cd2f28ebd542002206ab2833769213fa2603ceb14e1bf88a1647dfa4a28e5459fbc47c829adc34026ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017b858992acb67521bcafc16e7d4d1e93005c380443045022100ef53b0b4dee8630494f812ef6056c5a5821eb2478a0cabf55039d35db9ac3838022061845377b9eee8d4c20cf1a99c22f424a9ea21d842bc85cdaa8c4f4ba05b1f35ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b0000000000000000175060e8c493e5772f02b9e88e3973ae1ad35c4b6d3043021f0ef0634992f4b2ad6b038e58d247ef4476a1eda7f2ce48ae48e24a1d3bb54902203a769d1199251a4df6d83d0db299e3bd0c1363c51fa3e9f545dd5c13d730bbd1ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323500c2eb0b000000000000000017229c0d0469156c68ff3bedef21be4a5a652549f630440220435a475d12adb82f40a30520bfde3072e9a8fbf016529faacfde40dea4c7603102202d7d784c34f089ce93af3a902936de4960005d17a9d64f096a87364d4cd98625ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b000000000000000017e7f5bc11a5b2237519bf751220a5968483b3dfc93045022100ce0abaa1e6656daf0cab3652b8f9355633ac16beb264dff7f076c9760b4259d3022070a870ab901efa8fa4e54b9c11fe201716ad922aa73e69cc9b7c88b71acc73eeff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b000000000000000017f0157c4c48288c93e420b6932ee7c10c119d7d983045022100c33a6b77cd9cd63bc5257537dc90f0506ecee8fd0ad34facfed9132c54b9c17902205546cc10e43e60167964f37881412c1f4e742be6f1218749e63e1bdbd5be5023ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323600c2eb0b0000000000000000177f106853bea88b3b4ca2daaa2e9677ffb019d274304402205d25a0dbb4bd258e2392bf3ac4faedea4e6d24bdbbcdae653641db5eb532cf9e02200a38c7ada6731f470861b929d5f15f9c63a56a1985345b9d86ae005acf13b073ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b0000000000000000172ff8cc458a6815c2c15687aadc6351bd3dabb1993045022100d2aa5946b05215cd1cca45fefe390ec78081406d6b8f13e724cff599f046d6f1022035a2e6ff995bac39bae2dae1232f7ea2d0ba5897b1fe0ffccf1a0c1172d89308ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313500c2eb0b0000000000000000174e4248b0e82e17bce3d97fd49c9b9190e252b62d3045022100e88aaf7ef7fce75a0554d61932375ec058d4f0e35979bc2d3114e8f1c9d08040022051dd6c51e46258ce1b5c6c57e84194325629213b7f9bcabb879d51ac037aa6b5ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b000000000000000017d320a267170f665d111adaa1960d95f9708e7d073045022100accbc39d4f4e1e863962caf8e6cc548dea20ee1afa9e6e1806e067da9ce4d3d202200a1a38c83468157af05584a85ccfebb9e4c551c2237befbfd54a51fdcc96b168ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b00000000000000001768c6c5f98b69bd4d2f43ccd0d7842d24d0ddf23e3045022100c7c0193c159cbde8a09dead106919732a3b4e30a5cf51617314133454c65e1ef022074b355a40e59075104f694c19d785c2b8ccd3c1ffc0ee17fc8a2c919bbdc3ef2ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20343000c2eb0b00000000000000001730cc52afbe325a52f2bf8983937210c52650a5ed3045022100eaec6482374438ccc03a12bfc06b8ada1c4f2769d1b8451eb643213be07f25f802207bf68341be74bc813eb1ad74a0b56d16e27cb5954a61aa971296f81823d65d6bff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b000000000000000017042b228f903384321ef7e2e38f413a9049052170304502210090998a023faa9149d75cb51f38f98adf38e39cbb5dc15cdfcc0eb5beff9c2eec022034f33fa9ccb43a93965804e455e08adb9f00910063cd5a87ee70ef376a4c5db7ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323100c2eb0b0000000000000000175a821c2ba6fd549322c6a7994ac6fe5a9417f9b930440220494a3190e1fc982d01badc71006b776df163583ae1ac365d7dc4cda0a061373a02201aa1f3bb1d2ba405750c585784caf24f25b83f57fc11e0f675d12d2fe3d666deff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333500c2eb0b00000000000000001759854238ed033c028098256d30c4bd03e090f233304502210080d7de0342c4855c0c1de956e716623341fce1b4856a1c6e00ebba7bdeee4b6d0220172e8ec11914e21ed369fad2615dfa95ce09b3eec8f3a03e453b4050b7baefd6ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017927ac44a3d0a64edebbfe4dafc8510001eff90a530440220257e5ed2aaea59a5f1a4ae74acc59dd1c68a821ecd9eb506ae7fbb01cfe6511f02203eadfccae466fda0800b0cd401284f79016742f94cb6b8e23766e625e7634855ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313800c2eb0b000000000000000017bf515df18a873bf775604cf142690c5c9e2216af304502210089973d46853f64b22a8e5ea082741ce61a5b65ea627c2e7d65c6b68761ea9307022064f1232ac967e636f86ba2a1cb84b898e6ef433fc201b893914ec350ae5424c9ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000179a245b5d5ace31e5432c7cf0e0ef83ddce7143a33045022100d399c822102d6ef40c2e9d4117055604358ef4db80943936c99c3dbe2fe0c9e0022076522ed280a8b48dcf2e1050ada11da7d0ba410cd405cc3575886e524bf74b3bff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b0000000000000000170ce3d695aa0015271cadb4ae7cabde52e7bf881b3045022100f4160aad6909f8159bb2ba3b677a8d4ccdafd769fb7fb193cf45ef5c24e90b6702206f9c06f30f72624b140d0e79994d655095b9acfc633d65662ef982b52b3554dfff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b000000000000000017b6f21649c7b1b1b9975312888b2809b028cbb8353045022100f21501ede148ae4b1825747d5a35b2f212392ed213a3de1353cc554cf9fb450002203540a0542bdc74befa5e7737af0a76fd511fcc1d663f3c0ab21e1ca9bdfe69d2ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323200c2eb0b0000000000000000173894e92a8d11945bfb593acdb9e619c22c989d2d3045022100d6ba427e0f702f74d0e7ce778354f5ff0376c775e53991bf9f4057957feb2b7902207ef90dfb96a4c8c5a296ec89589e631913145e9736251a82ea93c48fc765dc3dff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333700c2eb0b00000000000000001779389f9aefdd03d9d3788bb08f1e5d8b6567aa1c3045022100e921d8a535a948010be53a418b14537894e5cd59f4798c28afb3a28b1eef3a6702201933fc9972f945e6a75b98bad43ab4c1d5aa50108ecfb0fc6d4d98692cea74c2ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b0000000000000000171785c644be770b6b42ba6d168e8bd5dbe1067cf63044022069b32ab58125181997f13326bd5bbd8a51c05ce6ca1cb3e05d211aabcdf094c202203ff626f8686e5eaadfbb3f0b0fd3c2d97a27da67df60d449bdfd1a7111691998ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017067c78e18d6221da34c66c5b99c49b6acdf7d6a03044022046e3fe677c86fe992839c0b4b6c8d52ed9b0468d746144f090a3210c3b22839a02200edefed7a6009d614c52d0258f137a9469e1b945cc44a9a966074039037b6050ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203900c2eb0b000000000000000017da2752064521eadb7cac06cc69a82ba14bd78abe3045022100991d550584f937ab579d30b9836d2ddb9d07a53b62088b9ec7d812e831eea3270220130e6d8efe60fec26dd14dc6190c1f68bd37b044c5e48669da7d129671d2fbe9ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b000000000000000017dce1e04722ba02e42d74bd57303917048a3b9a0c304402203d37d32b85c84245f3cf89d3e3187c61eb28f9140cf07914891da61d58f8268b02203e68eec8d8c2c2e06261e5a39d0a58fafd90c46123f7e09b28c6ba88a489ff24ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017fbe158a34cc27bb228e101f482ed036f96b824743045022100c1fbc4813b8370edb55d74cff95931f9e5d2e44587c362a184e0a60861aaaf94022075e2c4fabcbeeb54f0ac98f3197bb7aa064e5609a4f7004a51b180030d98ec78ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000171e0f108e44028a522525837008ba0dbc307e5a1d3045022100ddb81efcdf3bd77b70c048f3d8cf9752bf1bcef935c73e5f9a92066a4f2c5c18022034a7cd1b0d4a62e88a1d289244462794648d0643015c3ae2fd6c3e92bf9269a8ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313200c2eb0b000000000000000017ba531b66b0cdecdbc3cf271612d42265b1d3a5503044022048e574b2570893304e81957458715508df9160125def85414f5120bb8d5120610220737e6c0775977e19ff310a5480a0a402fc6b6ae6868cdd551d2b30244b15285aff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323700c2eb0b000000000000000017257049419ec6524607a68b33a0a36ca96ac30d5d30440220106fcf7ed726167522846df88446c32ba548a325254710ee229de51b6f1dda6b022054bb708966d6978b949cbc5697d3fa653281d16c7881443377d27307ace5bcccff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323900c2eb0b0000000000000000172c04bc6a74f4c3dd84a9a58123906e30786c68ca3045022100f348c5c1571df94075fe18c72472afff2a8163e6af45cd6bfc4c55bc7af895b302200140edc244687b44fdc245bc0a18edc09352a953f6ce7ff659f8935508027e93ff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313700c2eb0b0000000000000000174222f8f0ed1b116bd939ba2d602e0ece7b3f49b43045022100ec192e32473517250c25ce2dfb79082f34c63017ba2532a6e56406dad64dd8cc02207c5f0d699c9b76987e06fff3ad9788dbab9019653f0ae15555763b4e22bb02b4ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323300c2eb0b0000000000000000171b272cd1e6b978ae9fb815b40329988d2dc113e93044022059870246ff0cf10fd681b29f392c8d046d8344838222f0b53b96f2b436e73c42022070b12ca14c2dee42057a4bb5986461d415502f08abfded6ce85abecdaf851615ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017afb8147376f4e4b1c486e12385d9f5516d32785930440220105f2ab1c8a75d33f29ffd4ceddfd3b175a39cd141a976645faafee06a0708e50220178e2dd98924ed91d1691a3d747fcf8233edc33e98272a24b17b77602dd0fceeff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b0000000000000000171b21d99d4c50d090209cfb717e7e3171445a61e83044022070d12ef771ea3708d2145923d4c0b4c1eb31f8b3719a30cb9fd32566ded28eea02201a744120749b42c39479ad088a462f6f9fec468d85cdc9cf83e5cedc90940832ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333400c2eb0b000000000000000017263931c7543c6fc43bc3312498b241707bca3c043045022100cf63f159f40a6f75ddf5c4b206acce3813d94ab1cdee8c910dede9657d50afe60220724da18645f4ee23be41b697690e55a5e82a2ce1ed3808e104f0902de2c01c67ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b0000000000000000173e1b19bf4d66515f03c6f15e868001ab959b4f403045022100e5936646e020734f85e7601323f4be503f67da4a31f05210210a4d02808af39602205db4b337e1d883b1312bc03ca0195fc77a74f860ceb7867e197d9647f5d26d33ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000170dcd69f016fb9efd5f9ae47d578ed0d04bd58b01304402205e0fe17e03c3666f986dd243bc057dff4b190f14ba1cb7e19804e4c1042deec0022059cf68c76c5b2931b83e9bd3f75cfd75267b8f35aa957cf63cce3b2226665707ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323400c2eb0b00000000000000001705902ff13f07ff4a607ca76390eca20df01539183045022100967f65a4252ef1e2f1f69b754cc8f3b19046af3c7efae86baf81a85031311709022075f0ebbdf75c720696c925bee4f6161bf11a279bf7e9aaaf67234a53dcd614c2ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000175e0496aeecc3ed4be7f98c8679efc4b3d9a398fb3045022100f5bf4a65a49d712c81d8ceb9180a17527852ae393f3fbee6f556f695ea1be8100220332353e697d4916247ec68f40764c5b6411a45d0b3db65d19b699b92e5fa4fb7ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b000000000000000017af608296a6d81f700f792a6ac9da2d170749c40e3044022049637b4877124be3de9ba317970c35fe2f16dce72fe3f5752c1df56b1c14bc1c0220765bd7c7245298676e9943b7443548d723c60def0b7217fa6d991d5866406932ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333000c2eb0b00000000000000001755d63414206f2d2acfb498cb2c2c6f0b9b84b97e3045022100ff478f5c5d86597ebc267e5e819782a5316e79987df57d154046c9c7b742d2e602206a0092b9a8709c3fd1f6168e1cba5bb8e84789140111addd99a3cff0c8dc663dff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313400c2eb0b00000000000000001723f80cdfc6e8737f06a9c02d6ca93594388767f83044022055b7ca7c018bf871e7200ebef17ea826b817305e942b3403ab57a1d2f138b75f02202e8a79588f7802aed93470cce823a5f764715b23ecd84fe531d68019b5792bfbff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313100c2eb0b00000000000000001773938849133e49752e6322fb06520820eb82ab5230440220546f1457a0e381e332b2ae915a4505a86cb15816cd2cb143411a433847c17aed02202e05926f9270e36d9d9371531d68d365de3ed3ccbb7a2331c8a3215a318c451aff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333600c2eb0b0000000000000000173abaff16c478b48ec930bc54dcb2a85767ae61833045022100b9db8445e61c6645aae74c4ee5c536ae0bfe18f63dbc5316085f895db7ae8706022029bbda8d6f0609bdb34eca51a050327a86120733cca1df7eee94f598d0baa7f5ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b000000000000000017236b377b2f235de51e10128a7a5c5cad4c6e1dd130440220186bc71fdfaedc0c31285917f74997cfd66ea4a72e690d480d5f8ef5e7a1d1a20220710c383ea6e8b4f64ed2eaffae33a1f0bafff1fb1c578a1b369dd8151ef3f0bcff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323000c2eb0b000000000000000017b35eba0e5bbe1a15c6e2504779938e94572bc7e1304402204f1f1527f8d037d69ff3de5766b719b4b84f29d5a79ff390877a84bb9856d5da022008656e66808082b8dad2ef6d2168e031f04db664eaa3bed647395b42ffc9f759ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203800c2eb0b0000000000000000179c375cc35993d1b822494819e4e36f0035d2fc9b3045022100eacd0a48fcee232b4592465351612f802e31e0aec809a04aedb1ffc77b67970a02207ddeb454d49fcd5dd03eabcb28fd6a54356ee8ce513ee4eef6c3dad2bcc035d1ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333300c2eb0b000000000000000017db12692f9dc88fe4d380436e4a3824076a4f843f30450221008aaa31bd67638a4896b251f1e34148abcb9834c8324ad2aea68b85747525a98502203021aae408cd18175eb04e45a8bcedc5fffaa6e6effdb1c11a336976be49f240ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203400c2eb0b0000000000000000176d9b88a2a27357f2d7eeae78b386d2edf8df3b84304402200b48815ef066a0eac8a79ec4d0fc4f22b105eff437163af0573683d879b5a93802200bf1f2c2977b4065f5522bb8b917141cb82806446e57593d14dd9d632e0f7013ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313000c2eb0b0000000000000000176358f2ca1a4c272cce0eedb92f3bf8b837fd8cf03044022070c9c7d962d31b5189bf1747aa6e06fd35ae00e0b12efa6899173b3ea5e343180220146328f4a1ed1548a5a95ca8489af16a095cf0095924084f5a508c9e010d9e86ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20333900c2eb0b0000000000000000171b96e2b2a75c0ae6c7fa5626e3d79c20175f65913045022100c8ec04674367b578456e43088752a04b0f59de4bbda197ae38a94a1ad0db39f2022011bedc437d4a025b2eb0a0df29d61b55aa2d4e49a0ab01dd48e050833c153d15ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313300c2eb0b000000000000000017f8c689bc5455e9c7add499f969c3979f6cf1e6c8304502210096e14fce152f9f532f1836d260f535c3316fa8ab5e1bdf1e4c1275fcea5444dd02203be1c81790b12abacd63b89b3fe11cd664a2c9a3519eac0303e3d091dfaac54cff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20323800c2eb0b0000000000000000172fe38431dd11fa9a38bac772feeeb266a13ddb0b3045022100e67098e07447a4f767c9309ba315e89b332cabee4259887111745e83f9cdc4cd0220531cb5f0372caac1ed702394f9722dcf1106f115dc772272fb470db885eb49ebff0117004cf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203600c2eb0b000000000000000017fee784411bbe0538f549eee19cf6593ee1d0378f304402202fd4dd9351912093bf7289fc6a2204cedea60dee16bf6c169f82fd919e94326402207d2f7ffa0cd4e559e39421fec818020859e080d66d4511f6b86af8922c4bbfcfff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000e5472616e73616374696f6e20313600c2eb0b000000000000000017910efbc8d502d7a24a13533a6de93b41c5c74f4c3045022100f1fdf68f3d428be0f9dabd8cf20aabd4bc576ab63e666c339d7d17597f8b023b02203854f0db9240abf17d2f245478ad287d8514c1670fe2604fd7772adc23fae095ff0117004bf0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203500c2eb0b00000000000000001753fdee9530e0d82aa9fcb0d8a378bc0647584765304502210081b45f9b0bdf11c4e0944eef58a960b99ed022643b2f3d18dba890c1f02d4364022050eeca2c2e24440f6f18fec7f019dd27c54f7cb04b152a1a75b324816a859c79ff0117004af0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203100c2eb0b000000000000000017751964a7f75cd482dcc3838024c6dfa7e05f2eb83045022100af03a6ea5d5d9a997465d16580a7e59fe6bc1a4cdf85304f0fd14b76ea83d88e02206be74c7f22acbfb8496bb10c60024dfbe0924de7e26b912735826c8a90908c4e
diff --git a/benchmark/fixtures/transaction/deserialized/0.json b/benchmark/fixtures/transaction/deserialized/0.json
new file mode 100644
index 0000000000..0f938cc53a
--- /dev/null
+++ b/benchmark/fixtures/transaction/deserialized/0.json
@@ -0,0 +1,15 @@
+{
+ "version": 1,
+ "network": 23,
+ "type": 0,
+ "timestamp": 58126413,
+ "senderPublicKey": "03d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c357",
+ "fee": "10000000",
+ "amount": "200000000",
+ "vendorFieldHex": "5472616e73616374696f6e2037",
+ "expiration": 0,
+ "recipientId": "APyFYXxXtUrvZFnEuwLopfst94GMY5Zkeq",
+ "signature": "3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427d",
+ "vendorField": "Transaction 7",
+ "id": "00d2025f7914a8e794bdaea404a579840cf71402cef312d2080c7ecd86177e5f"
+}
diff --git a/benchmark/fixtures/transaction/serialized/0.txt b/benchmark/fixtures/transaction/serialized/0.txt
new file mode 100644
index 0000000000..899198688c
--- /dev/null
+++ b/benchmark/fixtures/transaction/serialized/0.txt
@@ -0,0 +1 @@
+ff0117004df0760303d7dfe44e771039334f4712fb95ad355254f674c8f5d286503199157b7bf7c35780969800000000000d5472616e73616374696f6e203700c2eb0b00000000000000001759e7dc56557733804418f0ea6fd3b2573a9aabdd3045022100bac5b7699748a891b39ff5439e16ea1a694e93954b248be6b8082da01e5386310220129eb06a58b9f80d36ea3cdc903e6cc0240bbe1d371339ffe15c87742af1427d
diff --git a/benchmark/helpers.js b/benchmark/helpers.js
new file mode 100644
index 0000000000..c0dbb56700
--- /dev/null
+++ b/benchmark/helpers.js
@@ -0,0 +1,8 @@
+const { readFileSync } = require("fs");
+const { resolve } = require("path");
+
+exports.createBlocks = count => new Array(count).fill(require("./fixtures/block"));
+
+exports.getFixture = value => readFileSync(resolve(__dirname, `./fixtures/${value}`)).toString().trim();
+
+exports.getJSONFixture = value => require(resolve(__dirname, `./fixtures/${value}`));
diff --git a/benchmark/index.js b/benchmark/index.js
new file mode 100644
index 0000000000..bf217a53e4
--- /dev/null
+++ b/benchmark/index.js
@@ -0,0 +1,12 @@
+const { benchmarker } = require('@faustbrian/benchmarker');
+
+benchmarker('core', [
+ { name: 'Block.serialize (0 transactions)', scenarios: require('./block/serialize') },
+ { name: 'Block.serialize (150 transactions)', scenarios: require('./block/serializeFull') },
+
+ { name: 'Block.deserialize (0 transactions)', scenarios: require('./block/deserialize/0') },
+ { name: 'Block.deserialize (150 transactions)', scenarios: require('./block/deserialize/150') },
+
+ { name: 'Transaction.serialize (Type 0)', scenarios: require('./transaction/serialize/0') },
+ { name: 'Transaction.deserialize (Type 0)', scenarios: require('./transaction/deserialize/0') },
+], { hideSummary: true });
diff --git a/benchmark/transaction/deserialize/0.js b/benchmark/transaction/deserialize/0.js
new file mode 100644
index 0000000000..5a01d18c2b
--- /dev/null
+++ b/benchmark/transaction/deserialize/0.js
@@ -0,0 +1,9 @@
+const {
+ deserialize
+} = require('./methods')
+
+const data = require('../../helpers').getFixture('transaction/serialized/0.txt');
+
+exports['core'] = () => {
+ return deserialize(data);
+};
diff --git a/benchmark/transaction/deserialize/methods.js b/benchmark/transaction/deserialize/methods.js
new file mode 100644
index 0000000000..d01249d1b3
--- /dev/null
+++ b/benchmark/transaction/deserialize/methods.js
@@ -0,0 +1,7 @@
+const {
+ models
+} = require('@arkecosystem/crypto')
+
+exports.deserialize = data => {
+ return models.Transaction.deserialize(data)
+}
diff --git a/benchmark/transaction/serialize/0.js b/benchmark/transaction/serialize/0.js
new file mode 100644
index 0000000000..761e1c1b31
--- /dev/null
+++ b/benchmark/transaction/serialize/0.js
@@ -0,0 +1,9 @@
+const {
+ models
+} = require('@arkecosystem/crypto')
+
+const data = require('../../helpers').getJSONFixture('transaction/deserialized/0');
+
+exports['core'] = () => {
+ return models.Transaction.serialize(data);
+};
diff --git a/docker/development/docker-compose.yml b/docker/development/docker-compose.yml
deleted file mode 100644
index de8fa8f6e6..0000000000
--- a/docker/development/docker-compose.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# For running some services on development without tainting your system
-#
-version: '2'
-services:
-
- postgres:
- image: "postgres:alpine"
- container_name: ark-development-postgres
- ports:
- - '127.0.0.1:5432:5432'
- volumes:
- - 'postgres:/var/lib/postgresql/data'
- environment:
- POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_development
- POSTGRES_USER: ark
-
-volumes:
- postgres:
diff --git a/docker/development/purge.sh b/docker/development/purge.sh
deleted file mode 100755
index 9849a43d90..0000000000
--- a/docker/development/purge.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env sh
-
-docker stop ark-development-postgres
-docker rm -v ark-development-postgres
-docker volume rm development_postgres
-docker network rm development_default
diff --git a/docker/devnet/Dockerfile b/docker/devnet/Dockerfile
deleted file mode 100644
index 46d929718d..0000000000
--- a/docker/devnet/Dockerfile
+++ /dev/null
@@ -1,16 +0,0 @@
-FROM node:10
-
-WORKDIR /ark-core
-
-COPY entrypoint.sh /
-
-RUN apt-get update && \
- apt-get -y install --no-install-recommends \
- build-essential \
- jq \
- iptables \
- python \
- vim && \
- rm -rf /var/lib/apt/lists/*
-
-EXPOSE 4002 4003
diff --git a/docker/devnet/docker-compose.yml b/docker/devnet/docker-compose.yml
deleted file mode 100644
index fdc00da3bf..0000000000
--- a/docker/devnet/docker-compose.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-version: '2'
-services:
-
- postgres:
- image: "postgres:alpine"
- container_name: ark-devnet-postgres
- ports:
- - '127.0.0.1:5432:5432'
- volumes:
- - 'postgres:/var/lib/postgresql/data'
- environment:
- POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_devnet
- POSTGRES_USER: ark
-
- ark-core:
- build: .
- image: ark-core
- container_name: ark-devnet-core
- ports:
- - "4002:4002"
- - "4003:4003"
- volumes:
- - ark-core:/ark-core
- tty: true
- privileged: true
- links:
- - postgres
- depends_on:
- - postgres
- command:
- - /bin/sh
- - -c
- - |
- /entrypoint.sh
-
-volumes:
- postgres:
- ark-core:
- driver_opts:
- type: none
- device: $PWD/../../
- o: bind
diff --git a/docker/devnet/entrypoint.sh b/docker/devnet/entrypoint.sh
deleted file mode 100755
index f67d2d61e4..0000000000
--- a/docker/devnet/entrypoint.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-
-sysctl -w net.ipv4.conf.all.route_localnet=1
-
-POSTGRES=$(ping -c 1 ark-devnet-postgres | awk -F'[()]' '/PING/{print $2}')
-CORE=$(ping -c 1 ark-devnet-core | awk -F'[()]' '/PING/{print $2}')
-
-iptables -I OUTPUT -t nat -o lo -d localhost -p tcp --dport 5432 -j DNAT --to-destination ${POSTGRES}:5432
-iptables -I POSTROUTING -t nat -p tcp --dport 5432 -d ${POSTGRES} -j SNAT --to ${CORE}
-
-cd /ark-core
-rm -rf node_modules package-lock.json > /dev/null 2>&1
-rm -rf packages/core/node_modules packages/core/package-lock.json 2>&1
-npm --quiet install lerna -g && npm --quiet install -g nodemon
-lerna bootstrap
-
-bash
diff --git a/docker/devnet/restore.sh b/docker/devnet/restore.sh
deleted file mode 100755
index ef8a357f39..0000000000
--- a/docker/devnet/restore.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-DOCKER_DB_NAME="$(docker-compose ps -q postgres)"
-DB_HOSTNAME=postgres
-DB_USER=ark
-LOCAL_DUMP_PATH="snapshot.dump"
-
-docker-compose up -d postgres
-docker exec -i "${DOCKER_DB_NAME}" pg_restore -C --clean --no-acl --no-owner -U "${DB_USER}" -d "${DB_HOSTNAME}" < "${LOCAL_DUMP_PATH}"
-docker-compose stop postgres
diff --git a/docker/mainnet/docker-compose.yml b/docker/mainnet/docker-compose.yml
deleted file mode 100644
index 213e11d607..0000000000
--- a/docker/mainnet/docker-compose.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# For running some services on development without tainting your system
-#
-version: '2'
-services:
-
- postgres:
- image: "postgres:alpine"
- container_name: ark-mainnet-postgres
- ports:
- - '127.0.0.1:5432:5432'
- volumes:
- - 'postgres:/var/lib/postgresql/data'
- environment:
- POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_mainnet
- POSTGRES_USER: ark
-
-volumes:
- postgres:
diff --git a/docker/production/Dockerfile b/docker/production/Dockerfile
new file mode 100644
index 0000000000..bdaca70ffc
--- /dev/null
+++ b/docker/production/Dockerfile
@@ -0,0 +1,18 @@
+FROM node:10-alpine
+
+WORKDIR /home/node/core
+
+ADD docker/production/entrypoint.sh /entrypoint.sh
+
+COPY ./ /home/node/core
+
+RUN apk add --no-cache --virtual .build-deps make gcc g++ python git \
+ && apk add --no-cache bash sudo git openntpd openssl \
+ && npm i pm2 -g --loglevel notice \
+ && yarn setup \
+ && yarn cache clean \
+ && apk del .build-deps \
+ && echo 'node ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
+
+USER node
+ENTRYPOINT ["bash", "-c", "/entrypoint.sh \"$@\"", "--"]
diff --git a/docker/LICENSE b/docker/production/LICENSE
similarity index 100%
rename from docker/LICENSE
rename to docker/production/LICENSE
diff --git a/docker/production/devnet/devnet.env b/docker/production/devnet/devnet.env
new file mode 100644
index 0000000000..786aa41cb4
--- /dev/null
+++ b/docker/production/devnet/devnet.env
@@ -0,0 +1,20 @@
+#MODE must be one of: relay or forger
+#relay: start a relay node only
+#forger: start relay and forger
+MODE=relay
+NETWORK=devnet
+#Core variables
+CORE_DB_HOST=postgres-devnet
+CORE_DB_USERNAME=node
+CORE_DB_PASSWORD=password
+CORE_DB_DATABASE=core_devnet
+CORE_P2P_HOST=0.0.0.0
+CORE_P2P_PORT=4002
+CORE_API_HOST=0.0.0.0
+CORE_API_PORT=4003
+CORE_WEBHOOKS_HOST=0.0.0.0
+CORE_WEBHOOKS_PORT=4004
+CORE_GRAPHQL_HOST=0.0.0.0
+CORE_GRAPHQL_PORT=4005
+CORE_JSON_RPC_HOST=0.0.0.0
+CORE_JSON_RPC_PORT=8080
diff --git a/docker/production/devnet/docker-compose-build.yml b/docker/production/devnet/docker-compose-build.yml
new file mode 100644
index 0000000000..f3fba47443
--- /dev/null
+++ b/docker/production/devnet/docker-compose-build.yml
@@ -0,0 +1,54 @@
+version: '2'
+services:
+ postgres:
+ image: "postgres:alpine"
+ container_name: postgres-devnet
+ restart: always
+ ports:
+ - '127.0.0.1:5432:5432'
+ volumes:
+ - 'postgres:/var/lib/postgresql/data'
+ networks:
+ - core
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_devnet
+ POSTGRES_USER: node
+
+ core:
+ build:
+ context: ../../../
+ dockerfile: docker/production/Dockerfile
+ image: core
+ container_name: core-devnet
+ restart: always
+ ports:
+ - "4002:4002"
+ - "4003:4003"
+ - "127.0.0.1:4004:4004"
+ - "127.0.0.1:4005:4005"
+ - "127.0.0.1:8080:8080"
+ cap_add:
+ - SYS_NICE
+ - SYS_RESOURCE
+ - SYS_TIME
+ volumes:
+ - ~/.config/ark-core:/home/node/.config/ark-core
+ - ~/.local/share/ark-core:/home/node/.local/share/ark-core
+ - ~/.local/state/ark-core:/home/node/.local/state/ark-core
+ - /etc/timezone:/etc/timezone:ro
+ - /etc/localtime:/etc/localtime:ro
+ - ./enc:/run/secrets
+ networks:
+ - core
+ env_file: ./devnet.env
+ tty: true
+ links:
+ - postgres
+ depends_on:
+ - postgres
+volumes:
+ postgres:
+ core:
+networks:
+ core:
diff --git a/docker/production/devnet/docker-compose.yml b/docker/production/devnet/docker-compose.yml
new file mode 100644
index 0000000000..04d68c4122
--- /dev/null
+++ b/docker/production/devnet/docker-compose.yml
@@ -0,0 +1,51 @@
+version: '2'
+services:
+ postgres:
+ image: "postgres:alpine"
+ container_name: postgres-devnet
+ restart: always
+ ports:
+ - '127.0.0.1:5432:5432'
+ volumes:
+ - 'postgres:/var/lib/postgresql/data'
+ networks:
+ - core
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_devnet
+ POSTGRES_USER: node
+
+ core:
+ image: arkecosystem/core:devnet
+ container_name: core-devnet
+ restart: always
+ ports:
+ - "4002:4002"
+ - "4003:4003"
+ - "127.0.0.1:4004:4004"
+ - "127.0.0.1:4005:4005"
+ - "127.0.0.1:8080:8080"
+ cap_add:
+ - SYS_NICE
+ - SYS_RESOURCE
+ - SYS_TIME
+ volumes:
+ - ~/.config/ark-core:/home/node/.config/ark-core
+ - ~/.local/share/ark-core:/home/node/.local/share/ark-core
+ - ~/.local/state/ark-core:/home/node/.local/state/ark-core
+ - /etc/timezone:/etc/timezone:ro
+ - /etc/localtime:/etc/localtime:ro
+ - ./enc:/run/secrets
+ networks:
+ - core
+ env_file: ./devnet.env
+ tty: true
+ links:
+ - postgres
+ depends_on:
+ - postgres
+volumes:
+ postgres:
+ core:
+networks:
+ core:
diff --git a/docker/production/devnet/enc.sh b/docker/production/devnet/enc.sh
new file mode 100755
index 0000000000..e510b293d4
--- /dev/null
+++ b/docker/production/devnet/enc.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+##########################################################
+# #
+# This script encrypts your forging secret and password. #
+# #
+##########################################################
+
+type openssl >/dev/null 2>&1 || { echo >&2 "OpenSSL missing. Please install and run the script again."; exit 1; }
+
+yellow=$(tput setaf 3)
+green=$(tput setaf 2)
+lila=$(tput setaf 4)
+bold=$(tput bold)
+reset=$(tput sgr0)
+
+warning ()
+{
+ echo " ${yellow}==>${reset}${bold} $1${reset}"
+}
+
+success ()
+{
+ echo " ${green}==>${reset}${bold} $1${reset}"
+}
+
+read -sp "Please enter your delegate secret: " inputSecret
+echo
+
+while true; do
+ read -sp "Please enter your password: " inputPass
+ echo
+ read -sp "Please enter password again: " inputPassA
+ echo
+ [ "${inputPass}" = "${inputPassA}" ] && break
+ echo "Password do not match! Please try again."
+done
+
+SECRET="${inputSecret}"
+BIP38="${inputPass}"
+
+rm -rf enc > /dev/null 2>&1
+mkdir enc; cd enc
+
+warning "Encrypting ..."
+
+openssl genrsa -out secret.key 2048
+openssl rsa -in secret.key -out secret.pub -outform PEM -pubout
+echo "${SECRET}" | openssl rsautl -encrypt -inkey secret.pub -pubin -out secret.dat
+
+openssl genrsa -out bip.key 2048
+openssl rsa -in bip.key -out bip.pub -outform PEM -pubout
+echo "${BIP38}" | openssl rsautl -encrypt -inkey bip.pub -pubin -out bip.dat
+
+success "Done! Created folder $(echo "${lila}enc${reset}") with all certificates and keys inside."
+success "You are now ready to run your docker $(echo "${yellow}forger")."
+
diff --git a/docker/devnet/purge_all.sh b/docker/production/devnet/purge_all.sh
similarity index 100%
rename from docker/devnet/purge_all.sh
rename to docker/production/devnet/purge_all.sh
diff --git a/docker/production/entrypoint.sh b/docker/production/entrypoint.sh
new file mode 100755
index 0000000000..86d7ff9593
--- /dev/null
+++ b/docker/production/entrypoint.sh
@@ -0,0 +1,66 @@
+#!/usr/bin/env bash
+sudo /usr/sbin/ntpd -s
+
+sudo rm -rf /home/node/.config/ark-core/*
+sudo rm -rf /home/node/.local/state/ark-core/*
+sudo chown node:node -R /home/node
+cp -r /home/node/core/packages/core/src/config/$NETWORK /home/node/.config/ark-core/$NETWORK
+cd /home/node/core/packages/core
+chmod +x ./dist/index.js
+
+CONFIG=/home/node/.config/ark-core/$NETWORK
+SECRET=`openssl rsautl -decrypt -inkey /run/secrets/secret.key -in /run/secrets/secret.dat`
+CORE_FORGER_PASSWORD=`openssl rsautl -decrypt -inkey /run/secrets/bip.key -in /run/secrets/bip.dat`
+
+#startup functions
+
+config_plain ()
+{
+ ./dist/index.js forger-plain --config $CONFIG --secret "$SECRET"
+}
+
+config_bip ()
+{
+ ./dist/index.js forger-bip38 --config $CONFIG --network $NETWORK --secret "$SECRET" --password "$CORE_FORGER_PASSWORD"
+}
+
+start_relay ()
+{
+ pm2 --name 'ark-core' --no-daemon start ./dist/index.js -- relay --config $CONFIG --network $NETWORK
+}
+
+start_forger ()
+{
+ pm2 --name 'ark-core' --no-daemon start ./dist/index.js -- start --config $CONFIG --network $NETWORK
+}
+
+start_bip ()
+{
+ pm2 --name 'ark-core' --no-daemon start ./dist/index.js -- start --config $CONFIG --network $NETWORK
+}
+
+#configure
+if [ -n "$SECRET" ] && [ -n "$CORE_FORGER_PASSWORD" ]; then
+ config_bip
+elif [ "$MODE" = "forger" ] && [ -z "$SECRET" ] && [ -z "$CORE_FORGER_PASSWORD" ]; then
+ echo "set SECRET and/or CORE_FORGER_PASWORD if you want to run a forger"
+ exit
+elif [ -n "$CORE_FORGER_PASSWORD" ]; then
+ config_plain
+fi
+
+#relay
+if [[ "$MODE" = "relay" ]]; then
+ start_relay
+fi
+
+#forging
+if [ "$MODE" = "forger" ] && [ -n "$SECRET" ] && [ -n "$CORE_FORGER_PASSWORD" ]; then
+ export CORE_FORGER_BIP38=$(grep bip38 /home/node/.config/ark-core/$NETWORK/delegates.json | awk '{print $2}' | tr -d '"') && export CORE_FORGER_PASSWORD && start_bip && sudo rm -rf /run/secrets/*
+elif [ "$MODE" = "forger" ] && [ -z "$SECRET" ] && [ -z "$CORE_FORGER_PASSWORD" ]; then
+ echo "set SECRET and/or CORE_FORGER_PASWORD if you want to run a forger"
+ exit
+elif [ "$MODE" = "forger" ] && [ -n "$SECRET" ] && [ -z "$CORE_FORGER_PASSWORD" ]; then
+ start_forger
+fi
+
diff --git a/docker/production/mainnet/docker-compose-build.yml b/docker/production/mainnet/docker-compose-build.yml
new file mode 100644
index 0000000000..a184308630
--- /dev/null
+++ b/docker/production/mainnet/docker-compose-build.yml
@@ -0,0 +1,54 @@
+version: '2'
+services:
+ postgres:
+ image: "postgres:alpine"
+ container_name: postgres-mainnet
+ restart: always
+ ports:
+ - '127.0.0.1:5432:5432'
+ volumes:
+ - 'postgres:/var/lib/postgresql/data'
+ networks:
+ - core
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_mainnet
+ POSTGRES_USER: node
+
+ core:
+ build:
+ context: ../../../
+ dockerfile: docker/production/Dockerfile
+ image: core
+ container_name: core-mainnet
+ restart: always
+ ports:
+ - "4001:4001"
+ - "4003:4003"
+ - "127.0.0.1:4004:4004"
+ - "127.0.0.1:4005:4005"
+ - "127.0.0.1:8080:8080"
+ cap_add:
+ - SYS_NICE
+ - SYS_RESOURCE
+ - SYS_TIME
+ volumes:
+ - ~/.config/ark-core:/home/node/.config/ark-core
+ - ~/.local/share/ark-core:/home/node/.local/share/ark-core
+ - ~/.local/state/ark-core:/home/node/.local/state/ark-core
+ - /etc/timezone:/etc/timezone:ro
+ - /etc/localtime:/etc/localtime:ro
+ - ./enc:/run/secrets
+ networks:
+ - core
+ env_file: ./mainnet.env
+ tty: true
+ links:
+ - postgres
+ depends_on:
+ - postgres
+volumes:
+ postgres:
+ core:
+networks:
+ core:
diff --git a/docker/production/mainnet/docker-compose.yml b/docker/production/mainnet/docker-compose.yml
new file mode 100644
index 0000000000..d308f961e3
--- /dev/null
+++ b/docker/production/mainnet/docker-compose.yml
@@ -0,0 +1,51 @@
+version: '2'
+services:
+ postgres:
+ image: "postgres:alpine"
+ container_name: postgres-mainnet
+ restart: always
+ ports:
+ - '127.0.0.1:5432:5432'
+ volumes:
+ - 'postgres:/var/lib/postgresql/data'
+ networks:
+ - core
+ environment:
+ POSTGRES_PASSWORD: password
+ POSTGRES_DB: core_mainnet
+ POSTGRES_USER: node
+
+ core:
+ image: arkecosystem/core
+ container_name: core-mainnet
+ restart: always
+ ports:
+ - "4001:4001"
+ - "4003:4003"
+ - "127.0.0.1:4004:4004"
+ - "127.0.0.1:4005:4005"
+ - "127.0.0.1:8080:8080"
+ cap_add:
+ - SYS_NICE
+ - SYS_RESOURCE
+ - SYS_TIME
+ volumes:
+ - ~/.config/ark-core:/home/node/.config/ark-core
+ - ~/.local/share/ark-core:/home/node/.local/share/ark-core
+ - ~/.local/state/ark-core:/home/node/.local/state/ark-core
+ - /etc/timezone:/etc/timezone:ro
+ - /etc/localtime:/etc/localtime:ro
+ - ./enc:/run/secrets
+ networks:
+ - core
+ env_file: ./mainnet.env
+ tty: true
+ links:
+ - postgres
+ depends_on:
+ - postgres
+volumes:
+ postgres:
+ core:
+networks:
+ core:
diff --git a/docker/production/mainnet/enc.sh b/docker/production/mainnet/enc.sh
new file mode 100755
index 0000000000..e510b293d4
--- /dev/null
+++ b/docker/production/mainnet/enc.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+##########################################################
+# #
+# This script encrypts your forging secret and password. #
+# #
+##########################################################
+
+type openssl >/dev/null 2>&1 || { echo >&2 "OpenSSL missing. Please install and run the script again."; exit 1; }
+
+yellow=$(tput setaf 3)
+green=$(tput setaf 2)
+lila=$(tput setaf 4)
+bold=$(tput bold)
+reset=$(tput sgr0)
+
+warning ()
+{
+ echo " ${yellow}==>${reset}${bold} $1${reset}"
+}
+
+success ()
+{
+ echo " ${green}==>${reset}${bold} $1${reset}"
+}
+
+read -sp "Please enter your delegate secret: " inputSecret
+echo
+
+while true; do
+ read -sp "Please enter your password: " inputPass
+ echo
+ read -sp "Please enter password again: " inputPassA
+ echo
+ [ "${inputPass}" = "${inputPassA}" ] && break
+ echo "Password do not match! Please try again."
+done
+
+SECRET="${inputSecret}"
+BIP38="${inputPass}"
+
+rm -rf enc > /dev/null 2>&1
+mkdir enc; cd enc
+
+warning "Encrypting ..."
+
+openssl genrsa -out secret.key 2048
+openssl rsa -in secret.key -out secret.pub -outform PEM -pubout
+echo "${SECRET}" | openssl rsautl -encrypt -inkey secret.pub -pubin -out secret.dat
+
+openssl genrsa -out bip.key 2048
+openssl rsa -in bip.key -out bip.pub -outform PEM -pubout
+echo "${BIP38}" | openssl rsautl -encrypt -inkey bip.pub -pubin -out bip.dat
+
+success "Done! Created folder $(echo "${lila}enc${reset}") with all certificates and keys inside."
+success "You are now ready to run your docker $(echo "${yellow}forger")."
+
diff --git a/docker/production/mainnet/mainnet.env b/docker/production/mainnet/mainnet.env
new file mode 100644
index 0000000000..a02e965319
--- /dev/null
+++ b/docker/production/mainnet/mainnet.env
@@ -0,0 +1,20 @@
+#MODE must be one of: relay or forger
+#relay: start a relay node only
+#forger: start relay and forger
+MODE=relay
+NETWORK=mainnet
+#Core variables
+CORE_DB_HOST=postgres-mainnet
+CORE_DB_USERNAME=node
+CORE_DB_PASSWORD=password
+CORE_DB_DATABASE=core_mainnet
+CORE_P2P_HOST=0.0.0.0
+CORE_P2P_PORT=4001
+CORE_API_HOST=0.0.0.0
+CORE_API_PORT=4003
+CORE_WEBHOOKS_HOST=0.0.0.0
+CORE_WEBHOOKS_PORT=4004
+CORE_GRAPHQL_HOST=0.0.0.0
+CORE_GRAPHQL_PORT=4005
+CORE_JSON_RPC_HOST=0.0.0.0
+CORE_JSON_RPC_PORT=8080
diff --git a/docker/production/mainnet/purge_all.sh b/docker/production/mainnet/purge_all.sh
new file mode 100755
index 0000000000..7be429bc03
--- /dev/null
+++ b/docker/production/mainnet/purge_all.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+docker stop $(docker ps -aq)
+docker rm $(docker ps -aq)
+docker rmi $(docker images -q)
+docker volume prune -f
+docker network prune -f
diff --git a/docker/production/purge_all.sh b/docker/production/purge_all.sh
new file mode 100755
index 0000000000..1fe1329a7d
--- /dev/null
+++ b/docker/production/purge_all.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+docker stop $(docker ps -aq)
+docker rm $(docker ps -aq)
+docker rmi $(docker images -q)
+docker volume prune -f
+docker network prune -f
diff --git a/docker/testnet/Dockerfile b/docker/testnet/Dockerfile
deleted file mode 100644
index e47f4a08ea..0000000000
--- a/docker/testnet/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-FROM node:10
-
-WORKDIR /ark-core
-
-COPY entrypoint.sh /
-
-RUN apt-get update && \
- apt-get -y install --no-install-recommends \
- build-essential \
- jq \
- iptables \
- python \
- vim && \
- rm -rf /var/lib/apt/lists/*
-
-EXPOSE 4000 4003
-
diff --git a/docker/testnet/docker-compose.yml b/docker/testnet/docker-compose.yml
deleted file mode 100644
index 9d42e85a7c..0000000000
--- a/docker/testnet/docker-compose.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-version: '2'
-services:
-
- postgres:
- image: "postgres:alpine"
- container_name: ark-testnet-postgres
- ports:
- - '127.0.0.1:5432:5432'
- volumes:
- - 'postgres:/var/lib/postgresql/data'
- environment:
- POSTGRES_PASSWORD: password
- POSTGRES_DB: ark_testnet
- POSTGRES_USER: ark
-
- ark-core:
- build: .
- image: ark-core
- container_name: ark-testnet-core
- ports:
- - "4000:4000"
- - "4003:4003"
- volumes:
- - ark-core:/ark-core
- tty: true
- privileged: true
- links:
- - postgres
- depends_on:
- - postgres
- command:
- - /bin/sh
- - -c
- - |
- /entrypoint.sh
-
-volumes:
- postgres:
- ark-core:
- driver_opts:
- type: none
- device: $PWD/../../
- o: bind
diff --git a/docker/testnet/entrypoint.sh b/docker/testnet/entrypoint.sh
deleted file mode 100755
index 0fcd93ea11..0000000000
--- a/docker/testnet/entrypoint.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-
-sysctl -w net.ipv4.conf.all.route_localnet=1
-
-POSTGRES=$(ping -c 1 ark-testnet-postgres | awk -F'[()]' '/PING/{print $2}')
-CORE=$(ping -c 1 ark-testnet-core | awk -F'[()]' '/PING/{print $2}')
-
-iptables -I OUTPUT -t nat -o lo -d localhost -p tcp --dport 5432 -j DNAT --to-destination ${POSTGRES}:5432
-iptables -I POSTROUTING -t nat -p tcp --dport 5432 -d ${POSTGRES} -j SNAT --to ${CORE}
-
-cd /ark-core
-rm -rf node_modules package-lock.json > /dev/null 2>&1
-rm -rf packages/core/node_modules packages/core/package-lock.json 2>&1
-npm --quiet install lerna -g && npm --quiet install -g nodemon
-lerna bootstrap
-
-bash
diff --git a/docker/testnet/restore.sh b/docker/testnet/restore.sh
deleted file mode 100755
index ef8a357f39..0000000000
--- a/docker/testnet/restore.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-DOCKER_DB_NAME="$(docker-compose ps -q postgres)"
-DB_HOSTNAME=postgres
-DB_USER=ark
-LOCAL_DUMP_PATH="snapshot.dump"
-
-docker-compose up -d postgres
-docker exec -i "${DOCKER_DB_NAME}" pg_restore -C --clean --no-acl --no-owner -U "${DB_USER}" -d "${DB_HOSTNAME}" < "${LOCAL_DUMP_PATH}"
-docker-compose stop postgres
diff --git a/greenkeeper.json b/greenkeeper.json
index 21f3d27557..fb58a5347c 100644
--- a/greenkeeper.json
+++ b/greenkeeper.json
@@ -1,39 +1,37 @@
{
- "groups": {
- "default": {
- "packages": [
- "package.json",
- "packages/core-api/package.json",
- "packages/core-blockchain/package.json",
- "packages/core-config/package.json",
- "packages/core-container/package.json",
- "packages/core-database-postgres/package.json",
- "packages/core-database/package.json",
- "packages/core-debugger-cli/package.json",
- "packages/core-deployer/package.json",
- "packages/core-elasticsearch/package.json",
- "packages/core-error-tracker-bugsnag/package.json",
- "packages/core-error-tracker-sentry/package.json",
- "packages/core-event-emitter/package.json",
- "packages/core-forger/package.json",
- "packages/core-graphql/package.json",
- "packages/core-http-utils/package.json",
- "packages/core-json-rpc/package.json",
- "packages/core-logger-winston/package.json",
- "packages/core-logger/package.json",
- "packages/core-p2p/package.json",
- "packages/core-snapshots-cli/package.json",
- "packages/core-snapshots/package.json",
- "packages/core-test-utils/package.json",
- "packages/core-tester-cli/package.json",
- "packages/core-transaction-pool-mem/package.json",
- "packages/core-transaction-pool/package.json",
- "packages/core-utils/package.json",
- "packages/core-vote-report/package.json",
- "packages/core-webhooks/package.json",
- "packages/core/package.json",
- "packages/crypto/package.json"
- ]
+ "groups": {
+ "default": {
+ "packages": [
+ "package.json",
+ "packages/core-api/package.json",
+ "packages/core-blockchain/package.json",
+ "packages/core-container/package.json",
+ "packages/core-database-postgres/package.json",
+ "packages/core-database/package.json",
+ "packages/core-debugger-cli/package.json",
+ "packages/core-deployer/package.json",
+ "packages/core-elasticsearch/package.json",
+ "packages/core-error-tracker-bugsnag/package.json",
+ "packages/core-error-tracker-sentry/package.json",
+ "packages/core-event-emitter/package.json",
+ "packages/core-forger/package.json",
+ "packages/core-graphql/package.json",
+ "packages/core-http-utils/package.json",
+ "packages/core-json-rpc/package.json",
+ "packages/core-logger-winston/package.json",
+ "packages/core-logger/package.json",
+ "packages/core-p2p/package.json",
+ "packages/core-snapshots-cli/package.json",
+ "packages/core-snapshots/package.json",
+ "packages/core-test-utils/package.json",
+ "packages/core-tester-cli/package.json",
+ "packages/core-transaction-pool/package.json",
+ "packages/core-utils/package.json",
+ "packages/core-vote-report/package.json",
+ "packages/core-webhooks/package.json",
+ "packages/core/package.json",
+ "packages/crypto/package.json"
+ ]
+ }
}
- }
}
diff --git a/install.sh b/install.sh
new file mode 100644
index 0000000000..c8134c72bd
--- /dev/null
+++ b/install.sh
@@ -0,0 +1,364 @@
+#!/usr/bin/env bash
+
+# -----------------------------------
+# TYPOGRAPHY
+# -----------------------------------
+
+red=$(tput setaf 1)
+green=$(tput setaf 2)
+yellow=$(tput setaf 3)
+lila=$(tput setaf 4)
+pink=$(tput setaf 5)
+blue=$(tput setaf 6)
+white=$(tput setaf 7)
+black=$(tput setaf 8)
+
+bg_red=$(tput setab 1)
+bg_green=$(tput setab 2)
+bg_yellow=$(tput setab 3)
+bg_lila=$(tput setab 4)
+bg_pink=$(tput setab 5)
+bg_blue=$(tput setab 6)
+bg_white=$(tput setab 7)
+bg_black=$(tput setab 8)
+
+bold=$(tput bold)
+reset=$(tput sgr0)
+
+# Indicators
+heading ()
+{
+ echo " ${lila}==>${reset}${bold} $1${reset}"
+}
+
+success ()
+{
+ echo " ${green}==>${reset}${bold} $1${reset}"
+}
+
+info ()
+{
+ echo " ${blue}==>${reset}${bold} $1${reset}"
+}
+
+warning ()
+{
+ echo " ${yellow}==>${reset}${bold} $1${reset}"
+}
+
+error ()
+{
+ echo " ${red}==>${reset}${bold} $1${reset}"
+}
+
+# Colored Text
+text_red ()
+{
+ echo "${red}$1${reset}"
+}
+
+text_green ()
+{
+ echo "${green}$1${reset}"
+}
+
+text_yellow ()
+{
+ echo "${yellow}$1${reset}"
+}
+
+text_lila ()
+{
+ echo "${lila}$1${reset}"
+}
+
+text_pink ()
+{
+ echo "${pink}$1${reset}"
+}
+
+text_blue ()
+{
+ echo "${blue}$1${reset}"
+}
+
+text_white ()
+{
+ echo "${white}$1${reset}"
+}
+
+text_black ()
+{
+ echo "${black}$1${reset}"
+}
+
+# Styles
+text_bold ()
+{
+ echo "${bold}"
+}
+
+text_reset ()
+{
+ echo "${reset}"
+}
+
+# Helpers
+divider ()
+{
+ text_lila " ==============================================================="
+}
+
+paragraph ()
+{
+ text_white "$1" | fold -w67 | paste -sd'\n' -
+}
+
+# Detect pkg type
+DEB=$(which apt-get)
+RPM=$(which yum)
+
+# Detect SystemV / SystemD
+SYS=$([[ -L "/sbin/init" ]] && echo 'SystemD' || echo 'SystemV')
+
+if [[ ! -z $DEB ]]; then
+ success "Running install for Debian derivate"
+elif [[ ! -z $RPM ]]; then
+ success "Running install for RedHat derivate"
+else
+ heading "Not supported system"
+ exit 1;
+fi
+
+if [[ $(locale -a | grep ^en_US.UTF-8) ]] || [[ $(locale -a | grep ^en_US.utf8) ]]; then
+ if ! $(grep -E "(en_US.UTF-8)" "$HOME/.bashrc"); then
+ # Setting the bashrc locale
+ echo "export LC_ALL=en_US.UTF-8" >> "$HOME/.bashrc"
+ echo "export LANG=en_US.UTF-8" >> "$HOME/.bashrc"
+ echo "export LANGUAGE=en_US.UTF-8" >> "$HOME/.bashrc"
+
+ # Setting the current shell locale
+ export LC_ALL="en_US.UTF-8"
+ export LANG="en_US.UTF-8"
+ export LANGUAGE="en_US.UTF-8"
+ fi
+else
+ # Install en_US.UTF-8 Locale
+ if [[ ! -z $DEB ]]; then
+ sudo locale-gen en_US.UTF-8
+ sudo update-locale LANG=en_US.UTF-8
+ elif [[ ! -z $RPM ]]; then
+ sudo localedef -c -i en_US -f UTF-8 en_US.UTF-8
+ fi
+
+ # Setting the current shell locale
+ export LC_ALL="en_US.UTF-8"
+ export LANG="en_US.UTF-8"
+ export LANGUAGE="en_US.UTF-8"
+
+ # Setting the bashrc locale
+ echo "export LC_ALL=en_US.UTF-8" >> "$HOME/.bashrc"
+ echo "export LANG=en_US.UTF-8" >> "$HOME/.bashrc"
+ echo "export LANGUAGE=en_US.UTF-8" >> "$HOME/.bashrc"
+fi
+
+heading "Installing system dependencies..."
+
+if [[ ! -z $DEB ]]; then
+ sudo apt-get update
+ sudo apt-get install -y git curl apt-transport-https update-notifier
+elif [[ ! -z $RPM ]]; then
+ sudo yum update -y
+ sudo yum install git curl epel-release -y
+fi
+
+success "Installed system dependencies!"
+
+heading "Installing node.js & npm..."
+
+sudo rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/{npm*,node*,man1/node*}
+sudo rm -rf ~/{.npm,.forever,.node*,.cache,.nvm}
+
+if [[ ! -z $DEB ]]; then
+ sudo wget --quiet -O - https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
+ (echo "deb https://deb.nodesource.com/node_10.x $(lsb_release -s -c) main" | sudo tee /etc/apt/sources.list.d/nodesource.list)
+ sudo apt-get update
+ sudo apt-get install nodejs -y
+elif [[ ! -z $RPM ]]; then
+ sudo yum install gcc-c++ make -y
+ curl -sL https://rpm.nodesource.com/setup_10.x | sudo -E bash - > /dev/null 2>&1
+fi
+
+success "Installed node.js & npm!"
+
+heading "Installing Yarn..."
+
+if [[ ! -z $DEB ]]; then
+ curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
+ (echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list)
+
+ sudo apt-get update
+ sudo apt-get install -y yarn
+elif [[ ! -z $RPM ]]; then
+ curl -sL https://dl.yarnpkg.com/rpm/yarn.repo | sudo tee /etc/yum.repos.d/yarn.repo
+ sudo yum install yarn -y
+fi
+
+success "Installed Yarn!"
+
+heading "Installing PM2..."
+
+sudo yarn global add pm2
+pm2 install pm2-logrotate
+pm2 set pm2-logrotate:max_size 500M
+pm2 set pm2-logrotate:compress true
+pm2 set pm2-logrotate:retain 7
+
+success "Installed PM2!"
+
+heading "Installing program dependencies..."
+
+if [[ ! -z $DEB ]]; then
+ sudo apt-get install build-essential libcairo2-dev pkg-config libtool autoconf automake python libpq-dev jq -y
+elif [[ ! -z $RPM ]]; then
+ sudo yum groupinstall "Development Tools" -y -q
+ sudo yum install postgresql-devel jq -y -q
+fi
+
+success "Installed program dependencies!"
+
+heading "Installing PostgreSQL..."
+
+if [[ ! -z $DEB ]]; then
+ sudo apt-get update
+ sudo apt-get install postgresql postgresql-contrib -y
+elif [[ ! -z $RPM ]]; then
+ sudo yum install postgresql-server postgresql-contrib -y
+
+ if [[ "$SYS" == "SystemV" ]]; then
+ sudo service postgresql initdb
+ sudo service postgresql start
+ else
+ sudo postgresql-setup initdb
+ sudo systemctl start postgresql
+ fi
+fi
+
+success "Installed PostgreSQL!"
+
+heading "Installing NTP..."
+
+sudo timedatectl set-ntp off > /dev/null 2>&1 # disable the default systemd timesyncd service
+
+if [[ ! -z $DEB ]]; then
+ sudo apt-get install ntp -yyq
+elif [[ ! -z $RPM ]]; then
+ sudo yum install ntp -y -q
+fi
+
+sudo ntpd -gq
+
+success "Installed NTP!"
+
+heading "Installing system updates..."
+
+if [[ ! -z $DEB ]]; then
+ sudo apt-get update
+ sudo apt-get upgrade -yqq
+ sudo apt-get dist-upgrade -yq
+ sudo apt-get autoremove -yyq
+ sudo apt-get autoclean -yq
+elif [[ ! -z $RPM ]]; then
+ sudo yum update
+ sudo yum clean
+fi
+
+success "Installed system updates!"
+
+heading "Installing Ark Core..."
+
+cd "$HOME"
+
+if [ -d "ark-core" ]; then
+ heading "Removing existing folder..."
+ rm -rf ark-core
+fi
+
+git clone https://github.com/ArkEcosystem/core.git ~/ark-core
+cd ark-core
+yarn setup
+
+success "Installed Ark Core!"
+
+# setup configuration
+read -p "Would you like to configure the core? [y/N]: " choice
+
+if [[ "$choice" =~ ^(yes|y|Y) ]]; then
+ info "Which network would you like to configure?"
+
+ validNetworks=("mainnet" "devnet" "testnet")
+
+ select opt in "${validNetworks[@]}"; do
+ case "$opt" in
+ "mainnet")
+ mkdir -p "${HOME}/.config/ark-core/mainnet"
+ cp -rf "${HOME}/ark-core/packages/core/src/config/mainnet/." "${HOME}/.config/ark-core/mainnet"
+ break
+ ;;
+ "devnet")
+ mkdir -p "${HOME}/.config/ark-core/devnet"
+ cp -rf "${HOME}/ark-core/packages/core/src/config/devnet/." "${HOME}/.config/ark-core/devnet"
+ break
+ ;;
+ "testnet")
+ mkdir -p "${HOME}/.config/ark-core/testnet"
+ cp -rf "${HOME}/ark-core/packages/core/src/config/testnet/." "${HOME}/.config/ark-core/testnet"
+ break
+ ;;
+ *)
+ echo "Invalid option $REPLY"
+ ;;
+ esac
+ done
+fi
+
+# setup postgres username, password and database
+read -p "Would you like to configure the database? [y/N]: " choice
+
+if [[ "$choice" =~ ^(yes|y|Y) ]]; then
+ read -p "Enter the database username: " databaseUsername
+ read -p "Enter the database password: " databasePassword
+ read -p "Enter the database name: " databaseName
+
+ userExists=$(sudo -i -u postgres psql -c "SELECT * FROM pg_user WHERE usename = '${databaseUsername}'" | grep -c "1 row")
+ databaseExists=$(sudo -i -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname = '${databaseName}'")
+
+ if [[ $userExists == 1 ]]; then
+ read -p "The database user ${databaseUsername} already exists, do you want to overwrite it? [y/N]: " choice
+
+ if [[ "$choice" =~ ^(yes|y|Y) ]]; then
+ if [[ $databaseExists == 1 ]]; then
+ sudo -i -u postgres psql -c "ALTER DATABASE ${databaseName} OWNER TO postgres;"
+ fi
+ sudo -i -u postgres psql -c "DROP USER ${databaseUsername}"
+ sudo -i -u postgres psql -c "CREATE USER ${databaseUsername} WITH PASSWORD '${databasePassword}' CREATEDB;"
+ elif [[ "$choice" =~ ^(no|n|N) ]]; then
+ continue;
+ fi
+ else
+ sudo -i -u postgres psql -c "CREATE USER ${databaseUsername} WITH PASSWORD '${databasePassword}' CREATEDB;"
+ fi
+
+ if [[ $databaseExists == 1 ]]; then
+ read -p "The database ${databaseName} already exists, do you want to overwrite it? [y/N]: " choice
+
+ if [[ "$choice" =~ ^(yes|y|Y) ]]; then
+ sudo -i -u postgres psql -c "DROP DATABASE ${databaseName};"
+ sudo -i -u postgres psql -c "CREATE DATABASE ${databaseName} WITH OWNER ${databaseUsername};"
+ elif [[ "$choice" =~ ^(no|n|N) ]]; then
+ sudo -i -u postgres psql -c "ALTER DATABASE ${databaseName} OWNER TO ${databaseUsername};"
+ fi
+ else
+ sudo -i -u postgres psql -c "CREATE DATABASE ${databaseName} WITH OWNER ${databaseUsername};"
+ fi
+fi
diff --git a/jest-preset.json b/jest-preset.json
new file mode 100644
index 0000000000..7396af0c7c
--- /dev/null
+++ b/jest-preset.json
@@ -0,0 +1,15 @@
+{
+ "testEnvironment": "node",
+ "bail": false,
+ "verbose": true,
+ "transform": {
+ "^.+\\.tsx?$": "ts-jest"
+ },
+ "testMatch": ["**/*.test.ts"],
+ "moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"],
+ "collectCoverage": false,
+ "coverageDirectory": "/.coverage",
+ "collectCoverageFrom": ["src/**/*.ts", "!**/node_modules/**"],
+ "watchman": false,
+ "setupTestFrameworkScriptFile": "jest-extended"
+}
diff --git a/jest.config.js b/jest.config.js
deleted file mode 100644
index febc94cc95..0000000000
--- a/jest.config.js
+++ /dev/null
@@ -1,11 +0,0 @@
-module.exports = {
- bail: false,
- verbose: true,
- testEnvironment: 'node',
- testMatch: ['**/packages/**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['packages/**/lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/lerna.json b/lerna.json
index 6fa44c3962..f06def6804 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,7 +1,7 @@
{
- "lerna": "2.10.0",
- "packages": ["packages/*", "plugins/*"],
- "npmClient": "yarn",
- "useWorkspaces": true,
- "version": "independent"
+ "lerna": "3.5.0",
+ "npmClient": "yarn",
+ "packages": ["packages/*", "plugins/*"],
+ "useWorkspaces": true,
+ "version": "2.1.0"
}
diff --git a/package.json b/package.json
index e54155c725..52c50538c9 100644
--- a/package.json
+++ b/package.json
@@ -1,60 +1,91 @@
{
- "private": true,
- "scripts": {
- "bootstrap": "lerna bootstrap",
- "clean": "lerna clean",
- "commit": "git-cz",
- "lint": "lerna run lint",
- "prepare": "lerna run prepare",
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:force-exit": "cross-env ARK_ENV=test jest --runInBand --forceExit",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "format": "prettier --write \"./*.{js,json,md}\" \"./packages/**/*.{js,json,md}\"",
- "snyk": "./node_modules/.bin/snyk protect"
- },
- "devDependencies": {
- "@arkecosystem/eslint-config-base": "^0.1.0",
- "@babel/core": "^7.1.6",
- "@babel/preset-env": "^7.1.6",
- "axios": "^0.18.0",
- "babel-loader": "^8.0.4",
- "body-parser": "^1.18.3",
- "codecov": "^3.1.0",
- "cross-env": "^5.2.0",
- "docdash": "^1.0.0",
- "eslint": "^5.9.0",
- "eslint-config-airbnb-base": "^13.1.0",
- "eslint-config-prettier": "^3.3.0",
- "eslint-plugin-import": "^2.14.0",
- "eslint-plugin-jest": "^22.1.0",
- "eslint-plugin-node": "^8.0.0",
- "eslint-plugin-promise": "^4.0.1",
- "express": "^4.16.4",
- "husky": "^1.2.0",
- "jest": "^23.6.0",
- "jest-extended": "^0.11.0",
- "js-yaml": "^3.12.0",
- "lerna": "^3.5.0",
- "lint-staged": "^8.1.0",
- "npm-check-updates": "^2.15.0",
- "prettier": "^1.15.2",
- "regenerator-runtime": "^0.13.1",
- "request-promise": "^4.2.2",
- "rimraf": "^2.6.2",
- "snyk": "^1.116.0",
- "uuid": "^3.3.2",
- "webpack": "^4.26.1",
- "webpack-cli": "^3.1.2",
- "webpack-merge": "^4.1.4",
- "webpack-node-externals": "^1.7.2"
- },
- "workspaces": [
- "packages/*",
- "plugins/*"
- ],
- "husky": {
- "hooks": {
- "pre-commit": "lint-staged && ./scripts/pre-commit.sh"
+ "private": true,
+ "name": "core",
+ "description": "The packages that make up the Ark Core",
+ "scripts": {
+ "lerna": "./node_modules/lerna/cli.js",
+ "setup": "yarn && yarn clean && yarn bootstrap && yarn build",
+ "bootstrap": "yarn lerna bootstrap",
+ "clean": "yarn lerna clean --yes",
+ "build": "yarn lerna run build",
+ "lint": "yarn lerna run lint",
+ "format": "yarn lint && yarn prettier",
+ "prettier": "prettier --write \"./*.{ts,js,json,md}\" \"./packages/**/*.{ts,js,json,md}\"",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "snyk": "./node_modules/.bin/snyk protect",
+ "upgrade": "cross-env-shell ./scripts/upgrade.sh",
+ "version": "cross-env-shell ./scripts/version.sh",
+ "release": "cross-env-shell ./scripts/release.sh",
+ "updates": "yarn lerna run updates",
+ "docker": "node ./scripts/docker/generate-docker.js",
+ "bench": "node benchmark/index.js"
+ },
+ "devDependencies": {
+ "@babel/core": "^7.2.2",
+ "@babel/preset-env": "^7.2.0",
+ "@faustbrian/benchmarker": "^0.1.2",
+ "@sindresorhus/tsconfig": "^0.1.1",
+ "@types/babel__core": "^7.0.4",
+ "@types/body-parser": "^1.17.0",
+ "@types/express": "^4.16.0",
+ "@types/jest": "^23.3.10",
+ "@types/js-yaml": "^3.11.4",
+ "@types/node": "^10.12.17",
+ "@types/prettier": "^1.15.2",
+ "@types/pretty-ms": "^4.0.0",
+ "@types/request-promise": "^4.1.42",
+ "@types/rimraf": "^2.0.2",
+ "@types/uuid": "^3.4.4",
+ "@types/webpack": "^4.4.23",
+ "@types/webpack-merge": "^4.1.3",
+ "@types/webpack-node-externals": "^1.6.3",
+ "axios": "^0.18.0",
+ "babel-loader": "^8.0.4",
+ "body-parser": "^1.18.3",
+ "codecov": "^3.1.0",
+ "cross-env": "^5.2.0",
+ "del-cli": "^1.1.0",
+ "docdash": "^1.0.1",
+ "express": "^4.16.4",
+ "husky": "^1.3.0",
+ "jest": "^23.6.0",
+ "jest-extended": "^0.11.0",
+ "js-yaml": "^3.12.0",
+ "lerna": "^3.6.0",
+ "lint-staged": "^8.1.0",
+ "npm-check-updates": "^2.15.0",
+ "prettier": "^1.15.3",
+ "prompts": "^2.0.1",
+ "regenerator-runtime": "^0.13.1",
+ "request-promise": "^4.2.2",
+ "rimraf": "^2.6.2",
+ "snyk": "^1.118.0",
+ "ts-jest": "^23.10.5",
+ "tslint": "^5.12.0",
+ "tslint-config-prettier": "^1.17.0",
+ "typedoc": "^0.13.0",
+ "typescript": "^3.2.2",
+ "uuid": "^3.3.2",
+ "webpack": "^4.27.1",
+ "webpack-cli": "^3.1.2",
+ "webpack-merge": "^4.1.5",
+ "webpack-node-externals": "^1.7.2"
+ },
+ "workspaces": [
+ "packages/*",
+ "plugins/*"
+ ],
+ "husky": {
+ "hooks": {
+ "pre-commit": "lint-staged && cross-env-shell ./scripts/pre-commit.sh"
+ }
+ },
+ "jest": {
+ "preset": "./jest-preset.json",
+ "collectCoverageFrom": [
+ "packages/**/src/**/*.ts",
+ "!**/node_modules/**"
+ ]
}
- }
}
diff --git a/packages/core-api/CHANGELOG.md b/packages/core-api/CHANGELOG.md
deleted file mode 100644
index 1e4a71ec9d..0000000000
--- a/packages/core-api/CHANGELOG.md
+++ /dev/null
@@ -1,105 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.14 - 2018-12-07
-
-### Fixed
-
-- Ensure safe integer range for block height lookups
-
-## 0.2.13 - 2018-12-06
-
-### Fixed
-
-- Perform second-signature checks in the `canApply` logic of multi-signatures
-
-## 0.2.12 - 2018-12-05
-
-### Changed
-
-- Increase cache generation timeout and make it configurable
-
-## 0.2.11 - 2018-12-05
-
-### Fixed
-
-- Take milestones into account for supply calculations
-
-## 0.2.1 - 2018-12-04
-
-### Added
-
-- Allow block display via height in v2 API
-
-### Fixed
-
-- Return the correct total count for `/api/v2/peers`
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- Return forged rewards and fees via v2 API
-- Return error feedback for transaction posting via v2 API
-- Cache block heights to reduce database load
-- Implement database repositories
-- Limit the number of transactions per request if posting
-- `ownerId` property for transaction searches
-- Blockchains endpoint to provide information like supply
-- Allow registration of additional plugins
-- Run HTTP & HTTPS server at the same time
-- Validate transaction payloads
-- Implement server side caching via server methods
-
-### Fixed
-
-- Ensure order parameters are treated as lower-case and properly formatted
-- Handle trailing slashes to avoid v1 issues
-
-### Changed
-
-- Use the IANA format for the API vendor in the `Accept` header
-- Use the official `hapi-api-version` dependency
-- Return ports as integers
-- Improved some error messages
-- Return broadcast IDs for improved feedback
-- Sort peers by latency
-- Stricter validation of parameters
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-- Return a `type` and `message` property for transaction errors
-- Only allow JSON requests to the API
-
-### Removed
-
-- All `redis` integrations and dependencies
-
-### Fixed
-
-- Return the delegate list in the v1 format with correct limits
-- Add the missing `vendorField` property to transactions
-- Broken search in the v2 API for blocks and transactions
-- Various search, sort and pagination issues
-- Failing search because of unknown parameters
-- Properly handle CORS headers
-- Race condition that would result in duplicate transactions in the transaction pool
-- Fixed the value returned by `unconfirmedBalance`
-- Various inconsistencies of string/integer values in the v1 API
-- Various inconsistencies of property names in the v1 API
-- Various validation schemas
-- Added missing `orderBy` property for block transaction sorting
-- Crashes caused by bad sorting handling
-- Properly return the total forged and total amount of transactions that was forged
-- Allow an offset of 0 as default
-- Sorting of transactions & wallets
-
-## 0.1.1 - 2018-06-14
-
-### Added
-
-- initial release
diff --git a/packages/core-api/LICENSE b/packages/core-api/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-api/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-api/README.md b/packages/core-api/README.md
index 4f63fccdf8..39b72d5bd4 100644
--- a/packages/core-api/README.md
+++ b/packages/core-api/README.md
@@ -14,9 +14,10 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Kristjan Košič](https://github.com/kristjank)
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [Joshua Noack](https://github.com/supaiku0)
+- [Kristjan Košič](https://github.com/kristjank)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-api/__tests__/__support__/setup.js b/packages/core-api/__tests__/__support__/setup.js
deleted file mode 100644
index fd3bad69ac..0000000000
--- a/packages/core-api/__tests__/__support__/setup.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const appHelper = require('@arkecosystem/core-test-utils/lib/helpers/container')
-
-const activeDelegates = require('@arkecosystem/core-test-utils/fixtures/testnet/delegates')
-const generateRound = require('./utils/generate-round')
-
-const round = generateRound(
- activeDelegates.map(delegate => delegate.publicKey),
- 1,
-)
-
-exports.setUp = async () => {
- jest.setTimeout(60000)
-
- await appHelper.setUp({})
-
- const connection = app.resolvePlugin('database')
- await connection.db.rounds.truncate()
- await connection.buildWallets(1)
- await connection.saveWallets(true)
- await connection.saveRound(round)
-}
-
-exports.tearDown = async () => {
- await app.tearDown()
-}
diff --git a/packages/core-api/__tests__/__support__/setup.ts b/packages/core-api/__tests__/__support__/setup.ts
new file mode 100644
index 0000000000..d990a0a17e
--- /dev/null
+++ b/packages/core-api/__tests__/__support__/setup.ts
@@ -0,0 +1,64 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import delay from "delay";
+import { registerWithContainer, setUpContainer } from "../../../core-test-utils/src/helpers/container";
+import { plugin } from "../../src/plugin";
+
+import { delegates } from "../../../core-test-utils/src/fixtures";
+import { generateRound } from "./utils/generate-round";
+
+import { queries } from "../../../core-database-postgres/src/queries";
+
+const round = generateRound(delegates.map(delegate => delegate.publicKey), 1);
+
+const options = {
+ enabled: true,
+ host: "0.0.0.0",
+ port: 4003,
+ whitelist: ["*"],
+};
+
+async function setUp() {
+ jest.setTimeout(60000);
+
+ await setUpContainer({
+ exclude: [
+ "@arkecosystem/core-webhooks",
+ "@arkecosystem/core-graphql",
+ "@arkecosystem/core-forger",
+ "@arkecosystem/core-json-rpc",
+ "@arkecosystem/core-api",
+ ],
+ });
+
+ const databaseService = app.resolvePlugin("database");
+ await databaseService.connection.roundsRepository.truncate();
+ await databaseService.buildWallets(1);
+ await databaseService.saveWallets(true);
+ await databaseService.saveRound(round);
+
+ await registerWithContainer(plugin, options);
+ await delay(1000); // give some more time for api server to be up
+}
+
+async function tearDown() {
+ await app.tearDown();
+
+ await plugin.deregister(app, options);
+}
+
+async function calculateRanks() {
+ const databaseService = app.resolvePlugin("database");
+
+ const rows = await (databaseService.connection as any).query.manyOrNone(queries.spv.delegatesRanks);
+
+ rows.forEach((delegate, i) => {
+ const wallet = databaseService.walletManager.findByPublicKey(delegate.publicKey);
+ wallet.missedBlocks = +delegate.missedBlocks;
+ (wallet as any).rate = i + 1;
+
+ databaseService.walletManager.reindex(wallet);
+ });
+}
+
+export { calculateRanks, setUp, tearDown };
diff --git a/packages/core-api/__tests__/__support__/utils/generate-round.js b/packages/core-api/__tests__/__support__/utils/generate-round.js
deleted file mode 100644
index 69eb78bb8b..0000000000
--- a/packages/core-api/__tests__/__support__/utils/generate-round.js
+++ /dev/null
@@ -1,7 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-
-module.exports = (delegates, round) => delegates.map(delegate => ({
- round,
- publicKey: delegate,
- voteBalance: bignumify('245098000000000'),
-}))
diff --git a/packages/core-api/__tests__/__support__/utils/generate-round.ts b/packages/core-api/__tests__/__support__/utils/generate-round.ts
new file mode 100644
index 0000000000..588e26d944
--- /dev/null
+++ b/packages/core-api/__tests__/__support__/utils/generate-round.ts
@@ -0,0 +1,9 @@
+import { bignumify } from "@arkecosystem/core-utils";
+
+export function generateRound(delegates, round) {
+ return delegates.map(delegate => ({
+ round,
+ publicKey: delegate,
+ voteBalance: bignumify("245098000000000"),
+ }));
+}
diff --git a/packages/core-api/__tests__/repositories/transactions.test.js b/packages/core-api/__tests__/repositories/transactions.test.js
deleted file mode 100644
index f4ee83fc5d..0000000000
--- a/packages/core-api/__tests__/repositories/transactions.test.js
+++ /dev/null
@@ -1,160 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const { crypto } = require('@arkecosystem/crypto')
-const app = require('../__support__/setup')
-
-let genesisBlock
-let genesisTransaction
-let repository
-
-beforeAll(async () => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json')
- genesisTransaction = genesisBlock.transactions[0]
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-beforeEach(async () => {
- repository = require('../../lib/repositories/transactions')
-})
-
-describe('Transaction Repository', () => {
- describe('search', () => {
- const expectSearch = async (params, expected) => {
- // await connection.saveBlock(genesisBlock)
-
- const transactions = await repository.search(params)
- expect(transactions).toBeObject()
-
- expect(transactions.count).toBeNumber()
-
- expect(transactions.rows).toBeArray()
- expect(transactions.rows).not.toBeEmpty()
- transactions.rows.forEach(transaction => {
- expect(transaction).toContainKeys([
- 'id',
- 'version',
- 'sequence',
- 'timestamp',
- 'type',
- 'amount',
- 'fee',
- 'serialized',
- 'blockId',
- 'senderPublicKey',
- 'vendorFieldHex',
- 'block',
- ])
- })
-
- expect(transactions.count).toBe(expected)
- }
-
- it('should be a function', () => {
- expect(repository.search).toBeFunction()
- })
-
- it('should search transactions by the specified `id`', async () => {
- await expectSearch({ id: genesisTransaction.id }, 1)
- })
-
- it('should search transactions by the specified `blockId`', async () => {
- await expectSearch({ blockId: genesisTransaction.blockId }, 153)
- })
-
- it('should search transactions by the specified `type`', async () => {
- await expectSearch({ type: genesisTransaction.type }, 51)
- })
-
- it('should search transactions by the specified `version`', async () => {
- await expectSearch({ version: genesisTransaction.version }, 153)
- })
-
- it('should search transactions by the specified `senderPublicKey`', async () => {
- await expectSearch(
- { senderPublicKey: genesisTransaction.senderPublicKey },
- 51,
- )
- })
-
- it('should search transactions by the specified `senderId`', async () => {
- const senderId = crypto.getAddress(genesisTransaction.senderPublicKey, 23)
- await expectSearch({ senderId }, 51)
- })
-
- it('should search transactions by the specified `recipientId`', async () => {
- await expectSearch({ recipientId: genesisTransaction.recipientId }, 2)
- })
-
- it('should search transactions by the specified `timestamp`', async () => {
- await expectSearch(
- {
- timestamp: {
- from: genesisTransaction.timestamp,
- to: genesisTransaction.timestamp,
- },
- },
- 153,
- )
- })
-
- it('should search transactions by the specified `amount`', async () => {
- await expectSearch(
- {
- amount: {
- from: genesisTransaction.amount,
- to: genesisTransaction.amount,
- },
- },
- 50,
- )
- })
-
- it('should search transactions by the specified `fee`', async () => {
- await expectSearch(
- {
- fee: {
- from: genesisTransaction.fee,
- to: genesisTransaction.fee,
- },
- },
- 153,
- )
- })
-
- it('should search transactions by the specified `vendorFieldHex`', async () => {
- await expectSearch(
- { vendorFieldHex: genesisTransaction.vendorFieldHex },
- 153,
- )
- })
-
- describe('when there are more than 1 condition', () => {
- it('should search transactions that includes all of them (AND)', async () => {
- await expectSearch(
- { recipientId: genesisTransaction.recipientId, type: 3 },
- 1,
- )
- })
- })
-
- describe('when no results', () => {
- it('should not return them', async () => {
- // await connection.saveBlock(genesisBlock)
-
- const transactions = await repository.search({ recipientId: 'dummy' })
- expect(transactions).toBeObject()
-
- expect(transactions).toHaveProperty('count', 0)
-
- expect(transactions.rows).toBeArray()
- expect(transactions.rows).toBeEmpty()
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/repositories/transactions.test.ts b/packages/core-api/__tests__/repositories/transactions.test.ts
new file mode 100644
index 0000000000..cd2685399a
--- /dev/null
+++ b/packages/core-api/__tests__/repositories/transactions.test.ts
@@ -0,0 +1,358 @@
+import "@arkecosystem/core-test-utils";
+import "jest-extended";
+
+import { crypto } from "@arkecosystem/crypto";
+import genesisBlock from "../../../core-test-utils/src/config/testnet/genesisBlock.json";
+// noinspection TypeScriptPreferShortImport
+import { TransactionsRepository } from "../../dist/repositories/transactions";
+import { setUp, tearDown } from "../__support__/setup";
+
+let repository;
+let genesisTransaction;
+
+beforeAll(async () => {
+ await setUp();
+
+ repository = new TransactionsRepository();
+
+ genesisTransaction = genesisBlock.transactions[0];
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("Transaction Repository", () => {
+ describe("search", () => {
+ const expectSearch = async (paramsOrTransactions, count = 1) => {
+ let transactions;
+ if (paramsOrTransactions.rows) {
+ transactions = paramsOrTransactions;
+ } else {
+ transactions = await repository.search(paramsOrTransactions);
+ }
+
+ expect(transactions).toBeObject();
+
+ expect(transactions.count).toBeNumber();
+ // expect(transactions.count).toBe(count);
+
+ expect(transactions.rows).toBeArray();
+ if (count > 0) {
+ expect(transactions.rows).not.toBeEmpty();
+ transactions.rows.forEach(transaction => {
+ expect(transaction).toContainKeys([
+ "id",
+ "version",
+ "sequence",
+ "timestamp",
+ "type",
+ "amount",
+ "fee",
+ "serialized",
+ "blockId",
+ "senderPublicKey",
+ "vendorFieldHex",
+ "block",
+ ]);
+ });
+ }
+ };
+
+ it("should search transactions by the specified `id`", async () => {
+ await expectSearch({ id: genesisTransaction.id });
+ });
+
+ it("should search transactions by the specified `blockId`", async () => {
+ await expectSearch({ blockId: genesisTransaction.blockId }, 153);
+ });
+
+ it("should search transactions by the specified `type`", async () => {
+ await expectSearch({ type: genesisTransaction.type }, 51);
+ });
+
+ it("should search transactions by the specified `version`", async () => {
+ await expectSearch({ version: genesisTransaction.version }, 153);
+ });
+
+ it("should search transactions by the specified `senderPublicKey`", async () => {
+ await expectSearch({ senderPublicKey: genesisTransaction.senderPublicKey }, 51);
+ });
+
+ describe("`senderId`", () => {
+ it("should search transactions by the specified `senderId`", async () => {
+ const senderPublicKey = genesisTransaction.senderPublicKey;
+ const senderId = crypto.getAddress(senderPublicKey, 23);
+
+ const transactions = await repository.search({ senderId });
+
+ await expectSearch(transactions, 51);
+
+ for (const row of transactions.rows) {
+ expect(row.senderPublicKey).toEqual(senderPublicKey);
+ }
+ });
+
+ describe("when the `senderId` is incorrect", () => {
+ it("should return no result", async () => {
+ const senderId = "unknown";
+ await expectSearch({ senderId }, 0);
+ });
+ });
+ });
+
+ it("should search transactions by the specified `recipientId`", async () => {
+ await expectSearch({ recipientId: genesisTransaction.recipientId }, 2);
+ });
+
+ describe("when searching by `senderPublicKey` and `recipientId`", () => {
+ it("should search transactions by sent by `senderPublicKey` to `recipientId`", async () => {
+ const senderPublicKey = genesisTransaction.senderPublicKey;
+ const recipientId = genesisBlock.transactions[2].recipientId;
+
+ let transactions = await repository.search({
+ recipientId,
+ senderPublicKey,
+ });
+
+ await expectSearch(transactions, 1);
+
+ for (const row of transactions.rows) {
+ expect(row.senderPublicKey).toEqual(senderPublicKey);
+ expect(row.recipientId).toEqual(recipientId);
+ }
+
+ transactions = await repository.search({
+ recipientId: "unknown",
+ senderPublicKey,
+ });
+
+ await expectSearch(transactions, 0);
+ });
+ });
+
+ describe("when searching by `senderId` and `recipientId`", () => {
+ it("should search transactions by sent by `senderId` to `recipientId`", async () => {
+ const senderId = crypto.getAddress(genesisTransaction.senderPublicKey, 23);
+ const recipientId = genesisBlock.transactions[2].recipientId;
+
+ let transactions = await repository.search({
+ recipientId,
+ senderId,
+ });
+
+ await expectSearch(transactions, 1);
+
+ for (const row of transactions.rows) {
+ expect(row.senderPublicKey).toEqual(genesisTransaction.senderPublicKey);
+ expect(row.recipientId).toEqual(recipientId);
+ }
+
+ transactions = await repository.search({
+ recipientId: "unknown",
+ senderId,
+ });
+
+ await expectSearch(transactions, 0);
+ });
+ });
+
+ describe("`addresses`", () => {
+ const addresses = [genesisBlock.transactions[1].recipientId, genesisBlock.transactions[4].recipientId];
+
+ it("should search transactions by the specified `addresses` (sender and recipient)", async () => {
+ await expectSearch({ addresses: [addresses[0]] }, 3);
+
+ await expectSearch({ addresses }, 6);
+ });
+
+ describe("when `addresses` is empty", () => {
+ it("should return all transactions", async () => {
+ await expectSearch({ address: [] }, 153);
+ });
+ });
+
+ describe("when searching by `addresses` and `senderId`", () => {
+ it("should search transactions by the `addresses`, but only include those received from `senderId`", async () => {
+ const senderId = crypto.getAddress(genesisTransaction.senderPublicKey, 23);
+
+ let transactions = await repository.search({
+ senderId,
+ addresses,
+ });
+
+ await expectSearch(transactions, 2);
+
+ for (const row of transactions.rows) {
+ expect(row.senderPublicKey).toEqual(genesisTransaction.senderPublicKey);
+ }
+
+ transactions = await repository.search({
+ senderId: "unknown",
+ addresses,
+ });
+
+ await expectSearch(transactions, 0);
+ });
+ });
+
+ describe("when searching by `addresses` and `senderPublicKey`", () => {
+ it("should search transactions by the `addresses`, but only include those received from `senderPublicKey`", async () => {
+ const { senderPublicKey } = genesisTransaction;
+
+ let transactions = await repository.search({
+ senderPublicKey,
+ addresses,
+ });
+
+ await expectSearch(transactions, 2);
+
+ for (const row of transactions.rows) {
+ expect(row.senderPublicKey).toEqual(senderPublicKey);
+ }
+
+ transactions = await repository.search({
+ senderPublicKey: "unknown",
+ addresses,
+ });
+
+ await expectSearch(transactions, 0);
+ });
+ });
+
+ describe("when searching by `addresses` and `recipientId`", () => {
+ it("should search transactions by the `addresses`, but only include those sent to `recipientId`", async () => {
+ const senderId = crypto.getAddress(genesisTransaction.senderPublicKey, 23);
+ const recipientId = genesisBlock.transactions[2].recipientId;
+
+ let transactions = await repository.search({
+ recipientId,
+ addresses: [senderId],
+ });
+
+ await expectSearch(transactions, 1);
+
+ for (const row of transactions.rows) {
+ expect(row.recipientId).toEqual(recipientId);
+ }
+
+ transactions = await repository.search({
+ recipientId: "unknown",
+ addresses,
+ });
+
+ await expectSearch(transactions, 0);
+ });
+ });
+
+ describe("when searching by `addresses`, `senderId` and `recipientId`", () => {
+ it("should search transactions by `senderId` and `recipientId` only", async () => {
+ const senderId = crypto.getAddress(genesisTransaction.senderPublicKey, 23);
+ const params = {
+ senderId,
+ recipientId: genesisTransaction.recipientId,
+ addresses,
+ };
+ const transactions = await repository.search(params);
+
+ await expectSearch(transactions, 1);
+
+ const { rows } = transactions;
+ expect(rows[0].senderPublicKey).toEqual(genesisTransaction.senderPublicKey);
+ expect(rows[0].recipientId).toEqual(genesisTransaction.recipientId);
+ });
+ });
+
+ describe("when searching by `addresses`, `senderPublicKey` and `recipientId`", () => {
+ it("should search transactions by `senderPublicKey` and `recipientId` only", async () => {
+ const params = {
+ senderPublicKey: genesisTransaction.senderPublicKey,
+ recipientId: genesisTransaction.recipientId,
+ addresses,
+ };
+ const transactions = await repository.search(params);
+
+ await expectSearch(transactions, 1);
+
+ const { rows } = transactions;
+ expect(rows[0].senderPublicKey).toEqual(genesisTransaction.senderPublicKey);
+ expect(rows[0].recipientId).toEqual(genesisTransaction.recipientId);
+ });
+ });
+
+ describe("when searching by `addresses` and other field`", () => {
+ it("should search transactions by the specified `addresses` and that field", async () => {
+ const amount = 245098000000000;
+ const transactions = await repository.search({
+ amount: { from: amount },
+ addresses,
+ });
+
+ await expectSearch(transactions, 2);
+
+ for (const row of transactions.rows) {
+ expect(row.amount).toEqual(amount.toString());
+ }
+ });
+ });
+ });
+
+ it("should search transactions by the specified `timestamp`", async () => {
+ await expectSearch(
+ {
+ timestamp: {
+ from: genesisTransaction.timestamp,
+ to: genesisTransaction.timestamp,
+ },
+ },
+ 153,
+ );
+ });
+
+ it("should search transactions by the specified `amount`", async () => {
+ await expectSearch(
+ {
+ amount: {
+ from: genesisTransaction.amount,
+ to: genesisTransaction.amount,
+ },
+ },
+ 50,
+ );
+ });
+
+ it("should search transactions by the specified `fee`", async () => {
+ await expectSearch(
+ {
+ fee: {
+ from: genesisTransaction.fee,
+ to: genesisTransaction.fee,
+ },
+ },
+ 153,
+ );
+ });
+
+ it("should search transactions by the specified `vendorFieldHex`", async () => {
+ await expectSearch({ vendorFieldHex: genesisTransaction.vendorFieldHex }, 153);
+ });
+
+ describe("when there are more than 1 condition", () => {
+ it("should search transactions that includes all of them (AND)", async () => {
+ await expectSearch({ recipientId: genesisTransaction.recipientId, type: 3 });
+ });
+ });
+
+ describe("when no results", () => {
+ it("should not return them", async () => {
+ const transactions = await repository.search({ recipientId: "dummy" });
+ expect(transactions).toBeObject();
+
+ expect(transactions).toHaveProperty("count", 0);
+
+ expect(transactions.rows).toBeArray();
+ expect(transactions.rows).toBeEmpty();
+ });
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/repositories/utils/build-filter-query.test.ts b/packages/core-api/__tests__/repositories/utils/build-filter-query.test.ts
new file mode 100644
index 0000000000..68d10d9046
--- /dev/null
+++ b/packages/core-api/__tests__/repositories/utils/build-filter-query.test.ts
@@ -0,0 +1,32 @@
+import "jest-extended";
+
+import { buildFilterQuery } from "../../../dist/repositories/utils/build-filter-query";
+
+describe("Repository utils > buildFilterQuery", () => {
+ describe("`in` filter", () => {
+ describe("when the parameters are empty", () => {
+ it("should generate where conditions", () => {
+ const params = { a: ["a1", "a2", "a3"] };
+ const query = buildFilterQuery(params, { in: [] });
+ expect(query).toEqual([]);
+ });
+ });
+
+ describe("when the parameters are not filterable", () => {
+ it("should generate where conditions", () => {
+ const params = { a: ["a1", "a2", "a3"] };
+ const query = buildFilterQuery(params, { in: ["NOT"] });
+ expect(query).toEqual([]);
+ });
+ });
+
+ describe("when the parameters are filterable", () => {
+ it("should generate where conditions", () => {
+ const values = ["a1", "a2", "a3"];
+ const params = { a: values };
+ const query = buildFilterQuery(params, { in: ["a"] });
+ expect(query).toEqual([{ column: "a", method: "in", value: values }]);
+ });
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/accounts.test.js b/packages/core-api/__tests__/v1/handlers/accounts.test.js
deleted file mode 100644
index 50c2cbc89e..0000000000
--- a/packages/core-api/__tests__/v1/handlers/accounts.test.js
+++ /dev/null
@@ -1,95 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const address = 'AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo'
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Wallets', () => {
- describe('GET api/accounts/getAllAccounts', () => {
- it('should return all the wallets', async () => {
- const response = await utils.request('GET', 'accounts/getAllAccounts')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.accounts).toBeArray()
- })
- })
-
- describe('GET api/accounts/?address', () => {
- it('should return account information', async () => {
- const response = await utils.request('GET', 'accounts', { address })
- expect(response).toBeSuccessfulResponse()
-
- utils.expectWallet(response.data.account)
- })
- })
-
- describe('GET api/accounts/getBalance?address', () => {
- it('should return balance', async () => {
- const response = await utils.request('GET', 'accounts/getBalance', {
- address,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.balance).toBeString()
- expect(response.data.unconfirmedBalance).toBeString()
- })
- })
-
- describe('GET /accounts/getPublicKey?address', () => {
- it('should return public key for address', async () => {
- const response = await utils.request('GET', 'accounts/getPublicKey', {
- address,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.publicKey).toBeString()
- })
- })
-
- describe('GET api/accounts/delegates/fee', () => {
- it('should return delegate fee of an account', async () => {
- const response = await utils.request('GET', 'accounts/delegates/fee')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.fee).toBeNumber()
- })
- })
-
- describe('GET /accounts/delegates?address', () => {
- it('should return delegate info the address has voted for', async () => {
- const response = await utils.request('GET', 'accounts/delegates', {
- address,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.delegates).toBeArray()
- expect(response.data.delegates[0].producedblocks).toBeNumber()
- })
- })
-
- describe('GET api/accounts/top', () => {
- it('should return the top wallets', async () => {
- const response = await utils.request('GET', 'accounts/top')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.accounts).toBeArray()
- })
- })
-
- describe('GET api/accounts/count', () => {
- it('should return the total number of wallets', async () => {
- const response = await utils.request('GET', 'accounts/count')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.count).toBeNumber()
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/accounts.test.ts b/packages/core-api/__tests__/v1/handlers/accounts.test.ts
new file mode 100644
index 0000000000..2643fd1b30
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/accounts.test.ts
@@ -0,0 +1,109 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const address = "AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Wallets", () => {
+ describe("GET api/accounts/getAllAccounts", () => {
+ it("should return all the wallets", async () => {
+ const response = await utils.request("GET", "accounts/getAllAccounts");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.accounts).toBeArray();
+ });
+ });
+
+ describe("GET api/accounts/?address", () => {
+ it("should return account information", async () => {
+ const response = await utils.request("GET", "accounts", { address });
+ expect(response).toBeSuccessfulResponse();
+
+ utils.expectWallet(response.data.account);
+ });
+
+ it("should not return an empty wallet", async () => {
+ // create a cold wallet in memory with the given senderId
+ const createCold = await utils.request("GET", "transactions", {
+ senderId: "AbhUUMJBw1dZJiZMxKBhHsdXMMafcMaPNG",
+ });
+ expect(createCold).toBeSuccessfulResponse();
+ expect(createCold.data.transactions).toBeEmpty();
+
+ // attempt to retrieve the cold wallet
+ const response = await utils.request("GET", "accounts", { address: "AbhUUMJBw1dZJiZMxKBhHsdXMMafcMaPNG" });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.error).toBe("Account not found");
+ });
+ });
+
+ describe("GET api/accounts/getBalance?address", () => {
+ it("should return balance", async () => {
+ const response = await utils.request("GET", "accounts/getBalance", {
+ address,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.balance).toBeString();
+ expect(response.data.unconfirmedBalance).toBeString();
+ });
+ });
+
+ describe("GET /accounts/getPublicKey?address", () => {
+ it("should return public key for address", async () => {
+ const response = await utils.request("GET", "accounts/getPublicKey", {
+ address,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.publicKey).toBeString();
+ });
+ });
+
+ describe("GET api/accounts/delegates/fee", () => {
+ it("should return delegate fee of an account", async () => {
+ const response = await utils.request("GET", "accounts/delegates/fee");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.fee).toBeNumber();
+ });
+ });
+
+ describe("GET /accounts/delegates?address", () => {
+ it("should return delegate info the address has voted for", async () => {
+ const response = await utils.request("GET", "accounts/delegates", {
+ address,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.delegates).toBeArray();
+ expect(response.data.delegates[0].producedblocks).toBeNumber();
+ });
+ });
+
+ describe("GET api/accounts/top", () => {
+ it("should return the top wallets", async () => {
+ const response = await utils.request("GET", "accounts/top");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.accounts).toBeArray();
+ });
+ });
+
+ describe("GET api/accounts/count", () => {
+ it("should return the total number of wallets", async () => {
+ const response = await utils.request("GET", "accounts/count");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.count).toBeNumber();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/blocks.test.js b/packages/core-api/__tests__/v1/handlers/blocks.test.js
deleted file mode 100644
index 2ddb2c8c14..0000000000
--- a/packages/core-api/__tests__/v1/handlers/blocks.test.js
+++ /dev/null
@@ -1,131 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-let genesisBlock
-
-beforeAll(async () => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json')
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Blocks', () => {
- describe('GET /blocks/get?id', () => {
- it('should return blocks based on id', async () => {
- const response = await utils.request('GET', 'blocks/get', {
- id: genesisBlock.id,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.block).toBeObject()
- expect(response.data.block.id).toBeString()
- expect(response.data.block.height).toBeNumber()
- })
-
- it('should return block not found', async () => {
- const response = await utils.request('GET', 'blocks/get', {
- id: '18777we16674628308671',
- })
- utils.expectError(response)
-
- expect(response.data.error).toContain('not found')
- })
- })
-
- describe('GET /blocks?limit=XX', () => {
- it('should return 1 blocks', async () => {
- const response = await utils.request('GET', 'blocks', { limit: 1 })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.blocks).toHaveLength(1)
- })
-
- it('should return limit error info', async () => {
- const response = await utils.request('GET', 'blocks', { limit: 500 })
- utils.expectError(response)
-
- expect(response.data.success).toBeFalse()
- expect(response.data.error).toContain('should be <= 100')
- })
- })
-
- describe('GET /blocks/getfees', () => {
- it('should return matching fees with the config', async () => {
- const response = await utils.request('GET', 'blocks/getFees')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.fees).toBeObject()
-
- expect(response.data.fees).toContainKeys([
- 'delegate',
- 'secondsignature',
- 'delegate',
- 'vote',
- 'multisignature',
- ])
- })
- })
-
- describe('GET /blocks/getNethash', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'blocks/getNethash')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.nethash).toBeString()
-
- const container = require('@arkecosystem/core-container')
- const config = container.resolvePlugin('config')
-
- expect(response.data.nethash).toBe(config.network.nethash)
- })
- })
-
- describe('GET /blocks/getMilestone', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'blocks/getMilestone')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.milestone).toBeNumber()
- })
- })
-
- describe('GET /blocks/getReward', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'blocks/getReward')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.reward).toBeNumber()
- })
- })
-
- describe('GET /blocks/getSupply', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'blocks/getSupply')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.supply).toBeNumber()
- })
- })
-
- describe('GET /blocks/getStatus', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'blocks/getStatus')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.epoch).toBeString()
- expect(response.data.height).toBeNumber()
- expect(response.data.fee).toBeNumber()
- expect(response.data.milestone).toBeNumber()
- expect(response.data.nethash).toBeString()
- expect(response.data.reward).toBeNumber()
- expect(response.data.supply).toBeNumber()
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/blocks.test.ts b/packages/core-api/__tests__/v1/handlers/blocks.test.ts
new file mode 100644
index 0000000000..c4f39662a3
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/blocks.test.ts
@@ -0,0 +1,126 @@
+import { app } from "@arkecosystem/core-container";
+import "@arkecosystem/core-test-utils";
+import genesisBlock from "../../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Blocks", () => {
+ describe("GET /blocks/get?id", () => {
+ it("should return blocks based on id", async () => {
+ const response = await utils.request("GET", "blocks/get", {
+ id: genesisBlock.id,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.block).toBeObject();
+ expect(response.data.block.id).toBeString();
+ expect(response.data.block.height).toBeNumber();
+ });
+
+ it("should return block not found", async () => {
+ const response = await utils.request("GET", "blocks/get", {
+ id: "18777we16674628308671",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toContain("not found");
+ });
+ });
+
+ describe("GET /blocks?limit=XX", () => {
+ it("should return 1 blocks", async () => {
+ const response = await utils.request("GET", "blocks", { limit: 1 });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.blocks).toHaveLength(1);
+ });
+
+ it("should return limit error info", async () => {
+ const response = await utils.request("GET", "blocks", { limit: 500 });
+ utils.expectError(response);
+
+ expect(response.data.success).toBeFalse();
+ expect(response.data.error).toContain("should be <= 100");
+ });
+ });
+
+ describe("GET /blocks/getfees", () => {
+ it("should return matching fees with the config", async () => {
+ const response = await utils.request("GET", "blocks/getFees");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.fees).toBeObject();
+
+ expect(response.data.fees).toContainKeys([
+ "delegate",
+ "secondsignature",
+ "delegate",
+ "vote",
+ "multisignature",
+ ]);
+ });
+ });
+
+ describe("GET /blocks/getNethash", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "blocks/getNethash");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.nethash).toBeString();
+
+ const config = app.getConfig();
+
+ expect(response.data.nethash).toBe(config.get("network.nethash"));
+ });
+ });
+
+ describe("GET /blocks/getMilestone", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "blocks/getMilestone");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.milestone).toBeNumber();
+ });
+ });
+
+ describe("GET /blocks/getReward", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "blocks/getReward");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.reward).toBeNumber();
+ });
+ });
+
+ describe("GET /blocks/getSupply", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "blocks/getSupply");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.supply).toBeNumber();
+ });
+ });
+
+ describe("GET /blocks/getStatus", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "blocks/getStatus");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.epoch).toBeString();
+ expect(response.data.height).toBeNumber();
+ expect(response.data.fee).toBeNumber();
+ expect(response.data.milestone).toBeNumber();
+ expect(response.data.nethash).toBeString();
+ expect(response.data.reward).toBeNumber();
+ expect(response.data.supply).toBeNumber();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/delegates.test.js b/packages/core-api/__tests__/v1/handlers/delegates.test.js
deleted file mode 100644
index fbad0b5817..0000000000
--- a/packages/core-api/__tests__/v1/handlers/delegates.test.js
+++ /dev/null
@@ -1,97 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const delegate = {
- username: 'genesis_9',
- publicKey:
- '0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647',
-}
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Delegates', () => {
- describe('GET /delegates', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'delegates')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- utils.expectDelegate(response.data.delegates[0])
- })
- })
-
- describe('GET /delegates/get', () => {
- it('should be ok using a username', async () => {
- const response = await utils.request('GET', 'delegates/get', {
- username: delegate.username,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- utils.expectDelegate(response.data.delegate, delegate)
- })
-
- it('should be ok using a publicKey', async () => {
- const response = await utils.request('GET', 'delegates/get', {
- publicKey: delegate.publicKey,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- utils.expectDelegate(response.data.delegate, delegate)
- })
- })
-
- describe('GET /delegates/count', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'delegates/count')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- expect(response.data).toHaveProperty('count')
- expect(response.data.count).toBeNumber()
- })
- })
-
- describe('GET /delegates/search', () => {
- it('should be ok searching a username', async () => {
- const response = await utils.request('GET', 'delegates/search', {
- q: delegate.username,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- utils.expectDelegate(response.data.delegates[0], delegate)
- })
- })
-
- describe('GET /delegates/voters', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'delegates/voters', {
- publicKey: delegate.publicKey,
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- utils.expectWallet(response.data.accounts[0])
- })
- })
-
- describe('GET /delegates/fee', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'delegates/fee')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- expect(response.data).toHaveProperty('fee')
- expect(response.data.fee).toBeNumber()
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/delegates.test.ts b/packages/core-api/__tests__/v1/handlers/delegates.test.ts
new file mode 100644
index 0000000000..6e6a915a9f
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/delegates.test.ts
@@ -0,0 +1,96 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const delegate = {
+ username: "genesis_9",
+ publicKey: "0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647",
+};
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Delegates", () => {
+ describe("GET /delegates", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "delegates");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ utils.expectDelegate(response.data.delegates[0]);
+ });
+ });
+
+ describe("GET /delegates/get", () => {
+ it("should be ok using a username", async () => {
+ const response = await utils.request("GET", "delegates/get", {
+ username: delegate.username,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ utils.expectDelegate(response.data.delegate, delegate);
+ });
+
+ it("should be ok using a publicKey", async () => {
+ const response = await utils.request("GET", "delegates/get", {
+ publicKey: delegate.publicKey,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ utils.expectDelegate(response.data.delegate, delegate);
+ });
+ });
+
+ describe("GET /delegates/count", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "delegates/count");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ expect(response.data).toHaveProperty("count");
+ expect(response.data.count).toBeNumber();
+ });
+ });
+
+ describe("GET /delegates/search", () => {
+ it("should be ok searching a username", async () => {
+ const response = await utils.request("GET", "delegates/search", {
+ q: delegate.username,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ utils.expectDelegate(response.data.delegates[0], delegate);
+ });
+ });
+
+ describe("GET /delegates/voters", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "delegates/voters", {
+ publicKey: delegate.publicKey,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ utils.expectWallet(response.data.accounts[0]);
+ });
+ });
+
+ describe("GET /delegates/fee", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "delegates/fee");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ expect(response.data).toHaveProperty("fee");
+ expect(response.data.fee).toBeNumber();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/loader.test.js b/packages/core-api/__tests__/v1/handlers/loader.test.js
deleted file mode 100644
index f7f9f36bfc..0000000000
--- a/packages/core-api/__tests__/v1/handlers/loader.test.js
+++ /dev/null
@@ -1,53 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Loader', () => {
- describe('GET /loader/status', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'loader/status')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- expect(response.data).toHaveProperty('loaded')
- expect(response.data).toHaveProperty('now')
- expect(response.data).toHaveProperty('blocksCount')
- })
- })
-
- describe('GET /loader/status/sync', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'loader/status/sync')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- expect(response.data).toHaveProperty('syncing')
- expect(response.data).toHaveProperty('blocks')
- expect(response.data).toHaveProperty('height')
- expect(response.data).toHaveProperty('id')
- })
- })
-
- describe('GET /loader/autoconfigure', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'loader/autoconfigure')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data).toBeObject()
- expect(response.data.network).toBeObject()
- expect(response.data.network).toHaveProperty('nethash')
- expect(response.data.network).toHaveProperty('token')
- expect(response.data.network).toHaveProperty('symbol')
- expect(response.data.network).toHaveProperty('explorer')
- expect(response.data.network).toHaveProperty('version')
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/loader.test.ts b/packages/core-api/__tests__/v1/handlers/loader.test.ts
new file mode 100644
index 0000000000..f8f79f765b
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/loader.test.ts
@@ -0,0 +1,53 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Loader", () => {
+ describe("GET /loader/status", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "loader/status");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ expect(response.data).toHaveProperty("loaded");
+ expect(response.data).toHaveProperty("now");
+ expect(response.data).toHaveProperty("blocksCount");
+ });
+ });
+
+ describe("GET /loader/status/sync", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "loader/status/sync");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ expect(response.data).toHaveProperty("syncing");
+ expect(response.data).toHaveProperty("blocks");
+ expect(response.data).toHaveProperty("height");
+ expect(response.data).toHaveProperty("id");
+ });
+ });
+
+ describe("GET /loader/autoconfigure", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "loader/autoconfigure");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data).toBeObject();
+ expect(response.data.network).toBeObject();
+ expect(response.data.network).toHaveProperty("nethash");
+ expect(response.data.network).toHaveProperty("token");
+ expect(response.data.network).toHaveProperty("symbol");
+ expect(response.data.network).toHaveProperty("explorer");
+ expect(response.data.network).toHaveProperty("version");
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/peers.test.js b/packages/core-api/__tests__/v1/handlers/peers.test.js
deleted file mode 100644
index 45e8a8d4ef..0000000000
--- a/packages/core-api/__tests__/v1/handlers/peers.test.js
+++ /dev/null
@@ -1,92 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const peerIp = '167.114.29.55'
-const peerPort = '4002'
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Peers', () => {
- describe('GET /peers/version', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'peers/version')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.version).toBeString()
- })
- })
-
- describe('GET /peers', () => {
- // NOTE Seems that ark-node replies successfully
- // it('should fail using empty parameters', async () => {
- // const response = await utils.request('GET', 'peers', {
- // state: null,
- // os: null,
- // shared: null,
- // version: null,
- // limit: null,
- // offset: null,
- // orderBy: null
- // })
- // debugger
- // utils.expectError(response)
- //
- // expect(response.data.error).toContain('should be string')
- // })
-
- it('should fail using limit > 100', async () => {
- const response = await utils.request('GET', 'peers', { limit: 101 })
- utils.expectError(response)
- })
-
- it('should fail using invalid parameters', async () => {
- const response = await utils.request('GET', 'peers', {
- state: 'invalid',
- os: 'invalid',
- shared: 'invalid',
- version: 'invalid',
- limit: 'invalid',
- offset: 'invalid',
- orderBy: 'invalid',
- })
- utils.expectError(response)
-
- expect(response.data.error).not.toBeNull()
- })
- })
-
- describe('GET /peers/get', () => {
- it('should fail using known ip address with no port', async () => {
- const response = await utils.request('GET', 'peers/get', {
- ip: '127.0.0.1',
- })
- utils.expectError(response)
-
- expect(response.data.error).toBe("should have required property 'port'")
- })
-
- it('should fail using valid port with no ip address', async () => {
- const response = await utils.request('GET', 'peers/get', { port: 4002 })
- utils.expectError(response)
-
- expect(response.data.error).toBe("should have required property 'ip'")
- })
-
- it('should fail using unknown ip address and port', async () => {
- const response = await utils.request('GET', 'peers/get', {
- ip: '99.99.99.99',
- port: peerPort,
- })
- utils.expectError(response)
-
- expect(response.data.error).toBe(`Peer 99.99.99.99:${peerPort} not found`)
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/peers.test.ts b/packages/core-api/__tests__/v1/handlers/peers.test.ts
new file mode 100644
index 0000000000..4df8286e72
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/peers.test.ts
@@ -0,0 +1,104 @@
+import { app } from "@arkecosystem/core-container";
+import { Peer } from "@arkecosystem/core-p2p/src/peer";
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const mockAddress = "1.0.0.99";
+const mockPort = 4002;
+
+beforeAll(async () => {
+ await setUp();
+
+ const peerMock = new Peer(mockAddress, mockPort);
+ peerMock.setStatus("OK");
+
+ const monitor = app.resolvePlugin("p2p");
+ monitor.peers = {};
+ monitor.peers[peerMock.ip] = peerMock;
+});
+
+afterAll(async () => {
+ const monitor = app.resolvePlugin("p2p");
+ monitor.peers = {};
+
+ await tearDown();
+});
+
+describe("API 1.0 - Peers", () => {
+ describe("GET /peers", () => {
+ it("should pass using valid parameters", async () => {
+ const response = await utils.request("GET", "peers", { limit: 50 });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.error).toBeUndefined();
+ });
+
+ it("should fail using limit > 100", async () => {
+ const response = await utils.request("GET", "peers", { limit: 101 });
+ utils.expectError(response);
+ });
+
+ it("should fail using invalid parameters", async () => {
+ const response = await utils.request("GET", "peers", {
+ state: "invalid",
+ os: "invalid",
+ shared: "invalid",
+ version: "invalid",
+ limit: "invalid",
+ offset: "invalid",
+ orderBy: "invalid",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).not.toBeNull();
+ });
+ });
+
+ describe("GET /peers/get", () => {
+ it("should pass using valid data", async () => {
+ const response = await utils.request("GET", "peers/get", {
+ ip: mockAddress,
+ port: mockPort,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data).toBeObject();
+ expect(response.data.peer.ip).toBe(mockAddress);
+ expect(response.data.peer.port).toBe(mockPort);
+ });
+
+ it("should fail using known ip address with no port", async () => {
+ const response = await utils.request("GET", "peers/get", {
+ ip: "127.0.0.1",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toBe("should have required property 'port'");
+ });
+
+ it("should fail using valid port with no ip address", async () => {
+ const response = await utils.request("GET", "peers/get", { port: 4002 });
+ utils.expectError(response);
+
+ expect(response.data.error).toBe("should have required property 'ip'");
+ });
+
+ it("should fail using unknown ip address and port", async () => {
+ const response = await utils.request("GET", "peers/get", {
+ ip: "99.99.99.99",
+ port: mockPort,
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toBe(`Peer 99.99.99.99:${mockPort} not found`);
+ });
+ });
+
+ describe("GET /peers/version", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "peers/version");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.version).toBeString();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/signatures.test.js b/packages/core-api/__tests__/v1/handlers/signatures.test.js
deleted file mode 100644
index 30fe41c2b9..0000000000
--- a/packages/core-api/__tests__/v1/handlers/signatures.test.js
+++ /dev/null
@@ -1,22 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Signatures', () => {
- describe('GET /signatures/fee', () => {
- it('should return second signature value from config', async () => {
- const response = await utils.request('GET', 'signatures/fee')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.fee).toBeNumber()
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/signatures.test.ts b/packages/core-api/__tests__/v1/handlers/signatures.test.ts
new file mode 100644
index 0000000000..4e9b1fd137
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/signatures.test.ts
@@ -0,0 +1,22 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Signatures", () => {
+ describe("GET /signatures/fee", () => {
+ it("should return second signature value from config", async () => {
+ const response = await utils.request("GET", "signatures/fee");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.fee).toBeNumber();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/handlers/transactions.test.js b/packages/core-api/__tests__/v1/handlers/transactions.test.js
deleted file mode 100644
index c4486e6040..0000000000
--- a/packages/core-api/__tests__/v1/handlers/transactions.test.js
+++ /dev/null
@@ -1,293 +0,0 @@
-/* eslint max-len: "off" */
-
-require('@arkecosystem/core-test-utils/lib/matchers')
-
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const address1 = 'APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn'
-const address2 = 'AHXtmB84sTZ9Zd35h9Y1vfFvPE2Xzqj8ri'
-
-let genesisBlock
-let transactionList
-
-beforeAll(async () => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json')
- transactionList = genesisBlock.transactions
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 1.0 - Transactions', () => {
- describe('GET /transactions', () => {
- it('should be ok using valid parameters', async () => {
- const data = {
- blockId: '17184958558311101492',
- senderId: address1,
- recipientId: address2,
- limit: 10,
- offset: 0,
- orderBy: 'amount:asc',
- }
-
- const response = await utils.request('GET', 'transactions', data)
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- })
- })
-
- it('should reply with transactions that have any of the values (OR)', async () => {
- const data = {
- senderId: address1,
- recipientId: address2,
- }
-
- const response = await utils.request('GET', 'transactions', data)
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- if (transaction.senderId === data.senderId) {
- expect(transaction.senderId).toBe(data.senderId)
- } else {
- expect(transaction.recipientId).toBe(data.recipientId)
- }
- })
- })
-
- it('should be ok filtering by type', async () => {
- const type = 3
-
- const response = await utils.request('GET', 'transactions', { type })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- expect(transaction.type).toBe(type)
- })
- })
-
- it('should be ok using no params', async () => {
- const response = await utils.request('GET', 'transactions')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- })
- })
-
- // fixquery
- // http://localhost:4003/api/transactions?orderBy=timestamp:desc&offset=0&limit=50&recipientId=ANwZGjK55pe4xSWfnggt324S9XKY3TSwAr&senderId=ANwZGjK55pe4xSWfnggt324S9XKY3TSwAr
-
- it('should fail using limit > 100', async () => {
- const limit = 101
-
- const response = await utils.request('GET', 'transactions', { limit })
- utils.expectError(response)
-
- expect(response.data.error).toBeString()
- })
-
- it('should be ok ordered by ascending timestamp', async () => {
- const response = await utils.request('GET', 'transactions', {
- orderBy: 'timestamp:asc',
- })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- })
-
- let flag = 0
- for (let i = 0; i < response.data.transactions.length; i++) {
- if (response.data.transactions[i + 1]) {
- // await response.data.transactions[i].toHaveProperty('timestamp').which.is.at.most(response.data.transactions[i + 1].timestamp)
- expect(response.data.transactions[i]).toHaveProperty('timestamp')
-
- if (flag === 0) {
- // offsetTimestamp = response.data.transactions[i + 1].timestamp
- flag = 1
- }
- }
- }
- })
-
- it('should be ok using offset == 1', async () => {
- const response = await utils.request('GET', 'transactions', { offset: 1 })
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- expect(response.data.transactions).not.toBeEmpty()
-
- response.data.transactions.forEach(transaction => {
- expect(transaction).toBeApiTransaction()
- })
- })
-
- it('should fail using offset == "one"', async () => {
- const response = await utils.request('GET', 'transactions', {
- offset: 'one',
- })
- utils.expectError(response)
-
- expect(response.data.error).toBeString()
- })
-
- it('should fail using completely invalid fields', async () => {
- const response = await utils.request('GET', 'transactions', {
- blockId: 'invalid',
- senderId: 'invalid',
- recipientId: 'invalid',
- limit: 'invalid',
- offset: 'invalid',
- orderBy: 'invalid',
- })
- utils.expectError(response)
-
- expect(response.data.error).toBeString()
- })
-
- it('should fail using partially invalid fields', async () => {
- const response = await utils.request('GET', 'transactions', {
- blockId: 'invalid',
- senderId: 'invalid',
- recipientId: address1,
- limit: 'invalid',
- offset: 'invalid',
- orderBy: 'invalid',
- })
- utils.expectError(response)
-
- expect(response.data.error).toBeString()
- })
- })
-
- describe('GET /transactions/get?id=', () => {
- it('should be ok using valid id', async () => {
- const transactionInCheck = transactionList[0]
- const response = await utils.request('GET', 'transactions/get', {
- id: transactionInCheck.id,
- })
-
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transaction).toBeApiTransaction()
-
- expect(response.data.transaction).toHaveProperty(
- 'id',
- transactionInCheck.id,
- )
- expect(response.data.transaction).toHaveProperty(
- 'amount',
- transactionInCheck.amount,
- )
- expect(response.data.transaction).toHaveProperty(
- 'fee',
- transactionInCheck.fee,
- )
- expect(response.data.transaction).toHaveProperty(
- 'recipientId',
- transactionInCheck.recipientId,
- )
- expect(response.data.transaction).toHaveProperty(
- 'senderId',
- transactionInCheck.senderId,
- )
- expect(response.data.transaction).toHaveProperty(
- 'type',
- transactionInCheck.type,
- )
- })
-
- it('should fail using invalid id', async () => {
- const response = await utils.request('GET', 'transactions/get', {
- id: 'invalid',
- })
-
- utils.expectError(response)
-
- expect(response.data.error).toBeString()
- })
- })
-
- describe('GET /transactions/unconfirmed/get?id=', () => {
- it('should be ok using valid id', async () => {
- const transaction = await utils.createTransaction()
-
- const response = await utils.request(
- 'GET',
- 'transactions/unconfirmed/get',
- { id: transaction.id },
- )
- expect(response).toBeSuccessfulResponse()
-
- if (response.data.success && response.data.transaction !== null) {
- expect(response.data.transaction).toBeObject()
- expect(response.data.transaction).toHaveProperty('id', transaction.id)
- expect(response.data.transaction).toHaveProperty(
- 'type',
- transaction.type,
- )
- expect(response.data.transaction).toHaveProperty(
- 'amount',
- transaction.amount,
- )
- expect(response.data.transaction).toHaveProperty('fee', transaction.fee)
- expect(response.data.transaction).toHaveProperty(
- 'recipientId',
- transaction.recipientId,
- )
- expect(response.data.transaction).toHaveProperty(
- 'senderPublicKey',
- transaction.senderPublicKey,
- )
- expect(response.data.transaction).toHaveProperty(
- 'signature',
- transaction.signature,
- )
- expect(response.data.transaction).toHaveProperty(
- 'timestamp',
- transaction.timestamp,
- )
- expect(response.data.transaction).toHaveProperty(
- 'vendorField',
- transaction.vendorField,
- )
- } else {
- expect(response.data.error).toBeString()
- }
- })
- })
-
- describe('GET /transactions/unconfirmed', () => {
- it('should be ok', async () => {
- const response = await utils.request('GET', 'transactions/unconfirmed')
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.transactions).toBeArray()
- })
- })
-})
diff --git a/packages/core-api/__tests__/v1/handlers/transactions.test.ts b/packages/core-api/__tests__/v1/handlers/transactions.test.ts
new file mode 100644
index 0000000000..795f11a5be
--- /dev/null
+++ b/packages/core-api/__tests__/v1/handlers/transactions.test.ts
@@ -0,0 +1,246 @@
+import "@arkecosystem/core-test-utils";
+import genesisBlock from "../../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const address1 = "APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn";
+const address2 = "AHXtmB84sTZ9Zd35h9Y1vfFvPE2Xzqj8ri";
+
+let transactionList;
+
+beforeAll(async () => {
+ await setUp();
+
+ transactionList = genesisBlock.transactions;
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 1.0 - Transactions", () => {
+ describe("GET /transactions", () => {
+ it("should be ok using valid parameters", async () => {
+ const data = {
+ blockId: "17184958558311101492",
+ senderId: address1,
+ recipientId: address2,
+ limit: 10,
+ offset: 0,
+ orderBy: "amount:asc",
+ };
+
+ const response = await utils.request("GET", "transactions", data);
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ });
+ });
+
+ it("should reply with transactions that have any of the values (OR)", async () => {
+ const data = {
+ senderId: address1,
+ recipientId: address2,
+ };
+
+ const response = await utils.request("GET", "transactions", data);
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ if (transaction.senderId === data.senderId) {
+ expect(transaction.senderId).toBe(data.senderId);
+ } else {
+ expect(transaction.recipientId).toBe(data.recipientId);
+ }
+ });
+ });
+
+ it("should be ok filtering by type", async () => {
+ const type = 3;
+
+ const response = await utils.request("GET", "transactions", { type });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ expect(transaction.type).toBe(type);
+ });
+ });
+
+ it("should be ok using no params", async () => {
+ const response = await utils.request("GET", "transactions");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ });
+ });
+
+ // fixquery
+ // http://localhost:4003/api/transactions?orderBy=timestamp:desc&offset=0&limit=50&recipientId=ANwZGjK55pe4xSWfnggt324S9XKY3TSwAr&senderId=ANwZGjK55pe4xSWfnggt324S9XKY3TSwAr
+
+ it("should fail using limit > 100", async () => {
+ const limit = 101;
+
+ const response = await utils.request("GET", "transactions", { limit });
+ utils.expectError(response);
+
+ expect(response.data.error).toBeString();
+ });
+
+ it("should be ok ordered by ascending timestamp", async () => {
+ const response = await utils.request("GET", "transactions", {
+ orderBy: "timestamp:asc",
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ });
+
+ let flag = 0;
+ for (let i = 0; i < response.data.transactions.length; i++) {
+ if (response.data.transactions[i + 1]) {
+ // await response.data.transactions[i].toHaveProperty('timestamp').which.is.at.most(response.data.transactions[i + 1].timestamp)
+ expect(response.data.transactions[i]).toHaveProperty("timestamp");
+
+ if (flag === 0) {
+ // offsetTimestamp = response.data.transactions[i + 1].timestamp
+ flag = 1;
+ }
+ }
+ }
+ });
+
+ it("should be ok using offset == 1", async () => {
+ const response = await utils.request("GET", "transactions", {
+ offset: 1,
+ });
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ expect(response.data.transactions).not.toBeEmpty();
+
+ response.data.transactions.forEach(transaction => {
+ expect(transaction).toBeApiTransaction();
+ });
+ });
+
+ it('should fail using offset == "one"', async () => {
+ const response = await utils.request("GET", "transactions", {
+ offset: "one",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toBeString();
+ });
+
+ it("should fail using completely invalid fields", async () => {
+ const response = await utils.request("GET", "transactions", {
+ blockId: "invalid",
+ senderId: "invalid",
+ recipientId: "invalid",
+ limit: "invalid",
+ offset: "invalid",
+ orderBy: "invalid",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toBeString();
+ });
+
+ it("should fail using partially invalid fields", async () => {
+ const response = await utils.request("GET", "transactions", {
+ blockId: "invalid",
+ senderId: "invalid",
+ recipientId: address1,
+ limit: "invalid",
+ offset: "invalid",
+ orderBy: "invalid",
+ });
+ utils.expectError(response);
+
+ expect(response.data.error).toBeString();
+ });
+ });
+
+ describe("GET /transactions/get?id=", () => {
+ it("should be ok using valid id", async () => {
+ const transactionInCheck = transactionList[0];
+ const response = await utils.request("GET", "transactions/get", {
+ id: transactionInCheck.id,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transaction).toBeApiTransaction();
+
+ expect(response.data.transaction).toHaveProperty("id", transactionInCheck.id);
+ expect(response.data.transaction).toHaveProperty("amount", transactionInCheck.amount);
+ expect(response.data.transaction).toHaveProperty("fee", transactionInCheck.fee);
+ expect(response.data.transaction).toHaveProperty("recipientId", transactionInCheck.recipientId);
+ expect(response.data.transaction).toHaveProperty("senderId", transactionInCheck.senderId);
+ expect(response.data.transaction).toHaveProperty("type", transactionInCheck.type);
+ });
+
+ it("should fail using invalid id", async () => {
+ const response = await utils.request("GET", "transactions/get", {
+ id: "invalid",
+ });
+
+ utils.expectError(response);
+
+ expect(response.data.error).toBeString();
+ });
+ });
+
+ describe("GET /transactions/unconfirmed/get?id=", () => {
+ it("should be ok using valid id", async () => {
+ const transaction = await utils.createTransaction();
+
+ const response = await utils.request("GET", "transactions/unconfirmed/get", { id: transaction.id });
+ expect(response).toBeSuccessfulResponse();
+
+ if (response.data.success && response.data.transaction !== null) {
+ expect(response.data.transaction).toBeObject();
+ expect(response.data.transaction).toHaveProperty("id", transaction.id);
+ expect(response.data.transaction).toHaveProperty("type", transaction.type);
+ expect(response.data.transaction).toHaveProperty("amount", transaction.amount);
+ expect(response.data.transaction).toHaveProperty("fee", transaction.fee);
+ expect(response.data.transaction).toHaveProperty("recipientId", transaction.recipientId);
+ expect(response.data.transaction).toHaveProperty("senderPublicKey", transaction.senderPublicKey);
+ expect(response.data.transaction).toHaveProperty("signature", transaction.signature);
+ expect(response.data.transaction).toHaveProperty("timestamp", transaction.timestamp);
+ expect(response.data.transaction).toHaveProperty("vendorField", transaction.vendorField);
+ } else {
+ expect(response.data.error).toBeString();
+ }
+ });
+ });
+
+ describe("GET /transactions/unconfirmed", () => {
+ it("should be ok", async () => {
+ const response = await utils.request("GET", "transactions/unconfirmed");
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.transactions).toBeArray();
+ });
+ });
+});
diff --git a/packages/core-api/__tests__/v1/utils.js b/packages/core-api/__tests__/v1/utils.js
deleted file mode 100644
index d27edb7d6f..0000000000
--- a/packages/core-api/__tests__/v1/utils.js
+++ /dev/null
@@ -1,107 +0,0 @@
-const axios = require('axios')
-const {
- client,
- transactionBuilder,
- NetworkManager,
-} = require('@arkecosystem/crypto')
-const apiHelpers = require('@arkecosystem/core-test-utils/lib/helpers/api')
-
-class Helpers {
- async request(method, path, params = {}) {
- const url = `http://localhost:4003/api/${path}`
- const headers = {
- 'API-Version': 1,
- 'Content-Type': 'application/json',
- }
-
- const server = require('@arkecosystem/core-container').resolvePlugin('api')
-
- return apiHelpers.request(server.http, method, url, headers, params)
- }
-
- expectJson(response) {
- expect(response.data).toBeObject()
- }
-
- expectStatus(response, code) {
- expect(response.status).toBe(code)
- }
-
- assertVersion(response, version) {
- expect(response.headers).toBeObject()
- expect(response.headers).toHaveProperty('api-version', version)
- }
-
- expectState(response, state) {
- expect(response.data).toHaveProperty('success', state)
- }
-
- expectSuccessful(response) {
- this.expectStatus(response, 200)
- this.expectJson(response)
- this.expectState(response, true)
- this.assertVersion(response, 1)
- }
-
- expectError(response) {
- this.expectStatus(response, 200)
- this.expectJson(response)
- this.expectState(response, false)
- this.assertVersion(response, 1)
- }
-
- expectDelegate(delegate, expected) {
- expect(delegate).toBeObject()
- expect(delegate.username).toBeString()
- expect(delegate.address).toBeString()
- expect(delegate.publicKey).toBeString()
- expect(delegate.vote).toBeString()
- expect(delegate.rate).toBeNumber()
- expect(delegate.missedblocks).toBeNumber()
- expect(delegate.producedblocks).toBeNumber()
- expect(delegate.approval).toBeNumber()
- expect(delegate.productivity).toBeNumber()
-
- Object.keys(expected || {}).forEach(attr => {
- expect(delegate[attr]).toBe(expected[attr])
- })
- }
-
- expectWallet(response) {
- expect(response).toHaveProperty('username')
- expect(response).toHaveProperty('address')
- expect(response).toHaveProperty('publicKey')
- expect(response).toHaveProperty('balance')
- }
-
- async createTransaction() {
- client.setConfig(NetworkManager.findByName('testnet'))
-
- const transaction = transactionBuilder
- .transfer()
- .amount(1 * 1e8)
- .recipientId('AZFEPTWnn2Sn8wDZgCRF8ohwKkrmk2AZi1')
- .vendorField('test')
- .sign(
- 'prison tobacco acquire stone dignity palace note decade they current lesson robot',
- )
- .getStruct()
-
- await axios.post(
- 'http://127.0.0.1:4003/api/v2/transactions',
- {
- transactions: [transaction],
- },
- {
- headers: { 'Content-Type': 'application/json' },
- },
- )
-
- return transaction
- }
-}
-
-/**
- * @type {Helpers}
- */
-module.exports = new Helpers()
diff --git a/packages/core-api/__tests__/v1/utils.ts b/packages/core-api/__tests__/v1/utils.ts
new file mode 100644
index 0000000000..aa7511b5fa
--- /dev/null
+++ b/packages/core-api/__tests__/v1/utils.ts
@@ -0,0 +1,100 @@
+import { app } from "@arkecosystem/core-container";
+import { ApiHelpers } from "@arkecosystem/core-test-utils/dist/helpers/api";
+import { client, NetworkManager, transactionBuilder } from "@arkecosystem/crypto";
+import axios from "axios";
+import "jest-extended";
+
+class Helpers {
+ public async request(method, path, params = {}) {
+ const url = `http://localhost:4003/api/${path}`;
+ const headers = {
+ "API-Version": 1,
+ "Content-Type": "application/json",
+ };
+
+ const server = app.resolvePlugin("api");
+
+ return ApiHelpers.request(server.http, method, url, headers, params);
+ }
+
+ public expectJson(response) {
+ expect(response.data).toBeObject();
+ }
+
+ public expectStatus(response, code) {
+ expect(response.status).toBe(code);
+ }
+
+ public assertVersion(response, version) {
+ expect(response.headers).toBeObject();
+ expect(response.headers).toHaveProperty("api-version", version);
+ }
+
+ public expectState(response, state) {
+ expect(response.data).toHaveProperty("success", state);
+ }
+
+ public expectSuccessful(response) {
+ this.expectStatus(response, 200);
+ this.expectJson(response);
+ this.expectState(response, true);
+ this.assertVersion(response, 1);
+ }
+
+ public expectError(response) {
+ this.expectStatus(response, 200);
+ this.expectJson(response);
+ this.expectState(response, false);
+ this.assertVersion(response, 1);
+ }
+
+ public expectDelegate(delegate, expected: any = {}) {
+ expect(delegate).toBeObject();
+ expect(delegate.username).toBeString();
+ expect(delegate.address).toBeString();
+ expect(delegate.publicKey).toBeString();
+ expect(delegate.vote).toBeString();
+ expect(delegate.rate).toBeNumber();
+ expect(delegate.missedblocks).toBeNumber();
+ expect(delegate.producedblocks).toBeNumber();
+ expect(delegate.approval).toBeNumber();
+ expect(delegate.productivity).toBeNumber();
+
+ Object.keys(expected || {}).forEach(attr => {
+ expect(delegate[attr]).toBe(expected[attr]);
+ });
+ }
+
+ public expectWallet(response) {
+ expect(response).toHaveProperty("username");
+ expect(response).toHaveProperty("address");
+ expect(response).toHaveProperty("publicKey");
+ expect(response).toHaveProperty("balance");
+ }
+
+ public async createTransaction() {
+ client.setConfig(NetworkManager.findByName("testnet"));
+
+ const transaction = transactionBuilder
+ .transfer()
+ .amount(1 * 1e8)
+ .recipientId("AZFEPTWnn2Sn8wDZgCRF8ohwKkrmk2AZi1")
+ .vendorField("test")
+ .sign("prison tobacco acquire stone dignity palace note decade they current lesson robot")
+ .getStruct();
+
+ await axios.post(
+ "http://127.0.0.1:4003/api/v2/transactions",
+ {
+ transactions: [transaction],
+ },
+ {
+ headers: { "Content-Type": "application/json" },
+ },
+ );
+
+ return transaction;
+ }
+}
+
+export const utils = new Helpers();
diff --git a/packages/core-api/__tests__/v2/handlers/blocks.test.js b/packages/core-api/__tests__/v2/handlers/blocks.test.js
deleted file mode 100644
index 569a708527..0000000000
--- a/packages/core-api/__tests__/v2/handlers/blocks.test.js
+++ /dev/null
@@ -1,579 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const blockchainHelper = require('@arkecosystem/core-test-utils/lib/helpers/blockchain')
-const { Block } = require('@arkecosystem/crypto').models
-const blocks2to100 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.2-100')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-let genesisBlock
-let container
-
-beforeAll(async () => {
- await app.setUp()
- await blockchainHelper.resetBlockchain()
- container = require('@arkecosystem/core-container')
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json')
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Blocks', () => {
- describe('GET /blocks', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the "%s" header', (header, request) => {
- it('should GET all the blocks', async () => {
- const response = await utils[request]('GET', 'blocks')
-
- expect(response).toBeSuccessfulResponse()
- expect(response).toBePaginated()
- expect(response.data.data).toBeArray()
-
- const block = response.data.data[0]
- utils.expectBlock(block, {
- id: genesisBlock.id,
- transactions: genesisBlock.numberOfTransactions,
- })
- })
- })
- })
-
- describe('GET /blocks?orderBy=height:', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the "%s" header', (header, request) => {
- it('should GET all the blocks in descending order', async () => {
- const response = await utils[request]('GET', 'blocks?orderBy=height:')
-
- expect(response).toBeSuccessfulResponse()
- expect(response).toBePaginated()
- expect(response.data.data).toBeArray()
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- })
- })
- })
-
- describe('GET /blocks/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a block by the given identifier', async () => {
- const response = await utils[request](
- 'GET',
- `blocks/${genesisBlock.id}`,
- )
-
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- const block = response.data.data
- utils.expectBlock(block, {
- id: genesisBlock.id,
- transactions: genesisBlock.numberOfTransactions,
- })
- })
- })
- })
-
- describe('GET /blocks/:id/transactions', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the "%s" header', (header, request) => {
- it('should GET all the transactions for the given block by id', async () => {
- const response = await utils[request](
- 'GET',
- `blocks/${genesisBlock.id}/transactions`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- const transaction = response.data.data[0]
- utils.expectTransaction(transaction)
- expect(transaction.blockId).toBe(genesisBlock.id)
- })
- })
- })
-
- describe('POST /blocks/search', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified blockId', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified version', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- version: genesisBlock.version,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.version).toBe(genesisBlock.version)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified previousBlock', async () => {
- // save a new block so that we can make the request with previousBlock
- const block2 = new Block(blocks2to100[0])
- const database = container.resolvePlugin('database')
- await database.saveBlock(block2)
-
- const response = await utils[request]('POST', 'blocks/search', {
- id: blocks2to100[0].id,
- previousBlock: blocks2to100[0].previousBlock,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(blocks2to100[0].id)
- expect(block.previous).toBe(blocks2to100[0].previousBlock)
-
- await database.deleteBlock(block2) // reset to genesis block
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified payloadHash', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- payloadHash: genesisBlock.payloadHash,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.payload.length).toBe(genesisBlock.payloadLength)
- expect(block.payload.hash).toBe(genesisBlock.payloadHash)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified generatorPublicKey', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- generatorPublicKey: genesisBlock.generatorPublicKey,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.generator.publicKey).toBe(genesisBlock.generatorPublicKey)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified blockSignature', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- blockSignature: genesisBlock.blockSignature,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.signature).toBe(genesisBlock.blockSignature)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified timestamp', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- timestamp: {
- from: genesisBlock.timestamp,
- to: genesisBlock.timestamp,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified height', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- height: {
- from: genesisBlock.height,
- to: genesisBlock.height,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.height).toBe(genesisBlock.height)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified height range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- height: {
- from: genesisBlock.height,
- to: genesisBlock.height,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.height).toBe(genesisBlock.height)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified numberOfTransactions', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- numberOfTransactions: {
- from: genesisBlock.numberOfTransactions,
- to: genesisBlock.numberOfTransactions,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.transactions).toBe(genesisBlock.numberOfTransactions)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified numberOfTransactions range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- numberOfTransactions: {
- from: genesisBlock.numberOfTransactions,
- to: genesisBlock.numberOfTransactions,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.transactions).toBe(genesisBlock.numberOfTransactions)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified totalAmount', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- totalAmount: { from: 1 },
- })
-
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified totalAmount range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- totalAmount: { from: 1 },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified totalFee', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- totalFee: { from: 0 },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(+block.forged.fee).toBe(genesisBlock.totalFee)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified totalFee range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- totalFee: { from: 0 },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(+block.forged.fee).toBe(genesisBlock.totalFee)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified reward', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- reward: {
- from: genesisBlock.reward,
- to: genesisBlock.reward,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(+block.forged.reward).toBe(genesisBlock.reward)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified reward range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- reward: {
- from: genesisBlock.reward,
- to: genesisBlock.reward,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(+block.forged.reward).toBe(genesisBlock.reward)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the exact specified payloadLength', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- payloadLength: {
- from: genesisBlock.payloadLength,
- to: genesisBlock.payloadLength,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.payload.length).toBe(genesisBlock.payloadLength)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specified payloadLength range', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- payloadLength: {
- from: genesisBlock.payloadLength,
- to: genesisBlock.payloadLength,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- expect(block.payload.length).toBe(genesisBlock.payloadLength)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the wrong specified version', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- id: genesisBlock.id,
- version: 2,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(0)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for blocks with the specific criteria', async () => {
- const response = await utils[request]('POST', 'blocks/search', {
- generatorPublicKey: genesisBlock.generatorPublicKey,
- version: genesisBlock.version,
- timestamp: {
- from: genesisBlock.timestamp,
- to: genesisBlock.timestamp,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const block = response.data.data[0]
- utils.expectBlock(block)
- expect(block.id).toBe(genesisBlock.id)
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/blocks.test.ts b/packages/core-api/__tests__/v2/handlers/blocks.test.ts
new file mode 100644
index 0000000000..9d92841bbd
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/blocks.test.ts
@@ -0,0 +1,573 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+import { models } from "@arkecosystem/crypto";
+import genesisBlock from "../../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { blocks2to100 } from "../../../../core-test-utils/src/fixtures";
+import { resetBlockchain } from "../../../../core-test-utils/src/helpers";
+
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+
+const container = app;
+const { Block } = models;
+
+beforeAll(async () => {
+ await setUp();
+ await resetBlockchain();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Blocks", () => {
+ describe("GET /blocks", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ 'using the "%s" header',
+ (header, request) => {
+ it("should GET all the blocks", async () => {
+ const response = await utils[request]("GET", "blocks");
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response).toBePaginated();
+ expect(response.data.data).toBeArray();
+
+ const block = response.data.data[0];
+ utils.expectBlock(block, {
+ id: genesisBlock.id,
+ transactions: genesisBlock.numberOfTransactions,
+ });
+ });
+ },
+ );
+ });
+
+ describe("GET /blocks?orderBy=height:", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ 'using the "%s" header',
+ (header, request) => {
+ it("should GET all the blocks in descending order", async () => {
+ const response = await utils[request]("GET", "blocks?orderBy=height:");
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response).toBePaginated();
+ expect(response.data.data).toBeArray();
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ });
+ },
+ );
+ });
+
+ describe("GET /blocks/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a block by the given identifier", async () => {
+ const response = await utils[request]("GET", `blocks/${genesisBlock.id}`);
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ const block = response.data.data;
+ utils.expectBlock(block, {
+ id: genesisBlock.id,
+ transactions: genesisBlock.numberOfTransactions,
+ });
+ });
+ },
+ );
+ });
+
+ describe("GET /blocks/:id/transactions", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ 'using the "%s" header',
+ (header, request) => {
+ it("should GET all the transactions for the given block by id", async () => {
+ const response = await utils[request]("GET", `blocks/${genesisBlock.id}/transactions`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ const transaction = response.data.data[0];
+ utils.expectTransaction(transaction);
+ expect(transaction.blockId).toBe(genesisBlock.id);
+ });
+ },
+ );
+ });
+
+ describe("POST /blocks/search", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified blockId", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified version", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ version: genesisBlock.version,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.version).toBe(genesisBlock.version);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified previousBlock", async () => {
+ // save a new block so that we can make the request with previousBlock
+ const block2 = new Block(blocks2to100[0]);
+ const databaseService = container.resolvePlugin("database");
+ await databaseService.saveBlock(block2);
+
+ const response = await utils[request]("POST", "blocks/search", {
+ id: blocks2to100[0].id,
+ previousBlock: blocks2to100[0].previousBlock,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(blocks2to100[0].id);
+ expect(block.previous).toBe(blocks2to100[0].previousBlock);
+
+ await databaseService.deleteBlock(block2); // reset to genesis block
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified payloadHash", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ payloadHash: genesisBlock.payloadHash,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.payload.length).toBe(genesisBlock.payloadLength);
+ expect(block.payload.hash).toBe(genesisBlock.payloadHash);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified generatorPublicKey", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ generatorPublicKey: genesisBlock.generatorPublicKey,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.generator.publicKey).toBe(genesisBlock.generatorPublicKey);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified blockSignature", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ blockSignature: genesisBlock.blockSignature,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.signature).toBe(genesisBlock.blockSignature);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified timestamp", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ timestamp: {
+ from: genesisBlock.timestamp,
+ to: genesisBlock.timestamp,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified height", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ height: {
+ from: genesisBlock.height,
+ to: genesisBlock.height,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.height).toBe(genesisBlock.height);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified height range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ height: {
+ from: genesisBlock.height,
+ to: genesisBlock.height,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.height).toBe(genesisBlock.height);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified numberOfTransactions", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ numberOfTransactions: {
+ from: genesisBlock.numberOfTransactions,
+ to: genesisBlock.numberOfTransactions,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.transactions).toBe(genesisBlock.numberOfTransactions);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified numberOfTransactions range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ numberOfTransactions: {
+ from: genesisBlock.numberOfTransactions,
+ to: genesisBlock.numberOfTransactions,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.transactions).toBe(genesisBlock.numberOfTransactions);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified totalAmount", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ totalAmount: { from: 1 },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified totalAmount range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ totalAmount: { from: 1 },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified totalFee", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ totalFee: { from: 0 },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(+block.forged.fee).toBe(genesisBlock.totalFee);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified totalFee range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ totalFee: { from: 0 },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(+block.forged.fee).toBe(genesisBlock.totalFee);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified reward", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ reward: {
+ from: genesisBlock.reward,
+ to: genesisBlock.reward,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(+block.forged.reward).toBe(genesisBlock.reward);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified reward range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ reward: {
+ from: genesisBlock.reward,
+ to: genesisBlock.reward,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(+block.forged.reward).toBe(genesisBlock.reward);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the exact specified payloadLength", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ payloadLength: {
+ from: genesisBlock.payloadLength,
+ to: genesisBlock.payloadLength,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.payload.length).toBe(genesisBlock.payloadLength);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specified payloadLength range", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ payloadLength: {
+ from: genesisBlock.payloadLength,
+ to: genesisBlock.payloadLength,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ expect(block.payload.length).toBe(genesisBlock.payloadLength);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the wrong specified version", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ id: genesisBlock.id,
+ version: 2,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(0);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for blocks with the specific criteria", async () => {
+ const response = await utils[request]("POST", "blocks/search", {
+ generatorPublicKey: genesisBlock.generatorPublicKey,
+ version: genesisBlock.version,
+ timestamp: {
+ from: genesisBlock.timestamp,
+ to: genesisBlock.timestamp,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const block = response.data.data[0];
+ utils.expectBlock(block);
+ expect(block.id).toBe(genesisBlock.id);
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/delegates.test.js b/packages/core-api/__tests__/v2/handlers/delegates.test.js
deleted file mode 100644
index 082581d75c..0000000000
--- a/packages/core-api/__tests__/v2/handlers/delegates.test.js
+++ /dev/null
@@ -1,151 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const { Block } = require('@arkecosystem/crypto').models
-const blocks2to100 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.2-100')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const delegate = {
- username: 'genesis_9',
- address: 'AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo',
- publicKey:
- '0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647',
-}
-
-let container
-
-beforeAll(async () => {
- await app.setUp()
- container = require('@arkecosystem/core-container')
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Delegates', () => {
- describe('GET /delegates', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the delegates', async () => {
- const response = await utils[request]('GET', 'delegates')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectDelegate(response.data.data[0])
- })
- })
- })
-
- describe('GET /delegates/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a delegate by the given username', async () => {
- const response = await utils[request](
- 'GET',
- `delegates/${delegate.username}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- utils.expectDelegate(response.data.data, delegate)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a delegate by the given address', async () => {
- const response = await utils[request](
- 'GET',
- `delegates/${delegate.address}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- utils.expectDelegate(response.data.data, delegate)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a delegate by the given public key', async () => {
- const response = await utils[request](
- 'GET',
- `delegates/${delegate.publicKey}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- utils.expectDelegate(response.data.data, delegate)
- })
- })
- })
-
- describe('POST /delegates/search', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for delegates with a username that matches the given string', async () => {
- const response = await utils[request]('POST', 'delegates/search', {
- username: delegate.username,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- utils.expectDelegate(response.data.data[0], delegate)
- })
- })
- })
-
- describe('GET /delegates/:id/blocks', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all blocks for a delegate by the given identifier', async () => {
- // save a new block so that we can make the request with generatorPublicKey
- const block2 = new Block(blocks2to100[0])
- const database = container.resolvePlugin('database')
- await database.saveBlock(block2)
-
- const response = await utils[request](
- 'GET',
- `delegates/${blocks2to100[0].generatorPublicKey}/blocks`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- utils.expectBlock(response.data.data[0])
-
- await database.deleteBlock(block2) // reset to genesis block
- })
- })
- })
-
- describe('GET /delegates/:id/voters', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all voters (wallets) for a delegate by the given identifier', async () => {
- const response = await utils[request](
- 'GET',
- `delegates/${delegate.publicKey}/voters`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectWallet(response.data.data[0])
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/delegates.test.ts b/packages/core-api/__tests__/v2/handlers/delegates.test.ts
new file mode 100644
index 0000000000..e4a9882365
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/delegates.test.ts
@@ -0,0 +1,189 @@
+import "@arkecosystem/core-test-utils";
+import { calculateRanks, setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+import { blocks2to100 } from "../../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+
+import { models } from "@arkecosystem/crypto";
+const { Block } = models;
+
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+
+const delegate = {
+ username: "genesis_9",
+ address: "AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo",
+ publicKey: "0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647",
+};
+
+beforeAll(async () => {
+ await setUp();
+ await calculateRanks();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Delegates", () => {
+ describe("GET /delegates", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the delegates", async () => {
+ const response = await utils[request]("GET", "delegates");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ response.data.data.forEach(utils.expectDelegate);
+ expect(response.data.data.sort((a, b) => a.rank < b.rank)).toEqual(response.data.data);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the delegates ordered by descending rank", async () => {
+ const response = await utils[request]("GET", "delegates", { orderBy: "rank:desc" });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ response.data.data.forEach(utils.expectDelegate);
+ expect(response.data.data.sort((a, b) => a.rank > b.rank)).toEqual(response.data.data);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the delegates ordered by descending productivity", async () => {
+ const response = await utils[request]("GET", "delegates", { orderBy: "productivity:desc" });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ response.data.data.forEach(utils.expectDelegate);
+ expect(
+ response.data.data.sort((a, b) => a.production.productivity > b.production.productivity),
+ ).toEqual(response.data.data);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the delegates ordered by descending approval", async () => {
+ const response = await utils[request]("GET", "delegates", { orderBy: "approval:desc" });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ response.data.data.forEach(utils.expectDelegate);
+ expect(response.data.data.sort((a, b) => a.production.approval > b.production.approval)).toEqual(
+ response.data.data,
+ );
+ });
+ },
+ );
+ });
+
+ describe("GET /delegates/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a delegate by the given username", async () => {
+ const response = await utils[request]("GET", `delegates/${delegate.username}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ utils.expectDelegate(response.data.data, delegate);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a delegate by the given address", async () => {
+ const response = await utils[request]("GET", `delegates/${delegate.address}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ utils.expectDelegate(response.data.data, delegate);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a delegate by the given public key", async () => {
+ const response = await utils[request]("GET", `delegates/${delegate.publicKey}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ utils.expectDelegate(response.data.data, delegate);
+ });
+ },
+ );
+ });
+
+ describe("POST /delegates/search", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for delegates with a username that matches the given string", async () => {
+ const response = await utils[request]("POST", "delegates/search", {
+ username: delegate.username,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ utils.expectDelegate(response.data.data[0], delegate);
+ });
+ },
+ );
+ });
+
+ describe("GET /delegates/:id/blocks", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all blocks for a delegate by the given identifier", async () => {
+ // save a new block so that we can make the request with generatorPublicKey
+ const block2 = new Block(blocks2to100[0]);
+ const databaseService = app.resolvePlugin("database");
+ await databaseService.saveBlock(block2);
+
+ const response = await utils[request](
+ "GET",
+ `delegates/${blocks2to100[0].generatorPublicKey}/blocks`,
+ );
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ utils.expectBlock(response.data.data[0]);
+
+ await databaseService.deleteBlock(block2); // reset to genesis block
+ });
+ },
+ );
+ });
+
+ describe("GET /delegates/:id/voters", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all voters (wallets) for a delegate by the given identifier", async () => {
+ const response = await utils[request]("GET", `delegates/${delegate.publicKey}/voters`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectWallet(response.data.data[0]);
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/node.test.js b/packages/core-api/__tests__/v2/handlers/node.test.js
deleted file mode 100644
index de4833564f..0000000000
--- a/packages/core-api/__tests__/v2/handlers/node.test.js
+++ /dev/null
@@ -1,67 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Loader', () => {
- describe('GET /node/status', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET the node status', async () => {
- const response = await utils[request]('GET', 'node/status')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data.synced).toBeBoolean()
- expect(response.data.data.now).toBeNumber()
- expect(response.data.data.blocksCount).toBeNumber()
- })
- })
- })
-
- describe('GET /node/syncing', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET the node syncing status', async () => {
- const response = await utils[request]('GET', 'node/syncing')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data.syncing).toBeBoolean()
- expect(response.data.data.blocks).toBeNumber()
- expect(response.data.data.height).toBeNumber()
- expect(response.data.data.id).toBeString()
- })
- })
- })
-
- describe('GET /node/configuration', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET the node configuration', async () => {
- const response = await utils[request]('GET', 'node/configuration')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data.nethash).toBeString()
- expect(response.data.data.token).toBeString()
- expect(response.data.data.symbol).toBeString()
- expect(response.data.data.explorer).toBeString()
- expect(response.data.data.version).toBeNumber()
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/node.test.ts b/packages/core-api/__tests__/v2/handlers/node.test.ts
new file mode 100644
index 0000000000..46a8179561
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/node.test.ts
@@ -0,0 +1,67 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Loader", () => {
+ describe("GET /node/status", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET the node status", async () => {
+ const response = await utils[request]("GET", "node/status");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ expect(response.data.data.synced).toBeBoolean();
+ expect(response.data.data.now).toBeNumber();
+ expect(response.data.data.blocksCount).toBeNumber();
+ });
+ },
+ );
+ });
+
+ describe("GET /node/syncing", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET the node syncing status", async () => {
+ const response = await utils[request]("GET", "node/syncing");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ expect(response.data.data.syncing).toBeBoolean();
+ expect(response.data.data.blocks).toBeNumber();
+ expect(response.data.data.height).toBeNumber();
+ expect(response.data.data.id).toBeString();
+ });
+ },
+ );
+ });
+
+ describe("GET /node/configuration", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET the node configuration", async () => {
+ const response = await utils[request]("GET", "node/configuration");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ expect(response.data.data.nethash).toBeString();
+ expect(response.data.data.token).toBeString();
+ expect(response.data.data.symbol).toBeString();
+ expect(response.data.data.explorer).toBeString();
+ expect(response.data.data.version).toBeNumber();
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/peers.test.js b/packages/core-api/__tests__/v2/handlers/peers.test.js
deleted file mode 100644
index 4bebbe5d6c..0000000000
--- a/packages/core-api/__tests__/v2/handlers/peers.test.js
+++ /dev/null
@@ -1,47 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const peers = require('@arkecosystem/core-test-utils/config/testnet/peers.json')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Peers', () => {
- describe('GET /peers', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the peers', async () => {
- const response = await utils[request]('GET', 'peers')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data[0]).toBeObject()
- })
- })
- })
-
- describe('GET /peers/:ip', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a peer by the given ip', async () => {
- const response = await utils[request](
- 'GET',
- `peers/${peers.list[0].ip}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data).toBeObject()
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/peers.test.ts b/packages/core-api/__tests__/v2/handlers/peers.test.ts
new file mode 100644
index 0000000000..30c3af41bd
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/peers.test.ts
@@ -0,0 +1,57 @@
+import { app } from "@arkecosystem/core-container";
+import { Peer } from "@arkecosystem/core-p2p/src/peer";
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const mockAddress = "1.0.0.99";
+const mockPort = 4002;
+
+beforeAll(async () => {
+ await setUp();
+
+ const peerMock = new Peer(mockAddress, mockPort);
+ peerMock.setStatus("OK");
+
+ const monitor = app.resolvePlugin("p2p");
+ monitor.peers = {};
+ monitor.peers[peerMock.ip] = peerMock;
+});
+
+afterAll(async () => {
+ const monitor = app.resolvePlugin("p2p");
+ monitor.peers = {};
+
+ await tearDown();
+});
+
+describe("API 2.0 - Peers", () => {
+ describe("GET /peers", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (_, request) => {
+ it("should GET all the peers", async () => {
+ const response = await utils[request]("GET", "peers");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data[0]).toBeObject();
+ });
+ },
+ );
+ });
+
+ describe("GET /peers/:ip", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (_, request) => {
+ it("should GET a peer by the given ip", async () => {
+ const response = await utils[request]("GET", `peers/${mockAddress}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+ expect(response.data.data.ip).toBe(mockAddress);
+ expect(response.data.data.port).toBe(mockPort);
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/transactions.test.js b/packages/core-api/__tests__/v2/handlers/transactions.test.js
deleted file mode 100644
index 6918c8e770..0000000000
--- a/packages/core-api/__tests__/v2/handlers/transactions.test.js
+++ /dev/null
@@ -1,653 +0,0 @@
-/* eslint max-len: "off" */
-
-require('@arkecosystem/core-test-utils/lib/matchers')
-const generateTransfers = require('@arkecosystem/core-test-utils/lib/generators/transactions/transfer')
-const generateWallets = require('@arkecosystem/core-test-utils/lib/generators/wallets')
-const delegates = require('@arkecosystem/core-test-utils/fixtures/testnet/delegates')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const transferFee = 10000000
-
-let genesisBlock
-let genesisTransactions
-
-let transactionId
-let blockId
-let type
-let wrongType
-let version
-let senderPublicKey
-let senderAddress
-let recipientAddress
-let timestamp
-let timestampFrom
-let timestampTo
-let amount
-let amountFrom
-let amountTo
-let fee
-let feeFrom
-let feeTo
-
-beforeAll(async () => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json')
- genesisTransactions = genesisBlock.transactions[0]
-
- transactionId = genesisTransactions.id
- blockId = genesisBlock.id
- type = genesisTransactions.type
- wrongType = 3
- version = 1
- senderPublicKey = genesisTransactions.senderPublicKey
- senderAddress = genesisTransactions.senderId
- recipientAddress = genesisTransactions.recipientId
- timestamp = genesisTransactions.timestamp
- timestampFrom = timestamp
- timestampTo = timestamp
- amount = genesisTransactions.amount
- amountFrom = amount
- amountTo = amount
- fee = genesisTransactions.fee
- feeFrom = fee
- feeTo = fee
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Transactions', () => {
- describe('GET /transactions', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the transactions', async () => {
- const response = await utils[request]('GET', 'transactions')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectTransaction(response.data.data[0])
- })
- })
- })
-
- describe('GET /transactions/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a transaction by the given identifier', async () => {
- const response = await utils[request](
- 'GET',
- `transactions/${transactionId}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- const transaction = response.data.data
- utils.expectTransaction(transaction)
- expect(transaction.id).toBe(transactionId)
- })
- })
- })
-
- describe('GET /transactions/unconfirmed', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the unconfirmed transactions', async () => {
- await utils.createTransaction()
-
- const response = await utils[request]('GET', 'transactions/unconfirmed')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toBeArray()
- expect(response.data.data).not.toBeEmpty()
- })
- })
- })
-
- describe('GET /transactions/unconfirmed/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET an unconfirmed transaction by the given identifier', async () => {
- const transaction = await utils.createTransaction()
-
- const response = await utils[request](
- 'GET',
- `transactions/unconfirmed/${transaction.id}`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data).toHaveProperty('id', transaction.id)
- })
- })
- })
-
- describe('POST /transactions/search', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified transactionId', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- id: transactionId,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.id).toBe(transactionId)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified blockId', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- blockId,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(100)
- expect(response.data.meta.totalCount).toBe(153)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.blockId).toBe(blockId)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified type', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- type,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- expect(response.data.data).toHaveLength(51)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.type).toBe(type)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified version', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- version,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- expect(response.data.data).toHaveLength(100)
- expect(response.data.meta.totalCount).toBe(153)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.version).toBe(version)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified senderPublicKey', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- senderPublicKey,
- })
-
- expect(response).toBeSuccessfulResponse()
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.sender).toBe(senderAddress)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified senderId', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- senderId: senderAddress,
- })
-
- expect(response).toBeSuccessfulResponse()
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.sender).toBe(senderAddress)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified recipientId (Address)', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- recipientId: recipientAddress,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- expect(response.data.data).toHaveLength(2)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.recipient).toBe(recipientAddress)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified timestamp', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- timestamp: {
- from: timestamp,
- to: timestamp,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data.length).toEqual(100)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.timestamp.epoch).toBe(timestamp)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the specified timestamp range', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- timestamp: {
- from: timestampFrom,
- to: timestampTo,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data).toHaveLength(100)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.timestamp.epoch).toBeGreaterThanOrEqual(
- timestampFrom,
- )
- expect(transaction.timestamp.epoch).toBeLessThanOrEqual(timestampTo)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified amount', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- amount: {
- from: amount,
- to: amount,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data).toHaveLength(50)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.amount).toBe(amount)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the specified amount range', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- amount: {
- from: amountFrom,
- to: amountTo,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data).toHaveLength(50)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.amount).toBeGreaterThanOrEqual(amountFrom)
- expect(transaction.amount).toBeLessThanOrEqual(amountTo)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the exact specified fee', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- fee: {
- from: fee,
- to: fee,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data).toHaveLength(100)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.fee).toBe(fee)
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the specified fee range', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- fee: {
- from: feeFrom,
- to: feeTo,
- },
- })
-
- expect(response).toBeSuccessfulResponse()
-
- const data = response.data.data
- expect(data).toBeArray()
- expect(data).toHaveLength(100)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.fee).toBeGreaterThanOrEqual(feeFrom)
- expect(transaction.fee).toBeLessThanOrEqual(feeTo)
- }
- })
- })
-
- // TODO remove the search by id, to be sure that is OK
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it.skip('should POST a search for transactions with the exact specified vendorFieldHex', async () => {
- const id =
- '0000faa27b422f7648b1a2f634f15c7e5c8e96b84929624fda44abf716bdf784'
- const vendorFieldHex =
- '64656c65676174653a20766f746572732073686172652e205468616e6b20796f7521207c74782062792061726b2d676f'
-
- const response = await utils[request]('POST', 'transactions/search', {
- id,
- vendorFieldHex,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- expect(response.data.data).toHaveLength(1)
-
- for (const transaction of response.data.data) {
- utils.expectTransaction(transaction)
- expect(transaction.vendorField).toBe(vendorFieldHex.toString('utf8'))
- }
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the wrong specified type', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- id: transactionId,
- type: wrongType,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- expect(response.data.data).toHaveLength(0)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for transactions with the specific criteria', async () => {
- const response = await utils[request]('POST', 'transactions/search', {
- senderPublicKey,
- type,
- timestamp: {
- from: timestampFrom,
- to: timestampTo,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- utils.expectTransaction(response.data.data[0])
- })
- })
- })
-
- describe('POST /transactions', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- const transactions = generateTransfers(
- 'testnet',
- delegates[0].secret,
- delegates[1].address,
- 1,
- 40,
- true,
- )
-
- it('should POST all the transactions', async () => {
- const response = await utils[request]('POST', 'transactions', {
- transactions,
- })
- expect(response).toBeSuccessfulResponse()
- })
-
- it('should not POST all the transactions', async () => {
- const response = await utils[request]('POST', 'transactions', {
- transactions: transactions.concat(transactions),
- })
-
- expect(response.data.statusCode).toBe(413)
- expect(response.data.message).toBe(
- 'Received 80 transactions. Only 40 are allowed per request.',
- )
- })
- })
-
- it('should POST 2 transactions double spending and get only 1 accepted and broadcasted', async () => {
- const transactions = generateTransfers(
- 'testnet',
- delegates[0].secret,
- delegates[1].address,
- 245098000000000 - 5098000000000, // a bit less than the delegates' balance
- 2,
- true,
- )
- const response = await utils.requestWithAcceptHeader(
- 'POST',
- 'transactions',
- {
- transactions,
- },
- )
-
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data.accept.length).toBe(1)
- expect(response.data.data.accept[0]).toBe(transactions[0].id)
-
- expect(response.data.data.broadcast.length).toBe(1)
- expect(response.data.data.broadcast[0]).toBe(transactions[0].id)
-
- expect(response.data.data.invalid.length).toBe(1)
- expect(response.data.data.invalid[0]).toBe(transactions[1].id)
- })
-
- it.each([3, 5, 8])(
- 'should accept and broadcast %i transactions emptying a wallet',
- async txNumber => {
- const sender = delegates[txNumber] // use txNumber so that we use a different delegate for each test case
- const receivers = generateWallets('testnet', 2)
- const amountPlusFee = Math.floor(sender.balance / txNumber)
- const lastAmountPlusFee =
- sender.balance - (txNumber - 1) * amountPlusFee
-
- const transactions = generateTransfers(
- 'testnet',
- sender.secret,
- receivers[0].address,
- amountPlusFee - transferFee,
- txNumber - 1,
- true,
- )
- const lastTransaction = generateTransfers(
- 'testnet',
- sender.secret,
- receivers[1].address,
- lastAmountPlusFee - transferFee,
- 1,
- true,
- )
- // we change the receiver in lastTransaction to prevent having 2 exact same transactions with same id (if not, could be same as transactions[0])
-
- const allTransactions = transactions.concat(lastTransaction)
-
- const response = await utils.requestWithAcceptHeader(
- 'POST',
- 'transactions',
- {
- transactions: allTransactions,
- },
- )
-
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.data.accept.sort()).toEqual(
- allTransactions.map(transaction => transaction.id).sort(),
- )
- expect(response.data.data.broadcast.sort()).toEqual(
- allTransactions.map(transaction => transaction.id).sort(),
- )
- expect(response.data.data.invalid.length).toBe(0)
- },
- )
-
- it.each([3, 5, 8])(
- 'should not accept the last of %i transactions emptying a wallet when the last one is 1 arktoshi too much',
- async txNumber => {
- const sender = delegates[txNumber + 1] // use txNumber + 1 so that we don't use the same delegates as the above test
- const receivers = generateWallets('testnet', 2)
- const amountPlusFee = Math.floor(sender.balance / txNumber)
- const lastAmountPlusFee =
- sender.balance - (txNumber - 1) * amountPlusFee + 1
-
- const transactions = generateTransfers(
- 'testnet',
- sender.secret,
- receivers[0].address,
- amountPlusFee - transferFee,
- txNumber - 1,
- true,
- )
- const lastTransaction = generateTransfers(
- 'testnet',
- sender.secret,
- receivers[1].address,
- lastAmountPlusFee - transferFee,
- 1,
- true,
- )
- // we change the receiver in lastTransaction to prevent having 2 exact same transactions with same id (if not, could be same as transactions[0])
-
- const allTransactions = transactions.concat(lastTransaction)
-
- const response = await utils.requestWithAcceptHeader(
- 'POST',
- 'transactions',
- {
- transactions: allTransactions,
- },
- )
-
- expect(response).toBeSuccessfulResponse()
-
- expect(response.data.data.accept.sort()).toEqual(
- transactions.map(transaction => transaction.id).sort(),
- )
- expect(response.data.data.broadcast.sort()).toEqual(
- transactions.map(transaction => transaction.id).sort(),
- )
- expect(response.data.data.invalid).toEqual(
- lastTransaction.map(transaction => transaction.id),
- )
- },
- )
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/transactions.test.ts b/packages/core-api/__tests__/v2/handlers/transactions.test.ts
new file mode 100644
index 0000000000..1c02ef1e4f
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/transactions.test.ts
@@ -0,0 +1,647 @@
+import "@arkecosystem/core-test-utils";
+import { constants } from "@arkecosystem/crypto";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+import genesisBlock from "../../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { delegates } from "../../../../core-test-utils/src/fixtures/testnet/delegates";
+import { generateTransfers } from "../../../../core-test-utils/src/generators/transactions/transfer";
+import { generateWallets } from "../../../../core-test-utils/src/generators/wallets";
+
+const transferFee = 10000000;
+
+let genesisTransaction;
+let genesisTransactions;
+
+let transactionId;
+let blockId;
+let type;
+let wrongType;
+let version;
+let senderPublicKey;
+let senderAddress;
+let recipientAddress;
+let timestamp;
+let timestampFrom;
+let timestampTo;
+let amount;
+let amountFrom;
+let amountTo;
+let fee;
+let feeFrom;
+let feeTo;
+
+beforeAll(async () => {
+ await setUp();
+
+ genesisTransactions = genesisBlock.transactions;
+ genesisTransaction = genesisTransactions[0];
+
+ transactionId = genesisTransaction.id;
+ blockId = genesisBlock.id;
+ type = genesisTransaction.type;
+ wrongType = 3;
+ version = 1;
+ senderPublicKey = genesisTransaction.senderPublicKey;
+ senderAddress = genesisTransaction.senderId;
+ recipientAddress = genesisTransaction.recipientId;
+ timestamp = genesisTransaction.timestamp;
+ timestampFrom = timestamp;
+ timestampTo = timestamp;
+ amount = genesisTransaction.amount;
+ amountFrom = amount;
+ amountTo = amount;
+ fee = genesisTransaction.fee;
+ feeFrom = fee;
+ feeTo = fee;
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Transactions", () => {
+ describe("GET /transactions", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the transactions", async () => {
+ const response = await utils[request]("GET", "transactions");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectTransaction(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("GET /transactions/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a transaction by the given identifier", async () => {
+ const response = await utils[request]("GET", `transactions/${transactionId}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ const transaction = response.data.data;
+ utils.expectTransaction(transaction);
+ expect(transaction.id).toBe(transactionId);
+ });
+ },
+ );
+ });
+
+ describe("GET /transactions/unconfirmed", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the unconfirmed transactions", async () => {
+ await utils.createTransaction();
+
+ const response = await utils[request]("GET", "transactions/unconfirmed");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).not.toBeEmpty();
+ });
+ },
+ );
+ });
+
+ describe("GET /transactions/unconfirmed/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET an unconfirmed transaction by the given identifier", async () => {
+ const transaction = await utils.createTransaction();
+
+ const response = await utils[request]("GET", `transactions/unconfirmed/${transaction.id}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+ expect(response.data.data).toHaveProperty("id", transaction.id);
+ });
+ },
+ );
+ });
+
+ describe("GET /transactions/types", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET transaction types", async () => {
+ const response = await utils[request]("GET", "transactions/types");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+ expect(response.data.data).toEqual({
+ Transfer: 0,
+ SecondSignature: 1,
+ DelegateRegistration: 2,
+ Vote: 3,
+ MultiSignature: 4,
+ Ipfs: 5,
+ TimelockTransfer: 6,
+ MultiPayment: 7,
+ DelegateResignation: 8,
+ });
+ });
+ },
+ );
+ });
+
+ describe("POST /transactions/search", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified transactionId", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ id: transactionId,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(1);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.id).toBe(transactionId);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified blockId", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ blockId,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.blockId).toBe(blockId);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified type", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ type,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(51);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.type).toBe(type);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified version", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ version,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.version).toBe(version);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified senderPublicKey", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ senderPublicKey,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.sender).toBe(senderAddress);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified senderId", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ senderId: senderAddress,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.sender).toBe(senderAddress);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified recipientId (Address)", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ recipientId: recipientAddress,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(2);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.recipient).toBe(recipientAddress);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the any of the specified addresses", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ addresses: [genesisTransactions[3].recipientId, genesisTransactions[8].recipientId],
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(6);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified timestamp", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ timestamp: {
+ from: timestamp,
+ to: timestamp,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.timestamp.epoch).toBe(timestamp);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the specified timestamp range", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ timestamp: {
+ from: timestampFrom,
+ to: timestampTo,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.timestamp.epoch).toBeGreaterThanOrEqual(timestampFrom);
+ expect(transaction.timestamp.epoch).toBeLessThanOrEqual(timestampTo);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified amount", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ amount: {
+ from: amount,
+ to: amount,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(50);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.amount).toBe(amount);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the specified amount range", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ amount: {
+ from: amountFrom,
+ to: amountTo,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(50);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.amount).toBeGreaterThanOrEqual(amountFrom);
+ expect(transaction.amount).toBeLessThanOrEqual(amountTo);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified fee", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ fee: {
+ from: fee,
+ to: fee,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.fee).toBe(fee);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the specified fee range", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ fee: {
+ from: feeFrom,
+ to: feeTo,
+ },
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(100);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(transaction.fee).toBeGreaterThanOrEqual(feeFrom);
+ expect(transaction.fee).toBeLessThanOrEqual(feeTo);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the exact specified vendorFieldHex", async () => {
+ const dummyTransaction = await utils.createTransaction();
+ const hexify = (value: string) => Buffer.from(value, "utf8").toString("hex");
+
+ const vendorFieldHex = hexify(dummyTransaction.vendorField);
+ const response = await utils[request]("POST", "transactions/search", {
+ vendorFieldHex,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ // TODO: the response is sometimes empty. Racy test?
+ // expect(response.data.data).toHaveLength(1);
+
+ for (const transaction of response.data.data) {
+ utils.expectTransaction(transaction);
+ expect(hexify(transaction.vendorField)).toBe(vendorFieldHex);
+ }
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the wrong specified type", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ id: transactionId,
+ type: wrongType,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(0);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for transactions with the specific criteria", async () => {
+ const response = await utils[request]("POST", "transactions/search", {
+ senderPublicKey,
+ type,
+ timestamp: {
+ from: timestampFrom,
+ to: timestampTo,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ utils.expectTransaction(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("POST /transactions", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ const transactions = generateTransfers(
+ "testnet",
+ delegates[0].secret,
+ delegates[1].address,
+ 1,
+ 40,
+ true,
+ );
+
+ it("should POST all the transactions", async () => {
+ const response = await utils[request]("POST", "transactions", {
+ transactions,
+ });
+ expect(response).toBeSuccessfulResponse();
+ });
+
+ it("should not POST all the transactions", async () => {
+ const response = await utils[request]("POST", "transactions", {
+ transactions: transactions.concat(transactions),
+ });
+
+ expect(response.data.statusCode).toBe(400);
+ expect(response.data.message).toBe(
+ 'child "transactions" fails because ["transactions" must contain less than or equal to 40 items]',
+ );
+ });
+ },
+ );
+
+ it("should POST 2 transactions double spending and get only 1 accepted and broadcasted", async () => {
+ const transactions = generateTransfers(
+ "testnet",
+ delegates[0].secret,
+ delegates[1].address,
+ 245098000000000 - 5098000000000, // a bit less than the delegates' balance
+ 2,
+ true,
+ );
+ const response = await utils.requestWithAcceptHeader("POST", "transactions", {
+ transactions,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ expect(response.data.data.accept).toHaveLength(1);
+ expect(response.data.data.accept[0]).toBe(transactions[0].id);
+
+ expect(response.data.data.broadcast).toHaveLength(1);
+ expect(response.data.data.broadcast[0]).toBe(transactions[0].id);
+
+ expect(response.data.data.invalid).toHaveLength(1);
+ expect(response.data.data.invalid[0]).toBe(transactions[1].id);
+ });
+
+ it.each([3, 5, 8])("should accept and broadcast %i transactions emptying a wallet", async txNumber => {
+ const sender = delegates[txNumber]; // use txNumber so that we use a different delegate for each test case
+ const receivers = generateWallets("testnet", 2);
+ const amountPlusFee = Math.floor(sender.balance / txNumber);
+ const lastAmountPlusFee = sender.balance - (txNumber - 1) * amountPlusFee;
+
+ const transactions = generateTransfers(
+ "testnet",
+ sender.secret,
+ receivers[0].address,
+ amountPlusFee - transferFee,
+ txNumber - 1,
+ true,
+ );
+ const lastTransaction = generateTransfers(
+ "testnet",
+ sender.secret,
+ receivers[1].address,
+ lastAmountPlusFee - transferFee,
+ 1,
+ true,
+ );
+ // we change the receiver in lastTransaction to prevent having 2 exact same transactions with same id (if not, could be same as transactions[0])
+
+ const allTransactions = transactions.concat(lastTransaction);
+
+ const response = await utils.requestWithAcceptHeader("POST", "transactions", {
+ transactions: allTransactions,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.data.accept.sort()).toEqual(allTransactions.map(transaction => transaction.id).sort());
+ expect(response.data.data.broadcast.sort()).toEqual(
+ allTransactions.map(transaction => transaction.id).sort(),
+ );
+ expect(response.data.data.invalid).toHaveLength(0);
+ });
+
+ it.each([3, 5, 8])(
+ "should not accept the last of %i transactions emptying a wallet when the last one is 1 arktoshi too much",
+ async txNumber => {
+ const sender = delegates[txNumber + 1]; // use txNumber + 1 so that we don't use the same delegates as the above test
+ const receivers = generateWallets("testnet", 2);
+ const amountPlusFee = Math.floor(sender.balance / txNumber);
+ const lastAmountPlusFee = sender.balance - (txNumber - 1) * amountPlusFee + 1;
+
+ const transactions = generateTransfers(
+ "testnet",
+ sender.secret,
+ receivers[0].address,
+ amountPlusFee - transferFee,
+ txNumber - 1,
+ true,
+ );
+ const lastTransaction = generateTransfers(
+ "testnet",
+ sender.secret,
+ receivers[1].address,
+ lastAmountPlusFee - transferFee,
+ 1,
+ true,
+ );
+ // we change the receiver in lastTransaction to prevent having 2 exact same transactions with same id (if not, could be same as transactions[0])
+
+ const allTransactions = transactions.concat(lastTransaction);
+
+ const response = await utils.requestWithAcceptHeader("POST", "transactions", {
+ transactions: allTransactions,
+ });
+
+ expect(response).toBeSuccessfulResponse();
+
+ expect(response.data.data.accept.sort()).toEqual(
+ transactions.map(transaction => transaction.id).sort(),
+ );
+ expect(response.data.data.broadcast.sort()).toEqual(
+ transactions.map(transaction => transaction.id).sort(),
+ );
+ expect(response.data.data.invalid).toEqual(lastTransaction.map(transaction => transaction.id));
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/votes.test.js b/packages/core-api/__tests__/v2/handlers/votes.test.js
deleted file mode 100644
index 70de25f462..0000000000
--- a/packages/core-api/__tests__/v2/handlers/votes.test.js
+++ /dev/null
@@ -1,48 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const voteId = 'ea294b610e51efb3ceb4229f27bf773e87f41d21b6bb1f3bf68629ffd652c2d3'
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Votes', () => {
- describe('GET /votes', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the votes', async () => {
- const response = await utils[request]('GET', 'votes')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
- utils.expectPaginator(response)
-
- expect(response.data.data[0]).toBeObject()
- expect(response.data.meta.count).toBeNumber()
- })
- })
- })
-
- describe('GET /votes/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a vote by the given identifier', async () => {
- const response = await utils[request]('GET', `votes/${voteId}`)
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- expect(response.data.data).toBeObject()
- expect(response.data.data.id).toBe(voteId)
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/votes.test.ts b/packages/core-api/__tests__/v2/handlers/votes.test.ts
new file mode 100644
index 0000000000..bb1d378593
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/votes.test.ts
@@ -0,0 +1,48 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const voteId = "ea294b610e51efb3ceb4229f27bf773e87f41d21b6bb1f3bf68629ffd652c2d3";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Votes", () => {
+ describe("GET /votes", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the votes", async () => {
+ const response = await utils[request]("GET", "votes");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ utils.expectPaginator(response);
+
+ expect(response.data.data[0]).toBeObject();
+ expect(response.data.meta.count).toBeNumber();
+ });
+ },
+ );
+ });
+
+ describe("GET /votes/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a vote by the given identifier", async () => {
+ const response = await utils[request]("GET", `votes/${voteId}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ expect(response.data.data).toBeObject();
+ expect(response.data.data.id).toBe(voteId);
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/handlers/wallets.test.js b/packages/core-api/__tests__/v2/handlers/wallets.test.js
deleted file mode 100644
index 9af90bc5f1..0000000000
--- a/packages/core-api/__tests__/v2/handlers/wallets.test.js
+++ /dev/null
@@ -1,357 +0,0 @@
-/* eslint max-len: "off" */
-
-require('@arkecosystem/core-test-utils/lib/matchers')
-const app = require('../../__support__/setup')
-const utils = require('../utils')
-
-const username = 'genesis_9'
-const address = 'AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo'
-const publicKey =
- '0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647'
-const balance = 245098000000000
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-describe('API 2.0 - Wallets', () => {
- describe('GET /wallets', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the wallets', async () => {
- const response = await utils[request]('GET', 'wallets')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectWallet(response.data.data[0])
- })
- })
- })
-
- describe('GET /wallets/top', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the top wallets', async () => {
- const response = await utils[request]('GET', 'wallets/top')
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectWallet(response.data.data[0])
- })
- })
- })
-
- describe('GET /wallets/:id', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET a wallet by the given identifier', async () => {
- const response = await utils[request]('GET', `wallets/${address}`)
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeObject()
-
- const wallet = response.data.data
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- })
- })
-
- describe('when requesting an unknown address', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should return ResourceNotFound error', async () => {
- try {
- await utils[request]('GET', 'wallets/dummy')
- } catch (error) {
- expect(error.response.status).toEqual(404)
- }
- })
- })
- })
- })
-
- describe('GET /wallets/:id/transactions', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the transactions for the given wallet by id', async () => {
- const response = await utils[request](
- 'GET',
- `wallets/${address}/transactions`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectTransaction(response.data.data[0])
- })
- })
- })
-
- describe('GET /wallets/:id/transactions/sent', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the send transactions for the given wallet by id', async () => {
- const response = await utils[request](
- 'GET',
- `wallets/${address}/transactions/sent`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- const transaction = response.data.data[0]
- utils.expectTransaction(transaction)
- expect(transaction.sender).toBe(address)
- })
- })
- })
-
- describe('GET /wallets/:id/transactions/received', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the received transactions for the given wallet by id', async () => {
- const response = await utils[request](
- 'GET',
- `wallets/${address}/transactions/received`,
- )
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- utils.expectTransaction(response.data.data[0])
- })
- })
- })
-
- describe('GET /wallets/:id/votes', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should GET all the votes for the given wallet by id', async () => {
- const response = await utils[request]('GET', `wallets/${address}/votes`)
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data[0]).toBeObject()
- })
- })
- })
-
- describe('POST /wallets/search', () => {
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the exact specified address', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the exact specified publicKey', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- publicKey,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- expect(wallet.publicKey).toBe(publicKey)
- })
- })
-
- // describe.each([
- // ['API-Version', 'request'],
- // ['Accept', 'requestWithAcceptHeader']
- // ])('using the %s header', (header, request) => {
- // it('should POST a search for wallets with the exact specified secondPublicKey', async () => {
- // const response = await utils[request]('POST', 'wallets/search', { address: addressSecondPassphrase, secondPublicKey })
- // expect(response).toBeSuccessfulResponse()
- // expect(response.data.data).toBeArray()
-
- // expect(response.data.data).toHaveLength(1)
-
- // const wallet = response.data.data[0]
- // utils.expectWallet(wallet)
- // expect(wallet.address).toBe(addressSecondPassphrase)
- // })
- // })
-
- // describe.each([
- // ['API-Version', 'request'],
- // ['Accept', 'requestWithAcceptHeader']
- // ])('using the %s header', (header, request) => {
- // it('should POST a search for wallets with the exact specified vote', async () => {
- // const response = await utils[request]('POST', 'wallets/search', { address: address, vote })
- // expect(response).toBeSuccessfulResponse()
- // expect(response.data.data).toBeArray()
-
- // expect(response.data.data).toHaveLength(1)
-
- // const wallet = response.data.data[0]
- // utils.expectWallet(wallet)
- // expect(wallet.address).toBe(address)
- // })
- // })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the exact specified username', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- username,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the exact specified balance', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- balance: {
- from: balance,
- to: balance,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- expect(wallet.balance).toBe(balance)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the specified balance range', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- balance: {
- from: balance - 1000,
- to: balance + 1000,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- expect(wallet.balance).toBe(balance)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the exact specified voteBalance', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- voteBalance: {
- from: balance,
- to: balance,
- },
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the wrong specified username', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- address,
- username: 'dummy',
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(0)
- })
- })
-
- describe.each([
- ['API-Version', 'request'],
- ['Accept', 'requestWithAcceptHeader'],
- ])('using the %s header', (header, request) => {
- it('should POST a search for wallets with the specific criteria', async () => {
- const response = await utils[request]('POST', 'wallets/search', {
- publicKey,
- username,
- })
- expect(response).toBeSuccessfulResponse()
- expect(response.data.data).toBeArray()
-
- expect(response.data.data).toHaveLength(1)
-
- const wallet = response.data.data[0]
- utils.expectWallet(wallet)
- expect(wallet.address).toBe(address)
- })
- })
- })
-})
diff --git a/packages/core-api/__tests__/v2/handlers/wallets.test.ts b/packages/core-api/__tests__/v2/handlers/wallets.test.ts
new file mode 100644
index 0000000000..0a20e72838
--- /dev/null
+++ b/packages/core-api/__tests__/v2/handlers/wallets.test.ts
@@ -0,0 +1,362 @@
+import "@arkecosystem/core-test-utils";
+import { setUp, tearDown } from "../../__support__/setup";
+import { utils } from "../utils";
+
+const username = "genesis_9";
+const address = "AG8kwwk4TsYfA2HdwaWBVAJQBj6VhdcpMo";
+const publicKey = "0377f81a18d25d77b100cb17e829a72259f08334d064f6c887298917a04df8f647";
+const balance = 245098000000000;
+const address2 = "AJjv7WztjJNYHrLAeveG5NgHWp6699ZJwD";
+
+beforeAll(async () => {
+ await setUp();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("API 2.0 - Wallets", () => {
+ describe("GET /wallets", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the wallets", async () => {
+ const response = await utils[request]("GET", "wallets");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectWallet(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("GET /wallets/top", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the top wallets", async () => {
+ const response = await utils[request]("GET", "wallets/top");
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectWallet(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("GET /wallets/:id", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET a wallet by the given identifier", async () => {
+ const response = await utils[request]("GET", `wallets/${address}`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeObject();
+
+ const wallet = response.data.data;
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ });
+ },
+ );
+
+ describe("when requesting an unknown address", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should return ResourceNotFound error", async () => {
+ try {
+ await utils[request]("GET", "wallets/dummy");
+ } catch (error) {
+ expect(error.response.status).toEqual(404);
+ }
+ });
+ },
+ );
+ });
+ });
+
+ describe("GET /wallets/:id/transactions", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the transactions for the given wallet by id", async () => {
+ const response = await utils[request]("GET", `wallets/${address}/transactions`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectTransaction(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("GET /wallets/:id/transactions/sent", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the send transactions for the given wallet by id", async () => {
+ const response = await utils[request]("GET", `wallets/${address}/transactions/sent`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ const transaction = response.data.data[0];
+ utils.expectTransaction(transaction);
+ expect(transaction.sender).toBe(address);
+ });
+ },
+ );
+ });
+
+ describe("GET /wallets/:id/transactions/received", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the received transactions for the given wallet by id", async () => {
+ const response = await utils[request]("GET", `wallets/${address}/transactions/received`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ utils.expectTransaction(response.data.data[0]);
+ });
+ },
+ );
+ });
+
+ describe("GET /wallets/:id/votes", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should GET all the votes for the given wallet by id", async () => {
+ const response = await utils[request]("GET", `wallets/${address}/votes`);
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data[0]).toBeObject();
+ });
+ },
+ );
+ });
+
+ describe("POST /wallets/search", () => {
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the exact specified address", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ });
+
+ it("should POST a search for wallets with the any of the specified addresses", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ addresses: [address, address2],
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+ expect(response.data.data).toHaveLength(2);
+
+ for (const wallet of response.data.data) {
+ utils.expectWallet(wallet);
+ }
+
+ const addresses = response.data.data.map(wallet => wallet.address).sort();
+ expect(addresses).toEqual([address, address2]);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the exact specified publicKey", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ publicKey,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ expect(wallet.publicKey).toBe(publicKey);
+ });
+ },
+ );
+
+ // describe.each([
+ // ['API-Version', 'request'],
+ // ['Accept', 'requestWithAcceptHeader']
+ // ])('using the %s header', (header, request) => {
+ // it('should POST a search for wallets with the exact specified secondPublicKey', async () => {
+ // const response = await utils[request]('POST', 'wallets/search', { address: addressSecondPassphrase, secondPublicKey })
+ // expect(response).toBeSuccessfulResponse()
+ // expect(response.data.data).toBeArray()
+
+ // expect(response.data.data).toHaveLength(1)
+
+ // const wallet = response.data.data[0]
+ // utils.expectWallet(wallet)
+ // expect(wallet.address).toBe(addressSecondPassphrase)
+ // })
+ // })
+
+ // describe.each([
+ // ['API-Version', 'request'],
+ // ['Accept', 'requestWithAcceptHeader']
+ // ])('using the %s header', (header, request) => {
+ // it('should POST a search for wallets with the exact specified vote', async () => {
+ // const response = await utils[request]('POST', 'wallets/search', { address: address, vote })
+ // expect(response).toBeSuccessfulResponse()
+ // expect(response.data.data).toBeArray()
+
+ // expect(response.data.data).toHaveLength(1)
+
+ // const wallet = response.data.data[0]
+ // utils.expectWallet(wallet)
+ // expect(wallet.address).toBe(address)
+ // })
+ // })
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the exact specified username", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ username,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the exact specified balance", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ balance: {
+ from: balance,
+ to: balance,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ expect(wallet.balance).toBe(balance);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the specified balance range", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ balance: {
+ from: balance - 1000,
+ to: balance + 1000,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ expect(wallet.balance).toBe(balance);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the exact specified voteBalance", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ voteBalance: {
+ from: balance,
+ to: balance,
+ },
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the wrong specified username", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ address,
+ username: "dummy",
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(0);
+ });
+ },
+ );
+
+ describe.each([["API-Version", "request"], ["Accept", "requestWithAcceptHeader"]])(
+ "using the %s header",
+ (header, request) => {
+ it("should POST a search for wallets with the specific criteria", async () => {
+ const response = await utils[request]("POST", "wallets/search", {
+ publicKey,
+ username,
+ });
+ expect(response).toBeSuccessfulResponse();
+ expect(response.data.data).toBeArray();
+
+ expect(response.data.data).toHaveLength(1);
+
+ const wallet = response.data.data[0];
+ utils.expectWallet(wallet);
+ expect(wallet.address).toBe(address);
+ });
+ },
+ );
+ });
+});
diff --git a/packages/core-api/__tests__/v2/utils.js b/packages/core-api/__tests__/v2/utils.js
deleted file mode 100644
index 71076a0823..0000000000
--- a/packages/core-api/__tests__/v2/utils.js
+++ /dev/null
@@ -1,182 +0,0 @@
-const axios = require('axios')
-const {
- client,
- transactionBuilder,
- NetworkManager,
-} = require('@arkecosystem/crypto')
-const apiHelpers = require('@arkecosystem/core-test-utils/lib/helpers/api')
-
-class Helpers {
- async request(method, path, params = {}) {
- const url = `http://localhost:4003/api/${path}`
- const headers = {
- 'API-Version': 2,
- 'Content-Type': 'application/json',
- }
-
- const server = require('@arkecosystem/core-container').resolvePlugin('api')
-
- return apiHelpers.request(server.http, method, url, headers, params)
- }
-
- async requestWithAcceptHeader(method, path, params = {}) {
- const url = `http://localhost:4003/api/${path}`
- const headers = {
- Accept: 'application/vnd.ark.core-api.v2+json',
- 'Content-Type': 'application/json',
- }
-
- const server = require('@arkecosystem/core-container').resolvePlugin('api')
-
- return apiHelpers.request(server.http, method, url, headers, params)
- }
-
- expectJson(response) {
- expect(response.data).toBeObject()
- }
-
- expectStatus(response, code) {
- expect(response.status).toBe(code)
- }
-
- assertVersion(response, version) {
- expect(response.headers).toBeObject()
- expect(response.headers).toHaveProperty('api-version', version)
- }
-
- expectResource(response) {
- expect(response.data.data).toBeObject()
- }
-
- expectCollection(response) {
- expect(Array.isArray(response.data.data)).toBe(true)
- }
-
- expectPaginator(response, firstPage = true) {
- expect(response.data.meta).toBeObject()
- expect(response.data.meta).toHaveProperty('count')
- expect(response.data.meta).toHaveProperty('pageCount')
- expect(response.data.meta).toHaveProperty('totalCount')
- expect(response.data.meta).toHaveProperty('next')
- expect(response.data.meta).toHaveProperty('previous')
- expect(response.data.meta).toHaveProperty('self')
- expect(response.data.meta).toHaveProperty('first')
- expect(response.data.meta).toHaveProperty('last')
- }
-
- expectSuccessful(response, statusCode = 200) {
- this.expectStatus(response, statusCode)
- this.expectJson(response)
- this.assertVersion(response, 2)
- }
-
- expectError(response, statusCode = 404) {
- this.expectStatus(response, statusCode)
- this.expectJson(response)
- expect(response.data.statusCode).toBeNumber()
- expect(response.data.error).toBeString()
- expect(response.data.message).toBeString()
- }
-
- expectTransaction(transaction) {
- expect(transaction).toBeObject()
- expect(transaction).toHaveProperty('id')
- expect(transaction).toHaveProperty('blockId')
- expect(transaction).toHaveProperty('type')
- expect(transaction).toHaveProperty('amount')
- expect(transaction).toHaveProperty('fee')
- expect(transaction).toHaveProperty('sender')
-
- if ([1, 2].indexOf(transaction.type) === -1) {
- expect(transaction.recipient).toBeString()
- }
-
- expect(transaction.signature).toBeString()
- expect(transaction.confirmations).toBeNumber()
- }
-
- expectBlock(block, expected) {
- expect(block).toBeObject()
- expect(block.id).toBeString()
- expect(block.version).toBeNumber()
- expect(block.height).toBeNumber()
- expect(block).toHaveProperty('previous') // `null` or String
- expect(block).toHaveProperty('forged')
- expect(block.forged.reward).toBeNumber()
- expect(block.forged.fee).toBeNumber()
- expect(block.forged.total).toBeNumber()
- expect(block.forged.amount).toBeNumber()
- expect(block).toHaveProperty('payload')
- expect(block.payload.length).toBeNumber()
- expect(block.payload.hash).toBeString()
- expect(block).toHaveProperty('generator')
- expect(block.generator.publicKey).toBeString()
- expect(block.signature).toBeString()
- expect(block.transactions).toBeNumber()
-
- Object.keys(expected || {}).forEach(attr => {
- expect(block[attr]).toEqual(expected[attr])
- })
- }
-
- expectDelegate(delegate, expected) {
- expect(delegate).toBeObject()
- expect(delegate.username).toBeString()
- expect(delegate.address).toBeString()
- expect(delegate.publicKey).toBeString()
- expect(delegate.votes).toBeNumber()
- expect(delegate.rank).toBeNumber()
- expect(delegate.blocks).toBeObject()
- expect(delegate.blocks.missed).toBeNumber()
- expect(delegate.blocks.produced).toBeNumber()
- expect(delegate.production).toBeObject()
- expect(delegate.production.approval).toBeNumber()
- expect(delegate.production.productivity).toBeNumber()
- expect(delegate.forged.fees).toBeNumber()
- expect(delegate.forged.rewards).toBeNumber()
- expect(delegate.forged.total).toBeNumber()
-
- Object.keys(expected || {}).forEach(attr => {
- expect(delegate[attr]).toBe(expected[attr])
- })
- }
-
- expectWallet(wallet) {
- expect(wallet).toBeObject()
- expect(wallet).toHaveProperty('address')
- expect(wallet).toHaveProperty('publicKey')
- expect(wallet).toHaveProperty('balance')
- expect(wallet).toHaveProperty('isDelegate')
- }
-
- async createTransaction() {
- client.setConfig(NetworkManager.findByName('testnet'))
-
- const transaction = transactionBuilder
- .transfer()
- .amount(1 * 1e8)
- .recipientId('AZFEPTWnn2Sn8wDZgCRF8ohwKkrmk2AZi1')
- .vendorField('test')
- .sign(
- 'prison tobacco acquire stone dignity palace note decade they current lesson robot',
- )
- .getStruct()
-
- await axios.post(
- 'http://127.0.0.1:4003/api/v2/transactions',
- {
- transactions: [transaction],
- },
- {
- headers: { 'Content-Type': 'application/json' },
- },
- )
-
- return transaction
- }
-}
-
-/**
- * @type {Helpers}
- */
-module.exports = new Helpers()
diff --git a/packages/core-api/__tests__/v2/utils.ts b/packages/core-api/__tests__/v2/utils.ts
new file mode 100644
index 0000000000..afb7d99c91
--- /dev/null
+++ b/packages/core-api/__tests__/v2/utils.ts
@@ -0,0 +1,176 @@
+import { app } from "@arkecosystem/core-container";
+import { client, NetworkManager, transactionBuilder } from "@arkecosystem/crypto";
+import axios from "axios";
+import "jest-extended";
+import { ApiHelpers } from "../../../core-test-utils/src/helpers/api";
+
+class Helpers {
+ public async request(method, path, params = {}) {
+ const url = `http://localhost:4003/api/${path}`;
+ const headers = {
+ "API-Version": 2,
+ "Content-Type": "application/json",
+ };
+
+ const server = app.resolvePlugin("api");
+
+ return ApiHelpers.request(server.http, method, url, headers, params);
+ }
+
+ public async requestWithAcceptHeader(method, path, params = {}) {
+ const url = `http://localhost:4003/api/${path}`;
+ const headers = {
+ Accept: "application/vnd.core-api.v2+json",
+ "Content-Type": "application/json",
+ };
+
+ const server = app.resolvePlugin("api");
+
+ return ApiHelpers.request(server.http, method, url, headers, params);
+ }
+
+ public expectJson(response) {
+ expect(response.data).toBeObject();
+ }
+
+ public expectStatus(response, code) {
+ expect(response.status).toBe(code);
+ }
+
+ public assertVersion(response, version) {
+ expect(response.headers).toBeObject();
+ expect(response.headers).toHaveProperty("api-version", version);
+ }
+
+ public expectResource(response) {
+ expect(response.data.data).toBeObject();
+ }
+
+ public expectCollection(response) {
+ expect(Array.isArray(response.data.data)).toBe(true);
+ }
+
+ public expectPaginator(response, firstPage = true) {
+ expect(response.data.meta).toBeObject();
+ expect(response.data.meta).toHaveProperty("count");
+ expect(response.data.meta).toHaveProperty("pageCount");
+ expect(response.data.meta).toHaveProperty("totalCount");
+ expect(response.data.meta).toHaveProperty("next");
+ expect(response.data.meta).toHaveProperty("previous");
+ expect(response.data.meta).toHaveProperty("self");
+ expect(response.data.meta).toHaveProperty("first");
+ expect(response.data.meta).toHaveProperty("last");
+ }
+
+ public expectSuccessful(response, statusCode = 200) {
+ this.expectStatus(response, statusCode);
+ this.expectJson(response);
+ this.assertVersion(response, 2);
+ }
+
+ public expectError(response, statusCode = 404) {
+ this.expectStatus(response, statusCode);
+ this.expectJson(response);
+ expect(response.data.statusCode).toBeNumber();
+ expect(response.data.error).toBeString();
+ expect(response.data.message).toBeString();
+ }
+
+ public expectTransaction(transaction) {
+ expect(transaction).toBeObject();
+ expect(transaction).toHaveProperty("id");
+ expect(transaction).toHaveProperty("blockId");
+ expect(transaction).toHaveProperty("type");
+ expect(transaction).toHaveProperty("amount");
+ expect(transaction).toHaveProperty("fee");
+ expect(transaction).toHaveProperty("sender");
+
+ if ([1, 2].indexOf(transaction.type) === -1) {
+ expect(transaction.recipient).toBeString();
+ }
+
+ expect(transaction.signature).toBeString();
+ expect(transaction.confirmations).toBeNumber();
+ }
+
+ public expectBlock(block, expected: any = {}) {
+ expect(block).toBeObject();
+ expect(block.id).toBeString();
+ expect(block.version).toBeNumber();
+ expect(block.height).toBeNumber();
+ expect(block).toHaveProperty("previous"); // `null` or String
+ expect(block).toHaveProperty("forged");
+ expect(block.forged.reward).toBeNumber();
+ expect(block.forged.fee).toBeNumber();
+ expect(block.forged.total).toBeNumber();
+ expect(block.forged.amount).toBeNumber();
+ expect(block).toHaveProperty("payload");
+ expect(block.payload.length).toBeNumber();
+ expect(block.payload.hash).toBeString();
+ expect(block).toHaveProperty("generator");
+ expect(block.generator.publicKey).toBeString();
+ expect(block.signature).toBeString();
+ expect(block.transactions).toBeNumber();
+
+ Object.keys(expected || {}).forEach(attr => {
+ expect(block[attr]).toEqual(expected[attr]);
+ });
+ }
+
+ public expectDelegate(delegate, expected: any = {}) {
+ expect(delegate).toBeObject();
+ expect(delegate.username).toBeString();
+ expect(delegate.address).toBeString();
+ expect(delegate.publicKey).toBeString();
+ expect(delegate.votes).toBeNumber();
+ expect(delegate.rank).toBeNumber();
+ expect(delegate.blocks).toBeObject();
+ expect(delegate.blocks.missed).toBeNumber();
+ expect(delegate.blocks.produced).toBeNumber();
+ expect(delegate.production).toBeObject();
+ expect(delegate.production.approval).toBeNumber();
+ expect(delegate.production.productivity).toBeNumber();
+ expect(delegate.forged.fees).toBeNumber();
+ expect(delegate.forged.rewards).toBeNumber();
+ expect(delegate.forged.total).toBeNumber();
+
+ Object.keys(expected || {}).forEach(attr => {
+ expect(delegate[attr]).toBe(expected[attr]);
+ });
+ }
+
+ public expectWallet(wallet) {
+ expect(wallet).toBeObject();
+ expect(wallet).toHaveProperty("address");
+ expect(wallet).toHaveProperty("publicKey");
+ expect(wallet).toHaveProperty("balance");
+ expect(wallet).toHaveProperty("isDelegate");
+ expect(wallet).toHaveProperty("vote");
+ }
+
+ public async createTransaction() {
+ client.setConfig(NetworkManager.findByName("testnet"));
+
+ const transaction = transactionBuilder
+ .transfer()
+ .amount(1 * 1e8)
+ .recipientId("AZFEPTWnn2Sn8wDZgCRF8ohwKkrmk2AZi1")
+ .vendorField("test")
+ .sign("prison tobacco acquire stone dignity palace note decade they current lesson robot")
+ .getStruct();
+
+ await axios.post(
+ "http://127.0.0.1:4003/api/v2/transactions",
+ {
+ transactions: [transaction],
+ },
+ {
+ headers: { "Content-Type": "application/json" },
+ },
+ );
+
+ return transaction;
+ }
+}
+
+export const utils = new Helpers();
diff --git a/packages/core-api/jest.config.js b/packages/core-api/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-api/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-api/lib/defaults.js b/packages/core-api/lib/defaults.js
deleted file mode 100644
index 0e201f6d40..0000000000
--- a/packages/core-api/lib/defaults.js
+++ /dev/null
@@ -1,83 +0,0 @@
-const path = require('path')
-
-module.exports = {
- enabled: false,
- host: process.env.ARK_API_HOST || '0.0.0.0',
- port: process.env.ARK_API_PORT || 4003,
- cache: {
- /**
- * How many seconds the server will try to complete the request and cache the result.
- *
- * Defaults to 8 seconds, set it to false if you do not care about the timeout.
- *
- * Setting it to false can result in requests never being completed, which is usually
- * caused by low-spec servers that are unable to handle the heavy load that results
- * out of SQL queries on the blocks and transactions tables.
- *
- * If you experience issues with the cache timeout, which is indicated by a 503 status codes,
- * you should consider upgrading your hardware or tweak your PostgreSQL settings.
- */
- generateTimeout: process.env.ARK_API_CACHE_TIMEOUT || 8000,
- },
- // @see https://hapijs.com/api#-serveroptionstls
- ssl: {
- enabled: process.env.ARK_API_SSL,
- host: process.env.ARK_API_SSL_HOST || '0.0.0.0',
- port: process.env.ARK_API_SSL_PORT || 8443,
- key: process.env.ARK_API_SSL_KEY,
- cert: process.env.ARK_API_SSL_CERT,
- },
- // @see https://github.com/p-meier/hapi-api-version
- versions: {
- validVersions: [1, 2],
- defaultVersion: 1,
- basePath: '/api/',
- vendorName: 'ark.core-api',
- },
- // @see https://github.com/wraithgar/hapi-rate-limit
- rateLimit: {
- enabled: !process.env.ARK_API_RATE_LIMIT,
- pathLimit: false,
- userLimit: process.env.ARK_API_RATE_LIMIT_USER_LIMIT || 300,
- userCache: {
- expiresIn: process.env.ARK_API_RATE_LIMIT_USER_EXPIRES || 60000,
- },
- ipWhitelist: ['127.0.0.1', '::ffff:127.0.0.1'],
- },
- // @see https://github.com/fknop/hapi-pagination
- pagination: {
- limit: 100,
- include: [
- '/api/v2/blocks',
- '/api/v2/blocks/{id}/transactions',
- '/api/v2/blocks/search',
- '/api/v2/delegates',
- '/api/v2/delegates/{id}/blocks',
- '/api/v2/delegates/{id}/voters',
- '/api/v2/delegates/search',
- '/api/v2/peers',
- '/api/v2/transactions',
- '/api/v2/transactions/search',
- '/api/v2/transactions/unconfirmed',
- '/api/v2/votes',
- '/api/v2/wallets',
- '/api/v2/wallets/top',
- '/api/v2/wallets/{id}/transactions',
- '/api/v2/wallets/{id}/transactions/received',
- '/api/v2/wallets/{id}/transactions/sent',
- '/api/v2/wallets/{id}/votes',
- '/api/v2/wallets/search',
- ],
- },
- whitelist: ['127.0.0.1', '::ffff:127.0.0.1'],
- plugins: [
- {
- plugin: path.resolve(__dirname, './versions/1'),
- routes: { prefix: '/api/v1' },
- },
- {
- plugin: path.resolve(__dirname, './versions/2'),
- routes: { prefix: '/api/v2' },
- },
- ],
-}
diff --git a/packages/core-api/lib/index.js b/packages/core-api/lib/index.js
deleted file mode 100644
index 811bf3ad83..0000000000
--- a/packages/core-api/lib/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'api',
- async register(container, options) {
- if (!options.enabled) {
- container
- .resolvePlugin('logger')
- .info('Public API is disabled :grey_exclamation:')
-
- return
- }
-
- return require('./server')(options)
- },
- async deregister(container, options) {
- if (options.enabled) {
- const servers = Object.entries(container.resolvePlugin('api'))
-
- for (const [type, server] of servers) {
- container.resolvePlugin('logger').info(`Stopping Public ${type} API`)
-
- return server.stop()
- }
- }
- },
-}
diff --git a/packages/core-api/lib/plugins/caster.js b/packages/core-api/lib/plugins/caster.js
deleted file mode 100644
index 6d5db8529e..0000000000
--- a/packages/core-api/lib/plugins/caster.js
+++ /dev/null
@@ -1,78 +0,0 @@
-/* eslint-disable */
-
-const { bignumify } = require('@arkecosystem/core-utils')
-
-/**
- * Check if the given value is a boolean.
- * @param {*} value
- * @return {Boolean}
- */
-function isBoolean(value) {
- try {
- return value.toLowerCase() === 'true' || value.toLowerCase() === 'false'
- } catch (e) {
- return false
- }
-}
-
-/**
- * Check if the given value is a number.
- * @param {*} value
- * @return {Boolean}
- */
-function isNumber(value) {
- return !isNaN(value)
-}
-
-/**
- * @TODO - Review this module later on in the development.
- *
- * The register method used by hapi.js.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- server.ext({
- type: 'onPreHandler',
- method: (request, h) => {
- const query = request.query
-
- Object.keys(query).map((key, index) => {
- // Special fields that should always be a "string"
- if (key === 'id' || key === 'blockId' || key === 'previousBlock') {
- query[key] = query[key]
- }
- // Booleans
- else if (isBoolean(query[key])) {
- query[key] = query[key].toLowerCase() === 'true'
- }
- // Integers - making sure "BigNumbers" are kept as strings
- else if (isNumber(query[key])) {
- query[key] =
- query[key] == Number(query[key])
- ? Number(query[key])
- : bignumify(query[key]).toString()
- }
- // Strings
- else {
- query[key] = query[key]
- }
- })
-
- request.query = query
-
- return h.continue
- },
- })
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: 'core-caster',
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-api/lib/plugins/endpoint-version.js b/packages/core-api/lib/plugins/endpoint-version.js
deleted file mode 100644
index 83f5415841..0000000000
--- a/packages/core-api/lib/plugins/endpoint-version.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const Boom = require('boom')
-
-const versionRegex = /^\/api\/v([0-9])\//
-
-/**
- * The register method used by hapi.js.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- server.ext({
- type: 'onRequest',
- async method(request, h) {
- const match = versionRegex.exec(request.path)
- if (match && match.length === 2) {
- const apiVersion = parseInt(match[1])
- if (options.validVersions.includes(apiVersion)) {
- request.pre.apiVersion = apiVersion
- } else {
- return Boom.badRequest(
- `Invalid api-version! Valid values: ${options.validVersions.join()}`,
- )
- }
- }
- return h.continue
- },
- })
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-module.exports = {
- name: 'endpoint-version',
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-api/lib/plugins/set-headers.js b/packages/core-api/lib/plugins/set-headers.js
deleted file mode 100644
index a0415af0be..0000000000
--- a/packages/core-api/lib/plugins/set-headers.js
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * The register method used by hapi.js.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- server.ext({
- type: 'onPreResponse',
- async method(request, h) {
- const response = request.response
- if (response.isBoom && response.data) {
- // Deleting the property beforehand makes it appear last in the
- // response body.
- delete response.output.payload.error
- response.output.payload.error = response.data
- }
-
- return h.continue
- },
- })
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: 'set-headers',
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/address.js b/packages/core-api/lib/plugins/validation/formats/address.js
deleted file mode 100644
index f2423b6560..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/address.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const bs58check = require('bs58check')
-const config = require('@arkecosystem/core-container').resolvePlugin('config')
-
-/**
- * Register the "address" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('address', {
- type: 'string',
- validate: value => {
- try {
- return bs58check.decode(value)[0] === config.network.pubKeyHash
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/csv.js b/packages/core-api/lib/plugins/validation/formats/csv.js
deleted file mode 100644
index 4c5fcb66cf..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/csv.js
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Register the "csv" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('csv', {
- type: 'string',
- validate: value => {
- try {
- const a = value.split(',')
-
- return a.length > 0 && a.length <= 1000
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/hex.js b/packages/core-api/lib/plugins/validation/formats/hex.js
deleted file mode 100644
index 2fc0f03575..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/hex.js
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Register the "hex" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('hex', {
- type: 'string',
- validate: value => {
- try {
- Buffer.from(value, 'hex')
-
- return true
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/ip.js b/packages/core-api/lib/plugins/validation/formats/ip.js
deleted file mode 100644
index a9833fea0c..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/ip.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const ip = require('ip')
-
-/**
- * Register the "ip" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('ip', {
- type: 'string',
- validate: value => ip.isV4Format(value) || ip.isV6Format(value),
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/parsedInt.js b/packages/core-api/lib/plugins/validation/formats/parsedInt.js
deleted file mode 100644
index 8c0db25163..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/parsedInt.js
+++ /dev/null
@@ -1,25 +0,0 @@
-/* eslint no-restricted-globals: "off" */
-
-/**
- * Register the "parsedInt" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('parsedInt', {
- type: 'string',
- validate: value => {
- if (
- isNaN(value) ||
- parseInt(value) !== value ||
- isNaN(parseInt(value, 10))
- ) {
- return false
- }
-
- value = parseInt(value)
-
- return true
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/publicKey.js b/packages/core-api/lib/plugins/validation/formats/publicKey.js
deleted file mode 100644
index 5a732a71cc..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/publicKey.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/**
- * Register the "publicKey" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('publicKey', {
- type: 'string',
- validate: value => {
- try {
- return Buffer.from(value, 'hex').length === 33
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/signature.js b/packages/core-api/lib/plugins/validation/formats/signature.js
deleted file mode 100644
index 80080ef1b3..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/signature.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/**
- * Register the "signature" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('signature', {
- type: 'string',
- validate: value => {
- try {
- return Buffer.from(value, 'hex').length < 73
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/formats/vendorField.js b/packages/core-api/lib/plugins/validation/formats/vendorField.js
deleted file mode 100644
index d78f5c2279..0000000000
--- a/packages/core-api/lib/plugins/validation/formats/vendorField.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/**
- * Register the "vendorField" validation rule.
- * @param {AJV} ajv
- * @return {void}
- */
-module.exports = ajv => {
- ajv.addFormat('vendorField', {
- type: 'string',
- validate: value => {
- try {
- return Buffer.from(value).length < 65
- } catch (e) {
- return false
- }
- },
- })
-}
diff --git a/packages/core-api/lib/plugins/validation/index.js b/packages/core-api/lib/plugins/validation/index.js
deleted file mode 100644
index f9a302ce7b..0000000000
--- a/packages/core-api/lib/plugins/validation/index.js
+++ /dev/null
@@ -1,99 +0,0 @@
-const PLUGIN_NAME = 'hapi-ajv'
-
-const fs = require('fs')
-const path = require('path')
-const Boom = require('boom')
-const AJV = require('ajv')
-
-const ajv = new AJV()
-
-/**
- * Validate the given data using AJV.
- * @param {Object} schema
- * @param {Object} data
- * @return {(Boolean|Object)}
- */
-function validate(schema, data) {
- return ajv.validate(schema, data) ? null : ajv.errors
-}
-
-/**
- * Create an error response for hapi.js.
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @param {Array} errors
- * @return {Hapi.Response}
- */
-function createErrorResponse(request, h, errors) {
- if (request.pre.apiVersion === 1) {
- return h
- .response({
- path: errors[0].dataPath,
- error: errors[0].message,
- success: false,
- })
- .takeover()
- }
- return Boom.badData(errors)
-}
-
-/**
- * Register all custom validation formats
- * @return {void}
- */
-function registerCustomFormats() {
- const directory = path.resolve(__dirname, 'formats')
-
- fs.readdirSync(directory).forEach(file => {
- if (file.indexOf('.js') !== -1) {
- require(`${directory}/${file}`)(ajv)
- }
- })
-}
-
-/**
- * The register method uses by hapi.js.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- registerCustomFormats()
-
- server.ext({
- type: 'onPreHandler',
- method: (request, h) => {
- const config = request.route.settings.plugins[PLUGIN_NAME] || {}
-
- let errors
-
- if (config.payloadSchema) {
- errors = validate(config.payloadSchema, request.payload)
-
- if (errors) {
- return createErrorResponse(request, h, errors)
- }
- }
-
- if (config.querySchema) {
- errors = validate(config.querySchema, request.query)
-
- if (errors) {
- return createErrorResponse(request, h, errors)
- }
- }
-
- return h.continue
- },
- })
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: PLUGIN_NAME,
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-api/lib/repositories/blocks.js b/packages/core-api/lib/repositories/blocks.js
deleted file mode 100644
index 82052aa393..0000000000
--- a/packages/core-api/lib/repositories/blocks.js
+++ /dev/null
@@ -1,159 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const database = app.resolvePlugin('database')
-
-const buildFilterQuery = require('./utils/filter-query')
-const Repository = require('./repository')
-
-class BlocksRepository extends Repository {
- /**
- * Get all blocks for the given parameters.
- * @param {Object} parameters
- * @return {Object}
- */
- async findAll(parameters = {}) {
- const selectQuery = this.query.select().from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- const applyConditions = queries => {
- const conditions = Object.entries(this._formatConditions(parameters))
-
- if (conditions.length) {
- const first = conditions.shift()
-
- for (const item of queries) {
- item.where(this.query[first[0]].equals(first[1]))
-
- for (const condition of conditions) {
- item.and(this.query[condition[0]].equals(condition[1]))
- }
- }
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- return this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit,
- offset: parameters.offset,
- orderBy: this.__orderBy(parameters),
- })
- }
-
- /**
- * Get all blocks for the given generator.
- * @param {String} generatorPublicKey
- * @param {Object} paginator
- * @return {Object}
- */
- async findAllByGenerator(generatorPublicKey, paginator) {
- return this.findAll({ ...{ generatorPublicKey }, ...paginator })
- }
-
- /**
- * Get a block.
- * @param {Number} value
- * @return {Object}
- */
- async findById(value) {
- const query = this.query
- .select()
- .from(this.query)
- .where(this.query.id.equals(value))
-
- // ensure that the value is not greater than 2147483647 (psql max int size)
- const height = +value
- if (height <= 2147483647) {
- query.or(this.query.height.equals(height))
- }
-
- return this._find(query)
- }
-
- /**
- * Get the last block for the given generator.
- * TODO is this right?
- * @param {String} generatorPublicKey
- * @return {Object}
- */
- async findLastByPublicKey(generatorPublicKey) {
- const query = this.query
- .select(this.query.id, this.query.timestamp)
- .from(this.query)
- .where(this.query.generator_public_key.equals(generatorPublicKey))
- .order(this.query.height.desc)
-
- return this._find(query)
- }
-
- /**
- * Search all blocks.
- * @param {Object} parameters
- * @return {Object}
- */
- async search(parameters) {
- const selectQuery = this.query.select().from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- const applyConditions = queries => {
- const conditions = buildFilterQuery(this._formatConditions(parameters), {
- exact: [
- 'id',
- 'version',
- 'previous_block',
- 'payload_hash',
- 'generator_public_key',
- 'block_signature',
- ],
- between: [
- 'timestamp',
- 'height',
- 'number_of_transactions',
- 'total_amount',
- 'total_fee',
- 'reward',
- 'payload_length',
- ],
- })
-
- if (conditions.length) {
- const first = conditions.shift()
-
- for (const item of queries) {
- item.where(this.query[first.column][first.method](first.value))
-
- for (const condition of conditions) {
- item.and(
- this.query[condition.column][condition.method](condition.value),
- )
- }
- }
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- return this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit,
- offset: parameters.offset,
- orderBy: this.__orderBy(parameters),
- })
- }
-
- getModel() {
- return database.models.block
- }
-
- __orderBy(parameters) {
- if (!parameters.orderBy) return ['height', 'desc']
-
- const orderBy = parameters.orderBy.split(':').map(p => p.toLowerCase())
- if (orderBy.length !== 2 || ['desc', 'asc'].includes(orderBy[1]) !== true) {
- return ['height', 'desc']
- }
-
- return orderBy
- }
-}
-
-module.exports = new BlocksRepository()
diff --git a/packages/core-api/lib/repositories/index.js b/packages/core-api/lib/repositories/index.js
deleted file mode 100644
index 5724a9e9b4..0000000000
--- a/packages/core-api/lib/repositories/index.js
+++ /dev/null
@@ -1,4 +0,0 @@
-module.exports = {
- blocks: require('./blocks'),
- transactions: require('./transactions'),
-}
diff --git a/packages/core-api/lib/repositories/repository.js b/packages/core-api/lib/repositories/repository.js
deleted file mode 100644
index 59af86586c..0000000000
--- a/packages/core-api/lib/repositories/repository.js
+++ /dev/null
@@ -1,82 +0,0 @@
-const snakeCase = require('lodash/snakeCase')
-const app = require('@arkecosystem/core-container')
-
-const database = app.resolvePlugin('database')
-
-module.exports = class Repository {
- constructor() {
- this.cache = database.getCache()
- this.model = this.getModel()
- this.query = this.model.query()
-
- this.__mapColumns()
- }
-
- async _find(query) {
- return database.query.oneOrNone(query.toQuery())
- }
-
- async _findMany(query) {
- return database.query.manyOrNone(query.toQuery())
- }
-
- async _findManyWithCount(
- selectQuery,
- countQuery,
- { limit, offset, orderBy },
- ) {
- const { count } = await this._find(countQuery)
-
- if (this.columns.includes(orderBy[0])) {
- selectQuery.order(this.query[snakeCase(orderBy[0])][orderBy[1]])
- }
-
- selectQuery.offset(offset).limit(limit)
-
- return {
- rows: await this._findMany(selectQuery),
- count: +count,
- }
- }
-
- _makeCountQuery() {
- return this.query.select('count(*) AS count').from(this.query)
- }
-
- _makeEstimateQuery() {
- return this.query
- .select('count(*) AS count')
- .from(`${this.model.getTable()} TABLESAMPLE SYSTEM (100)`)
- }
-
- _formatConditions(parameters) {
- const columns = this.model.getColumnSet().columns.map(column => ({
- name: column.name,
- prop: column.prop || column.name,
- }))
-
- return Object.keys(parameters)
- .filter(arg => this.columns.includes(arg))
- .reduce((items, item) => {
- const column = columns.find(
- value => value.name === item || value.prop === item,
- )
-
- column ? (items[column.name] = parameters[item]) : delete items[item]
-
- return items
- }, {})
- }
-
- __mapColumns() {
- this.columns = []
-
- for (const column of this.model.getColumnSet().columns) {
- this.columns.push(column.name)
-
- if (column.prop) {
- this.columns.push(column.prop)
- }
- }
- }
-}
diff --git a/packages/core-api/lib/repositories/transactions.js b/packages/core-api/lib/repositories/transactions.js
deleted file mode 100644
index 74137f7f0b..0000000000
--- a/packages/core-api/lib/repositories/transactions.js
+++ /dev/null
@@ -1,457 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const database = app.resolvePlugin('database')
-
-const dayjs = require('dayjs-ext')
-const { slots } = require('@arkecosystem/crypto')
-const { TRANSACTION_TYPES } = require('@arkecosystem/crypto').constants
-const buildFilterQuery = require('./utils/filter-query')
-const Repository = require('./repository')
-
-class TransactionsRepository extends Repository {
- /**
- * Get all transactions.
- * @param {Object} params
- * @return {Object}
- */
- async findAll(parameters = {}) {
- const selectQuery = this.query.select().from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- if (parameters.senderId) {
- const senderPublicKey = this.__publicKeyFromSenderId(parameters.senderId)
-
- if (!senderPublicKey) {
- return { rows: [], count: 0 }
- }
-
- parameters.senderPublicKey = senderPublicKey
- }
-
- const applyConditions = queries => {
- const conditions = Object.entries(this._formatConditions(parameters))
-
- if (conditions.length) {
- const first = conditions.shift()
-
- for (const item of queries) {
- item.where(this.query[first[0]].equals(first[1]))
-
- for (const condition of conditions) {
- item.and(this.query[condition[0]].equals(condition[1]))
- }
- }
- }
-
- for (const item of queries) {
- if (parameters.ownerId) {
- const owner = database.walletManager.findByAddress(parameters.ownerId)
-
- item.and(this.query.sender_public_key.equals(owner.publicKey))
- item.or(this.query.recipient_id.equals(owner.address))
- }
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- const results = await this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit,
- offset: parameters.offset,
- orderBy: this.__orderBy(parameters),
- })
-
- results.rows = await this.__mapBlocksToTransactions(results.rows)
-
- return results
- }
-
- /**
- * Get all transactions (LEGACY, for V1 only).
- * @param {Object} params
- * @return {Object}
- */
- async findAllLegacy(parameters = {}) {
- const selectQuery = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- if (parameters.senderId) {
- parameters.senderPublicKey = this.__publicKeyFromSenderId(
- parameters.senderId,
- )
- }
-
- const applyConditions = queries => {
- const conditions = Object.entries(this._formatConditions(parameters))
-
- if (conditions.length) {
- const first = conditions.shift()
-
- for (const item of queries) {
- item.where(this.query[first[0]].equals(first[1]))
-
- for (const [key, value] of conditions) {
- item.or(this.query[key].equals(value))
- }
- }
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- const results = await this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit,
- offset: parameters.offset,
- orderBy: this.__orderBy(parameters),
- })
-
- results.rows = await this.__mapBlocksToTransactions(results.rows)
-
- return results
- }
-
- /**
- * Get all transactions for the given Wallet object.
- * @param {Wallet} wallet
- * @param {Object} parameters
- * @return {Object}
- */
- async findAllByWallet(wallet, parameters = {}) {
- const selectQuery = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- const applyConditions = queries => {
- for (const item of queries) {
- item
- .where(this.query.sender_public_key.equals(wallet.publicKey))
- .or(this.query.recipient_id.equals(wallet.address))
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- const results = await this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit,
- offset: parameters.offset,
- orderBy: this.__orderBy(parameters),
- })
-
- results.rows = await this.__mapBlocksToTransactions(results.rows)
-
- return results
- }
-
- /**
- * Get all transactions for the given sender public key.
- * @param {String} senderPublicKey
- * @param {Object} parameters
- * @return {Object}
- */
- async findAllBySender(senderPublicKey, parameters = {}) {
- return this.findAll({ ...{ senderPublicKey }, ...parameters })
- }
-
- /**
- * Get all transactions for the given recipient address.
- * @param {String} recipientId
- * @param {Object} parameters
- * @return {Object}
- */
- async findAllByRecipient(recipientId, parameters = {}) {
- return this.findAll({ ...{ recipientId }, ...parameters })
- }
-
- /**
- * Get all vote transactions for the given sender public key.
- * TODO rename to findAllVotesBySender or not?
- * @param {String} senderPublicKey
- * @param {Object} parameters
- * @return {Object}
- */
- async allVotesBySender(senderPublicKey, parameters = {}) {
- return this.findAll({
- ...{ senderPublicKey, type: TRANSACTION_TYPES.VOTE },
- ...parameters,
- })
- }
-
- /**
- * Get all transactions for the given block.
- * @param {Number} blockId
- * @param {Object} parameters
- * @return {Object}
- */
- async findAllByBlock(blockId, parameters = {}) {
- return this.findAll({ ...{ blockId }, ...parameters })
- }
-
- /**
- * Get all transactions for the given type.
- * @param {Number} type
- * @param {Object} parameters
- * @return {Object}
- */
- async findAllByType(type, parameters = {}) {
- return this.findAll({ ...{ type }, ...parameters })
- }
-
- /**
- * Get a transaction.
- * @param {Number} id
- * @return {Object}
- */
- async findById(id) {
- const query = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- .where(this.query.id.equals(id))
-
- const transaction = await this._find(query)
-
- return this.__mapBlocksToTransactions(transaction)
- }
-
- /**
- * Get a transactions for the given type and id.
- * @param {Number} type
- * @param {Number} id
- * @return {Object}
- */
- async findByTypeAndId(type, id) {
- const query = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- .where(this.query.id.equals(id).and(this.query.type.equals(type)))
-
- const transaction = await this._find(query)
-
- return this.__mapBlocksToTransactions(transaction)
- }
-
- /**
- * Get transactions for the given ids.
- * @param {Array} ids
- * @return {Object}
- */
- async findByIds(ids) {
- const query = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- .where(this.query.id.in(ids))
-
- return this._findMany(query)
- }
-
- /**
- * Get all transactions that have a vendor field.
- * @return {Object}
- */
- async findWithVendorField() {
- const query = this.query
- .select(this.query.block_id, this.query.serialized, this.query.timestamp)
- .from(this.query)
- .where(this.query.vendor_field_hex.isNotNull())
-
- const transactions = await this._findMany(query)
-
- return this.__mapBlocksToTransactions(transactions)
- }
-
- /**
- * Calculates min, max and average fee statistics based on transactions table
- * @return {Object}
- */
- async getFeeStatistics() {
- const query = this.query
- .select(
- this.query.type,
- this.query.fee.min('minFee'),
- this.query.fee.max('maxFee'),
- this.query.fee.avg('avgFee'),
- this.query.timestamp.max('timestamp'),
- )
- .from(this.query)
- .where(
- this.query.timestamp.gte(slots.getTime(dayjs().subtract(30, 'days'))),
- )
- .group(this.query.type)
- .order('"timestamp" DESC')
-
- return this._findMany(query)
- }
-
- /**
- * Search all transactions.
- *
- * @param {Object} params
- * @return {Object}
- */
- async search(parameters) {
- const selectQuery = this.query.select().from(this.query)
- const countQuery = this._makeEstimateQuery()
-
- if (parameters.senderId) {
- const senderPublicKey = this.__publicKeyFromSenderId(parameters.senderId)
-
- if (senderPublicKey) {
- parameters.senderPublicKey = senderPublicKey
- }
- }
-
- const applyConditions = queries => {
- const conditions = buildFilterQuery(this._formatConditions(parameters), {
- exact: [
- 'id',
- 'block_id',
- 'type',
- 'version',
- 'sender_public_key',
- 'recipient_id',
- ],
- between: ['timestamp', 'amount', 'fee'],
- wildcard: ['vendor_field_hex'],
- })
-
- if (conditions.length) {
- const first = conditions.shift()
-
- for (const item of queries) {
- item.where(this.query[first.column][first.method](first.value))
-
- for (const condition of conditions) {
- item.and(
- this.query[condition.column][condition.method](condition.value),
- )
- }
- }
- }
- }
-
- applyConditions([selectQuery, countQuery])
-
- const results = await this._findManyWithCount(selectQuery, countQuery, {
- limit: parameters.limit || 100,
- offset: parameters.offset || 0,
- orderBy: this.__orderBy(parameters),
- })
-
- results.rows = await this.__mapBlocksToTransactions(results.rows)
-
- return results
- }
-
- getModel() {
- return database.models.transaction
- }
-
- /**
- * [__mapBlocksToTransactions description]
- * @param {Array|Object} data
- * @return {Object}
- */
- async __mapBlocksToTransactions(data) {
- const blockQuery = database.models.block.query()
-
- // Array...
- if (Array.isArray(data)) {
- // 1. get heights from cache
- const missingFromCache = []
-
- for (let i = 0; i < data.length; i++) {
- const cachedBlock = this.__getBlockCache(data[i].blockId)
-
- if (cachedBlock) {
- data[i].block = cachedBlock
- } else {
- missingFromCache.push({
- index: i,
- blockId: data[i].blockId,
- })
- }
- }
-
- // 2. get missing heights from database
- if (missingFromCache.length) {
- const query = blockQuery
- .select(blockQuery.id, blockQuery.height)
- .from(blockQuery)
- .where(blockQuery.id.in(missingFromCache.map(d => d.blockId)))
- .group(blockQuery.id)
-
- const blocks = await this._findMany(query)
-
- for (const missing of missingFromCache) {
- const block = blocks.find(item => item.id === missing.blockId)
- if (block) {
- data[missing.index].block = block
- this.__setBlockCache(block)
- }
- }
- }
-
- return data
- }
-
- // Object...
- if (data) {
- const cachedBlock = this.__getBlockCache(data.blockId)
-
- if (cachedBlock) {
- data.block = cachedBlock
- } else {
- const query = blockQuery
- .select(blockQuery.id, blockQuery.height)
- .from(blockQuery)
- .where(blockQuery.id.equals(data.blockId))
-
- data.block = await this._find(query)
-
- this.__setBlockCache(data.block)
- }
- }
-
- return data
- }
-
- /**
- * Tries to retrieve the height of the block from the cache
- * @param {String} blockId
- * @return {Object|null}
- */
- __getBlockCache(blockId) {
- const height = this.cache.get(`heights:${blockId}`)
-
- return height ? { height, id: blockId } : null
- }
-
- /**
- * Stores the height of the block on the cache
- * @param {Object} block
- * @param {String} block.id
- * @param {Number} block.height
- */
- __setBlockCache({ id, height }) {
- this.cache.set(`heights:${id}`, height)
- }
-
- /**
- * Retrieves the publicKey of the address from the WalletManager in-memory data
- * @param {String} senderId
- * @return {String}
- */
- __publicKeyFromSenderId(senderId) {
- return database.walletManager.findByAddress(senderId).publicKey
- }
-
- __orderBy(parameters) {
- return parameters.orderBy
- ? parameters.orderBy.split(':').map(p => p.toLowerCase())
- : ['timestamp', 'desc']
- }
-}
-
-module.exports = new TransactionsRepository()
diff --git a/packages/core-api/lib/repositories/utils/filter-query.js b/packages/core-api/lib/repositories/utils/filter-query.js
deleted file mode 100644
index 0e2614354b..0000000000
--- a/packages/core-api/lib/repositories/utils/filter-query.js
+++ /dev/null
@@ -1,79 +0,0 @@
-/* eslint no-prototype-builtins: "off" */
-
-/**
- * Create a "where" object for a sql query.
- * @param {Object} parameters
- * @param {Object} filters
- * @return {Object}
- */
-module.exports = (parameters, filters) => {
- const where = []
-
- if (filters.hasOwnProperty('exact')) {
- for (const elem of filters.exact) {
- if (typeof parameters[elem] !== 'undefined') {
- where.push({
- column: elem,
- method: 'equals',
- value: parameters[elem],
- })
- }
- }
- }
-
- if (filters.hasOwnProperty('between')) {
- for (const elem of filters.between) {
- if (!parameters[elem]) {
- continue
- }
-
- if (
- !parameters[elem].hasOwnProperty('from') &&
- !parameters[elem].hasOwnProperty('to')
- ) {
- where.push({
- column: elem,
- method: 'equals',
- value: parameters[elem],
- })
- }
-
- if (
- parameters[elem].hasOwnProperty('from') ||
- parameters[elem].hasOwnProperty('to')
- ) {
- where[elem] = {}
-
- if (parameters[elem].hasOwnProperty('from')) {
- where.push({
- column: elem,
- method: 'gte',
- value: parameters[elem].from,
- })
- }
-
- if (parameters[elem].hasOwnProperty('to')) {
- where.push({
- column: elem,
- method: 'lte',
- value: parameters[elem].to,
- })
- }
- }
- }
- }
-
- if (filters.hasOwnProperty('wildcard')) {
- for (const elem of filters.wildcard) {
- if (parameters[elem]) {
- where.push({
- column: elem,
- method: 'like',
- value: `%${parameters[elem]}%`,
- })
- }
- }
- }
-
- return where
-}
diff --git a/packages/core-api/lib/server.js b/packages/core-api/lib/server.js
deleted file mode 100644
index 81f8088683..0000000000
--- a/packages/core-api/lib/server.js
+++ /dev/null
@@ -1,125 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const {
- createServer,
- createSecureServer,
- mountServer,
- plugins,
-} = require('@arkecosystem/core-http-utils')
-
-/**
- * Create a new hapi.js server.
- * @param {Object} config
- * @return {Hapi.Server}
- */
-module.exports = async config => {
- const options = {
- host: config.host,
- port: config.port,
- routes: {
- cors: {
- additionalHeaders: ['api-version'],
- },
- validate: {
- async failAction(request, h, err) {
- throw err
- },
- },
- },
- }
-
- const servers = { http: await createServer(options) }
-
- if (config.ssl.enabled) {
- servers.https = await createSecureServer(options, null, config.ssl)
- }
-
- for (const [type, server] of Object.entries(servers)) {
- // TODO: enable after mainnet migration
- // await server.register({ plugin: plugins.contentType })
-
- await server.register({
- plugin: plugins.corsHeaders,
- })
-
- await server.register({
- plugin: plugins.transactionPayload,
- options: {
- routes: [
- {
- method: 'POST',
- path: '/api/v2/transactions',
- },
- ],
- },
- })
-
- await server.register({
- plugin: plugins.whitelist,
- options: {
- whitelist: config.whitelist,
- name: 'Public API',
- },
- })
-
- await server.register({
- plugin: require('./plugins/set-headers'),
- })
-
- await server.register({
- plugin: require('hapi-api-version'),
- options: config.versions,
- })
-
- await server.register({
- plugin: require('./plugins/endpoint-version'),
- options: { validVersions: config.versions.validVersions },
- })
-
- await server.register({
- plugin: require('./plugins/caster'),
- })
-
- await server.register({
- plugin: require('./plugins/validation'),
- })
-
- await server.register({
- plugin: require('hapi-rate-limit'),
- options: config.rateLimit,
- })
-
- await server.register({
- plugin: require('hapi-pagination'),
- options: {
- meta: {
- baseUri: '',
- },
- query: {
- limit: {
- default: config.pagination.limit,
- },
- },
- results: {
- name: 'data',
- },
- routes: {
- include: config.pagination.include,
- exclude: ['*'],
- },
- },
- })
-
- for (const plugin of config.plugins) {
- if (typeof plugin.plugin === 'string') {
- plugin.plugin = require(plugin.plugin)
- }
-
- await server.register(plugin)
- }
-
- await mountServer(`Public ${type} API`, server)
- }
-
- return servers
-}
diff --git a/packages/core-api/lib/utils/generate-cache-key.js b/packages/core-api/lib/utils/generate-cache-key.js
deleted file mode 100644
index 9c72414078..0000000000
--- a/packages/core-api/lib/utils/generate-cache-key.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = value =>
- require('crypto')
- .createHash('sha256')
- .update(JSON.stringify(value))
- .digest('hex')
diff --git a/packages/core-api/lib/utils/transformer.js b/packages/core-api/lib/utils/transformer.js
deleted file mode 100644
index af05e063c4..0000000000
--- a/packages/core-api/lib/utils/transformer.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/* eslint max-len: "off" */
-
-const path = require('path')
-
-/**
- * Transform the given data to a resource.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {Object} transformer
- * @return {Object}
- */
-const transformResource = (request, data, transformer) =>
- require(path.resolve(
- __dirname,
- `../versions/${request.pre.apiVersion}/transformers/${transformer}`,
- ))(data)
-
-/**
- * Transform the given data to a collection.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {Object} transformer
- * @return {Object}
- */
-const transformCollection = (request, data, transformer) =>
- data.map(d => transformResource(request, d, transformer))
-
-/**
- * @type {Object}
- */
-module.exports = {
- transformResource,
- transformCollection,
-}
diff --git a/packages/core-api/lib/versions/1/handlers/accounts.js b/packages/core-api/lib/versions/1/handlers/accounts.js
deleted file mode 100644
index e3806f558d..0000000000
--- a/packages/core-api/lib/versions/1/handlers/accounts.js
+++ /dev/null
@@ -1,193 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const database = app.resolvePlugin('database')
-const blockchain = app.resolvePlugin('blockchain')
-
-const utils = require('../utils')
-const schema = require('../schemas/accounts')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.accounts.index(request)
-
- return utils.respondWithCache(data, h)
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.accounts.show(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getAccount,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.balance = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.accounts.balance(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getBalance,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.publicKey = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.accounts.publicKey(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getPublicKey,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.fee = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- fee: config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees.delegateRegistration,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.delegates = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const account = await database.wallets.findById(request.query.address)
-
- if (!account) {
- return utils.respondWith('Address not found.', true)
- }
-
- if (!account.vote) {
- return utils.respondWith(
- `Address ${request.query.address} hasn't voted yet.`,
- true,
- )
- }
-
- const delegate = await database.delegates.findById(account.vote)
-
- return utils.respondWith({
- delegates: [utils.toResource(request, delegate, 'delegate')],
- })
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getDelegates,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.top = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- let accounts = database.wallets.top(utils.paginate(request))
-
- accounts = accounts.rows.map(account => ({
- address: account.address,
- balance: `${account.balance}`,
- publicKey: account.publicKey,
- }))
-
- return utils.respondWith({ accounts })
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.top,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.count = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const { count } = await database.wallets.findAll()
-
- return utils.respondWith({ count })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/blocks.js b/packages/core-api/lib/versions/1/handlers/blocks.js
deleted file mode 100644
index e8f64c1ec1..0000000000
--- a/packages/core-api/lib/versions/1/handlers/blocks.js
+++ /dev/null
@@ -1,216 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const { supplyCalculator } = require('@arkecosystem/core-utils')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-
-const utils = require('../utils')
-const schema = require('../schemas/blocks')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.blocks.index(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getBlocks,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.blocks.show(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getBlock,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.epoch = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- epoch: config.getConstants(blockchain.getLastBlock().data.height).epoch,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.height = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const block = blockchain.getLastBlock()
-
- return utils.respondWith({ height: block.data.height, id: block.data.id })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.nethash = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({ nethash: config.network.nethash })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.fee = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- fee: config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees.transfer,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.fees = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const fees = config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees
-
- return utils.respondWith({
- fees: {
- send: fees.transfer,
- vote: fees.vote,
- secondsignature: fees.secondSignature,
- delegate: fees.delegateRegistration,
- multisignature: fees.multiSignature,
- },
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.milestone = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- milestone: Math.floor(blockchain.getLastBlock().data.height / 3000000),
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.reward = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- reward: config.getConstants(blockchain.getLastBlock().data.height).reward,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.supply = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
- return utils.respondWith({
- supply: supplyCalculator.calculate(lastBlock.data.height),
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.status = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
- const constants = config.getConstants(lastBlock.data.height)
-
- return utils.respondWith({
- epoch: constants.epoch,
- height: lastBlock.data.height,
- fee: constants.fees.staticFees.transfer,
- milestone: Math.floor(lastBlock.data.height / 3000000),
- nethash: config.network.nethash,
- reward: constants.reward,
- supply: supplyCalculator.calculate(lastBlock.data.height),
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/delegates.js b/packages/core-api/lib/versions/1/handlers/delegates.js
deleted file mode 100644
index 87cc25d3ee..0000000000
--- a/packages/core-api/lib/versions/1/handlers/delegates.js
+++ /dev/null
@@ -1,201 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const database = app.resolvePlugin('database')
-const blockchain = app.resolvePlugin('blockchain')
-const { slots } = require('@arkecosystem/crypto')
-
-const utils = require('../utils')
-const schema = require('../schemas/delegates')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.delegates.index(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getDelegates,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.delegates.show(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getDelegate,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.count = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.delegates.count(request)
-
- return utils.respondWithCache(data, h)
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.delegates.search(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.search,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.voters = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.delegates.voters(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getVoters,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.fee = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- fee: config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees.delegateRegistration,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.forged = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const wallet = database.walletManager.findByPublicKey(
- request.query.generatorPublicKey,
- )
-
- return utils.respondWith({
- fees: Number(wallet.forgedFees),
- rewards: Number(wallet.forgedRewards),
- forged: Number(wallet.forgedFees) + Number(wallet.forgedRewards),
- })
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getForgedByAccount,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.nextForgers = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
- const limit = request.query.limit || 10
-
- const delegatesCount = config.getConstants(lastBlock).activeDelegates
- const currentSlot = slots.getSlotNumber(lastBlock.data.timestamp)
-
- let activeDelegates = await database.getActiveDelegates(
- lastBlock.data.height,
- )
- activeDelegates = activeDelegates.map(delegate => delegate.publicKey)
-
- const nextForgers = []
- for (let i = 1; i <= delegatesCount && i <= limit; i++) {
- const delegate = activeDelegates[(currentSlot + i) % delegatesCount]
-
- if (delegate) {
- nextForgers.push(delegate)
- }
- }
-
- return utils.respondWith({
- currentBlock: lastBlock.data.height,
- currentSlot,
- delegates: nextForgers,
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/loader.js b/packages/core-api/lib/versions/1/handlers/loader.js
deleted file mode 100644
index 693991de95..0000000000
--- a/packages/core-api/lib/versions/1/handlers/loader.js
+++ /dev/null
@@ -1,80 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-const utils = require('../utils')
-const { transactions } = require('../../../repositories')
-
-/**
- * @type {Object}
- */
-exports.status = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
-
- return utils.respondWith({
- loaded: blockchain.isSynced(),
- now: lastBlock ? lastBlock.data.height : 0,
- blocksCount:
- blockchain.p2p.getNetworkHeight() - lastBlock
- ? lastBlock.data.height
- : 0,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.syncing = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
-
- return utils.respondWith({
- syncing: !blockchain.isSynced(),
- blocks: blockchain.p2p.getNetworkHeight() - lastBlock.data.height,
- height: lastBlock.data.height,
- id: lastBlock.data.id,
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.autoconfigure = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const feeStatisticsData = await transactions.getFeeStatistics()
-
- return utils.respondWith({
- network: {
- nethash: config.network.nethash,
- token: config.network.client.token,
- symbol: config.network.client.symbol,
- explorer: config.network.client.explorer,
- version: config.network.pubKeyHash,
- ports: utils.toResource(request, config, 'ports'),
- feeStatistics: utils.toCollection(
- request,
- feeStatisticsData,
- 'fee-statistics',
- ),
- },
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/peers.js b/packages/core-api/lib/versions/1/handlers/peers.js
deleted file mode 100644
index 27c95c6378..0000000000
--- a/packages/core-api/lib/versions/1/handlers/peers.js
+++ /dev/null
@@ -1,127 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const p2p = app.resolvePlugin('p2p')
-
-const utils = require('../utils')
-const schema = require('../schemas/peers')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const allPeers = await p2p.getPeers()
-
- if (!allPeers) {
- return utils.respondWith('No peers found', true)
- }
-
- let peers = allPeers
- .map(peer => {
- // just use 'OK' status for API instead of p2p http status codes
- peer.status = peer.status === 200 ? 'OK' : peer.status
- return peer
- })
- .sort((a, b) => a.delay - b.delay)
- peers = request.query.os
- ? allPeers.filter(peer => peer.os === request.query.os)
- : peers
- peers = request.query.status
- ? allPeers.filter(peer => peer.status === request.query.status)
- : peers
- peers = request.query.port
- ? allPeers.filter(peer => peer.port === request.query.port)
- : peers
- peers = request.query.version
- ? allPeers.filter(peer => peer.version === request.query.version)
- : peers
- peers = peers.slice(0, request.query.limit || 100)
-
- if (request.query.orderBy) {
- const order = request.query.orderBy.split(':')
- if (['port', 'status', 'os', 'version'].includes(order[0])) {
- peers =
- order[1].toUpperCase() === 'ASC'
- ? peers.sort((a, b) => a[order[0]] - b[order[0]])
- : peers.sort((a, b) => a[order[0]] + b[order[0]])
- }
- }
-
- return utils.respondWith({
- peers: utils.toCollection(
- request,
- peers.map(peer => peer.toBroadcastInfo()),
- 'peer',
- ),
- })
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getPeers,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const peers = await p2p.getPeers()
-
- if (!peers) {
- return utils.respondWith('No peers found', true)
- }
-
- const peer = peers.find(
- elem =>
- elem.ip === request.query.ip && +elem.port === +request.query.port,
- )
-
- if (!peer) {
- return utils.respondWith(
- `Peer ${request.query.ip}:${request.query.port} not found`,
- true,
- )
- }
-
- return utils.respondWith({
- peer: utils.toResource(request, peer.toBroadcastInfo(), 'peer'),
- })
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getPeer,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.version = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- version: app.getVersion(),
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/signatures.js b/packages/core-api/lib/versions/1/handlers/signatures.js
deleted file mode 100644
index 5a32cea6b0..0000000000
--- a/packages/core-api/lib/versions/1/handlers/signatures.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-
-const utils = require('../utils')
-
-/**
- * @type {Object}
- */
-exports.fee = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- return utils.respondWith({
- fee: config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees.secondSignature,
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/handlers/transactions.js b/packages/core-api/lib/versions/1/handlers/transactions.js
deleted file mode 100644
index 9ac43c1d87..0000000000
--- a/packages/core-api/lib/versions/1/handlers/transactions.js
+++ /dev/null
@@ -1,106 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const transactionPool = app.resolvePlugin('transactionPool')
-
-const utils = require('../utils')
-const schema = require('../schemas/transactions')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.transactions.index(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getTransactions,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v1.transactions.show(request)
-
- return utils.respondWithCache(data, h)
- },
- config: {
- plugins: {
- 'hapi-ajv': {
- querySchema: schema.getTransaction,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.unconfirmed = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const pagination = utils.paginate(request)
-
- let transactions = transactionPool.getTransactions(
- pagination.offset,
- pagination.limit,
- )
- transactions = transactions.map(transaction => ({
- serialized: transaction,
- }))
-
- return utils.respondWith({
- transactions: utils.toCollection(request, transactions, 'transaction'),
- })
- },
-}
-
-/**
- * @type {Object}
- */
-exports.showUnconfirmed = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const transaction = transactionPool.getTransaction(request.query.id)
-
- if (!transaction) {
- return utils.respondWith('Transaction not found', true)
- }
-
- return utils.respondWith({
- transaction: utils.toResource(
- request,
- {
- serialized: transaction.serialized,
- },
- 'transaction',
- ),
- })
- },
-}
diff --git a/packages/core-api/lib/versions/1/index.js b/packages/core-api/lib/versions/1/index.js
deleted file mode 100644
index 45ef7237a5..0000000000
--- a/packages/core-api/lib/versions/1/index.js
+++ /dev/null
@@ -1,100 +0,0 @@
-const blocks = require('./handlers/blocks')
-const delegates = require('./handlers/delegates')
-const loader = require('./handlers/loader')
-const peers = require('./handlers/peers')
-const signatures = require('./handlers/signatures')
-const transactions = require('./handlers/transactions')
-const accounts = require('./handlers/accounts')
-
-const registerAccountMethods = require('./methods/accounts')
-const registerBlockMethods = require('./methods/blocks')
-const registerDelegateMethods = require('./methods/delegates')
-const registerTransactionMethods = require('./methods/transactions')
-
-/**
- * Register the v1 routes.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- registerAccountMethods(server)
- registerBlockMethods(server)
- registerDelegateMethods(server)
- registerTransactionMethods(server)
-
- server.route([
- { method: 'GET', path: '/accounts/getAllAccounts', ...accounts.index },
- { method: 'GET', path: '/accounts', ...accounts.show },
- { method: 'GET', path: '/accounts/getBalance', ...accounts.balance },
- { method: 'GET', path: '/accounts/getPublicKey', ...accounts.publicKey },
- { method: 'GET', path: '/accounts/delegates/fee', ...accounts.fee },
- { method: 'GET', path: '/accounts/delegates', ...accounts.delegates },
- { method: 'GET', path: '/accounts/top', ...accounts.top },
- { method: 'GET', path: '/accounts/count', ...accounts.count },
-
- { method: 'GET', path: '/blocks', ...blocks.index },
- { method: 'GET', path: '/blocks/get', ...blocks.show },
- { method: 'GET', path: '/blocks/getEpoch', ...blocks.epoch },
- { method: 'GET', path: '/blocks/getHeight', ...blocks.height },
- { method: 'GET', path: '/blocks/getheight', ...blocks.height }, // desktop wallet inconsistency
- { method: 'GET', path: '/blocks/getNethash', ...blocks.nethash },
- { method: 'GET', path: '/blocks/getFee', ...blocks.fee },
- { method: 'GET', path: '/blocks/getFees', ...blocks.fees },
- { method: 'GET', path: '/blocks/getfees', ...blocks.fees }, // desktop wallet inconsistency
- { method: 'GET', path: '/blocks/getMilestone', ...blocks.milestone },
- { method: 'GET', path: '/blocks/getReward', ...blocks.reward },
- { method: 'GET', path: '/blocks/getSupply', ...blocks.supply },
- { method: 'GET', path: '/blocks/getStatus', ...blocks.status },
-
- { method: 'GET', path: '/delegates', ...delegates.index },
- { method: 'GET', path: '/delegates/get', ...delegates.show },
- { method: 'GET', path: '/delegates/count', ...delegates.count },
- { method: 'GET', path: '/delegates/search', ...delegates.search },
- { method: 'GET', path: '/delegates/voters', ...delegates.voters },
- { method: 'GET', path: '/delegates/fee', ...delegates.fee },
- {
- method: 'GET',
- path: '/delegates/forging/getForgedByAccount',
- ...delegates.forged,
- },
- {
- method: 'GET',
- path: '/delegates/getNextForgers',
- ...delegates.nextForgers,
- },
-
- { method: 'GET', path: '/loader/status', ...loader.status },
- { method: 'GET', path: '/loader/status/sync', ...loader.syncing },
- { method: 'GET', path: '/loader/autoconfigure', ...loader.autoconfigure },
-
- { method: 'GET', path: '/peers', ...peers.index },
- { method: 'GET', path: '/peers/get', ...peers.show },
- { method: 'GET', path: '/peers/version', ...peers.version },
-
- { method: 'GET', path: '/signatures/fee', ...signatures.fee },
-
- { method: 'GET', path: '/transactions', ...transactions.index },
- { method: 'GET', path: '/transactions/get', ...transactions.show },
- {
- method: 'GET',
- path: '/transactions/unconfirmed',
- ...transactions.unconfirmed,
- },
- {
- method: 'GET',
- path: '/transactions/unconfirmed/get',
- ...transactions.showUnconfirmed,
- },
- ])
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: 'Ark Public API - v1',
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-api/lib/versions/1/methods/accounts.js b/packages/core-api/lib/versions/1/methods/accounts.js
deleted file mode 100644
index a3533391d7..0000000000
--- a/packages/core-api/lib/versions/1/methods/accounts.js
+++ /dev/null
@@ -1,98 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const utils = require('../utils')
-
-const database = app.resolvePlugin('database')
-
-const index = async request => {
- const { rows } = await database.wallets.findAll({
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.respondWith({
- accounts: utils.toCollection(request, rows, 'account'),
- })
-}
-
-const show = async request => {
- const account = await database.wallets.findById(request.query.address)
-
- if (!account) {
- return utils.respondWith('Account not found', true)
- }
-
- return utils.respondWith({
- account: utils.toResource(request, account, 'account'),
- })
-}
-
-const balance = async request => {
- const account = await database.wallets.findById(request.query.address)
-
- if (!account) {
- return utils.respondWith({ balance: '0', unconfirmedBalance: '0' })
- }
-
- return utils.respondWith({
- balance: account ? `${account.balance}` : '0',
- unconfirmedBalance: account ? `${account.balance}` : '0',
- })
-}
-
-const publicKey = async request => {
- const account = await database.wallets.findById(request.query.address)
-
- if (!account) {
- return utils.respondWith('Account not found', true)
- }
-
- return utils.respondWith({ publicKey: account.publicKey })
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v1.accounts.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v1.accounts.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({ address: request.query.address }),
- })
-
- server.method('v1.accounts.balance', balance, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({ address: request.query.address }),
- })
-
- server.method('v1.accounts.publicKey', publicKey, {
- cache: {
- expiresIn: 600 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({ address: request.query.address }),
- })
-}
diff --git a/packages/core-api/lib/versions/1/methods/blocks.js b/packages/core-api/lib/versions/1/methods/blocks.js
deleted file mode 100644
index 3a3a12a477..0000000000
--- a/packages/core-api/lib/versions/1/methods/blocks.js
+++ /dev/null
@@ -1,60 +0,0 @@
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const { blocks: blocksRepository } = require('../../../repositories')
-const utils = require('../utils')
-
-const index = async request => {
- const { count, rows } = await blocksRepository.findAll({
- ...request.query,
- ...utils.paginate(request),
- })
-
- if (!rows) {
- return utils.respondWith('No blocks found', true)
- }
-
- return utils.respondWith({
- blocks: utils.toCollection(request, rows, 'block'),
- count,
- })
-}
-
-const show = async request => {
- const block = await blocksRepository.findById(request.query.id)
-
- if (!block) {
- return utils.respondWith(
- `Block with id ${request.query.id} not found`,
- true,
- )
- }
-
- return utils.respondWith({
- block: utils.toResource(request, block, 'block'),
- })
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v1.blocks.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v1.blocks.show', show, {
- cache: {
- expiresIn: 600 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.query.id }),
- })
-}
diff --git a/packages/core-api/lib/versions/1/methods/delegates.js b/packages/core-api/lib/versions/1/methods/delegates.js
deleted file mode 100644
index 89ff3b62b7..0000000000
--- a/packages/core-api/lib/versions/1/methods/delegates.js
+++ /dev/null
@@ -1,134 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const utils = require('../utils')
-
-const database = app.resolvePlugin('database')
-
-const index = async request => {
- const { count, rows } = await database.delegates.paginate({
- ...request.query,
- ...{
- offset: request.query.offset || 0,
- limit: request.query.limit || 51,
- },
- })
-
- return utils.respondWith({
- delegates: utils.toCollection(request, rows, 'delegate'),
- totalCount: count,
- })
-}
-
-const show = async request => {
- if (!request.query.publicKey && !request.query.username) {
- return utils.respondWith('Delegate not found', true)
- }
-
- const delegate = await database.delegates.findById(
- request.query.publicKey || request.query.username,
- )
-
- if (!delegate) {
- return utils.respondWith('Delegate not found', true)
- }
-
- return utils.respondWith({
- delegate: utils.toResource(request, delegate, 'delegate'),
- })
-}
-
-const count = async request => {
- const delegate = await database.delegates.findAll()
-
- return utils.respondWith({ count: delegate.count })
-}
-
-const search = async request => {
- const { rows } = await database.delegates.search({
- ...{ username: request.query.q },
- ...utils.paginate(request),
- })
-
- return utils.respondWith({
- delegates: utils.toCollection(request, rows, 'delegate'),
- })
-}
-
-const voters = async request => {
- const delegate = await database.delegates.findById(request.query.publicKey)
-
- if (!delegate) {
- return utils.respondWith({
- accounts: [],
- })
- }
-
- const accounts = await database.wallets.findAllByVote(delegate.publicKey)
-
- return utils.respondWith({
- accounts: utils.toCollection(request, accounts.rows, 'voter'),
- })
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v1.delegates.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...{
- offset: request.query.offset || 0,
- limit: request.query.limit || 51,
- },
- }),
- })
-
- server.method('v1.delegates.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- id: request.query.publicKey || request.query.username,
- }),
- })
-
- server.method('v1.delegates.count', count, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ time: +new Date() }),
- })
-
- server.method('v1.delegates.search', search, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...{ username: request.query.q },
- ...utils.paginate(request),
- }),
- })
-
- server.method('v1.delegates.voters', voters, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.query.publicKey }),
- })
-}
diff --git a/packages/core-api/lib/versions/1/methods/transactions.js b/packages/core-api/lib/versions/1/methods/transactions.js
deleted file mode 100644
index 6a160100f5..0000000000
--- a/packages/core-api/lib/versions/1/methods/transactions.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const {
- transactions: transactionsRepository,
-} = require('../../../repositories')
-const utils = require('../utils')
-
-const index = async request => {
- const { count, rows } = await transactionsRepository.findAllLegacy({
- ...request.query,
- ...utils.paginate(request),
- })
-
- if (!rows) {
- return utils.respondWith('No transactions found', true)
- }
-
- return utils.respondWith({
- transactions: utils.toCollection(request, rows, 'transaction'),
- count,
- })
-}
-
-const show = async request => {
- const result = await transactionsRepository.findById(request.query.id)
-
- if (!result) {
- return utils.respondWith('No transactions found', true)
- }
-
- return utils.respondWith({
- transaction: utils.toResource(request, result, 'transaction'),
- })
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v1.transactions.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v1.transactions.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.query.id }),
- })
-}
diff --git a/packages/core-api/lib/versions/1/schemas/accounts.js b/packages/core-api/lib/versions/1/schemas/accounts.js
deleted file mode 100755
index 3e64a77fc0..0000000000
--- a/packages/core-api/lib/versions/1/schemas/accounts.js
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * The AJV schema for the account endpoints.
- * @type {Object}
- */
-module.exports = {
- getBalance: {
- type: 'object',
- properties: {
- address: {
- type: 'string',
- minLength: 1,
- format: 'address',
- },
- },
- required: ['address'],
- },
- getPublicKey: {
- type: 'object',
- properties: {
- address: {
- type: 'string',
- minLength: 1,
- format: 'address',
- },
- },
- required: ['address'],
- },
- generatePublicKey: {
- type: 'object',
- properties: {
- secret: {
- type: 'string',
- minLength: 1,
- },
- },
- required: ['secret'],
- },
- getDelegates: {
- type: 'object',
- properties: {
- address: {
- type: 'string',
- minLength: 1,
- format: 'address',
- },
- },
- required: ['address'],
- },
- getAccount: {
- type: 'object',
- properties: {
- address: {
- type: 'string',
- minLength: 1,
- format: 'address',
- },
- },
- required: ['address'],
- },
- top: {
- type: 'object',
- properties: {
- limit: {
- type: 'integer',
- minimum: 0,
- maximum: 100,
- },
- offset: {
- type: 'integer',
- minimum: 0,
- },
- },
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/blocks.js b/packages/core-api/lib/versions/1/schemas/blocks.js
deleted file mode 100755
index 699cc691d4..0000000000
--- a/packages/core-api/lib/versions/1/schemas/blocks.js
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * The AJV schema for the block endpoints.
- * @type {Object}
- */
-module.exports = {
- getBlock: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- minLength: 1,
- },
- },
- required: ['id'],
- },
- getBlocks: {
- type: 'object',
- properties: {
- limit: {
- type: 'integer',
- minimum: 0,
- maximum: 100,
- },
- orderBy: {
- type: 'string',
- },
- offset: {
- type: 'integer',
- minimum: 0,
- },
- generatorPublicKey: {
- type: 'string',
- format: 'publicKey',
- },
- totalAmount: {
- type: 'integer',
- minimum: 0,
- },
- totalFee: {
- type: 'integer',
- minimum: 0,
- },
- reward: {
- type: 'integer',
- minimum: 0,
- },
- previousBlock: {
- type: 'string',
- },
- height: {
- type: 'integer',
- },
- },
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/delegates.js b/packages/core-api/lib/versions/1/schemas/delegates.js
deleted file mode 100755
index 7ce2953c4d..0000000000
--- a/packages/core-api/lib/versions/1/schemas/delegates.js
+++ /dev/null
@@ -1,88 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const lastBlock = app.resolvePlugin('blockchain').getLastBlock()
-
-/**
- * The AJV schema for the delegate endpoints.
- * @type {Object}
- */
-module.exports = {
- forgingStatus: {
- type: 'object',
- properties: {
- publicKey: {
- type: 'string',
- format: 'publicKey',
- },
- },
- required: ['publicKey'],
- },
- getDelegate: {
- type: 'object',
- properties: {
- publicKey: {
- type: 'string',
- },
- username: {
- type: 'string',
- },
- },
- },
- search: {
- type: 'object',
- properties: {
- q: {
- type: 'string',
- minLength: 1,
- maxLength: 20,
- },
- limit: {
- type: 'integer',
- minimum: 1,
- maximum: 100,
- },
- },
- required: ['q'],
- },
- getVoters: {
- type: 'object',
- properties: {
- publicKey: {
- type: 'string',
- format: 'publicKey',
- },
- },
- required: ['publicKey'],
- },
- getDelegates: {
- type: 'object',
- properties: {
- orderBy: {
- type: 'string',
- },
- limit: {
- type: 'integer',
- minimum: 1,
- maximum: lastBlock
- ? app
- .resolvePlugin('config')
- .getConstants(lastBlock.data.height).activeDelegates
- : 51,
- },
- offset: {
- type: 'integer',
- minimum: 0,
- },
- },
- },
- getForgedByAccount: {
- type: 'object',
- properties: {
- generatorPublicKey: {
- type: 'string',
- format: 'publicKey',
- },
- },
- required: ['generatorPublicKey'],
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/loader.js b/packages/core-api/lib/versions/1/schemas/loader.js
deleted file mode 100755
index 72e4a5ab05..0000000000
--- a/packages/core-api/lib/versions/1/schemas/loader.js
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * The AJV schema for the loader endpoints.
- * @type {Object}
- */
-module.exports = {
- loadSignatures: {
- type: 'object',
- properties: {
- signatures: {
- type: 'array',
- uniqueItems: true,
- },
- },
- required: ['signatures'],
- },
- loadUnconfirmedTransactions: {
- type: 'object',
- properties: {
- transactions: {
- type: 'array',
- uniqueItems: true,
- },
- },
- required: ['transactions'],
- },
- getNetwork: {
- peers: {
- type: 'object',
- properties: {
- peers: {
- type: 'array',
- uniqueItems: true,
- },
- },
- required: ['peers'],
- },
- peer: {
- type: 'object',
- properties: {
- ip: {
- type: 'string',
- format: 'ip',
- },
- port: {
- type: 'integer',
- minimum: 1,
- maximum: 65535,
- },
- state: {
- type: 'integer',
- minimum: 0,
- maximum: 3,
- },
- os: {
- type: 'string',
- },
- version: {
- type: 'string',
- },
- },
- required: ['ip', 'port'],
- },
- height: {
- type: 'object',
- properties: {
- height: {
- type: 'integer',
- minimum: 0,
- },
- id: {
- type: 'string',
- minLength: 1,
- },
- },
- required: ['height'],
- },
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/peers.js b/packages/core-api/lib/versions/1/schemas/peers.js
deleted file mode 100755
index d0625846bd..0000000000
--- a/packages/core-api/lib/versions/1/schemas/peers.js
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * The AJV schema for the peer endpoints.
- * @type {Object}
- */
-module.exports = {
- headers: {
- type: 'object',
- properties: {
- port: {
- type: 'integer',
- minimum: 1,
- maximum: 65535,
- },
- os: {
- type: 'string',
- maxLength: 64,
- },
- nethash: {
- type: 'string',
- maxLength: 64,
- },
- height: {
- type: 'integer',
- minimum: 0,
- },
- version: {
- type: 'string',
- maxLength: 11,
- },
- blockheader: {
- type: 'object',
- },
- },
- required: ['port', 'nethash', 'version'],
- },
- updatePeersList: {
- peers: {
- type: 'object',
- properties: {
- peers: {
- type: 'array',
- uniqueItems: true,
- },
- },
- required: ['peers'],
- },
- peer: {
- type: 'object',
- properties: {
- ip: {
- type: 'string',
- format: 'ip',
- },
- port: {
- type: 'integer',
- minimum: 1,
- maximum: 65535,
- },
- state: {
- type: 'integer',
- minimum: 0,
- maximum: 3,
- },
- os: {
- type: 'string',
- maxLength: 64,
- },
- version: {
- type: 'string',
- maxLength: 11,
- },
- },
- required: ['ip', 'port'],
- },
- },
- getPeers: {
- type: 'object',
- properties: {
- port: {
- type: 'integer',
- minimum: 1,
- maximum: 65535,
- },
- status: {
- type: 'string',
- maxLength: 20,
- },
- os: {
- type: 'string',
- maxLength: 64,
- },
- version: {
- type: 'string',
- maxLength: 11,
- },
- orderBy: {
- type: 'string',
- },
- limit: {
- type: 'integer',
- minimum: 0,
- maximum: 100,
- },
- offset: {
- type: 'integer',
- minimum: 0,
- },
- },
- },
- getPeer: {
- type: 'object',
- properties: {
- ip: {
- type: 'string',
- format: 'ip',
- },
- port: {
- type: 'integer',
- minimum: 0,
- maximum: 65535,
- },
- },
- required: ['ip', 'port'],
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/signatures.js b/packages/core-api/lib/versions/1/schemas/signatures.js
deleted file mode 100755
index bfe01d1373..0000000000
--- a/packages/core-api/lib/versions/1/schemas/signatures.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/**
- * The AJV schema for the signature endpoints.
- * @type {Object}
- */
-module.exports = {
- getFee: {
- type: 'object',
- properties: {
- address: {
- type: 'string',
- minLength: 1,
- format: 'address',
- },
- },
- required: ['address'],
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/transactions.js b/packages/core-api/lib/versions/1/schemas/transactions.js
deleted file mode 100755
index f100b71157..0000000000
--- a/packages/core-api/lib/versions/1/schemas/transactions.js
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * The AJV schema for the transaction endpoints.
- * @type {Object}
- */
-module.exports = {
- getTransactions: {
- type: 'object',
- properties: {
- blockId: {
- type: 'string',
- },
- limit: {
- type: 'integer',
- minimum: 0,
- maximum: 100,
- },
- type: {
- type: 'integer',
- minimum: 0,
- maximum: 10,
- },
- orderBy: {
- type: 'string',
- },
- offset: {
- type: 'integer',
- minimum: 0,
- },
- senderPublicKey: {
- type: 'string',
- format: 'publicKey',
- },
- vendorField: {
- type: 'string',
- format: 'vendorField',
- },
- ownerPublicKey: {
- type: 'string',
- format: 'publicKey',
- },
- ownerAddress: {
- type: 'string',
- },
- senderId: {
- type: 'string',
- format: 'address',
- },
- recipientId: {
- type: 'string',
- format: 'address',
- },
- amount: {
- type: 'integer',
- minimum: 0,
- maximum: 10 ** 8,
- },
- fee: {
- type: 'integer',
- minimum: 0,
- maximum: 10 ** 8,
- },
- },
- },
- getTransaction: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- minLength: 1,
- },
- },
- required: ['id'],
- },
- getUnconfirmedTransaction: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- minLength: 1,
- },
- },
- required: ['id'],
- },
- getUnconfirmedTransactions: {
- type: 'object',
- properties: {
- senderPublicKey: {
- type: 'string',
- format: 'publicKey',
- },
- address: {
- type: 'string',
- },
- },
- },
-}
diff --git a/packages/core-api/lib/versions/1/schemas/transport.js b/packages/core-api/lib/versions/1/schemas/transport.js
deleted file mode 100644
index 125c5f03c3..0000000000
--- a/packages/core-api/lib/versions/1/schemas/transport.js
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * The AJV schema for the transport endpoints.
- * @type {Object}
- */
-module.exports = {
- headers: {
- type: 'object',
- properties: {
- ip: {
- type: 'string',
- format: 'ip',
- },
- port: {
- type: 'integer',
- minimum: 1,
- maximum: 65535,
- },
- os: {
- type: 'string',
- maxLength: 64,
- },
- nethash: {
- type: 'string',
- maxLength: 64,
- },
- version: {
- type: 'string',
- maxLength: 11,
- },
- },
- required: ['ip', 'port', 'nethash', 'version'],
- },
- commonBlocks: {
- type: 'object',
- properties: {
- ids: {
- type: 'string',
- format: 'csv',
- },
- },
- required: ['ids'],
- },
- transactionsFromIds: {
- type: 'object',
- properties: {
- ids: {
- type: 'string',
- format: 'csv',
- },
- },
- required: ['ids'],
- },
- blocks: {
- type: 'object',
- properties: {
- lastBlockHeight: {
- type: 'integer',
- },
- },
- },
- block: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- },
- },
- },
- signatures: {
- type: 'object',
- properties: {
- signature: {
- type: 'object',
- properties: {
- transaction: {
- type: 'string',
- },
- signature: {
- type: 'string',
- format: 'signature',
- },
- },
- required: ['transaction', 'signature'],
- },
- },
- required: ['signature'],
- },
- transactions: {
- id: 'nodeManager.transactions',
- type: 'array',
- uniqueItems: true,
- required: ['transactions'],
- },
-}
diff --git a/packages/core-api/lib/versions/1/transformers/account.js b/packages/core-api/lib/versions/1/transformers/account.js
deleted file mode 100644
index 42552aa92e..0000000000
--- a/packages/core-api/lib/versions/1/transformers/account.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/* eslint camelcase: "off" */
-
-/**
- * Turns a "wallet" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const hasSecondSignature = !!model.secondPublicKey
-
- return {
- address: model.address,
- publicKey: model.publicKey,
- secondPublicKey: model.secondPublicKey,
- votes: model.votes,
- username: model.username,
- balance: `${model.balance}`,
- unconfirmedBalance: `${model.balance}`,
- multisignatures: [],
- u_multisignatures: [],
- unconfirmedSignature: hasSecondSignature ? 1 : 0,
- secondSignature: hasSecondSignature ? 1 : 0,
- }
-}
diff --git a/packages/core-api/lib/versions/1/transformers/block.js b/packages/core-api/lib/versions/1/transformers/block.js
deleted file mode 100644
index f400f8dcd7..0000000000
--- a/packages/core-api/lib/versions/1/transformers/block.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-const blockchain = require('@arkecosystem/core-container').resolvePlugin(
- 'blockchain',
-)
-
-/**
- * Turns a "block" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const lastBlock = blockchain.getLastBlock()
-
- return {
- id: model.id,
- version: model.version,
- timestamp: model.timestamp,
- previousBlock: model.previousBlock,
- height: model.height,
- numberOfTransactions: model.numberOfTransactions,
- totalAmount: +bignumify(model.totalAmount).toFixed(),
- totalForged: +bignumify(model.reward)
- .plus(model.totalFee)
- .toString(),
- totalFee: +bignumify(model.totalFee).toFixed(),
- reward: +bignumify(model.reward).toFixed(),
- payloadLength: model.payloadLength,
- payloadHash: model.payloadHash,
- generatorPublicKey: model.generatorPublicKey,
- blockSignature: model.blockSignature,
- confirmations: lastBlock ? lastBlock.data.height - model.height : 0,
- }
-}
diff --git a/packages/core-api/lib/versions/1/transformers/delegate.js b/packages/core-api/lib/versions/1/transformers/delegate.js
deleted file mode 100644
index b453280f1c..0000000000
--- a/packages/core-api/lib/versions/1/transformers/delegate.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const { delegateCalculator } = require('@arkecosystem/core-utils')
-
-/**
- * Turns a "delegate" object into a generic object.
- * @param {Object} delegate
- * @return {Object}
- */
-module.exports = delegate => ({
- username: delegate.username,
- address: delegate.address,
- publicKey: delegate.publicKey,
- vote: `${delegate.voteBalance}`,
- producedblocks: delegate.producedBlocks,
- missedblocks: delegate.missedBlocks,
- forged: delegate.forged,
- rate: delegate.rate,
- approval: delegateCalculator.calculateApproval(delegate),
- productivity: delegateCalculator.calculateProductivity(delegate),
-})
diff --git a/packages/core-api/lib/versions/1/transformers/fee-statistics.js b/packages/core-api/lib/versions/1/transformers/fee-statistics.js
deleted file mode 100644
index 2a5e5fd250..0000000000
--- a/packages/core-api/lib/versions/1/transformers/fee-statistics.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/**
- * Turns a "fee-statistics" object into readable object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => ({
- type: model.type,
- fees: {
- minFee: parseInt(model.minFee),
- maxFee: parseInt(model.maxFee),
- avgFee: parseInt(model.avgFee),
- },
-})
diff --git a/packages/core-api/lib/versions/1/transformers/peer.js b/packages/core-api/lib/versions/1/transformers/peer.js
deleted file mode 100644
index cde2573111..0000000000
--- a/packages/core-api/lib/versions/1/transformers/peer.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-
-/**
- * Turns a "peer" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const peer = {
- ip: model.ip,
- port: model.port,
- version: model.version,
- height: model.height,
- status: model.status,
- os: model.os,
- delay: model.delay,
- }
-
- if (config.network.name !== 'mainnet') {
- peer.hashid = model.hashid
- }
-
- return peer
-}
diff --git a/packages/core-api/lib/versions/1/transformers/ports.js b/packages/core-api/lib/versions/1/transformers/ports.js
deleted file mode 100644
index 8c8615740f..0000000000
--- a/packages/core-api/lib/versions/1/transformers/ports.js
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Turns a "config" object into readable object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = config => {
- const result = {}
- const keys = [
- '@arkecosystem/core-p2p',
- '@arkecosystem/core-api',
- '@arkecosystem/core-graphql',
- '@arkecosystem/core-json-rpc',
- '@arkecosystem/core-webhooks',
- ]
-
- for (const [name, options] of Object.entries(config.plugins)) {
- if (keys.includes(name) && options.enabled) {
- if (options.server && options.server.enabled) {
- result[name] = options.server.port
-
- continue
- }
-
- result[name] = options.port
- }
- }
-
- return result
-}
diff --git a/packages/core-api/lib/versions/1/transformers/transaction.js b/packages/core-api/lib/versions/1/transformers/transaction.js
deleted file mode 100644
index fbe730c867..0000000000
--- a/packages/core-api/lib/versions/1/transformers/transaction.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { crypto } = require('@arkecosystem/crypto')
-const { bignumify } = require('@arkecosystem/core-utils')
-
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-
-const { Transaction } = require('@arkecosystem/crypto').models
-
-/**
- * Turns a "transaction" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const data = new Transaction(model.serialized.toString('hex'))
-
- return {
- id: data.id,
- blockid: model.blockId,
- type: data.type,
- timestamp: model.timestamp || data.timestamp,
- amount: +bignumify(data.amount).toFixed(),
- fee: +bignumify(data.fee).toFixed(),
- recipientId: data.recipientId,
- senderId: crypto.getAddress(
- data.senderPublicKey,
- config.network.pubKeyHash,
- ),
- senderPublicKey: data.senderPublicKey,
- vendorField: data.vendorField,
- signature: data.signature,
- signSignature: data.signSignature,
- signatures: data.signatures,
- asset: data.asset || {},
- confirmations: model.block
- ? blockchain.getLastBlock().data.height - model.block.height
- : 0,
- }
-}
diff --git a/packages/core-api/lib/versions/1/transformers/voter.js b/packages/core-api/lib/versions/1/transformers/voter.js
deleted file mode 100644
index e40f5b76c6..0000000000
--- a/packages/core-api/lib/versions/1/transformers/voter.js
+++ /dev/null
@@ -1,11 +0,0 @@
-/**
- * Turns a "voter" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => ({
- username: model.username,
- address: model.address,
- publicKey: model.publicKey,
- balance: `${model.balance}`,
-})
diff --git a/packages/core-api/lib/versions/1/utils.js b/packages/core-api/lib/versions/1/utils.js
deleted file mode 100644
index eeaf7dbbbb..0000000000
--- a/packages/core-api/lib/versions/1/utils.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/* eslint max-len: "off" */
-
-const {
- transformResource,
- transformCollection,
-} = require('../../utils/transformer')
-
-/**
- * Create a pagination object for the request.
- * @param {Hapi.Request} request
- * @return {Object}
- */
-const paginate = request => ({
- offset: request.query.offset || 0,
- limit: request.query.limit || 100,
-})
-
-/**
- * Create a hapi.js response.
- * @param {Object} data
- * @param {Boolean} error
- * @return {Object}
- */
-const respondWith = (data, error = false) =>
- error ? { error: data, success: false } : { ...data, success: true }
-
-/**
- * Respond with data from cache.
- * @param {Object} data
- * @param {Hapi.Toolkit} h
- * @return {Object}
- */
-const respondWithCache = (data, h) => {
- const { value, cached } = data
- const lastModified = cached ? new Date(cached.stored) : new Date()
-
- return value.isBoom
- ? h.response(value.output.payload).code(value.output.statusCode)
- : h.response(value).header('Last-modified', lastModified.toUTCString())
-}
-
-/**
- * Transform the given data into a resource.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const toResource = (request, data, transformer) =>
- transformResource(request, data, transformer)
-
-/**
- * Transform the given data into a collection.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const toCollection = transformCollection
-
-/**
- * @type {Object}
- */
-module.exports = {
- paginate,
- respondWith,
- respondWithCache,
- toResource,
- toCollection,
-}
diff --git a/packages/core-api/lib/versions/2/handlers/blockchain.js b/packages/core-api/lib/versions/2/handlers/blockchain.js
deleted file mode 100644
index e41b419ed8..0000000000
--- a/packages/core-api/lib/versions/2/handlers/blockchain.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const { bignumify, supplyCalculator } = require('@arkecosystem/core-utils')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
-
- return {
- data: {
- block: {
- height: lastBlock.data.height,
- id: lastBlock.data.id,
- },
- supply: supplyCalculator.calculate(lastBlock.data.height),
- },
- }
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/blocks.js b/packages/core-api/lib/versions/2/handlers/blocks.js
deleted file mode 100644
index 8f5356be0e..0000000000
--- a/packages/core-api/lib/versions/2/handlers/blocks.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const { respondWithCache } = require('../utils')
-const schema = require('../schema/blocks')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.blocks.index(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.blocks.show(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.show,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactions = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.blocks.transactions(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.transactions,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.blocks.search(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.search,
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/delegates.js b/packages/core-api/lib/versions/2/handlers/delegates.js
deleted file mode 100644
index 71d9626cf7..0000000000
--- a/packages/core-api/lib/versions/2/handlers/delegates.js
+++ /dev/null
@@ -1,118 +0,0 @@
-const { respondWithCache } = require('../utils')
-const schema = require('../schema/delegates')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.index(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.show(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.show,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.search(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.search,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.blocks = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.blocks(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.blocks,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.voters = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.voters(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.voters,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.voterBalances = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.delegates.voterBalances(
- request,
- )
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.voterBalances,
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/node.js b/packages/core-api/lib/versions/2/handlers/node.js
deleted file mode 100644
index b2a164a521..0000000000
--- a/packages/core-api/lib/versions/2/handlers/node.js
+++ /dev/null
@@ -1,84 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const blockchain = app.resolvePlugin('blockchain')
-const config = app.resolvePlugin('config')
-const utils = require('../utils')
-const { transactions } = require('../../../repositories')
-
-/**
- * @type {Object}
- */
-exports.status = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
- const networkHeight = await blockchain.p2p.getNetworkHeight()
-
- return {
- data: {
- synced: blockchain.isSynced(),
- now: lastBlock ? lastBlock.data.height : 0,
- blocksCount: networkHeight - lastBlock.data.height || 0,
- },
- }
- },
-}
-
-/**
- * @type {Object}
- */
-exports.syncing = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const lastBlock = blockchain.getLastBlock()
- const networkHeight = await blockchain.p2p.getNetworkHeight()
-
- return {
- data: {
- syncing: !blockchain.isSynced(),
- blocks: networkHeight - lastBlock.data.height || 0,
- height: lastBlock.data.height,
- id: lastBlock.data.id,
- },
- }
- },
-}
-
-/**
- * @type {Object}
- */
-exports.configuration = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const feeStatisticsData = await transactions.getFeeStatistics()
-
- return {
- data: {
- nethash: config.network.nethash,
- token: config.network.client.token,
- symbol: config.network.client.symbol,
- explorer: config.network.client.explorer,
- version: config.network.pubKeyHash,
- ports: utils.toResource(request, config, 'ports'),
- constants: config.getConstants(blockchain.getLastBlock().data.height),
- feeStatistics: utils.toCollection(
- request,
- feeStatisticsData,
- 'fee-statistics',
- ),
- },
- }
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/peers.js b/packages/core-api/lib/versions/2/handlers/peers.js
deleted file mode 100644
index c1039f8ffc..0000000000
--- a/packages/core-api/lib/versions/2/handlers/peers.js
+++ /dev/null
@@ -1,98 +0,0 @@
-const Boom = require('boom')
-const app = require('@arkecosystem/core-container')
-
-const blockchain = app.resolvePlugin('blockchain')
-const utils = require('../utils')
-const schema = require('../schema/peers')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const allPeers = await blockchain.p2p.getPeers()
-
- let result = allPeers.sort((a, b) => a.delay - b.delay)
- result = request.query.os
- ? result.filter(peer => peer.os === request.query.os)
- : result
- result = request.query.status
- ? result.filter(peer => peer.status === request.query.status)
- : result
- result = request.query.port
- ? result.filter(peer => peer.port === request.query.port)
- : result
- result = request.query.version
- ? result.filter(peer => peer.version === request.query.version)
- : result
- result = result.slice(0, request.query.limit || 100)
-
- if (request.query.orderBy) {
- const order = request.query.orderBy.split(':')
-
- if (['port', 'status', 'os', 'version'].includes(order[0])) {
- result = order[1].toUpperCase() === 'ASC'
- ? result.sort((a, b) => a[order[0]] - b[order[0]])
- : result.sort((a, b) => a[order[0]] + b[order[0]])
- }
- }
-
- return utils.toPagination(
- request,
- { rows: result, count: allPeers.length },
- 'peer',
- )
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const peers = await blockchain.p2p.getPeers()
- const peer = peers.find(p => p.ip === request.params.ip)
-
- if (!peer) {
- return Boom.notFound('Peer not found')
- }
-
- return utils.respondWithResource(request, peer, 'peer')
- },
- options: {
- validate: schema.show,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.suspended = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const peers = app.resolvePlugin('p2p').getSuspendedPeers()
-
- return utils.respondWithCollection(
- request,
- Object.values(peers).map(peer => peer.peer),
- 'peer',
- )
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/transactions.js b/packages/core-api/lib/versions/2/handlers/transactions.js
deleted file mode 100644
index cda76c5e98..0000000000
--- a/packages/core-api/lib/versions/2/handlers/transactions.js
+++ /dev/null
@@ -1,214 +0,0 @@
-const Boom = require('boom')
-
-const { TRANSACTION_TYPES } = require('@arkecosystem/crypto').constants
-const { TransactionGuard } = require('@arkecosystem/core-transaction-pool')
-
-const app = require('@arkecosystem/core-container')
-
-const blockchain = app.resolvePlugin('blockchain')
-const config = app.resolvePlugin('config')
-const transactionPool = app.resolvePlugin('transactionPool')
-
-const utils = require('../utils')
-const schema = require('../schema/transactions')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.transactions.index(request)
-
- return utils.respondWithCache(data, h)
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.store = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- if (!transactionPool.options.enabled) {
- return Boom.serverUnavailable('Transaction pool is disabled.')
- }
-
- const guard = new TransactionGuard(transactionPool)
-
- const result = await guard.validate(request.payload.transactions)
-
- if (result.broadcast.length > 0) {
- app
- .resolvePlugin('p2p')
- .broadcastTransactions(guard.getBroadcastTransactions())
- }
-
- return {
- data: {
- accept: result.accept,
- broadcast: result.broadcast,
- excess: result.excess,
- invalid: result.invalid,
- },
- errors: result.errors,
- }
- },
- options: {
- validate: schema.store,
- plugins: {
- pagination: {
- enabled: false,
- },
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.transactions.show(request)
-
- return utils.respondWithCache(data, h)
- },
- options: {
- validate: schema.show,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.unconfirmed = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- if (!transactionPool.options.enabled) {
- return Boom.serverUnavailable('Transaction pool is disabled.')
- }
-
- const pagination = utils.paginate(request)
-
- let transactions = transactionPool.getTransactions(
- pagination.offset,
- pagination.limit,
- )
- transactions = transactions.map(transaction => ({
- serialized: transaction,
- }))
-
- return utils.toPagination(
- request,
- {
- count: transactionPool.getPoolSize(),
- rows: transactions,
- },
- 'transaction',
- )
- },
- options: {
- validate: schema.unconfirmed,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.showUnconfirmed = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- handler(request, h) {
- if (!transactionPool.options.enabled) {
- return Boom.serverUnavailable('Transaction pool is disabled.')
- }
-
- let transaction = transactionPool.getTransaction(request.params.id)
-
- if (!transaction) {
- return Boom.notFound('Transaction not found')
- }
-
- transaction = { serialized: transaction.serialized }
-
- return utils.respondWithResource(request, transaction, 'transaction')
- },
- options: {
- validate: schema.showUnconfirmed,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.transactions.search(request)
-
- return utils.respondWithCache(data, h)
- },
- options: {
- validate: schema.search,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.types = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- return {
- data: TRANSACTION_TYPES,
- }
- },
-}
-
-/**
- * @type {Object}
- */
-exports.fees = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- return {
- data: config.getConstants(blockchain.getLastBlock().data.height).fees
- .staticFees,
- }
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/votes.js b/packages/core-api/lib/versions/2/handlers/votes.js
deleted file mode 100644
index 18804a03be..0000000000
--- a/packages/core-api/lib/versions/2/handlers/votes.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const { respondWithCache } = require('../utils')
-const schema = require('../schema/votes')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.votes.index(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.votes.show(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.show,
- },
-}
diff --git a/packages/core-api/lib/versions/2/handlers/wallets.js b/packages/core-api/lib/versions/2/handlers/wallets.js
deleted file mode 100644
index 44e31f8da8..0000000000
--- a/packages/core-api/lib/versions/2/handlers/wallets.js
+++ /dev/null
@@ -1,156 +0,0 @@
-const { respondWithCache } = require('../utils')
-const schema = require('../schema/wallets')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.index(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.index,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.top = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.top(request)
-
- return respondWithCache(data, h)
- },
- // TODO: create top schema
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.show(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.show,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactions = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.transactions(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.transactions,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactionsSent = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.transactionsSent(
- request,
- )
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.transactionsSent,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactionsReceived = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.transactionsReceived(
- request,
- )
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.transactionsReceived,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.votes = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.votes(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.votes,
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const data = await request.server.methods.v2.wallets.search(request)
-
- return respondWithCache(data, h)
- },
- options: {
- validate: schema.search,
- },
-}
diff --git a/packages/core-api/lib/versions/2/index.js b/packages/core-api/lib/versions/2/index.js
deleted file mode 100644
index c16c9cd1be..0000000000
--- a/packages/core-api/lib/versions/2/index.js
+++ /dev/null
@@ -1,111 +0,0 @@
-const blockchain = require('./handlers/blockchain')
-const blocks = require('./handlers/blocks')
-const delegates = require('./handlers/delegates')
-const node = require('./handlers/node')
-const peers = require('./handlers/peers')
-const transactions = require('./handlers/transactions')
-const votes = require('./handlers/votes')
-const wallets = require('./handlers/wallets')
-
-const registerBlockMethods = require('./methods/blocks')
-const registerDelegateMethods = require('./methods/delegates')
-const registerTransactionMethods = require('./methods/transactions')
-const registerWalletMethods = require('./methods/wallets')
-const registerVoteMethods = require('./methods/votes')
-
-/**
- * Register the v2 routes.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- registerBlockMethods(server)
- registerDelegateMethods(server)
- registerTransactionMethods(server)
- registerWalletMethods(server)
- registerVoteMethods(server)
-
- server.route([
- { method: 'GET', path: '/blockchain', ...blockchain.index },
-
- { method: 'GET', path: '/blocks', ...blocks.index },
- { method: 'GET', path: '/blocks/{id}', ...blocks.show },
- {
- method: 'GET',
- path: '/blocks/{id}/transactions',
- ...blocks.transactions,
- },
- { method: 'POST', path: '/blocks/search', ...blocks.search },
-
- { method: 'GET', path: '/delegates', ...delegates.index },
- { method: 'GET', path: '/delegates/{id}', ...delegates.show },
- { method: 'GET', path: '/delegates/{id}/blocks', ...delegates.blocks },
- { method: 'GET', path: '/delegates/{id}/voters', ...delegates.voters },
- {
- method: 'GET',
- path: '/delegates/{id}/voters/balances',
- ...delegates.voterBalances,
- },
- { method: 'POST', path: '/delegates/search', ...delegates.search },
-
- { method: 'GET', path: '/node/status', ...node.status },
- { method: 'GET', path: '/node/syncing', ...node.syncing },
- { method: 'GET', path: '/node/configuration', ...node.configuration },
-
- { method: 'GET', path: '/peers', ...peers.index },
- { method: 'GET', path: '/peers/suspended', ...peers.suspended },
- { method: 'GET', path: '/peers/{ip}', ...peers.show },
-
- { method: 'GET', path: '/transactions', ...transactions.index },
- { method: 'POST', path: '/transactions', ...transactions.store },
- { method: 'GET', path: '/transactions/{id}', ...transactions.show },
- {
- method: 'GET',
- path: '/transactions/unconfirmed',
- ...transactions.unconfirmed,
- },
- {
- method: 'GET',
- path: '/transactions/unconfirmed/{id}',
- ...transactions.showUnconfirmed,
- },
- { method: 'POST', path: '/transactions/search', ...transactions.search },
- { method: 'GET', path: '/transactions/types', ...transactions.types },
- { method: 'GET', path: '/transactions/fees', ...transactions.fees },
-
- { method: 'GET', path: '/votes', ...votes.index },
- { method: 'GET', path: '/votes/{id}', ...votes.show },
-
- { method: 'GET', path: '/wallets', ...wallets.index },
- { method: 'GET', path: '/wallets/top', ...wallets.top },
- { method: 'GET', path: '/wallets/{id}', ...wallets.show },
- {
- method: 'GET',
- path: '/wallets/{id}/transactions',
- ...wallets.transactions,
- },
- {
- method: 'GET',
- path: '/wallets/{id}/transactions/sent',
- ...wallets.transactionsSent,
- },
- {
- method: 'GET',
- path: '/wallets/{id}/transactions/received',
- ...wallets.transactionsReceived,
- },
- { method: 'GET', path: '/wallets/{id}/votes', ...wallets.votes },
- { method: 'POST', path: '/wallets/search', ...wallets.search },
- ])
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: 'Ark Public API - v2',
- version: '2.0.0',
- register,
-}
diff --git a/packages/core-api/lib/versions/2/methods/blocks.js b/packages/core-api/lib/versions/2/methods/blocks.js
deleted file mode 100644
index 821cf32cfa..0000000000
--- a/packages/core-api/lib/versions/2/methods/blocks.js
+++ /dev/null
@@ -1,105 +0,0 @@
-const Boom = require('boom')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const {
- blocks: blocksRepository,
- transactions: transactionsRepository,
-} = require('../../../repositories')
-const utils = require('../utils')
-
-const index = async request => {
- const blocks = await blocksRepository.findAll({
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, blocks, 'block')
-}
-
-const show = async request => {
- const block = await blocksRepository.findById(request.params.id)
-
- if (!block) {
- return Boom.notFound('Block not found')
- }
-
- return utils.respondWithResource(request, block, 'block')
-}
-
-const transactions = async request => {
- const block = await blocksRepository.findById(request.params.id)
-
- if (!block) {
- return Boom.notFound('Block not found')
- }
-
- const rows = await transactionsRepository.findAllByBlock(block.id, {
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, rows, 'transaction')
-}
-
-const search = async request => {
- const blocks = await blocksRepository.search({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, blocks, 'block')
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v2.blocks.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.blocks.show', show, {
- cache: {
- expiresIn: 600 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-
- server.method('v2.blocks.transactions', transactions, {
- cache: {
- expiresIn: 600 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...{ id: request.params.id },
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.blocks.search', search, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-}
diff --git a/packages/core-api/lib/versions/2/methods/delegates.js b/packages/core-api/lib/versions/2/methods/delegates.js
deleted file mode 100644
index 7390c2f3f6..0000000000
--- a/packages/core-api/lib/versions/2/methods/delegates.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const Boom = require('boom')
-const orderBy = require('lodash/orderBy')
-const app = require('@arkecosystem/core-container')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const { blocks: blocksRepository } = require('../../../repositories')
-const utils = require('../utils')
-
-const database = app.resolvePlugin('database')
-
-const index = async request => {
- const delegates = await database.delegates.paginate({
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, delegates, 'delegate')
-}
-
-const show = async request => {
- const delegate = await database.delegates.findById(request.params.id)
-
- if (!delegate) {
- return Boom.notFound('Delegate not found')
- }
-
- return utils.respondWithResource(request, delegate, 'delegate')
-}
-
-const search = async request => {
- const delegates = await database.delegates.search({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, delegates, 'delegate')
-}
-
-const blocks = async request => {
- const delegate = await database.delegates.findById(request.params.id)
-
- if (!delegate) {
- return Boom.notFound('Delegate not found')
- }
-
- const rows = await blocksRepository.findAllByGenerator(
- delegate.publicKey,
- utils.paginate(request),
- )
-
- return utils.toPagination(request, rows, 'block')
-}
-
-const voters = async request => {
- const delegate = await database.delegates.findById(request.params.id)
-
- if (!delegate) {
- return Boom.notFound('Delegate not found')
- }
-
- const wallets = await database.wallets.findAllByVote(
- delegate.publicKey,
- utils.paginate(request),
- )
-
- return utils.toPagination(request, wallets, 'wallet')
-}
-
-const voterBalances = async request => {
- const delegate = await database.delegates.findById(request.params.id)
-
- if (!delegate) {
- return Boom.notFound('Delegate not found')
- }
-
- const wallets = await database.wallets
- .all()
- .filter(wallet => wallet.vote === delegate.publicKey)
-
- const data = {}
- orderBy(wallets, ['balance'], ['desc']).forEach(wallet => {
- data[wallet.address] = +wallet.balance.toFixed()
- })
-
- return { data }
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v2.delegates.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.delegates.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-
- server.method('v2.delegates.search', search, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.delegates.blocks', blocks, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...{ id: request.params.id },
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.delegates.voters', voters, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...{ id: request.params.id },
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.delegates.voterBalances', voterBalances, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-}
diff --git a/packages/core-api/lib/versions/2/methods/transactions.js b/packages/core-api/lib/versions/2/methods/transactions.js
deleted file mode 100644
index 2eaa0897d3..0000000000
--- a/packages/core-api/lib/versions/2/methods/transactions.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const Boom = require('boom')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const {
- transactions: transactionsRepository,
-} = require('../../../repositories')
-const utils = require('../utils')
-
-const index = async request => {
- const transactions = await transactionsRepository.findAll({
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, transactions, 'transaction')
-}
-
-const show = async request => {
- const transaction = await transactionsRepository.findById(request.params.id)
-
- if (!transaction) {
- return Boom.notFound('Transaction not found')
- }
-
- return utils.respondWithResource(request, transaction, 'transaction')
-}
-
-const search = async request => {
- const transactions = await transactionsRepository.search({
- ...request.query,
- ...request.payload,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, transactions, 'transaction')
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v2.transactions.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.transactions.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-
- server.method('v2.transactions.search', search, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-}
diff --git a/packages/core-api/lib/versions/2/methods/votes.js b/packages/core-api/lib/versions/2/methods/votes.js
deleted file mode 100644
index 7d4df6060b..0000000000
--- a/packages/core-api/lib/versions/2/methods/votes.js
+++ /dev/null
@@ -1,58 +0,0 @@
-const Boom = require('boom')
-const { TRANSACTION_TYPES } = require('@arkecosystem/crypto').constants
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const {
- transactions: transactionsRepository,
-} = require('../../../repositories')
-const utils = require('../utils')
-
-const index = async request => {
- const transactions = await transactionsRepository.findAllByType(
- TRANSACTION_TYPES.VOTE,
- {
- ...request.query,
- ...utils.paginate(request),
- },
- )
-
- return utils.toPagination(request, transactions, 'transaction')
-}
-
-const show = async request => {
- const transaction = await transactionsRepository.findByTypeAndId(
- TRANSACTION_TYPES.VOTE,
- request.params.id,
- )
-
- if (!transaction) {
- return Boom.notFound('Vote not found')
- }
-
- return utils.respondWithResource(request, transaction, 'transaction')
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v2.votes.index', index, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.votes.show', show, {
- cache: {
- expiresIn: 8 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-}
diff --git a/packages/core-api/lib/versions/2/methods/wallets.js b/packages/core-api/lib/versions/2/methods/wallets.js
deleted file mode 100644
index 4902fffe32..0000000000
--- a/packages/core-api/lib/versions/2/methods/wallets.js
+++ /dev/null
@@ -1,221 +0,0 @@
-const Boom = require('boom')
-const app = require('@arkecosystem/core-container')
-const generateCacheKey = require('../../../utils/generate-cache-key')
-const utils = require('../utils')
-const {
- transactions: transactionsRepository,
-} = require('../../../repositories')
-
-const database = app.resolvePlugin('database')
-
-const index = async request => {
- const wallets = await database.wallets.findAll({
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, wallets, 'wallet')
-}
-
-const top = async request => {
- const wallets = await database.wallets.top(utils.paginate(request))
-
- return utils.toPagination(request, wallets, 'wallet')
-}
-
-const show = async request => {
- const wallet = await database.wallets.findById(request.params.id)
-
- if (!wallet) {
- return Boom.notFound('Wallet not found')
- }
-
- return utils.respondWithResource(request, wallet, 'wallet')
-}
-
-const transactions = async request => {
- const wallet = await database.wallets.findById(request.params.id)
-
- if (!wallet) {
- return Boom.notFound('Wallet not found')
- }
-
- const rows = await transactionsRepository.findAllByWallet(wallet, {
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, rows, 'transaction')
-}
-
-const transactionsSent = async request => {
- const wallet = await database.wallets.findById(request.params.id)
-
- if (!wallet) {
- return Boom.notFound('Wallet not found')
- }
-
- // NOTE: We unset this value because it otherwise will produce a faulty SQL query
- delete request.params.id
-
- const rows = await transactionsRepository.findAllBySender(wallet.publicKey, {
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, rows, 'transaction')
-}
-
-const transactionsReceived = async request => {
- const wallet = await database.wallets.findById(request.params.id)
-
- if (!wallet) {
- return Boom.notFound('Wallet not found')
- }
-
- // NOTE: We unset this value because it otherwise will produce a faulty SQL query
- delete request.params.id
-
- const rows = await transactionsRepository.findAllByRecipient(wallet.address, {
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, rows, 'transaction')
-}
-
-const votes = async request => {
- const wallet = await database.wallets.findById(request.params.id)
-
- if (!wallet) {
- return Boom.notFound('Wallet not found')
- }
-
- // NOTE: We unset this value because it otherwise will produce a faulty SQL query
- delete request.params.id
-
- const rows = await transactionsRepository.allVotesBySender(wallet.publicKey, {
- ...request.params,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, rows, 'transaction')
-}
-
-const search = async request => {
- const wallets = await database.wallets.search({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- })
-
- return utils.toPagination(request, wallets, 'wallet')
-}
-
-module.exports = server => {
- const generateTimeout = require('../../utils').getCacheTimeout()
-
- server.method('v2.wallets.index', index, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.wallets.top', top, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey(utils.paginate(request)),
- })
-
- server.method('v2.wallets.show', show, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request => generateCacheKey({ id: request.params.id }),
- })
-
- server.method('v2.wallets.transactions', transactions, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.wallets.transactionsSent', transactionsSent, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.wallets.transactionsReceived', transactionsReceived, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.query,
- ...request.params,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.wallets.votes', votes, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.params,
- ...utils.paginate(request),
- }),
- })
-
- server.method('v2.wallets.search', search, {
- cache: {
- expiresIn: 30 * 1000,
- generateTimeout,
- getDecoratedValue: true,
- },
- generateKey: request =>
- generateCacheKey({
- ...request.payload,
- ...request.query,
- ...utils.paginate(request),
- }),
- })
-}
diff --git a/packages/core-api/lib/versions/2/schema/blocks.js b/packages/core-api/lib/versions/2/schema/blocks.js
deleted file mode 100644
index 8960fcab17..0000000000
--- a/packages/core-api/lib/versions/2/schema/blocks.js
+++ /dev/null
@@ -1,173 +0,0 @@
-const Joi = require('joi')
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- id: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- version: Joi.number()
- .integer()
- .min(0),
- timestamp: Joi.number()
- .integer()
- .min(0),
- previousBlock: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- height: Joi.number()
- .integer()
- .positive(),
- numberOfTransactions: Joi.number()
- .integer()
- .min(0),
- totalAmount: Joi.number()
- .integer()
- .min(0),
- totalFee: Joi.number()
- .integer()
- .min(0),
- reward: Joi.number()
- .integer()
- .min(0),
- payloadLength: Joi.number()
- .integer()
- .positive(),
- payloadHash: Joi.string().hex(),
- generatorPublicKey: Joi.string()
- .hex()
- .length(66),
- blockSignature: Joi.string().hex(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- id: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactions = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- id: Joi.string()
- .hex()
- .length(66),
- blockId: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- type: Joi.number()
- .integer()
- .min(0),
- version: Joi.number()
- .integer()
- .min(0),
- senderPublicKey: Joi.string()
- .hex()
- .length(66),
- senderId: Joi.string()
- .alphanum()
- .length(34),
- recipientId: Joi.string()
- .alphanum()
- .length(34),
- timestamp: Joi.number()
- .integer()
- .min(0),
- amount: Joi.number()
- .integer()
- .min(0),
- fee: Joi.number()
- .integer()
- .min(0),
- vendorFieldHex: Joi.string().hex(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- query: pagination,
- payload: {
- id: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- version: Joi.number()
- .integer()
- .min(0),
- previousBlock: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- payloadHash: Joi.string().hex(),
- generatorPublicKey: Joi.string()
- .hex()
- .length(66),
- blockSignature: Joi.string().hex(),
- timestamp: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- height: Joi.object().keys({
- from: Joi.number()
- .integer()
- .positive(),
- to: Joi.number()
- .integer()
- .positive(),
- }),
- numberOfTransactions: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- totalAmount: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- totalFee: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- reward: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- payloadLength: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- },
-}
diff --git a/packages/core-api/lib/versions/2/schema/delegates.js b/packages/core-api/lib/versions/2/schema/delegates.js
deleted file mode 100644
index 200911d270..0000000000
--- a/packages/core-api/lib/versions/2/schema/delegates.js
+++ /dev/null
@@ -1,153 +0,0 @@
-const Joi = require('joi')
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- address: Joi.string()
- .alphanum()
- .length(34),
- publicKey: Joi.string()
- .hex()
- .length(66),
- secondPublicKey: Joi.string()
- .hex()
- .length(66),
- vote: Joi.string()
- .hex()
- .length(66),
- username: Joi.string(),
- balance: Joi.number()
- .integer()
- .min(0),
- voteBalance: Joi.number()
- .integer()
- .min(0),
- producedBlocks: Joi.number()
- .integer()
- .min(0),
- missedBlocks: Joi.number()
- .integer()
- .min(0),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- id: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- query: pagination,
- payload: {
- username: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.blocks = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- id: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- version: Joi.number()
- .integer()
- .min(0),
- timestamp: Joi.number()
- .integer()
- .min(0),
- previousBlock: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- height: Joi.number()
- .integer()
- .positive(),
- numberOfTransactions: Joi.number()
- .integer()
- .min(0),
- totalAmount: Joi.number()
- .integer()
- .min(0),
- totalFee: Joi.number()
- .integer()
- .min(0),
- reward: Joi.number()
- .integer()
- .min(0),
- payloadLength: Joi.number()
- .integer()
- .min(0),
- payloadHash: Joi.string().hex(),
- generatorPublicKey: Joi.string()
- .hex()
- .length(66),
- blockSignature: Joi.string().hex(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.voters = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- address: Joi.string()
- .alphanum()
- .length(34),
- publicKey: Joi.string()
- .hex()
- .length(66),
- secondPublicKey: Joi.string()
- .hex()
- .length(66),
- vote: Joi.string()
- .hex()
- .length(66),
- username: Joi.string(),
- balance: Joi.number()
- .integer()
- .min(0),
- voteBalance: Joi.number()
- .integer()
- .min(0),
- producedBlocks: Joi.number()
- .integer()
- .min(0),
- missedBlocks: Joi.number()
- .integer()
- .min(0),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.voterBalances = {
- params: {
- id: Joi.string(),
- },
-}
diff --git a/packages/core-api/lib/versions/2/schema/pagination.js b/packages/core-api/lib/versions/2/schema/pagination.js
deleted file mode 100644
index 62543ef181..0000000000
--- a/packages/core-api/lib/versions/2/schema/pagination.js
+++ /dev/null
@@ -1,14 +0,0 @@
-const Joi = require('joi')
-
-module.exports = {
- page: Joi.number()
- .integer()
- .positive(),
- offset: Joi.number()
- .integer()
- .min(0),
- limit: Joi.number()
- .integer()
- .min(1)
- .max(100),
-}
diff --git a/packages/core-api/lib/versions/2/schema/peers.js b/packages/core-api/lib/versions/2/schema/peers.js
deleted file mode 100644
index 958ce07a61..0000000000
--- a/packages/core-api/lib/versions/2/schema/peers.js
+++ /dev/null
@@ -1,28 +0,0 @@
-const Joi = require('joi')
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- ip: Joi.string().ip(),
- os: Joi.string(),
- status: Joi.string(),
- port: Joi.number().port(),
- version: Joi.string(),
- orderBy: Joi.string(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- ip: Joi.string().ip(),
- },
-}
diff --git a/packages/core-api/lib/versions/2/schema/transactions.js b/packages/core-api/lib/versions/2/schema/transactions.js
deleted file mode 100644
index d21de61550..0000000000
--- a/packages/core-api/lib/versions/2/schema/transactions.js
+++ /dev/null
@@ -1,147 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const Joi = require('@arkecosystem/crypto').validator.engine.joi
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- id: Joi.string()
- .hex()
- .length(64),
- blockId: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- type: Joi.number()
- .integer()
- .min(0),
- version: Joi.number()
- .integer()
- .positive(),
- senderPublicKey: Joi.string()
- .hex()
- .length(66),
- senderId: Joi.string()
- .alphanum()
- .length(34),
- recipientId: Joi.string()
- .alphanum()
- .length(34),
- ownerId: Joi.string()
- .alphanum()
- .length(34),
- timestamp: Joi.number()
- .integer()
- .min(0),
- amount: Joi.number()
- .integer()
- .min(0),
- fee: Joi.number()
- .integer()
- .min(0),
- vendorFieldHex: Joi.string().hex(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.store = {
- payload: {
- transactions: Joi.arkTransactions()
- .min(1)
- .max(
- app.resolveOptions('transactionPool').maxTransactionsPerRequest,
- )
- .options({ stripUnknown: true }),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- id: Joi.string()
- .hex()
- .length(64),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.unconfirmed = {
- query: pagination,
-}
-
-/**
- * @type {Object}
- */
-exports.showUnconfirmed = {
- params: {
- id: Joi.string()
- .hex()
- .length(64),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- query: pagination,
- payload: {
- orderBy: Joi.string(),
- id: Joi.string()
- .hex()
- .length(64),
- blockId: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- type: Joi.number()
- .integer()
- .min(0),
- version: Joi.number()
- .integer()
- .positive(),
- senderPublicKey: Joi.string()
- .hex()
- .length(66),
- senderId: Joi.string()
- .alphanum()
- .length(34),
- recipientId: Joi.string()
- .alphanum()
- .length(34),
- ownerId: Joi.string()
- .alphanum()
- .length(34),
- vendorFieldHex: Joi.string().hex(),
- timestamp: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- amount: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- fee: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- },
-}
diff --git a/packages/core-api/lib/versions/2/schema/votes.js b/packages/core-api/lib/versions/2/schema/votes.js
deleted file mode 100644
index c98075f7b7..0000000000
--- a/packages/core-api/lib/versions/2/schema/votes.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const Joi = require('joi')
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- id: Joi.string()
- .hex()
- .length(64),
- blockId: Joi.string().regex(/^[0-9]+$/, 'numbers'),
- version: Joi.number()
- .integer()
- .positive(),
- senderPublicKey: Joi.string()
- .hex()
- .length(66),
- senderId: Joi.string()
- .alphanum()
- .length(34),
- recipientId: Joi.string()
- .alphanum()
- .length(34),
- timestamp: Joi.number()
- .integer()
- .min(0),
- amount: Joi.number()
- .integer()
- .min(0),
- fee: Joi.number()
- .integer()
- .min(0),
- vendorFieldHex: Joi.string().hex(),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- id: Joi.string()
- .hex()
- .length(64),
- },
-}
diff --git a/packages/core-api/lib/versions/2/schema/wallets.js b/packages/core-api/lib/versions/2/schema/wallets.js
deleted file mode 100644
index 1c086c9b9a..0000000000
--- a/packages/core-api/lib/versions/2/schema/wallets.js
+++ /dev/null
@@ -1,136 +0,0 @@
-const Joi = require('joi')
-const pagination = require('./pagination')
-
-/**
- * @type {Object}
- */
-exports.index = {
- query: {
- ...pagination,
- ...{
- orderBy: Joi.string(),
- address: Joi.string()
- .alphanum()
- .length(34),
- publicKey: Joi.string()
- .hex()
- .length(66),
- secondPublicKey: Joi.string()
- .hex()
- .length(66),
- vote: Joi.string()
- .hex()
- .length(66),
- username: Joi.string(),
- balance: Joi.number().integer(),
- voteBalance: Joi.number()
- .integer()
- .min(0),
- producedBlocks: Joi.number()
- .integer()
- .min(0),
- missedBlocks: Joi.number()
- .integer()
- .min(0),
- },
- },
-}
-
-/**
- * @type {Object}
- */
-exports.show = {
- params: {
- id: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactions = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- orderBy: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactionsSent = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- orderBy: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.transactionsReceived = {
- params: {
- id: Joi.string(),
- },
- query: {
- ...pagination,
- orderBy: Joi.string(),
- },
-}
-
-/**
- * @type {Object}
- */
-exports.votes = {
- params: {
- id: Joi.string(),
- },
- query: pagination,
-}
-
-/**
- * @type {Object}
- */
-exports.search = {
- query: pagination,
- payload: {
- orderBy: Joi.string(),
- address: Joi.string()
- .alphanum()
- .length(34),
- publicKey: Joi.string()
- .hex()
- .length(66),
- secondPublicKey: Joi.string()
- .hex()
- .length(66),
- vote: Joi.string()
- .hex()
- .length(66),
- username: Joi.string(),
- producedBlocks: Joi.number()
- .integer()
- .min(0),
- missedBlocks: Joi.number()
- .integer()
- .min(0),
- balance: Joi.object().keys({
- from: Joi.number().integer(),
- to: Joi.number().integer(),
- }),
- voteBalance: Joi.object().keys({
- from: Joi.number()
- .integer()
- .min(0),
- to: Joi.number()
- .integer()
- .min(0),
- }),
- },
-}
diff --git a/packages/core-api/lib/versions/2/transformers/block.js b/packages/core-api/lib/versions/2/transformers/block.js
deleted file mode 100644
index 45baf0495c..0000000000
--- a/packages/core-api/lib/versions/2/transformers/block.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const database = require('@arkecosystem/core-container').resolvePlugin(
- 'database',
-)
-const { formatTimestamp, bignumify } = require('@arkecosystem/core-utils')
-
-/**
- * Turns a "block" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const generator = database.walletManager.findByPublicKey(
- model.generatorPublicKey,
- )
-
- model.reward = bignumify(model.reward)
- model.totalFee = bignumify(model.totalFee)
-
- return {
- id: model.id,
- version: +model.version,
- height: +model.height,
- previous: model.previousBlock,
- forged: {
- reward: +model.reward.toFixed(),
- fee: +model.totalFee.toFixed(),
- total: +model.reward.plus(model.totalFee).toFixed(),
- amount: +bignumify(model.totalAmount).toFixed(),
- },
- payload: {
- hash: model.payloadHash,
- length: model.payloadLength,
- },
- generator: {
- username: generator.username,
- address: generator.address,
- publicKey: generator.publicKey,
- },
- signature: model.blockSignature,
- confirmations: model.confirmations,
- transactions: model.numberOfTransactions,
- timestamp: formatTimestamp(model.timestamp),
- }
-}
diff --git a/packages/core-api/lib/versions/2/transformers/delegate.js b/packages/core-api/lib/versions/2/transformers/delegate.js
deleted file mode 100644
index ef81fb2246..0000000000
--- a/packages/core-api/lib/versions/2/transformers/delegate.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const {
- bignumify,
- formatTimestamp,
- delegateCalculator,
-} = require('@arkecosystem/core-utils')
-
-/**
- * Turns a "delegate" object into a generic object.
- * @param {Object} delegate
- * @return {Object}
- */
-module.exports = delegate => {
- const data = {
- username: delegate.username,
- address: delegate.address,
- publicKey: delegate.publicKey,
- votes: +bignumify(delegate.voteBalance).toFixed(),
- rank: delegate.rate,
- blocks: {
- produced: delegate.producedBlocks,
- missed: delegate.missedBlocks,
- },
- production: {
- approval: delegateCalculator.calculateApproval(delegate),
- productivity: delegateCalculator.calculateProductivity(delegate),
- },
- forged: {
- fees: +delegate.forgedFees.toFixed(),
- rewards: +delegate.forgedRewards.toFixed(),
- total: +delegate.forgedFees.plus(delegate.forgedRewards).toFixed(),
- },
- }
-
- const lastBlock = delegate.lastBlock
-
- if (lastBlock) {
- data.blocks.last = {
- id: lastBlock.id,
- timestamp: formatTimestamp(lastBlock.timestamp),
- }
- }
-
- return data
-}
diff --git a/packages/core-api/lib/versions/2/transformers/fee-statistics.js b/packages/core-api/lib/versions/2/transformers/fee-statistics.js
deleted file mode 100644
index 2a5e5fd250..0000000000
--- a/packages/core-api/lib/versions/2/transformers/fee-statistics.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/**
- * Turns a "fee-statistics" object into readable object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => ({
- type: model.type,
- fees: {
- minFee: parseInt(model.minFee),
- maxFee: parseInt(model.maxFee),
- avgFee: parseInt(model.avgFee),
- },
-})
diff --git a/packages/core-api/lib/versions/2/transformers/peer.js b/packages/core-api/lib/versions/2/transformers/peer.js
deleted file mode 100644
index 379050e9d2..0000000000
--- a/packages/core-api/lib/versions/2/transformers/peer.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-
-/**
- * Turns a "peer" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const peer = {
- ip: model.ip,
- port: +model.port,
- version: model.version,
- height: model.state ? model.state.height : model.height,
- status: model.status,
- os: model.os,
- latency: model.delay,
- }
-
- if (config.network.name !== 'mainnet') {
- peer.hashid = model.hashid || 'unknown'
- }
-
- return peer
-}
diff --git a/packages/core-api/lib/versions/2/transformers/ports.js b/packages/core-api/lib/versions/2/transformers/ports.js
deleted file mode 100644
index 02f3f4ee21..0000000000
--- a/packages/core-api/lib/versions/2/transformers/ports.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Turns a "config" object into readable object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = config => {
- const result = {}
- const keys = [
- '@arkecosystem/core-p2p',
- '@arkecosystem/core-api',
- '@arkecosystem/core-graphql',
- '@arkecosystem/core-json-rpc',
- '@arkecosystem/core-webhooks',
- ]
-
- result[keys[0]] = config.plugins[keys[0]].port
-
- for (const [name, options] of Object.entries(config.plugins)) {
- if (keys.includes(name) && options.enabled) {
- if (options.server && options.server.enabled) {
- result[name] = options.server.port
-
- continue
- }
-
- result[name] = options.port
- }
- }
-
- return result
-}
diff --git a/packages/core-api/lib/versions/2/transformers/transaction.js b/packages/core-api/lib/versions/2/transformers/transaction.js
deleted file mode 100644
index 5f6268f77c..0000000000
--- a/packages/core-api/lib/versions/2/transformers/transaction.js
+++ /dev/null
@@ -1,37 +0,0 @@
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const blockchain = app.resolvePlugin('blockchain')
-
-const { crypto } = require('@arkecosystem/crypto')
-const { Transaction } = require('@arkecosystem/crypto').models
-
-const { bignumify, formatTimestamp } = require('@arkecosystem/core-utils')
-
-/**
- * Turns a "transaction" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => {
- const data = new Transaction(model.serialized.toString('hex'))
- const lastBlock = blockchain.getLastBlock()
-
- return {
- id: data.id,
- blockId: model.blockId,
- version: data.version,
- type: data.type,
- amount: +bignumify(data.amount).toFixed(),
- fee: +bignumify(data.fee).toFixed(),
- sender: crypto.getAddress(data.senderPublicKey, config.network.pubKeyHash),
- recipient: data.recipientId,
- signature: data.signature,
- signSignature: data.signSignature,
- signatures: data.signatures,
- vendorField: data.vendorField,
- asset: data.asset,
- confirmations: model.block ? lastBlock.data.height - model.block.height : 0,
- timestamp: formatTimestamp(model.timestamp || data.timestamp),
- }
-}
diff --git a/packages/core-api/lib/versions/2/transformers/wallet.js b/packages/core-api/lib/versions/2/transformers/wallet.js
deleted file mode 100644
index 584e5b6ae9..0000000000
--- a/packages/core-api/lib/versions/2/transformers/wallet.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-
-/**
- * Turns a "wallet" object into a generic object.
- * @param {Object} model
- * @return {Object}
- */
-module.exports = model => ({
- address: model.address,
- publicKey: model.publicKey,
- username: model.username,
- secondPublicKey: model.secondPublicKey,
- balance: +bignumify(model.balance).toFixed(),
- isDelegate: !!model.username,
-})
diff --git a/packages/core-api/lib/versions/2/utils.js b/packages/core-api/lib/versions/2/utils.js
deleted file mode 100644
index 262aa477a1..0000000000
--- a/packages/core-api/lib/versions/2/utils.js
+++ /dev/null
@@ -1,106 +0,0 @@
-const Boom = require('boom')
-const {
- transformResource,
- transformCollection,
-} = require('../../utils/transformer')
-
-/**
- * Create a pagination object for the request.
- * @param {Hapi.Request} request
- * @return {Object}
- */
-const paginate = request => {
- const pagination = {
- offset: (request.query.page - 1) * request.query.limit || 0,
- limit: request.query.limit || 100,
- }
-
- if (request.query.offset) {
- pagination.offset = request.query.offset
- }
-
- return pagination
-}
-
-/**
- * Respond with a resource.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const respondWithResource = (request, data, transformer) =>
- data
- ? { data: transformResource(request, data, transformer) }
- : Boom.notFound()
-
-/**
- * Respond with a collection.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const respondWithCollection = (request, data, transformer) => ({
- data: transformCollection(request, data, transformer),
-})
-
-/**
- * Respond with data from cache.
- * @param {Object} data
- * @param {Hapi.Toolkit} h
- * @return {Object}
- */
-const respondWithCache = (data, h) => {
- const { value, cached } = data
- const lastModified = cached ? new Date(cached.stored) : new Date()
-
- return value.isBoom
- ? h.response(value.output.payload).code(value.output.statusCode)
- : h.response(value).header('Last-modified', lastModified.toUTCString())
-}
-
-/**
- * Transform the given data into a resource.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const toResource = (request, data, transformer) =>
- transformResource(request, data, transformer)
-
-/**
- * Transform the given data into a collection.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const toCollection = (request, data, transformer) =>
- transformCollection(request, data, transformer)
-
-/**
- * Transform the given data into a pagination.
- * @param {Hapi.Request} request
- * @param {Object} data
- * @param {String} transformer
- * @return {Object}
- */
-const toPagination = (request, data, transformer) => ({
- results: transformCollection(request, data.rows, transformer),
- totalCount: data.count,
-})
-
-/**
- * @type {Object}
- */
-module.exports = {
- paginate,
- respondWithResource,
- respondWithCollection,
- respondWithCache,
- toResource,
- toCollection,
- toPagination,
-}
diff --git a/packages/core-api/lib/versions/utils.js b/packages/core-api/lib/versions/utils.js
deleted file mode 100644
index 5b1b32f49d..0000000000
--- a/packages/core-api/lib/versions/utils.js
+++ /dev/null
@@ -1,7 +0,0 @@
-exports.getCacheTimeout = () => {
- const {
- generateTimeout,
- } = require('@arkecosystem/core-container').resolveOptions('api').cache
-
- return JSON.parse(generateTimeout)
-}
diff --git a/packages/core-api/package.json b/packages/core-api/package.json
index 69cce4989e..420bd0fbc1 100644
--- a/packages/core-api/package.json
+++ b/packages/core-api/package.json
@@ -1,48 +1,71 @@
{
- "name": "@arkecosystem/core-api",
- "description": "Public API for Ark Core",
- "version": "0.2.14",
- "contributors": [
- "Kristjan Košič ",
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/core-container": "~0.2",
- "@arkecosystem/core-http-utils": "~0.2",
- "@arkecosystem/core-transaction-pool": "~0.2",
- "@arkecosystem/core-utils": "~0.2",
- "@arkecosystem/crypto": "~0.2",
- "ajv": "^6.5.5",
- "boom": "^7.3.0",
- "bs58check": "^2.1.2",
- "dayjs-ext": "^2.2.0",
- "hapi-api-version": "^2.1.0",
- "hapi-pagination": "https://github.com/faustbrian/hapi-pagination",
- "hapi-rate-limit": "^3.0.0",
- "ip": "^1.1.5",
- "joi": "^14.3.0",
- "lodash": "^4.17.11",
- "lodash.orderby": "^4.6.0",
- "lodash.snakecase": "^4.1.1"
- },
- "devDependencies": {
- "@arkecosystem/core-test-utils": "~0.2",
- "axios": "^0.18.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-api",
+ "description": "Public API for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "Kristjan Košič ",
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index",
+ "types": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "bash ../../scripts/pre-test.sh",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/core-container": "^2.1.0",
+ "@arkecosystem/core-http-utils": "^2.1.0",
+ "@arkecosystem/core-transaction-pool": "^2.1.0",
+ "@arkecosystem/core-utils": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/lodash.orderby": "^4.6.4",
+ "@types/lodash.partition": "^4.6.4",
+ "@types/lodash.snakecase": "^4.1.4",
+ "ajv": "^6.6.2",
+ "boom": "^7.3.0",
+ "bs58check": "^2.1.2",
+ "dayjs-ext": "^2.2.0",
+ "delay": "^4.1.0",
+ "hapi-api-version": "^2.1.0",
+ "hapi-pagination": "https://github.com/faustbrian/hapi-pagination",
+ "hapi-rate-limit": "^3.0.0",
+ "ip": "^1.1.5",
+ "joi": "^14.3.0",
+ "lodash.orderby": "^4.6.0",
+ "lodash.partition": "^4.6.0",
+ "lodash.snakecase": "^4.1.1"
+ },
+ "devDependencies": {
+ "@arkecosystem/core-test-utils": "^2.1.0",
+ "@types/boom": "^7.2.1",
+ "@types/ip": "^1.1.0",
+ "@types/joi": "^14.0.1",
+ "axios": "^0.18.0"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-api/src/defaults.ts b/packages/core-api/src/defaults.ts
new file mode 100644
index 0000000000..647268235b
--- /dev/null
+++ b/packages/core-api/src/defaults.ts
@@ -0,0 +1,84 @@
+import { resolve } from "path";
+
+export const defaults = {
+ enabled: false,
+ host: process.env.CORE_API_HOST || "0.0.0.0",
+ port: process.env.CORE_API_PORT || 4003,
+ cache: {
+ enabled: true,
+ /**
+ * How many seconds the server will try to complete the request and cache the result.
+ *
+ * Defaults to 8 seconds, set it to false if you do not care about the timeout.
+ *
+ * Setting it to false can result in requests never being completed, which is usually
+ * caused by low-spec servers that are unable to handle the heavy load that results
+ * out of SQL queries on the blocks and transactions tables.
+ *
+ * If you experience issues with the cache timeout, which is indicated by a 503 status codes,
+ * you should consider upgrading your hardware or tweak your PostgreSQL settings.
+ */
+ generateTimeout: process.env.CORE_API_CACHE_TIMEOUT || 8000,
+ },
+ // @see https://hapijs.com/api#-serveroptionstls
+ ssl: {
+ enabled: process.env.CORE_API_SSL,
+ host: process.env.CORE_API_SSL_HOST || "0.0.0.0",
+ port: process.env.CORE_API_SSL_PORT || 8443,
+ key: process.env.CORE_API_SSL_KEY,
+ cert: process.env.CORE_API_SSL_CERT,
+ },
+ // @see https://github.com/p-meier/hapi-api-version
+ versions: {
+ validVersions: [1, 2],
+ defaultVersion: 1,
+ basePath: "/api/",
+ vendorName: "core-api",
+ },
+ // @see https://github.com/wraithgar/hapi-rate-limit
+ rateLimit: {
+ enabled: !process.env.CORE_API_RATE_LIMIT,
+ pathLimit: false,
+ userLimit: process.env.CORE_API_RATE_LIMIT_USER_LIMIT || 300,
+ userCache: {
+ expiresIn: process.env.CORE_API_RATE_LIMIT_USER_EXPIRES || 60000,
+ },
+ ipWhitelist: ["127.0.0.1", "::ffff:127.0.0.1"],
+ },
+ // @see https://github.com/fknop/hapi-pagination
+ pagination: {
+ limit: 100,
+ include: [
+ "/api/v2/blocks",
+ "/api/v2/blocks/{id}/transactions",
+ "/api/v2/blocks/search",
+ "/api/v2/delegates",
+ "/api/v2/delegates/{id}/blocks",
+ "/api/v2/delegates/{id}/voters",
+ "/api/v2/delegates/search",
+ "/api/v2/peers",
+ "/api/v2/transactions",
+ "/api/v2/transactions/search",
+ "/api/v2/transactions/unconfirmed",
+ "/api/v2/votes",
+ "/api/v2/wallets",
+ "/api/v2/wallets/top",
+ "/api/v2/wallets/{id}/transactions",
+ "/api/v2/wallets/{id}/transactions/received",
+ "/api/v2/wallets/{id}/transactions/sent",
+ "/api/v2/wallets/{id}/votes",
+ "/api/v2/wallets/search",
+ ],
+ },
+ whitelist: ["127.0.0.1", "::ffff:127.0.0.1"],
+ plugins: [
+ {
+ plugin: resolve(__dirname, "./versions/1"),
+ routes: { prefix: "/api/v1" },
+ },
+ {
+ plugin: resolve(__dirname, "./versions/2"),
+ routes: { prefix: "/api/v2" },
+ },
+ ],
+};
diff --git a/packages/core-api/src/index.ts b/packages/core-api/src/index.ts
new file mode 100644
index 0000000000..cb7249fe8e
--- /dev/null
+++ b/packages/core-api/src/index.ts
@@ -0,0 +1,5 @@
+export * from "./defaults";
+export * from "./server";
+export * from "./interfaces";
+export * from "./repositories";
+export * from "./plugin";
diff --git a/packages/core-api/src/interfaces/index.ts b/packages/core-api/src/interfaces/index.ts
new file mode 100644
index 0000000000..5ed29ebdf6
--- /dev/null
+++ b/packages/core-api/src/interfaces/index.ts
@@ -0,0 +1 @@
+export * from "./repository";
diff --git a/packages/core-api/src/interfaces/repository.ts b/packages/core-api/src/interfaces/repository.ts
new file mode 100644
index 0000000000..f91ed6026c
--- /dev/null
+++ b/packages/core-api/src/interfaces/repository.ts
@@ -0,0 +1,9 @@
+export interface IRepository {
+ databaseService: any;
+ cache: any;
+ model: any;
+ query: any;
+ columns: string[];
+
+ getModel(): object;
+}
diff --git a/packages/core-api/src/plugin.ts b/packages/core-api/src/plugin.ts
new file mode 100644
index 0000000000..be469c15b2
--- /dev/null
+++ b/packages/core-api/src/plugin.ts
@@ -0,0 +1,28 @@
+import { Container, Logger } from "@arkecosystem/core-interfaces";
+import { defaults } from "./defaults";
+import { Server } from "./server";
+
+export const plugin: Container.PluginDescriptor = {
+ pkg: require("../package.json"),
+ defaults,
+ alias: "api",
+ async register(container: Container.IContainer, options) {
+ if (!options.enabled) {
+ container.resolvePlugin("logger").info("Public API is disabled :grey_exclamation:");
+
+ return false;
+ }
+
+ const server = new Server(options);
+ await server.start();
+
+ return server;
+ },
+ async deregister(container: Container.IContainer, options) {
+ if (options.enabled) {
+ container.resolvePlugin("logger").info(`Stopping Public API`);
+
+ await container.resolvePlugin("api").stop();
+ }
+ },
+};
diff --git a/packages/core-api/src/plugins/caster.ts b/packages/core-api/src/plugins/caster.ts
new file mode 100644
index 0000000000..f1af31b848
--- /dev/null
+++ b/packages/core-api/src/plugins/caster.ts
@@ -0,0 +1,50 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import Hapi from "hapi";
+
+function isBoolean(value) {
+ try {
+ return value.toLowerCase() === "true" || value.toLowerCase() === "false";
+ } catch (e) {
+ return false;
+ }
+}
+
+function isNumber(value) {
+ return !isNaN(value);
+}
+
+const register = async (server: Hapi.Server, options: object): Promise => {
+ server.ext({
+ type: "onPreHandler",
+ method: (request, h) => {
+ const query = request.query;
+
+ Object.keys(query).map((key, index) => {
+ // Special fields that should always be a "string"
+ if (key === "id" || key === "blockId" || key === "previousBlock") {
+ query[key] = query[key];
+ } else if (isBoolean(query[key])) {
+ query[key] = query[key].toLowerCase() === "true";
+ } else if (isNumber(query[key])) {
+ query[key] =
+ // @ts-ignore
+ // tslint:disable-next-line triple-equals
+ query[key] == Number(query[key]) ? Number(query[key]) : bignumify(query[key]).toString();
+ } else {
+ query[key] = query[key];
+ }
+ });
+
+ // @ts-ignore
+ request.query = query;
+
+ return h.continue;
+ },
+ });
+};
+
+export = {
+ register,
+ name: "core-caster",
+ version: "1.0.0",
+};
diff --git a/packages/core-api/src/plugins/endpoint-version.ts b/packages/core-api/src/plugins/endpoint-version.ts
new file mode 100644
index 0000000000..bbe8f4f191
--- /dev/null
+++ b/packages/core-api/src/plugins/endpoint-version.ts
@@ -0,0 +1,31 @@
+import Boom from "boom";
+import Hapi from "hapi";
+
+const versionRegex = /^\/api\/v([0-9])\//;
+
+const register = async (server: Hapi.Server, options: any): Promise => {
+ server.ext({
+ type: "onRequest",
+ async method(request, h) {
+ const match = versionRegex.exec(request.path);
+
+ if (match && match.length === 2) {
+ const apiVersion = parseInt(match[1], 10);
+
+ if (!options.validVersions.includes(apiVersion)) {
+ return Boom.badRequest(`Invalid api-version! Valid values: ${options.validVersions.join()}`);
+ }
+
+ request.pre.apiVersion = apiVersion;
+ }
+
+ return h.continue;
+ },
+ });
+};
+
+export = {
+ register,
+ name: "endpoint-version",
+ version: "1.0.0",
+};
diff --git a/packages/core-api/src/plugins/set-headers.ts b/packages/core-api/src/plugins/set-headers.ts
new file mode 100644
index 0000000000..a31e66f880
--- /dev/null
+++ b/packages/core-api/src/plugins/set-headers.ts
@@ -0,0 +1,28 @@
+import Boom from "boom";
+import Hapi from "hapi";
+
+const register = async (server: Hapi.Server, options: object): Promise => {
+ server.ext({
+ type: "onPreResponse",
+ async method(request, h) {
+ const response = request.response;
+
+ // @ts-ignore
+ if (response.isBoom && response.data) {
+ // Deleting the property beforehand makes it appear last in the response body.
+ // @ts-ignore
+ delete response.output.payload.error;
+ // @ts-ignore
+ response.output = { payload: { error: response.data } };
+ }
+
+ return h.continue;
+ },
+ });
+};
+
+export = {
+ register,
+ name: "set-headers",
+ version: "1.0.0",
+};
diff --git a/packages/core-api/src/plugins/validation/formats/address.ts b/packages/core-api/src/plugins/validation/formats/address.ts
new file mode 100644
index 0000000000..7e4ecc56d7
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/address.ts
@@ -0,0 +1,17 @@
+import { app } from "@arkecosystem/core-container";
+import * as bs58check from "bs58check";
+
+export function registerAddressFormat(ajv) {
+ const config = app.getConfig();
+
+ ajv.addFormat("address", {
+ type: "string",
+ validate: value => {
+ try {
+ return bs58check.decode(value)[0] === config.get("network.pubKeyHash");
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/csv.ts b/packages/core-api/src/plugins/validation/formats/csv.ts
new file mode 100644
index 0000000000..f3a2af591f
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/csv.ts
@@ -0,0 +1,14 @@
+export function registerCsvFormat(ajv) {
+ ajv.addFormat("csv", {
+ type: "string",
+ validate: value => {
+ try {
+ const a = value.split(",");
+
+ return a.length > 0 && a.length <= 1000;
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/hex.ts b/packages/core-api/src/plugins/validation/formats/hex.ts
new file mode 100644
index 0000000000..77ca40ebb1
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/hex.ts
@@ -0,0 +1,14 @@
+export function registerHexFormat(ajv) {
+ ajv.addFormat("hex", {
+ type: "string",
+ validate: value => {
+ try {
+ Buffer.from(value, "hex");
+
+ return true;
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/ip.ts b/packages/core-api/src/plugins/validation/formats/ip.ts
new file mode 100644
index 0000000000..2d0639c83e
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/ip.ts
@@ -0,0 +1,8 @@
+import * as ip from "ip";
+
+export function registerIpFormat(ajv) {
+ ajv.addFormat("ip", {
+ type: "string",
+ validate: value => ip.isV4Format(value) || ip.isV6Format(value),
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/parseInt.ts b/packages/core-api/src/plugins/validation/formats/parseInt.ts
new file mode 100644
index 0000000000..8e8da2ff7f
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/parseInt.ts
@@ -0,0 +1,14 @@
+export function registerParseIntFormat(ajv) {
+ ajv.addFormat("parsedInt", {
+ type: "string",
+ validate: value => {
+ if (isNaN(value) || parseInt(value, 10) !== value || isNaN(parseInt(value, 10))) {
+ return false;
+ }
+
+ value = parseInt(value, 10);
+
+ return true;
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/publicKey.ts b/packages/core-api/src/plugins/validation/formats/publicKey.ts
new file mode 100644
index 0000000000..df19146296
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/publicKey.ts
@@ -0,0 +1,12 @@
+export function registerPublicKeyFormat(ajv) {
+ ajv.addFormat("publicKey", {
+ type: "string",
+ validate: value => {
+ try {
+ return Buffer.from(value, "hex").length === 33;
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/signature.ts b/packages/core-api/src/plugins/validation/formats/signature.ts
new file mode 100644
index 0000000000..1f96dac2d4
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/signature.ts
@@ -0,0 +1,12 @@
+export function registerSignatureFormat(ajv) {
+ ajv.addFormat("signature", {
+ type: "string",
+ validate: value => {
+ try {
+ return Buffer.from(value, "hex").length < 73;
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/formats/vendorField.ts b/packages/core-api/src/plugins/validation/formats/vendorField.ts
new file mode 100644
index 0000000000..4b634941c0
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/formats/vendorField.ts
@@ -0,0 +1,12 @@
+export function registerVendorFieldFormat(ajv) {
+ ajv.addFormat("vendorField", {
+ type: "string",
+ validate: value => {
+ try {
+ return Buffer.from(value).length < 65;
+ } catch (e) {
+ return false;
+ }
+ },
+ });
+}
diff --git a/packages/core-api/src/plugins/validation/index.ts b/packages/core-api/src/plugins/validation/index.ts
new file mode 100644
index 0000000000..66d75776ae
--- /dev/null
+++ b/packages/core-api/src/plugins/validation/index.ts
@@ -0,0 +1,81 @@
+import AJV from "ajv";
+import Boom from "boom";
+import * as fs from "fs";
+import Hapi from "hapi";
+import * as path from "path";
+
+// SOF: IMPORT CUSTOM AJV FORMATS
+import { registerAddressFormat } from "./formats/address";
+import { registerCsvFormat } from "./formats/csv";
+import { registerHexFormat } from "./formats/hex";
+import { registerIpFormat } from "./formats/ip";
+import { registerParseIntFormat } from "./formats/parseInt";
+import { registerPublicKeyFormat } from "./formats/publicKey";
+import { registerSignatureFormat } from "./formats/signature";
+import { registerVendorFieldFormat } from "./formats/vendorField";
+// EOF: IMPORT CUSTOM AJV FORMATS
+
+const PLUGIN_NAME = "hapi-ajv";
+
+const register = async (server: Hapi.Server, options: object): Promise => {
+ const ajv = new AJV();
+ registerCsvFormat(ajv);
+ registerAddressFormat(ajv);
+ registerHexFormat(ajv);
+ registerIpFormat(ajv);
+ registerParseIntFormat(ajv);
+ registerPublicKeyFormat(ajv);
+ registerSignatureFormat(ajv);
+ registerVendorFieldFormat(ajv);
+
+ const validate = (schema, data) => {
+ return ajv.validate(schema, data) ? null : ajv.errors;
+ };
+
+ const createErrorResponse = (request, h, errors) => {
+ if (request.pre.apiVersion === 1) {
+ return h
+ .response({
+ path: errors[0].dataPath,
+ error: errors[0].message,
+ success: false,
+ })
+ .takeover();
+ }
+
+ return Boom.badData(errors);
+ };
+
+ server.ext({
+ type: "onPreHandler",
+ method: (request, h) => {
+ const config = request.route.settings.plugins[PLUGIN_NAME] || {};
+
+ let errors;
+
+ if (config.payloadSchema) {
+ errors = validate(config.payloadSchema, request.payload);
+
+ if (errors) {
+ return createErrorResponse(request, h, errors);
+ }
+ }
+
+ if (config.querySchema) {
+ errors = validate(config.querySchema, request.query);
+
+ if (errors) {
+ return createErrorResponse(request, h, errors);
+ }
+ }
+
+ return h.continue;
+ },
+ });
+};
+
+export = {
+ register,
+ name: PLUGIN_NAME,
+ version: "1.0.0",
+};
diff --git a/packages/core-api/src/repositories/blocks.ts b/packages/core-api/src/repositories/blocks.ts
new file mode 100644
index 0000000000..9d312fbf50
--- /dev/null
+++ b/packages/core-api/src/repositories/blocks.ts
@@ -0,0 +1,137 @@
+import { IRepository } from "../interfaces";
+import { Repository } from "./repository";
+import { buildFilterQuery } from "./utils/build-filter-query";
+
+export class BlockRepository extends Repository implements IRepository {
+ constructor() {
+ super();
+ }
+
+ /**
+ * Get all blocks for the given parameters.
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAll(parameters: any = {}): Promise {
+ const selectQuery = this.query.select().from(this.query);
+
+ const conditions = Object.entries(this._formatConditions(parameters));
+
+ if (conditions.length) {
+ const first = conditions.shift();
+
+ selectQuery.where(this.query[first[0]].equals(first[1]));
+
+ for (const condition of conditions) {
+ selectQuery.and(this.query[condition[0]].equals(condition[1]));
+ }
+ }
+
+ return this._findManyWithCount(selectQuery, {
+ limit: parameters.limit,
+ offset: parameters.offset,
+ orderBy: this.__orderBy(parameters),
+ });
+ }
+
+ /**
+ * Get all blocks for the given generator.
+ * @param {String} generatorPublicKey
+ * @param {Object} paginator
+ * @return {Object}
+ */
+ public async findAllByGenerator(generatorPublicKey, paginator): Promise {
+ return this.findAll({ ...{ generatorPublicKey }, ...paginator });
+ }
+
+ /**
+ * Get a block.
+ * @param {Number} id
+ * @return {Object}
+ */
+ public async findById(value): Promise {
+ const query = this.query
+ .select()
+ .from(this.query)
+ .where(this.query.id.equals(value));
+
+ // ensure that the value is not greater than 2147483647 (psql max int size)
+ const height = +value;
+ if (height <= 2147483647) {
+ query.or(this.query.height.equals(height));
+ }
+
+ return this._find(query);
+ }
+
+ /**
+ * Get the last block for the given generator.
+ * TODO is this right?
+ * @param {String} generatorPublicKey
+ * @return {Object}
+ */
+ public async findLastByPublicKey(generatorPublicKey): Promise {
+ const query = this.query
+ .select(this.query.id, this.query.timestamp)
+ .from(this.query)
+ .where(this.query.generator_public_key.equals(generatorPublicKey))
+ .order(this.query.height.desc);
+
+ return this._find(query);
+ }
+
+ /**
+ * Search all blocks.
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async search(parameters): Promise {
+ const selectQuery = this.query.select().from(this.query);
+
+ const conditions = buildFilterQuery(this._formatConditions(parameters), {
+ exact: ["id", "version", "previous_block", "payload_hash", "generator_public_key", "block_signature"],
+ between: [
+ "timestamp",
+ "height",
+ "number_of_transactions",
+ "total_amount",
+ "total_fee",
+ "reward",
+ "payload_length",
+ ],
+ });
+
+ if (conditions.length) {
+ const first = conditions.shift();
+
+ selectQuery.where(this.query[first.column][first.method](first.value));
+
+ for (const condition of conditions) {
+ selectQuery.and(this.query[condition.column][condition.method](condition.value));
+ }
+ }
+
+ return this._findManyWithCount(selectQuery, {
+ limit: parameters.limit,
+ offset: parameters.offset,
+ orderBy: this.__orderBy(parameters),
+ });
+ }
+
+ public getModel(): any {
+ return (this.databaseService.connection as any).models.block;
+ }
+
+ public __orderBy(parameters): string[] {
+ if (!parameters.orderBy) {
+ return ["height", "desc"];
+ }
+
+ const orderBy = parameters.orderBy.split(":").map(p => p.toLowerCase());
+ if (orderBy.length !== 2 || ["desc", "asc"].includes(orderBy[1]) !== true) {
+ return ["height", "desc"];
+ }
+
+ return orderBy;
+ }
+}
diff --git a/packages/core-api/src/repositories/index.ts b/packages/core-api/src/repositories/index.ts
new file mode 100644
index 0000000000..585fe72b50
--- /dev/null
+++ b/packages/core-api/src/repositories/index.ts
@@ -0,0 +1,7 @@
+import { BlockRepository } from "./blocks";
+import { TransactionsRepository } from "./transactions";
+
+const blocksRepository = new BlockRepository();
+const transactionsRepository = new TransactionsRepository();
+
+export { blocksRepository, transactionsRepository, BlockRepository, TransactionsRepository };
diff --git a/packages/core-api/src/repositories/repository.ts b/packages/core-api/src/repositories/repository.ts
new file mode 100644
index 0000000000..9dae5dcf47
--- /dev/null
+++ b/packages/core-api/src/repositories/repository.ts
@@ -0,0 +1,110 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, TransactionPool } from "@arkecosystem/core-interfaces";
+import snakeCase from "lodash/snakeCase";
+import { IRepository } from "../interfaces";
+
+export abstract class Repository implements IRepository {
+ public databaseService = app.resolvePlugin("database");
+ public cache = this.databaseService.cache;
+ public transactionPool = app.resolvePlugin("transactionPool");
+ public model = this.getModel();
+ public query = this.model.query();
+ public columns: string[] = [];
+
+ protected constructor() {
+ this.__mapColumns();
+ }
+
+ // todo: Introduce a generic param to return type-safe models
+ public abstract getModel(): any;
+
+ public async _find(query): Promise {
+ return (this.databaseService.connection as any).query.oneOrNone(query.toQuery());
+ }
+
+ public async _findMany(query): Promise {
+ return (this.databaseService.connection as any).query.manyOrNone(query.toQuery());
+ }
+
+ public async _findManyWithCount(selectQuery, { limit, offset, orderBy }): Promise {
+ if (Array.isArray(orderBy) && this.columns.includes(orderBy[0])) {
+ selectQuery.order(this.query[snakeCase(orderBy[0])][orderBy[1]]);
+ }
+
+ const offsetIsSet = Number.isInteger(offset) && offset > 0;
+ const limitIsSet = Number.isInteger(limit);
+
+ if (!offsetIsSet && !limitIsSet) {
+ // tslint:disable-next-line:no-shadowed-variable
+ const rows = await this._findMany(selectQuery);
+
+ return { rows, count: rows.length };
+ }
+
+ selectQuery.offset(offset).limit(limit);
+
+ const rows = await this._findMany(selectQuery);
+
+ if (rows.length < limit) {
+ return { rows, count: offset + rows.length };
+ }
+
+ // Get the last rows=... from something that looks like (1 column, few rows):
+ //
+ // QUERY PLAN
+ // ------------------------------------------------------------------
+ // Limit (cost=15.34..15.59 rows=100 width=622)
+ // -> Sort (cost=15.34..15.64 rows=120 width=622)
+ // Sort Key: "timestamp" DESC
+ // -> Seq Scan on transactions (cost=0.00..11.20 rows=120 width=622)
+
+ let count = 0;
+ const explainSql = `EXPLAIN ${selectQuery.toString()}`;
+ for (const row of await (this.databaseService.connection as any).query.manyOrNone(explainSql)) {
+ const line: any = Object.values(row)[0];
+ const match = line.match(/rows=([0-9]+)/);
+ if (match !== null) {
+ count = Number(match[1]);
+ }
+ }
+
+ return { rows, count: Math.max(count, rows.length) };
+ }
+
+ public _makeCountQuery(): Promise {
+ return this.query.select("count(*) AS count").from(this.query);
+ }
+
+ public _makeEstimateQuery(): Promise {
+ return this.query.select("count(*) AS count").from(`${this.model.getTable()} TABLESAMPLE SYSTEM (100)`);
+ }
+
+ public _formatConditions(parameters): any {
+ const columns = this.model.getColumnSet().columns.map(column => ({
+ name: column.name,
+ prop: column.prop || column.name,
+ }));
+
+ return Object.keys(parameters)
+ .filter(arg => this.columns.includes(arg))
+ .reduce((items, item) => {
+ const column = columns.find(value => value.name === item || value.prop === item);
+
+ column ? (items[column.name] = parameters[item]) : delete items[item];
+
+ return items;
+ }, {});
+ }
+
+ public __mapColumns(): void {
+ this.columns = [];
+
+ for (const column of this.model.getColumnSet().columns) {
+ this.columns.push(column.name);
+
+ if (column.prop) {
+ this.columns.push(column.prop);
+ }
+ }
+ }
+}
diff --git a/packages/core-api/src/repositories/transactions.ts b/packages/core-api/src/repositories/transactions.ts
new file mode 100644
index 0000000000..59a0a6dbc4
--- /dev/null
+++ b/packages/core-api/src/repositories/transactions.ts
@@ -0,0 +1,521 @@
+import { constants, slots } from "@arkecosystem/crypto";
+import dayjs from "dayjs-ext";
+import partition from "lodash/partition";
+import snakeCase from "lodash/snakeCase";
+import { IRepository } from "../interfaces";
+import { Repository } from "./repository";
+import { buildFilterQuery } from "./utils/build-filter-query";
+
+export class TransactionsRepository extends Repository implements IRepository {
+ constructor() {
+ super();
+ }
+
+ /**
+ * Get all transactions.
+ * @param {Object} params
+ * @return {Object}
+ */
+ public async findAll(parameters: any = {}, sequenceOrder: "asc" | "desc" = "desc"): Promise {
+ const selectQuery = this.query.select().from(this.query);
+
+ if (parameters.senderId) {
+ const senderPublicKey = this.__publicKeyFromAddress(parameters.senderId);
+
+ if (!senderPublicKey) {
+ return { rows: [], count: 0 };
+ }
+
+ parameters.senderPublicKey = senderPublicKey;
+ }
+
+ const conditions = Object.entries(this._formatConditions(parameters));
+
+ if (conditions.length) {
+ const first = conditions.shift();
+
+ selectQuery.where(this.query[first[0]].equals(first[1]));
+
+ for (const condition of conditions) {
+ selectQuery.and(this.query[condition[0]].equals(condition[1]));
+ }
+ }
+
+ if (parameters.ownerId) {
+ const owner = this.databaseService.walletManager.findByAddress(parameters.ownerId);
+
+ selectQuery.and(this.query.sender_public_key.equals(owner.publicKey));
+ selectQuery.or(this.query.recipient_id.equals(owner.address));
+ }
+
+ this.__orderBy(selectQuery, parameters, sequenceOrder);
+
+ const results = await this._findManyWithCount(selectQuery, {
+ limit: parameters.limit,
+ offset: parameters.offset,
+ orderBy: null,
+ });
+
+ results.rows = await this.__mapBlocksToTransactions(results.rows);
+
+ return results;
+ }
+
+ /**
+ * Get all transactions (LEGACY, for V1 only).
+ * @param {Object} params
+ * @return {Object}
+ */
+ public async findAllLegacy(parameters: any = {}): Promise {
+ const selectQuery = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query);
+
+ if (parameters.senderId) {
+ parameters.senderPublicKey = this.__publicKeyFromAddress(parameters.senderId);
+ }
+
+ const applyConditions = queries => {
+ const conditions = Object.entries(this._formatConditions(parameters));
+
+ if (conditions.length) {
+ const first = conditions.shift();
+
+ for (const item of queries) {
+ item.where(this.query[first[0]].equals(first[1]));
+
+ for (const [key, value] of conditions) {
+ item.or(this.query[key].equals(value));
+ }
+ }
+ }
+ };
+
+ applyConditions([selectQuery]);
+
+ this.__orderBy(selectQuery, parameters);
+
+ const results = await this._findManyWithCount(selectQuery, {
+ limit: parameters.limit,
+ offset: parameters.offset,
+ orderBy: null,
+ });
+
+ results.rows = await this.__mapBlocksToTransactions(results.rows);
+
+ return results;
+ }
+
+ /**
+ * Get all transactions for the given Wallet object.
+ * @param {Wallet} wallet
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAllByWallet(wallet, parameters: any = {}): Promise {
+ const selectQuery = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query);
+
+ const applyConditions = queries => {
+ for (const item of queries) {
+ item.where(this.query.sender_public_key.equals(wallet.publicKey)).or(
+ this.query.recipient_id.equals(wallet.address),
+ );
+ }
+ };
+
+ applyConditions([selectQuery]);
+
+ this.__orderBy(selectQuery, parameters);
+
+ const results = await this._findManyWithCount(selectQuery, {
+ limit: parameters.limit,
+ offset: parameters.offset,
+ orderBy: null,
+ });
+
+ results.rows = await this.__mapBlocksToTransactions(results.rows);
+
+ return results;
+ }
+
+ /**
+ * Get all transactions for the given sender public key.
+ * @param {String} senderPublicKey
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAllBySender(senderPublicKey, parameters: any = {}): Promise {
+ return this.findAll({ ...{ senderPublicKey }, ...parameters });
+ }
+
+ /**
+ * Get all transactions for the given recipient address.
+ * @param {String} recipientId
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAllByRecipient(recipientId, parameters: any = {}): Promise {
+ return this.findAll({ ...{ recipientId }, ...parameters });
+ }
+
+ /**
+ * Get all vote transactions for the given sender public key.
+ * TODO rename to findAllVotesBySender or not?
+ * @param {String} senderPublicKey
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async allVotesBySender(senderPublicKey, parameters: any = {}): Promise {
+ return this.findAll({
+ ...{ senderPublicKey, type: constants.TransactionTypes.Vote },
+ ...parameters,
+ });
+ }
+
+ /**
+ * Get all transactions for the given block.
+ * @param {Number} blockId
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAllByBlock(blockId, parameters: any = {}): Promise {
+ return this.findAll({ ...{ blockId }, ...parameters }, "asc");
+ }
+
+ /**
+ * Get all transactions for the given type.
+ * @param {Number} type
+ * @param {Object} parameters
+ * @return {Object}
+ */
+ public async findAllByType(type, parameters: any = {}): Promise {
+ return this.findAll({ ...{ type }, ...parameters });
+ }
+
+ /**
+ * Get a transaction.
+ * @param {Number} id
+ * @return {Object}
+ */
+ public async findById(id): Promise {
+ const query = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query)
+ .where(this.query.id.equals(id));
+
+ const transaction = await this._find(query);
+
+ return this.__mapBlocksToTransactions(transaction);
+ }
+
+ /**
+ * Get a transactions for the given type and id.
+ * @param {Number} type
+ * @param {Number} id
+ * @return {Object}
+ */
+ public async findByTypeAndId(type, id): Promise {
+ const query = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query)
+ .where(this.query.id.equals(id).and(this.query.type.equals(type)));
+
+ const transaction = await this._find(query);
+
+ return this.__mapBlocksToTransactions(transaction);
+ }
+
+ /**
+ * Get transactions for the given ids.
+ * @param {Array} ids
+ * @return {Object}
+ */
+ public async findByIds(ids): Promise {
+ const query = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query)
+ .where(this.query.id.in(ids));
+
+ return this._findMany(query);
+ }
+
+ /**
+ * Get all transactions that have a vendor field.
+ * @return {Object}
+ */
+ public async findWithVendorField(): Promise {
+ const query = this.query
+ .select(this.query.block_id, this.query.serialized, this.query.timestamp)
+ .from(this.query)
+ .where(this.query.vendor_field_hex.isNotNull());
+
+ const transactions = await this._findMany(query);
+
+ return this.__mapBlocksToTransactions(transactions);
+ }
+
+ /**
+ * Calculates min, max and average fee statistics based on transactions table
+ * @return {Object}
+ */
+ public async getFeeStatistics(): Promise {
+ const query = this.query
+ .select(
+ this.query.type,
+ this.query.fee.min("minFee"),
+ this.query.fee.max("maxFee"),
+ this.query.fee.avg("avgFee"),
+ this.query.timestamp.max("timestamp"),
+ )
+ .from(this.query)
+ .where(
+ this.query.timestamp.gte(
+ slots.getTime(
+ dayjs()
+ .subtract(30, "day")
+ .valueOf(),
+ ),
+ ),
+ )
+ .and(this.query.fee.gte(this.transactionPool.options.dynamicFees.minFeeBroadcast))
+ .group(this.query.type)
+ .order('"timestamp" DESC');
+
+ return this._findMany(query);
+ }
+
+ /**
+ * Search all transactions.
+ *
+ * @param {Object} parameters
+ * @param {Number} [parameters.limit] - Limit the number of results
+ * @param {Number} [parameters.offset] - Skip some results
+ * @param {Array} [parameters.orderBy] - Order of the results
+ * @param {String} [parameters.id] - Search by transaction id
+ * @param {String} [parameters.blockId] - Search by block id
+ * @param {String} [parameters.recipientId] - Search by recipient address
+ * @param {String} [parameters.senderPublicKey] - Search by sender public key
+ * @param {String} [parameters.senderId] - Search by sender address
+ * @param {Array} [parameters.addresses] - Search by senders or recipients addresses
+ * @param {Number} [parameters.type] - Search by transaction type
+ * @param {Number} [parameters.version] - Search by transaction version
+ * @param {Object} [parameters.timestamp] - Search by transaction date
+ * @param {Number} [parameters.timestamp.from] - Since date
+ * @param {Number} [parameters.timestamp.to] - Until date
+ * @param {Object} [parameters.amount] - Search by transaction amount
+ * @param {Number} [parameters.amount.from] - From amount
+ * @param {Number} [parameters.amount.to] - To date
+ * @param {Object} [parameters.fee] - Search by transaction fee
+ * @param {Number} [parameters.fee.from] - From fee
+ * @param {Number} [parameters.fee.to] - To fee
+ * @return {Object}
+ */
+ public async search(parameters): Promise {
+ const selectQuery = this.query.select().from(this.query);
+
+ const filters = {
+ exact: ["id", "block_id", "type", "version"],
+ between: ["timestamp", "amount", "fee"],
+ wildcard: ["vendor_field_hex"],
+ in: [],
+ };
+
+ if (parameters.senderId) {
+ const senderPublicKey = this.__publicKeyFromAddress(parameters.senderId);
+
+ if (senderPublicKey) {
+ parameters.senderPublicKey = senderPublicKey;
+ } else {
+ return { count: 0, rows: [] };
+ }
+ }
+
+ if (parameters.recipientId) {
+ filters.exact.push("recipient_id");
+ }
+ if (parameters.senderPublicKey) {
+ filters.exact.push("sender_public_key");
+ }
+
+ // When both participants, sender and recipient, are provided, searching by addresses is not useful
+ if (parameters.addresses) {
+ if (!parameters.recipientId) {
+ filters.in.push("recipient_id");
+ parameters.recipientId = parameters.addresses;
+ }
+ if (!parameters.senderPublicKey) {
+ filters.in.push("sender_public_key");
+ parameters.senderPublicKey = parameters.addresses.map(address => {
+ return this.__publicKeyFromAddress(address);
+ });
+ }
+ }
+
+ const conditions = buildFilterQuery(this._formatConditions(parameters), filters);
+
+ /*
+ * Searching by `addresses` could create queries:
+ * - 1 `senderPublicKey` AND n `recipientId`
+ * - n `senderPublicKey` AND 1 `recipientId`.
+ * - n `senderPublicKey` OR n `recipientId`.
+ */
+ if (conditions.length) {
+ const [participants, rest] = partition(conditions, condition => {
+ return ["sender_public_key", "recipient_id"].indexOf(condition.column) > -1;
+ });
+
+ if (participants.length > 0) {
+ const [first, last] = participants;
+ selectQuery.where(this.query[first.column][first.method](first.value));
+
+ if (last) {
+ const usesInOperator = participants.every(condition => condition.method === "in");
+ if (usesInOperator) {
+ selectQuery.or(this.query[last.column][last.method](last.value));
+ } else {
+ // This search is 1 `senderPublicKey` and 1 `recipientId`
+ selectQuery.and(this.query[last.column][last.method](last.value));
+ }
+ }
+ } else if (rest.length) {
+ const first = rest.shift();
+ selectQuery.where(this.query[first.column][first.method](first.value));
+ }
+
+ for (const condition of rest) {
+ selectQuery.and(this.query[condition.column][condition.method](condition.value));
+ }
+ }
+
+ this.__orderBy(selectQuery, parameters);
+
+ const results = await this._findManyWithCount(selectQuery, {
+ limit: parameters.limit || 100,
+ offset: parameters.offset || 0,
+ orderBy: null,
+ });
+
+ results.rows = await this.__mapBlocksToTransactions(results.rows);
+
+ return results;
+ }
+
+ public getModel(): object {
+ return (this.databaseService.connection as any).models.transaction;
+ }
+
+ /**
+ * [__mapBlocksToTransactions description]
+ * @param {Array|Object} data
+ * @return {Object}
+ */
+ public async __mapBlocksToTransactions(data): Promise {
+ const blockQuery = (this.databaseService.connection as any).models.block.query();
+
+ // Array...
+ if (Array.isArray(data)) {
+ // 1. get heights from cache
+ const missingFromCache = [];
+
+ for (let i = 0; i < data.length; i++) {
+ const cachedBlock = this.__getBlockCache(data[i].blockId);
+
+ if (cachedBlock) {
+ data[i].block = cachedBlock;
+ } else {
+ missingFromCache.push({
+ index: i,
+ blockId: data[i].blockId,
+ });
+ }
+ }
+
+ // 2. get missing heights from database
+ if (missingFromCache.length) {
+ const query = blockQuery
+ .select(blockQuery.id, blockQuery.height)
+ .from(blockQuery)
+ .where(blockQuery.id.in(missingFromCache.map(d => d.blockId)))
+ .group(blockQuery.id);
+
+ const blocks = await this._findMany(query);
+
+ for (const missing of missingFromCache) {
+ const block = blocks.find(item => item.id === missing.blockId);
+ if (block) {
+ data[missing.index].block = block;
+ this.__setBlockCache(block);
+ }
+ }
+ }
+
+ return data;
+ }
+
+ // Object...
+ if (data) {
+ const cachedBlock = this.__getBlockCache(data.blockId);
+
+ if (cachedBlock) {
+ data.block = cachedBlock;
+ } else {
+ const query = blockQuery
+ .select(blockQuery.id, blockQuery.height)
+ .from(blockQuery)
+ .where(blockQuery.id.equals(data.blockId));
+
+ data.block = await this._find(query);
+
+ this.__setBlockCache(data.block);
+ }
+ }
+
+ return data;
+ }
+
+ /**
+ * Tries to retrieve the height of the block from the cache
+ * @param {String} blockId
+ * @return {Object|null}
+ */
+ public __getBlockCache(blockId): any {
+ const height = this.cache.get(`heights:${blockId}`);
+
+ return height ? { height, id: blockId } : null;
+ }
+
+ /**
+ * Stores the height of the block on the cache
+ * @param {Object} block
+ * @param {String} block.id
+ * @param {Number} block.height
+ */
+ public __setBlockCache({ id, height }): void {
+ this.cache.set(`heights:${id}`, height);
+ }
+
+ /**
+ * Retrieves the publicKey of the address from the WalletManager in-memory data
+ * @param {String} senderId
+ * @return {String}
+ */
+ public __publicKeyFromAddress(senderId): string {
+ if (this.databaseService.walletManager.exists(senderId)) {
+ return this.databaseService.walletManager.findByAddress(senderId).publicKey;
+ }
+
+ return null;
+ }
+
+ public __orderBy(selectQuery, parameters, sequenceOrder: "asc" | "desc" = "desc"): void {
+ const orderBy = parameters.orderBy
+ ? parameters.orderBy.split(":").map(p => p.toLowerCase())
+ : ["timestamp", "desc"];
+
+ selectQuery.order(this.query[snakeCase(orderBy[0])][orderBy[1]]);
+
+ selectQuery.order(this.query.sequence[sequenceOrder]);
+ }
+}
diff --git a/packages/core-api/src/repositories/utils/build-filter-query.ts b/packages/core-api/src/repositories/utils/build-filter-query.ts
new file mode 100644
index 0000000000..dd2e26f7fa
--- /dev/null
+++ b/packages/core-api/src/repositories/utils/build-filter-query.ts
@@ -0,0 +1,77 @@
+export function buildFilterQuery(parameters, filters) {
+ const where = [];
+
+ if (filters.hasOwnProperty("exact")) {
+ for (const elem of filters.exact) {
+ if (typeof parameters[elem] !== "undefined") {
+ where.push({
+ column: elem,
+ method: "equals",
+ value: parameters[elem],
+ });
+ }
+ }
+ }
+
+ if (filters.hasOwnProperty("between")) {
+ for (const elem of filters.between) {
+ if (!parameters[elem]) {
+ continue;
+ }
+
+ if (!parameters[elem].hasOwnProperty("from") && !parameters[elem].hasOwnProperty("to")) {
+ where.push({
+ column: elem,
+ method: "equals",
+ value: parameters[elem],
+ });
+ }
+
+ if (parameters[elem].hasOwnProperty("from") || parameters[elem].hasOwnProperty("to")) {
+ where[elem] = {};
+
+ if (parameters[elem].hasOwnProperty("from")) {
+ where.push({
+ column: elem,
+ method: "gte",
+ value: parameters[elem].from,
+ });
+ }
+
+ if (parameters[elem].hasOwnProperty("to")) {
+ where.push({
+ column: elem,
+ method: "lte",
+ value: parameters[elem].to,
+ });
+ }
+ }
+ }
+ }
+
+ if (filters.hasOwnProperty("in")) {
+ for (const elem of filters.in) {
+ if (parameters[elem]) {
+ where.push({
+ column: elem,
+ method: "in",
+ value: parameters[elem],
+ });
+ }
+ }
+ }
+
+ if (filters.hasOwnProperty("wildcard")) {
+ for (const elem of filters.wildcard) {
+ if (parameters[elem]) {
+ where.push({
+ column: elem,
+ method: "like",
+ value: `%${parameters[elem]}%`,
+ });
+ }
+ }
+ }
+
+ return where;
+}
diff --git a/packages/core-api/src/server.ts b/packages/core-api/src/server.ts
new file mode 100644
index 0000000000..a329e40e62
--- /dev/null
+++ b/packages/core-api/src/server.ts
@@ -0,0 +1,147 @@
+import { app } from "@arkecosystem/core-container";
+import { createSecureServer, createServer, mountServer, plugins } from "@arkecosystem/core-http-utils";
+import { Logger } from "@arkecosystem/core-interfaces";
+import Hapi from "hapi";
+
+export class Server {
+ private logger = app.resolvePlugin("logger");
+
+ private http: any;
+ private https: any;
+
+ public constructor(private config: any) {}
+
+ public async start(): Promise {
+ const options = {
+ host: this.config.host,
+ port: this.config.port,
+ routes: {
+ cors: {
+ additionalHeaders: ["api-version"],
+ },
+ validate: {
+ async failAction(request, h, err) {
+ throw err;
+ },
+ },
+ },
+ };
+
+ if (this.config.enabled) {
+ this.http = await createServer(options);
+ this.http.app.config = this.config;
+
+ this.registerPlugins("HTTP", this.http);
+ }
+
+ if (this.config.ssl.enabled) {
+ this.https = await createSecureServer(options, null, this.config.ssl);
+ this.https.app.config = this.config;
+
+ this.registerPlugins("HTTPS", this.https);
+ }
+ }
+
+ public async stop(): Promise {
+ if (this.http) {
+ this.logger.info(`Stopping Public HTTP API`);
+ await this.http.stop();
+ }
+
+ if (this.https) {
+ this.logger.info(`Stopping Public HTTPS API`);
+ await this.https.stop();
+ }
+ }
+
+ public async restart(): Promise {
+ if (this.http) {
+ await this.http.stop();
+ await this.http.start();
+ }
+
+ if (this.https) {
+ await this.https.stop();
+ await this.https.start();
+ }
+ }
+
+ public instance(type: string): Hapi.Server {
+ return this[type];
+ }
+
+ private async registerPlugins(name: string, server: Hapi.Server): Promise {
+ // TODO: enable after mainnet migration
+ // await server.register({ plugin: plugins.contentType })
+
+ await server.register({
+ plugin: plugins.corsHeaders,
+ });
+
+ await server.register({
+ plugin: plugins.whitelist,
+ options: {
+ whitelist: this.config.whitelist,
+ name: "Public API",
+ },
+ });
+
+ await server.register({
+ plugin: require("./plugins/set-headers"),
+ });
+
+ await server.register({
+ plugin: require("hapi-api-version"),
+ options: this.config.versions,
+ });
+
+ await server.register({
+ plugin: require("./plugins/endpoint-version"),
+ options: { validVersions: this.config.versions.validVersions },
+ });
+
+ await server.register({
+ plugin: require("./plugins/caster"),
+ });
+
+ await server.register({
+ plugin: require("./plugins/validation"),
+ });
+
+ await server.register({
+ plugin: require("hapi-rate-limit"),
+ options: this.config.rateLimit,
+ });
+
+ await server.register({
+ plugin: require("hapi-pagination"),
+ options: {
+ meta: {
+ baseUri: "",
+ },
+ query: {
+ limit: {
+ default: this.config.pagination.limit,
+ },
+ },
+ results: {
+ name: "data",
+ },
+ routes: {
+ include: this.config.pagination.include,
+ exclude: ["*"],
+ },
+ },
+ });
+
+ for (const plugin of this.config.plugins) {
+ if (typeof plugin.plugin === "string") {
+ plugin.plugin = require(plugin.plugin);
+ }
+
+ await server.register(plugin);
+ }
+
+ await mountServer(`Public ${name.toUpperCase()} API`, server);
+ }
+}
diff --git a/packages/core-api/src/services/cache.ts b/packages/core-api/src/services/cache.ts
new file mode 100644
index 0000000000..f0f354bee2
--- /dev/null
+++ b/packages/core-api/src/services/cache.ts
@@ -0,0 +1,43 @@
+import { app } from "@arkecosystem/core-container";
+import { createHash } from "crypto";
+import Hapi from "hapi";
+
+export class ServerCache {
+ public static make(server: Hapi.Server): ServerCache {
+ return new ServerCache(server);
+ }
+
+ private constructor(readonly server: Hapi.Server) {}
+
+ public method(name: string, method: any, expiresIn: number, argsCallback?: any): this {
+ let options = {};
+
+ // @ts-ignore
+ if (this.server.app.config.cache.enabled) {
+ options = {
+ cache: {
+ expiresIn: expiresIn * 1000,
+ generateTimeout: this.getCacheTimeout(),
+ getDecoratedValue: true,
+ },
+ generateKey: request => this.generateCacheKey(argsCallback(request)),
+ };
+ }
+
+ this.server.method(name, method, options);
+
+ return this;
+ }
+
+ private generateCacheKey(value: object): string {
+ return createHash("sha256")
+ .update(JSON.stringify(value))
+ .digest("hex");
+ }
+
+ private getCacheTimeout(): number | boolean {
+ const { generateTimeout } = app.resolveOptions("api").cache;
+
+ return JSON.parse(generateTimeout);
+ }
+}
diff --git a/packages/core-api/src/services/index.ts b/packages/core-api/src/services/index.ts
new file mode 100644
index 0000000000..e60aa06216
--- /dev/null
+++ b/packages/core-api/src/services/index.ts
@@ -0,0 +1,4 @@
+import { ServerCache } from "./cache";
+import { transformerService } from "./transformer";
+
+export { ServerCache, transformerService };
diff --git a/packages/core-api/src/services/transformer.ts b/packages/core-api/src/services/transformer.ts
new file mode 100644
index 0000000000..832778a47f
--- /dev/null
+++ b/packages/core-api/src/services/transformer.ts
@@ -0,0 +1,53 @@
+import { transformAccountLegacy } from "../versions/1/accounts/transformer";
+import { transformBlockLegacy } from "../versions/1/blocks/transformer";
+import { transformDelegateLegacy } from "../versions/1/delegates/transformer";
+import { transformPeerLegacy } from "../versions/1/peers/transformer";
+import { transformFeeStatisticsLegacy } from "../versions/1/shared/transformers/fee-statistics";
+import { transformPortsLegacy } from "../versions/1/shared/transformers/ports";
+import { transformVoterLegacy } from "../versions/1/shared/transformers/voter";
+import { transformTransactionLegacy } from "../versions/1/transactions/transformer";
+
+import { transformBlock } from "../versions/2/blocks/transformer";
+import { transformDelegate } from "../versions/2/delegates/transformer";
+import { transformPeer } from "../versions/2/peers/transformer";
+import { transformFeeStatistics } from "../versions/2/shared/transformers/fee-statistics";
+import { transformPorts } from "../versions/2/shared/transformers/ports";
+import { transformTransaction } from "../versions/2/transactions/transformer";
+import { transformWallet } from "../versions/2/wallets/transformer";
+
+class Transformer {
+ private transformers: Map = new Map();
+
+ public constructor() {
+ this.transformers.set(1, {
+ account: transformAccountLegacy,
+ block: transformBlockLegacy,
+ delegate: transformDelegateLegacy,
+ "fee-statistics": transformFeeStatisticsLegacy,
+ peer: transformPeerLegacy,
+ ports: transformPortsLegacy,
+ transaction: transformTransactionLegacy,
+ voter: transformVoterLegacy,
+ });
+
+ this.transformers.set(2, {
+ block: transformBlock,
+ delegate: transformDelegate,
+ "fee-statistics": transformFeeStatistics,
+ peer: transformPeer,
+ ports: transformPorts,
+ transaction: transformTransaction,
+ wallet: transformWallet,
+ });
+ }
+
+ public toResource(request, data, transformer): object {
+ return this.transformers.get(request.pre.apiVersion)[transformer](data);
+ }
+
+ public toCollection(request, data, transformer): object[] {
+ return data.map(d => this.toResource(request, d, transformer));
+ }
+}
+
+export const transformerService = new Transformer();
diff --git a/packages/core-api/src/versions/1/accounts/controller.ts b/packages/core-api/src/versions/1/accounts/controller.ts
new file mode 100644
index 0000000000..5c8a6410cc
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/controller.ts
@@ -0,0 +1,112 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class AccountsController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.accounts.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.accounts.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async balance(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.accounts.balance(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async publicKey(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.accounts.publicKey(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async fee(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ fee: this.config.getMilestone(this.blockchain.getLastHeight()).fees.staticFees.delegateRegistration,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async delegates(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const account = await this.databaseService.wallets.findById(request.query.address);
+
+ if (!account) {
+ return super.respondWith("Address not found.", true);
+ }
+
+ if (!account.vote) {
+ return super.respondWith(
+ // @ts-ignore
+ `Address ${request.query.address} hasn't voted yet.`,
+ true,
+ );
+ }
+
+ const delegate = await this.databaseService.delegates.findById(account.vote);
+
+ return super.respondWith({
+ delegates: [super.toResource(request, delegate, "delegate")],
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async top(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const wallets = this.databaseService.wallets.top(super.paginate(request));
+
+ const accounts = wallets.rows.map(account => ({
+ address: account.address,
+ balance: `${account.balance}`,
+ publicKey: account.publicKey,
+ }));
+
+ return super.respondWith({ accounts });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async count(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const { count } = await this.databaseService.wallets.findAll();
+
+ return super.respondWith({ count });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/accounts/index.ts b/packages/core-api/src/versions/1/accounts/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/accounts/methods.ts b/packages/core-api/src/versions/1/accounts/methods.ts
new file mode 100644
index 0000000000..ce6e32fa63
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/methods.ts
@@ -0,0 +1,63 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import { ServerCache } from "../../../services";
+import { paginate, respondWith, toCollection, toResource } from "../utils";
+
+const databaseService = app.resolvePlugin("database");
+
+const index = async request => {
+ const { rows } = await databaseService.wallets.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return respondWith({
+ accounts: toCollection(request, rows, "account"),
+ });
+};
+
+const show = async request => {
+ const account = await databaseService.wallets.findById(request.query.address);
+
+ if (!account) {
+ return respondWith("Account not found", true);
+ }
+
+ return respondWith({
+ account: toResource(request, account, "account"),
+ });
+};
+
+const balance = async request => {
+ const account = await databaseService.wallets.findById(request.query.address);
+
+ if (!account) {
+ return respondWith({ balance: "0", unconfirmedBalance: "0" });
+ }
+
+ return respondWith({
+ balance: account ? `${account.balance}` : "0",
+ unconfirmedBalance: account ? `${account.balance}` : "0",
+ });
+};
+
+const publicKey = async request => {
+ const account = await databaseService.wallets.findById(request.query.address);
+
+ if (!account) {
+ return respondWith("Account not found", true);
+ }
+
+ return respondWith({ publicKey: account.publicKey });
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v1.accounts.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v1.accounts.show", show, 8, request => ({ address: request.query.address }))
+ .method("v1.accounts.balance", balance, 8, request => ({ address: request.query.address }))
+ .method("v1.accounts.publicKey", publicKey, 600, request => ({ address: request.query.address }));
+}
diff --git a/packages/core-api/src/versions/1/accounts/routes.ts b/packages/core-api/src/versions/1/accounts/routes.ts
new file mode 100644
index 0000000000..b28f5133bf
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/routes.ts
@@ -0,0 +1,91 @@
+import Hapi from "hapi";
+import { AccountsController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new AccountsController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/accounts/getAllAccounts",
+ handler: controller.index,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts",
+ handler: controller.show,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getAccount,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/getBalance",
+ handler: controller.balance,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getBalance,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/getPublicKey",
+ handler: controller.publicKey,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getPublicKey,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/delegates/fee",
+ handler: controller.fee,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/delegates",
+ handler: controller.delegates,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getDelegates,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/top",
+ handler: controller.top,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.top,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/accounts/count",
+ handler: controller.count,
+ });
+}
diff --git a/packages/core-api/src/versions/1/accounts/schema.ts b/packages/core-api/src/versions/1/accounts/schema.ts
new file mode 100644
index 0000000000..f157165932
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/schema.ts
@@ -0,0 +1,73 @@
+export const getBalance: object = {
+ type: "object",
+ properties: {
+ address: {
+ type: "string",
+ minLength: 1,
+ format: "address",
+ },
+ },
+ required: ["address"],
+};
+
+export const getPublicKey: object = {
+ type: "object",
+ properties: {
+ address: {
+ type: "string",
+ minLength: 1,
+ format: "address",
+ },
+ },
+ required: ["address"],
+};
+
+export const generatePublicKey: object = {
+ type: "object",
+ properties: {
+ secret: {
+ type: "string",
+ minLength: 1,
+ },
+ },
+ required: ["secret"],
+};
+
+export const getDelegates: object = {
+ type: "object",
+ properties: {
+ address: {
+ type: "string",
+ minLength: 1,
+ format: "address",
+ },
+ },
+ required: ["address"],
+};
+
+export const getAccount: object = {
+ type: "object",
+ properties: {
+ address: {
+ type: "string",
+ minLength: 1,
+ format: "address",
+ },
+ },
+ required: ["address"],
+};
+
+export const top: object = {
+ type: "object",
+ properties: {
+ limit: {
+ type: "integer",
+ minimum: 0,
+ maximum: 100,
+ },
+ offset: {
+ type: "integer",
+ minimum: 0,
+ },
+ },
+};
diff --git a/packages/core-api/src/versions/1/accounts/transformer.ts b/packages/core-api/src/versions/1/accounts/transformer.ts
new file mode 100644
index 0000000000..efe62ec6b2
--- /dev/null
+++ b/packages/core-api/src/versions/1/accounts/transformer.ts
@@ -0,0 +1,17 @@
+export function transformAccountLegacy(model) {
+ const hasSecondSignature = !!model.secondPublicKey;
+
+ return {
+ address: model.address,
+ publicKey: model.publicKey,
+ secondPublicKey: model.secondPublicKey,
+ votes: model.votes,
+ username: model.username,
+ balance: `${model.balance}`,
+ unconfirmedBalance: `${model.balance}`,
+ multisignatures: [],
+ u_multisignatures: [],
+ unconfirmedSignature: hasSecondSignature ? 1 : 0,
+ secondSignature: hasSecondSignature ? 1 : 0,
+ };
+}
diff --git a/packages/core-api/src/versions/1/blocks/controller.ts b/packages/core-api/src/versions/1/blocks/controller.ts
new file mode 100644
index 0000000000..ef791fe30b
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/controller.ts
@@ -0,0 +1,143 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class BlocksController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.blocks.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.blocks.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async epoch(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ epoch: this.config.getMilestone(this.blockchain.getLastHeight()).epoch,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async height(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const block = this.blockchain.getLastBlock();
+
+ return super.respondWith({ height: block.data.height, id: block.data.id });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async nethash(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({ nethash: this.config.get("network.nethash") });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async fee(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ fee: this.config.getMilestone(this.blockchain.getLastHeight()).fees.staticFees.transfer,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async fees(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastHeight = this.blockchain.getLastHeight();
+ const fees = this.config.getMilestone(lastHeight).fees.staticFees;
+
+ return super.respondWith({
+ fees: {
+ send: fees.transfer,
+ vote: fees.vote,
+ secondsignature: fees.secondSignature,
+ delegate: fees.delegateRegistration,
+ multisignature: fees.multiSignature,
+ },
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async milestone(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ milestone: Math.floor(this.blockchain.getLastHeight() / 3000000),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async reward(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ reward: this.config.getMilestone(this.blockchain.getLastHeight()).reward,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async supply(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+ const constants = this.config.getMilestone(lastBlock.data.height);
+ const rewards = bignumify(constants.reward).times(lastBlock.data.height - constants.height);
+
+ return super.respondWith({
+ supply: +bignumify(this.config.get("genesisBlock.totalAmount"))
+ .plus(rewards)
+ .toFixed(),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async status(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+ const constants = this.config.getMilestone(lastBlock.data.height);
+ const rewards = bignumify(constants.reward).times(lastBlock.data.height - constants.height);
+
+ return super.respondWith({
+ epoch: constants.epoch,
+ height: lastBlock.data.height,
+ fee: constants.fees.staticFees.transfer,
+ milestone: Math.floor(lastBlock.data.height / 3000000),
+ nethash: this.config.get("network.nethash"),
+ reward: constants.reward,
+ supply: +bignumify(this.config.get("genesisBlock.totalAmount"))
+ .plus(rewards)
+ .toFixed(),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/blocks/index.ts b/packages/core-api/src/versions/1/blocks/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/blocks/methods.ts b/packages/core-api/src/versions/1/blocks/methods.ts
new file mode 100644
index 0000000000..3a61ae2fe3
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/methods.ts
@@ -0,0 +1,40 @@
+import { blocksRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWith, toCollection, toResource } from "../utils";
+
+const index = async request => {
+ const { count, rows } = await blocksRepository.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ if (!rows) {
+ return respondWith("No blocks found", true);
+ }
+
+ return respondWith({
+ blocks: toCollection(request, rows, "block"),
+ count,
+ });
+};
+
+const show = async request => {
+ const block = await blocksRepository.findById(request.query.id);
+
+ if (!block) {
+ return respondWith(`Block with id ${request.query.id} not found`, true);
+ }
+
+ return respondWith({
+ block: toResource(request, block, "block"),
+ });
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v1.blocks.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v1.blocks.show", show, 600, request => ({ id: request.query.id }));
+}
diff --git a/packages/core-api/src/versions/1/blocks/routes.ts b/packages/core-api/src/versions/1/blocks/routes.ts
new file mode 100644
index 0000000000..40cf97c72e
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/routes.ts
@@ -0,0 +1,100 @@
+import Hapi from "hapi";
+import { BlocksController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new BlocksController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/blocks",
+ handler: controller.index,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getBlocks,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/get",
+ handler: controller.show,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getBlock,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getEpoch",
+ handler: controller.epoch,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getHeight",
+ handler: controller.height,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getheight",
+ handler: controller.height,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getNethash",
+ handler: controller.nethash,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getFee",
+ handler: controller.fee,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getFees",
+ handler: controller.fees,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getfees",
+ handler: controller.fees,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getMilestone",
+ handler: controller.milestone,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getReward",
+ handler: controller.reward,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getSupply",
+ handler: controller.supply,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/getStatus",
+ handler: controller.status,
+ });
+}
diff --git a/packages/core-api/src/versions/1/blocks/schema.ts b/packages/core-api/src/versions/1/blocks/schema.ts
new file mode 100644
index 0000000000..552e3048bc
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/schema.ts
@@ -0,0 +1,50 @@
+export const getBlock: object = {
+ type: "object",
+ properties: {
+ id: {
+ type: "string",
+ minLength: 1,
+ },
+ },
+ required: ["id"],
+};
+
+export const getBlocks: object = {
+ type: "object",
+ properties: {
+ limit: {
+ type: "integer",
+ minimum: 0,
+ maximum: 100,
+ },
+ orderBy: {
+ type: "string",
+ },
+ offset: {
+ type: "integer",
+ minimum: 0,
+ },
+ generatorPublicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ totalAmount: {
+ type: "integer",
+ minimum: 0,
+ },
+ totalFee: {
+ type: "integer",
+ minimum: 0,
+ },
+ reward: {
+ type: "integer",
+ minimum: 0,
+ },
+ previousBlock: {
+ type: "string",
+ },
+ height: {
+ type: "integer",
+ },
+ },
+};
diff --git a/packages/core-api/src/versions/1/blocks/transformer.ts b/packages/core-api/src/versions/1/blocks/transformer.ts
new file mode 100644
index 0000000000..6facbc8ff7
--- /dev/null
+++ b/packages/core-api/src/versions/1/blocks/transformer.ts
@@ -0,0 +1,27 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain } from "@arkecosystem/core-interfaces";
+import { bignumify } from "@arkecosystem/core-utils";
+
+export function transformBlockLegacy(model) {
+ const lastBlock = app.resolvePlugin("blockchain").getLastBlock();
+
+ return {
+ id: model.id,
+ version: model.version,
+ timestamp: model.timestamp,
+ previousBlock: model.previousBlock,
+ height: model.height,
+ numberOfTransactions: model.numberOfTransactions,
+ totalAmount: +bignumify(model.totalAmount).toFixed(),
+ totalForged: +bignumify(model.reward)
+ .plus(model.totalFee)
+ .toString(),
+ totalFee: +bignumify(model.totalFee).toFixed(),
+ reward: +bignumify(model.reward).toFixed(),
+ payloadLength: model.payloadLength,
+ payloadHash: model.payloadHash,
+ generatorPublicKey: model.generatorPublicKey,
+ blockSignature: model.blockSignature,
+ confirmations: lastBlock ? lastBlock.data.height - model.height : 0,
+ };
+}
diff --git a/packages/core-api/src/versions/1/delegates/controller.ts b/packages/core-api/src/versions/1/delegates/controller.ts
new file mode 100644
index 0000000000..4ee2ce26f1
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/controller.ts
@@ -0,0 +1,119 @@
+import { slots } from "@arkecosystem/crypto";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class DelegatesController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.delegates.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.delegates.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async count(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.delegates.count(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async search(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.delegates.search(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async voters(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.delegates.voters(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async fee(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ fee: this.config.getMilestone(this.blockchain.getLastHeight()).fees.staticFees.delegateRegistration,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async forged(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const wallet = this.databaseService.walletManager.findByPublicKey(
+ // @ts-ignore
+ request.query.generatorPublicKey,
+ );
+
+ return super.respondWith({
+ fees: Number(wallet.forgedFees),
+ rewards: Number(wallet.forgedRewards),
+ forged: Number(wallet.forgedFees) + Number(wallet.forgedRewards),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async nextForgers(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+ // @ts-ignore
+ const limit = request.query.limit || 10;
+
+ const delegatesCount = this.config.getMilestone(lastBlock).activeDelegates;
+ const currentSlot = slots.getSlotNumber(lastBlock.data.timestamp);
+
+ let activeDelegates = await this.databaseService.getActiveDelegates(lastBlock.data.height);
+ activeDelegates = activeDelegates.map(delegate => delegate.publicKey);
+
+ const nextForgers = [];
+ for (let i = 1; i <= delegatesCount && i <= limit; i++) {
+ const delegate = activeDelegates[(currentSlot + i) % delegatesCount];
+
+ if (delegate) {
+ nextForgers.push(delegate);
+ }
+ }
+
+ return super.respondWith({
+ currentBlock: lastBlock.data.height,
+ currentSlot,
+ delegates: nextForgers,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/delegates/index.ts b/packages/core-api/src/versions/1/delegates/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/delegates/methods.ts b/packages/core-api/src/versions/1/delegates/methods.ts
new file mode 100644
index 0000000000..a0024fd189
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/methods.ts
@@ -0,0 +1,90 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import { ServerCache } from "../../../services";
+import { paginate, respondWith, toCollection, toResource } from "../utils";
+
+const databaseService = app.resolvePlugin("database");
+
+const index = async request => {
+ const { count, rows } = await databaseService.delegates.findAll({
+ ...request.query,
+ ...{
+ offset: request.query.offset || 0,
+ limit: request.query.limit || 51,
+ },
+ });
+
+ return respondWith({
+ delegates: toCollection(request, rows, "delegate"),
+ totalCount: count,
+ });
+};
+
+const show = async request => {
+ if (!request.query.publicKey && !request.query.username) {
+ return respondWith("Delegate not found", true);
+ }
+
+ const delegate = await databaseService.delegates.findById(request.query.publicKey || request.query.username);
+
+ if (!delegate) {
+ return respondWith("Delegate not found", true);
+ }
+
+ return respondWith({
+ delegate: toResource(request, delegate, "delegate"),
+ });
+};
+
+const countDelegates = async request => {
+ const delegate = await databaseService.delegates.findAll();
+
+ return respondWith({ count: delegate.count });
+};
+
+const search = async request => {
+ const { rows } = await databaseService.delegates.search({
+ ...{ username: request.query.q },
+ ...paginate(request),
+ });
+
+ return respondWith({
+ delegates: toCollection(request, rows, "delegate"),
+ });
+};
+
+const voters = async request => {
+ const delegate = await databaseService.delegates.findById(request.query.publicKey);
+
+ if (!delegate) {
+ return respondWith({
+ accounts: [],
+ });
+ }
+
+ const accounts = await databaseService.wallets.findAllByVote(delegate.publicKey);
+
+ return respondWith({
+ accounts: toCollection(request, accounts.rows, "voter"),
+ });
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v1.delegates.index", index, 8, request => ({
+ ...request.query,
+ ...{
+ offset: request.query.offset || 0,
+ limit: request.query.limit || 51,
+ },
+ }))
+ .method("v1.delegates.show", show, 8, request => ({
+ id: request.query.publicKey || request.query.username,
+ }))
+ .method("v1.delegates.count", countDelegates, 8, request => ({ time: +new Date() }))
+ .method("v1.delegates.search", search, 8, request => ({
+ ...{ username: request.query.q },
+ ...paginate(request),
+ }))
+ .method("v1.delegates.voters", voters, 8, request => ({ id: request.query.publicKey }));
+}
diff --git a/packages/core-api/src/versions/1/delegates/routes.ts b/packages/core-api/src/versions/1/delegates/routes.ts
new file mode 100644
index 0000000000..a1f1f4f572
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/routes.ts
@@ -0,0 +1,91 @@
+import Hapi from "hapi";
+import { DelegatesController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new DelegatesController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/delegates",
+ handler: controller.index,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getDelegates,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/get",
+ handler: controller.show,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getDelegate,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/count",
+ handler: controller.count,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/search",
+ handler: controller.search,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.search,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/voters",
+ handler: controller.voters,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getVoters,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/fee",
+ handler: controller.fee,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/forging/getForgedByAccount",
+ handler: controller.forged,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getForgedByAccount,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/getNextForgers",
+ handler: controller.nextForgers,
+ });
+}
diff --git a/packages/core-api/src/versions/1/delegates/schema.ts b/packages/core-api/src/versions/1/delegates/schema.ts
new file mode 100644
index 0000000000..2cdab0866d
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/schema.ts
@@ -0,0 +1,84 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain } from "@arkecosystem/core-interfaces";
+
+const lastBlock = app.resolvePlugin("blockchain").getLastBlock();
+
+export const forgingStatus: object = {
+ type: "object",
+ properties: {
+ publicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ },
+ required: ["publicKey"],
+};
+
+export const getDelegate: object = {
+ type: "object",
+ properties: {
+ publicKey: {
+ type: "string",
+ },
+ username: {
+ type: "string",
+ },
+ },
+};
+
+export const search: object = {
+ type: "object",
+ properties: {
+ q: {
+ type: "string",
+ minLength: 1,
+ maxLength: 20,
+ },
+ limit: {
+ type: "integer",
+ minimum: 1,
+ maximum: 100,
+ },
+ },
+ required: ["q"],
+};
+
+export const getVoters: object = {
+ type: "object",
+ properties: {
+ publicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ },
+ required: ["publicKey"],
+};
+
+export const getDelegates: object = {
+ type: "object",
+ properties: {
+ orderBy: {
+ type: "string",
+ },
+ limit: {
+ type: "integer",
+ minimum: 1,
+ maximum: lastBlock ? app.getConfig().getMilestone(lastBlock.data.height).activeDelegates : 51,
+ },
+ offset: {
+ type: "integer",
+ minimum: 0,
+ },
+ },
+};
+
+export const getForgedByAccount: object = {
+ type: "object",
+ properties: {
+ generatorPublicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ },
+ required: ["generatorPublicKey"],
+};
diff --git a/packages/core-api/src/versions/1/delegates/transformer.ts b/packages/core-api/src/versions/1/delegates/transformer.ts
new file mode 100644
index 0000000000..84cff93572
--- /dev/null
+++ b/packages/core-api/src/versions/1/delegates/transformer.ts
@@ -0,0 +1,16 @@
+import { delegateCalculator } from "@arkecosystem/core-utils";
+
+export function transformDelegateLegacy(model) {
+ return {
+ username: model.username,
+ address: model.address,
+ publicKey: model.publicKey,
+ vote: `${model.voteBalance}`,
+ producedblocks: model.producedBlocks,
+ missedblocks: model.missedBlocks,
+ forged: model.forged,
+ rate: model.rate,
+ approval: delegateCalculator.calculateApproval(model),
+ productivity: delegateCalculator.calculateProductivity(model),
+ };
+}
diff --git a/packages/core-api/src/versions/1/index.ts b/packages/core-api/src/versions/1/index.ts
new file mode 100644
index 0000000000..5218afe26f
--- /dev/null
+++ b/packages/core-api/src/versions/1/index.ts
@@ -0,0 +1,22 @@
+import Hapi from "hapi";
+import * as Accounts from "./accounts";
+import * as Blocks from "./blocks";
+import * as Delegates from "./delegates";
+import * as Loader from "./loader";
+import * as Peers from "./peers";
+import * as Signatures from "./signatures";
+import * as Transactions from "./transactions";
+
+const register = async (server: Hapi.Server): Promise => {
+ const modules = [Accounts, Blocks, Delegates, Loader, Peers, Signatures, Transactions];
+
+ for (const module of modules) {
+ module.register(server);
+ }
+};
+
+export = {
+ register,
+ name: "Public API - Legacy",
+ version: "1.0.0",
+};
diff --git a/packages/core-api/src/versions/1/loader/controller.ts b/packages/core-api/src/versions/1/loader/controller.ts
new file mode 100644
index 0000000000..3f159f532f
--- /dev/null
+++ b/packages/core-api/src/versions/1/loader/controller.ts
@@ -0,0 +1,66 @@
+import { app } from "@arkecosystem/core-container";
+import Boom from "boom";
+import Hapi from "hapi";
+import { transactionsRepository } from "../../../repositories";
+import { Controller } from "../shared/controller";
+
+export class LoaderController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return { data: true };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async status(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+
+ return super.respondWith({
+ loaded: this.blockchain.isSynced(),
+ now: lastBlock ? lastBlock.data.height : 0,
+ blocksCount: this.blockchain.p2p.getNetworkHeight() - (lastBlock ? lastBlock.data.height : 0),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async syncing(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+
+ return super.respondWith({
+ syncing: !this.blockchain.isSynced(),
+ blocks: this.blockchain.p2p.getNetworkHeight() - lastBlock.data.height,
+ height: lastBlock.data.height,
+ id: lastBlock.data.id,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async autoconfigure(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const feeStatisticsData = await transactionsRepository.getFeeStatistics();
+
+ const network = this.config.get("network");
+
+ return super.respondWith({
+ network: {
+ nethash: network.nethash,
+ token: network.client.token,
+ symbol: network.client.symbol,
+ explorer: network.client.explorer,
+ version: network.pubKeyHash,
+ ports: super.toResource(request, this.config, "ports"),
+ feeStatistics: super.toCollection(request, feeStatisticsData, "fee-statistics"),
+ },
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/loader/index.ts b/packages/core-api/src/versions/1/loader/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/1/loader/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/loader/routes.ts b/packages/core-api/src/versions/1/loader/routes.ts
new file mode 100644
index 0000000000..cc14deef54
--- /dev/null
+++ b/packages/core-api/src/versions/1/loader/routes.ts
@@ -0,0 +1,25 @@
+import Hapi from "hapi";
+import { LoaderController } from "./controller";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new LoaderController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/loader/status",
+ handler: controller.status,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/loader/status/sync",
+ handler: controller.syncing,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/loader/autoconfigure",
+ handler: controller.autoconfigure,
+ });
+}
diff --git a/packages/core-api/src/versions/1/peers/controller.ts b/packages/core-api/src/versions/1/peers/controller.ts
new file mode 100644
index 0000000000..ac40e3fc0d
--- /dev/null
+++ b/packages/core-api/src/versions/1/peers/controller.ts
@@ -0,0 +1,111 @@
+import { app } from "@arkecosystem/core-container";
+import { P2P } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class PeersController extends Controller {
+ protected p2p: P2P.IMonitor;
+
+ public constructor() {
+ super();
+
+ this.p2p = app.resolvePlugin("p2p");
+ }
+
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const allPeers: any[] = await this.p2p.getPeers();
+
+ if (!allPeers) {
+ return super.respondWith("No peers found", true);
+ }
+
+ let peers = allPeers
+ .map(peer => {
+ // just use 'OK' status for API instead of p2p http status codes
+ peer.status = peer.status === 200 ? "OK" : peer.status;
+ return peer;
+ })
+ .sort((a, b) => a.delay - b.delay);
+ // @ts-ignore
+ peers = request.query.os
+ ? // @ts-ignore
+ allPeers.filter(peer => peer.os === (request.query as any).os)
+ : peers;
+ // @ts-ignore
+ peers = request.query.status
+ ? // @ts-ignore
+ allPeers.filter(peer => peer.status === (request.query as any).status)
+ : peers;
+ // @ts-ignore
+ peers = request.query.port
+ ? // @ts-ignore
+ allPeers.filter(peer => peer.port === (request.query as any).port)
+ : peers;
+ // @ts-ignore
+ peers = request.query.version
+ ? // @ts-ignore
+ allPeers.filter(peer => peer.version === (request.query as any).version)
+ : peers;
+ // @ts-ignore
+ peers = peers.slice(0, request.query.limit || 100);
+
+ // @ts-ignore
+ if (request.query.orderBy) {
+ // @ts-ignore
+ const order = request.query.orderBy.split(":");
+ if (["port", "status", "os", "version"].includes(order[0])) {
+ peers =
+ order[1].toUpperCase() === "ASC"
+ ? peers.sort((a, b) => a[order[0]] - b[order[0]])
+ : peers.sort((a, b) => a[order[0]] + b[order[0]]);
+ }
+ }
+
+ return super.respondWith({
+ peers: super.toCollection(request, peers.map(peer => peer.toBroadcastInfo()), "peer"),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const peers = await this.p2p.getPeers();
+ if (!peers) {
+ return super.respondWith("No peers found", true);
+ }
+
+ const peer = peers.find(
+ // @ts-ignore
+ elem => elem.ip === (request.query as any).ip && +elem.port === +request.query.port,
+ );
+
+ if (!peer) {
+ return super.respondWith(
+ // @ts-ignore
+ `Peer ${request.query.ip}:${request.query.port} not found`,
+ true,
+ );
+ }
+
+ return super.respondWith({
+ peer: super.toResource(request, peer.toBroadcastInfo(), "peer"),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async version(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return super.respondWith({
+ version: app.getVersion(),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/peers/index.ts b/packages/core-api/src/versions/1/peers/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/1/peers/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/peers/routes.ts b/packages/core-api/src/versions/1/peers/routes.ts
new file mode 100644
index 0000000000..7291d152ad
--- /dev/null
+++ b/packages/core-api/src/versions/1/peers/routes.ts
@@ -0,0 +1,40 @@
+import Hapi from "hapi";
+import { PeersController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new PeersController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/peers",
+ handler: controller.index,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getPeers,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/peers/get",
+ handler: controller.show,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getPeer,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/peers/version",
+ handler: controller.version,
+ });
+}
diff --git a/packages/core-api/src/versions/1/peers/schema.ts b/packages/core-api/src/versions/1/peers/schema.ts
new file mode 100644
index 0000000000..087a724c01
--- /dev/null
+++ b/packages/core-api/src/versions/1/peers/schema.ts
@@ -0,0 +1,50 @@
+export const getPeers: object = {
+ type: "object",
+ properties: {
+ port: {
+ type: "integer",
+ minimum: 1,
+ maximum: 65535,
+ },
+ status: {
+ type: "string",
+ maxLength: 20,
+ },
+ os: {
+ type: "string",
+ maxLength: 64,
+ },
+ version: {
+ type: "string",
+ maxLength: 11,
+ },
+ orderBy: {
+ type: "string",
+ },
+ limit: {
+ type: "integer",
+ minimum: 0,
+ maximum: 100,
+ },
+ offset: {
+ type: "integer",
+ minimum: 0,
+ },
+ },
+};
+
+export const getPeer: object = {
+ type: "object",
+ properties: {
+ ip: {
+ type: "string",
+ format: "ip",
+ },
+ port: {
+ type: "integer",
+ minimum: 0,
+ maximum: 65535,
+ },
+ },
+ required: ["ip", "port"],
+};
diff --git a/packages/core-api/src/versions/1/peers/transformer.ts b/packages/core-api/src/versions/1/peers/transformer.ts
new file mode 100644
index 0000000000..3571e2f861
--- /dev/null
+++ b/packages/core-api/src/versions/1/peers/transformer.ts
@@ -0,0 +1,21 @@
+import { app } from "@arkecosystem/core-container";
+
+export function transformPeerLegacy(model) {
+ const config = app.getConfig();
+
+ const peer: any = {
+ ip: model.ip,
+ port: model.port,
+ version: model.version,
+ height: model.height,
+ status: [200, "OK"].includes(model.status) ? "OK" : "ERROR",
+ os: model.os,
+ delay: model.delay,
+ };
+
+ if (config.get("network.name") !== "mainnet") {
+ peer.hashid = model.hashid;
+ }
+
+ return peer;
+}
diff --git a/packages/core-api/src/versions/1/shared/controller.ts b/packages/core-api/src/versions/1/shared/controller.ts
new file mode 100644
index 0000000000..7e43933440
--- /dev/null
+++ b/packages/core-api/src/versions/1/shared/controller.ts
@@ -0,0 +1,31 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain, Database, Logger } from "@arkecosystem/core-interfaces";
+import Hapi from "hapi";
+import { paginate, respondWith, respondWithCache, toCollection, toResource } from "../utils";
+
+export class Controller {
+ protected config = app.getConfig();
+ protected blockchain = app.resolvePlugin("blockchain");
+ protected databaseService = app.resolvePlugin("database");
+ protected logger = app.resolvePlugin("logger");
+
+ protected paginate(request: Hapi.Request): any {
+ return paginate(request);
+ }
+
+ protected respondWith(data, error = false): object {
+ return respondWith(data, error);
+ }
+
+ protected respondWithCache(data, h): any {
+ return respondWithCache(data, h);
+ }
+
+ protected toResource(request, data, transformer): object {
+ return toResource(request, data, transformer);
+ }
+
+ protected toCollection(request, data, transformer): object {
+ return toCollection(request, data, transformer);
+ }
+}
diff --git a/packages/core-api/src/versions/1/shared/transformers/fee-statistics.ts b/packages/core-api/src/versions/1/shared/transformers/fee-statistics.ts
new file mode 100644
index 0000000000..ba64483456
--- /dev/null
+++ b/packages/core-api/src/versions/1/shared/transformers/fee-statistics.ts
@@ -0,0 +1,10 @@
+export function transformFeeStatisticsLegacy(model: any) {
+ return {
+ type: model.type,
+ fees: {
+ minFee: parseInt(model.minFee, 10),
+ maxFee: parseInt(model.maxFee, 10),
+ avgFee: parseInt(model.avgFee, 10),
+ },
+ };
+}
diff --git a/packages/core-api/src/versions/1/shared/transformers/ports.ts b/packages/core-api/src/versions/1/shared/transformers/ports.ts
new file mode 100644
index 0000000000..579a94b156
--- /dev/null
+++ b/packages/core-api/src/versions/1/shared/transformers/ports.ts
@@ -0,0 +1,32 @@
+export function transformPortsLegacy(config: any) {
+ const result = {};
+ const keys = [
+ "@arkecosystem/core-p2p",
+ "@arkecosystem/core-api",
+ "@arkecosystem/core-graphql",
+ "@arkecosystem/core-json-rpc",
+ "@arkecosystem/core-webhooks",
+ ];
+
+ const plugins = config.get("plugins");
+
+ result[keys[0]] = +plugins[keys[0]].port;
+
+ for (const [name, options] of Object.entries(plugins)) {
+ // @ts-ignore
+ if (keys.includes(name) && options.enabled) {
+ // @ts-ignore
+ if (options.server && options.server.enabled) {
+ // @ts-ignore
+ result[name] = +options.server.port;
+
+ continue;
+ }
+
+ // @ts-ignore
+ result[name] = +options.port;
+ }
+ }
+
+ return result;
+}
diff --git a/packages/core-api/src/versions/1/shared/transformers/voter.ts b/packages/core-api/src/versions/1/shared/transformers/voter.ts
new file mode 100644
index 0000000000..c595446bae
--- /dev/null
+++ b/packages/core-api/src/versions/1/shared/transformers/voter.ts
@@ -0,0 +1,8 @@
+export function transformVoterLegacy(model: any) {
+ return {
+ username: model.username,
+ address: model.address,
+ publicKey: model.publicKey,
+ balance: `${model.balance}`,
+ };
+}
diff --git a/packages/core-api/src/versions/1/signatures/controller.ts b/packages/core-api/src/versions/1/signatures/controller.ts
new file mode 100644
index 0000000000..ebe6a1ffc6
--- /dev/null
+++ b/packages/core-api/src/versions/1/signatures/controller.ts
@@ -0,0 +1,17 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class SignaturesController extends Controller {
+ public async fee(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const height: number = this.blockchain.getLastHeight();
+
+ return super.respondWith({
+ fee: this.config.getMilestone(height).fees.staticFees.secondSignature,
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/signatures/index.ts b/packages/core-api/src/versions/1/signatures/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/1/signatures/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/signatures/routes.ts b/packages/core-api/src/versions/1/signatures/routes.ts
new file mode 100644
index 0000000000..758c6afd82
--- /dev/null
+++ b/packages/core-api/src/versions/1/signatures/routes.ts
@@ -0,0 +1,13 @@
+import Hapi from "hapi";
+import { SignaturesController } from "./controller";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new SignaturesController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/signatures/fee",
+ handler: controller.fee,
+ });
+}
diff --git a/packages/core-api/src/versions/1/transactions/controller.ts b/packages/core-api/src/versions/1/transactions/controller.ts
new file mode 100644
index 0000000000..15ce629a3f
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/controller.ts
@@ -0,0 +1,76 @@
+import { app } from "@arkecosystem/core-container";
+import { TransactionPool } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class TransactionsController extends Controller {
+ protected transactionPool = app.resolvePlugin("transactionPool");
+
+ public constructor() {
+ super();
+ }
+
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.transactions.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v1.transactions.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async unconfirmed(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const pagination = super.paginate(request);
+
+ const transactions = this.transactionPool
+ .getTransactions(pagination.offset, pagination.limit)
+ .map(transaction => ({
+ serialized: transaction,
+ }));
+
+ return super.respondWith({
+ transactions: super.toCollection(request, transactions, "transaction"),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async showUnconfirmed(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const transaction = this.transactionPool.getTransaction(request.query.id);
+
+ if (!transaction) {
+ return super.respondWith("Transaction not found", true);
+ }
+
+ return super.respondWith({
+ transaction: super.toResource(
+ request,
+ {
+ serialized: transaction.serialized,
+ },
+ "transaction",
+ ),
+ });
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/1/transactions/index.ts b/packages/core-api/src/versions/1/transactions/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/1/transactions/methods.ts b/packages/core-api/src/versions/1/transactions/methods.ts
new file mode 100644
index 0000000000..9e4dac3261
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/methods.ts
@@ -0,0 +1,40 @@
+import { transactionsRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWith, toCollection, toResource } from "../utils";
+
+const index = async request => {
+ const { count, rows } = await transactionsRepository.findAllLegacy({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ if (!rows) {
+ return respondWith("No transactions found", true);
+ }
+
+ return respondWith({
+ transactions: toCollection(request, rows, "transaction"),
+ count,
+ });
+};
+
+const show = async request => {
+ const result = await transactionsRepository.findById(request.query.id);
+
+ if (!result) {
+ return respondWith("No transactions found", true);
+ }
+
+ return respondWith({
+ transaction: toResource(request, result, "transaction"),
+ });
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v1.transactions.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v1.transactions.show", show, 8, request => ({ id: request.query.id }));
+}
diff --git a/packages/core-api/src/versions/1/transactions/routes.ts b/packages/core-api/src/versions/1/transactions/routes.ts
new file mode 100644
index 0000000000..b31a618d21
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/routes.ts
@@ -0,0 +1,46 @@
+import Hapi from "hapi";
+import { TransactionsController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new TransactionsController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/transactions",
+ handler: controller.index,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getTransactions,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/get",
+ handler: controller.show,
+ options: {
+ plugins: {
+ "hapi-ajv": {
+ querySchema: Schema.getTransaction,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/unconfirmed",
+ handler: controller.unconfirmed,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/unconfirmed/get",
+ handler: controller.showUnconfirmed,
+ });
+}
diff --git a/packages/core-api/src/versions/1/transactions/schema.ts b/packages/core-api/src/versions/1/transactions/schema.ts
new file mode 100644
index 0000000000..f9522579f5
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/schema.ts
@@ -0,0 +1,93 @@
+export const getTransactions: object = {
+ type: "object",
+ properties: {
+ blockId: {
+ type: "string",
+ },
+ limit: {
+ type: "integer",
+ minimum: 0,
+ maximum: 100,
+ },
+ type: {
+ type: "integer",
+ minimum: 0,
+ maximum: 10,
+ },
+ orderBy: {
+ type: "string",
+ },
+ offset: {
+ type: "integer",
+ minimum: 0,
+ },
+ senderPublicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ vendorField: {
+ type: "string",
+ format: "vendorField",
+ },
+ ownerPublicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ ownerAddress: {
+ type: "string",
+ },
+ senderId: {
+ type: "string",
+ format: "address",
+ },
+ recipientId: {
+ type: "string",
+ format: "address",
+ },
+ amount: {
+ type: "integer",
+ minimum: 0,
+ maximum: 10 ** 8,
+ },
+ fee: {
+ type: "integer",
+ minimum: 0,
+ maximum: 10 ** 8,
+ },
+ },
+};
+
+export const getTransaction: object = {
+ type: "object",
+ properties: {
+ id: {
+ type: "string",
+ minLength: 1,
+ },
+ },
+ required: ["id"],
+};
+
+export const getUnconfirmedTransaction: object = {
+ type: "object",
+ properties: {
+ id: {
+ type: "string",
+ minLength: 1,
+ },
+ },
+ required: ["id"],
+};
+
+export const getUnconfirmedTransactions: object = {
+ type: "object",
+ properties: {
+ senderPublicKey: {
+ type: "string",
+ format: "publicKey",
+ },
+ address: {
+ type: "string",
+ },
+ },
+};
diff --git a/packages/core-api/src/versions/1/transactions/transformer.ts b/packages/core-api/src/versions/1/transactions/transformer.ts
new file mode 100644
index 0000000000..6785d8b343
--- /dev/null
+++ b/packages/core-api/src/versions/1/transactions/transformer.ts
@@ -0,0 +1,29 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain } from "@arkecosystem/core-interfaces";
+import { bignumify } from "@arkecosystem/core-utils";
+import { crypto, models } from "@arkecosystem/crypto";
+
+export function transformTransactionLegacy(model) {
+ const config = app.getConfig();
+ const blockchain = app.resolvePlugin("blockchain");
+
+ const data: any = new models.Transaction(model.serialized.toString("hex"));
+
+ return {
+ id: data.id,
+ blockid: model.blockId,
+ type: data.type,
+ timestamp: model.timestamp || data.timestamp,
+ amount: +bignumify(data.amount).toFixed(),
+ fee: +bignumify(data.fee).toFixed(),
+ recipientId: data.recipientId,
+ senderId: crypto.getAddress(data.senderPublicKey, config.get("network.pubKeyHash")),
+ senderPublicKey: data.senderPublicKey,
+ vendorField: data.vendorField,
+ signature: data.signature,
+ signSignature: data.signSignature,
+ signatures: data.signatures,
+ asset: data.asset || {},
+ confirmations: model.block ? blockchain.getLastBlock().data.height - model.block.height : 0,
+ };
+}
diff --git a/packages/core-api/src/versions/1/utils.ts b/packages/core-api/src/versions/1/utils.ts
new file mode 100644
index 0000000000..8ccb75962c
--- /dev/null
+++ b/packages/core-api/src/versions/1/utils.ts
@@ -0,0 +1,35 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { transformerService } from "../../services/transformer";
+
+function paginate(request: Hapi.Request): any {
+ return {
+ // @ts-ignore
+ offset: request.query.offset || 0,
+ // @ts-ignore
+ limit: request.query.limit || 100,
+ };
+}
+
+function respondWith(data, error = false): object {
+ return error ? { error: data, success: false } : { ...data, success: true };
+}
+
+function respondWithCache(data, h): any {
+ const { value, cached } = data;
+ const lastModified = cached ? new Date(cached.stored) : new Date();
+
+ return value.isBoom
+ ? h.response(value.output.payload).code(value.output.statusCode)
+ : h.response(value).header("Last-modified", lastModified.toUTCString());
+}
+
+function toResource(request, data, transformer): object {
+ return transformerService.toResource(request, data, transformer);
+}
+
+function toCollection(request, data, transformer): object {
+ return transformerService.toCollection(request, data, transformer);
+}
+
+export { paginate, respondWith, respondWithCache, toResource, toCollection };
diff --git a/packages/core-api/src/versions/2/blockchain/controller.ts b/packages/core-api/src/versions/2/blockchain/controller.ts
new file mode 100644
index 0000000000..ed112186cc
--- /dev/null
+++ b/packages/core-api/src/versions/2/blockchain/controller.ts
@@ -0,0 +1,24 @@
+import { supplyCalculator } from "@arkecosystem/core-utils";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class BlockchainController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+
+ return {
+ data: {
+ block: {
+ height: lastBlock.data.height,
+ id: lastBlock.data.id,
+ },
+ supply: supplyCalculator.calculate(lastBlock.data.height),
+ },
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/blockchain/index.ts b/packages/core-api/src/versions/2/blockchain/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/2/blockchain/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/blockchain/routes.ts b/packages/core-api/src/versions/2/blockchain/routes.ts
new file mode 100644
index 0000000000..9d8f4bad20
--- /dev/null
+++ b/packages/core-api/src/versions/2/blockchain/routes.ts
@@ -0,0 +1,13 @@
+import Hapi from "hapi";
+import { BlockchainController } from "./controller";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new BlockchainController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/blockchain",
+ handler: controller.index,
+ });
+}
diff --git a/packages/core-api/src/versions/2/blocks/controller.ts b/packages/core-api/src/versions/2/blocks/controller.ts
new file mode 100644
index 0000000000..6882959c98
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/controller.ts
@@ -0,0 +1,49 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class BlocksController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.blocks.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.blocks.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async transactions(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.blocks.transactions(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async search(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.blocks.search(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/blocks/index.ts b/packages/core-api/src/versions/2/blocks/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/blocks/methods.ts b/packages/core-api/src/versions/2/blocks/methods.ts
new file mode 100644
index 0000000000..a12628c4d8
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/methods.ts
@@ -0,0 +1,67 @@
+import Boom from "boom";
+import { blocksRepository, transactionsRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWithResource, toPagination } from "../utils";
+
+const index = async request => {
+ const blocks = await blocksRepository.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, blocks, "block");
+};
+
+const show = async request => {
+ const block = await blocksRepository.findById(request.params.id);
+
+ if (!block) {
+ return Boom.notFound("Block not found");
+ }
+
+ return respondWithResource(request, block, "block");
+};
+
+const transactions = async request => {
+ const block = await blocksRepository.findById(request.params.id);
+
+ if (!block) {
+ return Boom.notFound("Block not found");
+ }
+
+ const rows = await transactionsRepository.findAllByBlock(block.id, {
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, rows, "transaction");
+};
+
+const search = async request => {
+ const blocks = await blocksRepository.search({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, blocks, "block");
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v2.blocks.index", index, 6, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.blocks.show", show, 600, request => ({ id: request.params.id }))
+ .method("v2.blocks.transactions", transactions, 600, request => ({
+ ...{ id: request.params.id },
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.blocks.search", search, 30, request => ({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ }));
+}
diff --git a/packages/core-api/src/versions/2/blocks/routes.ts b/packages/core-api/src/versions/2/blocks/routes.ts
new file mode 100644
index 0000000000..ebdb465167
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/routes.ts
@@ -0,0 +1,44 @@
+import Hapi from "hapi";
+import { BlocksController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new BlocksController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/blocks",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/{id}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/blocks/{id}/transactions",
+ handler: controller.transactions,
+ options: {
+ validate: Schema.transactions,
+ },
+ });
+
+ server.route({
+ method: "POST",
+ path: "/blocks/search",
+ handler: controller.search,
+ options: {
+ validate: Schema.search,
+ },
+ });
+}
diff --git a/packages/core-api/src/versions/2/blocks/schema.ts b/packages/core-api/src/versions/2/blocks/schema.ts
new file mode 100644
index 0000000000..d8f70e1098
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/schema.ts
@@ -0,0 +1,161 @@
+import * as Joi from "joi";
+import { pagination } from "../shared/schemas/pagination";
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ version: Joi.number()
+ .integer()
+ .min(0),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ previousBlock: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ height: Joi.number()
+ .integer()
+ .positive(),
+ numberOfTransactions: Joi.number()
+ .integer()
+ .min(0),
+ totalAmount: Joi.number()
+ .integer()
+ .min(0),
+ totalFee: Joi.number()
+ .integer()
+ .min(0),
+ reward: Joi.number()
+ .integer()
+ .min(0),
+ payloadLength: Joi.number()
+ .integer()
+ .positive(),
+ payloadHash: Joi.string().hex(),
+ generatorPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ blockSignature: Joi.string().hex(),
+ },
+ },
+};
+
+export const show: object = {
+ params: {
+ id: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ },
+};
+
+export const transactions: object = {
+ params: {
+ id: Joi.string(),
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(66),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .min(0),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: Joi.string()
+ .alphanum()
+ .length(34),
+ recipientId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const search: object = {
+ query: pagination,
+ payload: {
+ id: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ version: Joi.number()
+ .integer()
+ .min(0),
+ previousBlock: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ payloadHash: Joi.string().hex(),
+ generatorPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ blockSignature: Joi.string().hex(),
+ timestamp: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ height: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .positive(),
+ to: Joi.number()
+ .integer()
+ .positive(),
+ }),
+ numberOfTransactions: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ totalAmount: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ totalFee: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ reward: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ payloadLength: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ },
+};
diff --git a/packages/core-api/src/versions/2/blocks/transformer.ts b/packages/core-api/src/versions/2/blocks/transformer.ts
new file mode 100644
index 0000000000..cd22eb234e
--- /dev/null
+++ b/packages/core-api/src/versions/2/blocks/transformer.ts
@@ -0,0 +1,37 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import { bignumify, formatTimestamp } from "@arkecosystem/core-utils";
+
+export function transformBlock(model) {
+ const databaseService = app.resolvePlugin("database");
+ const generator = databaseService.walletManager.findByPublicKey(model.generatorPublicKey);
+
+ model.reward = bignumify(model.reward);
+ model.totalFee = bignumify(model.totalFee);
+
+ return {
+ id: model.id,
+ version: +model.version,
+ height: +model.height,
+ previous: model.previousBlock,
+ forged: {
+ reward: +model.reward.toFixed(),
+ fee: +model.totalFee.toFixed(),
+ total: +model.reward.plus(model.totalFee).toFixed(),
+ amount: +bignumify(model.totalAmount).toFixed(),
+ },
+ payload: {
+ hash: model.payloadHash,
+ length: model.payloadLength,
+ },
+ generator: {
+ username: generator.username,
+ address: generator.address,
+ publicKey: generator.publicKey,
+ },
+ signature: model.blockSignature,
+ confirmations: model.confirmations,
+ transactions: model.numberOfTransactions,
+ timestamp: formatTimestamp(model.timestamp),
+ };
+}
diff --git a/packages/core-api/src/versions/2/delegates/controller.ts b/packages/core-api/src/versions/2/delegates/controller.ts
new file mode 100644
index 0000000000..c36d413476
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/controller.ts
@@ -0,0 +1,71 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class DelegatesController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async search(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.search(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async blocks(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.blocks(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async voters(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.voters(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async voterBalances(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.delegates.voterBalances(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/delegates/index.ts b/packages/core-api/src/versions/2/delegates/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/delegates/methods.ts b/packages/core-api/src/versions/2/delegates/methods.ts
new file mode 100644
index 0000000000..be2342af89
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/methods.ts
@@ -0,0 +1,102 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import orderBy from "lodash/orderBy";
+import { blocksRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWithResource, toPagination } from "../utils";
+
+const databaseService = app.resolvePlugin("database");
+
+const index = async request => {
+ const delegates = await databaseService.delegates.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, delegates, "delegate");
+};
+
+const show = async request => {
+ const delegate = await databaseService.delegates.findById(request.params.id);
+
+ if (!delegate) {
+ return Boom.notFound("Delegate not found");
+ }
+
+ return respondWithResource(request, delegate, "delegate");
+};
+
+const search = async request => {
+ const delegates = await databaseService.delegates.search({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, delegates, "delegate");
+};
+
+const blocks = async request => {
+ const delegate = await databaseService.delegates.findById(request.params.id);
+
+ if (!delegate) {
+ return Boom.notFound("Delegate not found");
+ }
+
+ const rows = await blocksRepository.findAllByGenerator(delegate.publicKey, paginate(request));
+
+ return toPagination(request, rows, "block");
+};
+
+const voters = async request => {
+ const delegate = await databaseService.delegates.findById(request.params.id);
+
+ if (!delegate) {
+ return Boom.notFound("Delegate not found");
+ }
+
+ const wallets = await databaseService.wallets.findAllByVote(delegate.publicKey, paginate(request));
+
+ return toPagination(request, wallets, "wallet");
+};
+
+const voterBalances = async request => {
+ const delegate = await databaseService.delegates.findById(request.params.id);
+
+ if (!delegate) {
+ return Boom.notFound("Delegate not found");
+ }
+
+ const wallets = await databaseService.wallets.all().filter(wallet => wallet.vote === delegate.publicKey);
+
+ const data = {};
+ orderBy(wallets, ["balance"], ["desc"]).forEach(wallet => {
+ data[wallet.address] = +wallet.balance.toFixed();
+ });
+
+ return { data };
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v2.delegates.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.delegates.show", show, 8, request => ({ id: request.params.id }))
+ .method("v2.delegates.search", search, 30, request => ({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.delegates.blocks", blocks, 8, request => ({
+ ...{ id: request.params.id },
+ ...paginate(request),
+ }))
+ .method("v2.delegates.voters", voters, 8, request => ({
+ ...{ id: request.params.id },
+ ...paginate(request),
+ }))
+ .method("v2.delegates.voterBalances", voterBalances, 8, request => ({ id: request.params.id }));
+}
diff --git a/packages/core-api/src/versions/2/delegates/routes.ts b/packages/core-api/src/versions/2/delegates/routes.ts
new file mode 100644
index 0000000000..b1c7705e7c
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/routes.ts
@@ -0,0 +1,62 @@
+import Hapi from "hapi";
+import { DelegatesController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new DelegatesController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/delegates",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/{id}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/{id}/blocks",
+ handler: controller.blocks,
+ options: {
+ validate: Schema.blocks,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/{id}/voters",
+ handler: controller.voters,
+ options: {
+ validate: Schema.voters,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/delegates/{id}/voters/balances",
+ handler: controller.voterBalances,
+ options: {
+ validate: Schema.voterBalances,
+ },
+ });
+
+ server.route({
+ method: "POST",
+ path: "/delegates/search",
+ handler: controller.search,
+ options: {
+ validate: Schema.search,
+ },
+ });
+}
diff --git a/packages/core-api/src/versions/2/delegates/schema.ts b/packages/core-api/src/versions/2/delegates/schema.ts
new file mode 100644
index 0000000000..8ebb84cf26
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/schema.ts
@@ -0,0 +1,145 @@
+import * as Joi from "joi";
+import { pagination } from "../shared/schemas/pagination";
+
+const schemaIdentifier = Joi.string()
+ .regex(/^[a-zA-Z0-9!@$&_.]+$/)
+ .min(1)
+ .max(66);
+
+const schemaUsername = Joi.string()
+ .regex(/^[a-z0-9!@$&_.]+$/)
+ .min(1)
+ .max(20);
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ address: Joi.string()
+ .alphanum()
+ .length(34),
+ publicKey: Joi.string()
+ .hex()
+ .length(66),
+ secondPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ vote: Joi.string()
+ .hex()
+ .length(66),
+ username: schemaUsername,
+ balance: Joi.number()
+ .integer()
+ .min(0),
+ voteBalance: Joi.number()
+ .integer()
+ .min(0),
+ producedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ missedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ },
+ },
+};
+
+export const show: object = {
+ params: {
+ id: schemaIdentifier,
+ },
+};
+
+export const search: object = {
+ query: pagination,
+ payload: {
+ username: schemaUsername,
+ },
+};
+
+export const blocks: object = {
+ params: {
+ id: schemaIdentifier,
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ version: Joi.number()
+ .integer()
+ .min(0),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ previousBlock: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ height: Joi.number()
+ .integer()
+ .positive(),
+ numberOfTransactions: Joi.number()
+ .integer()
+ .min(0),
+ totalAmount: Joi.number()
+ .integer()
+ .min(0),
+ totalFee: Joi.number()
+ .integer()
+ .min(0),
+ reward: Joi.number()
+ .integer()
+ .min(0),
+ payloadLength: Joi.number()
+ .integer()
+ .min(0),
+ payloadHash: Joi.string().hex(),
+ generatorPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ blockSignature: Joi.string().hex(),
+ },
+ },
+};
+
+export const voters: object = {
+ params: {
+ id: schemaIdentifier,
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ address: Joi.string()
+ .alphanum()
+ .length(34),
+ publicKey: Joi.string()
+ .hex()
+ .length(66),
+ secondPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ vote: Joi.string()
+ .hex()
+ .length(66),
+ username: schemaUsername,
+ balance: Joi.number()
+ .integer()
+ .min(0),
+ voteBalance: Joi.number()
+ .integer()
+ .min(0),
+ producedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ missedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ },
+ },
+};
+
+export const voterBalances: object = {
+ params: {
+ id: schemaIdentifier,
+ },
+};
diff --git a/packages/core-api/src/versions/2/delegates/transformer.ts b/packages/core-api/src/versions/2/delegates/transformer.ts
new file mode 100644
index 0000000000..6fdb040209
--- /dev/null
+++ b/packages/core-api/src/versions/2/delegates/transformer.ts
@@ -0,0 +1,37 @@
+import { bignumify, delegateCalculator, formatTimestamp } from "@arkecosystem/core-utils";
+
+export function transformDelegate(delegate) {
+ const data = {
+ username: delegate.username,
+ address: delegate.address,
+ publicKey: delegate.publicKey,
+ votes: +bignumify(delegate.voteBalance).toFixed(),
+ rank: delegate.rate,
+ blocks: {
+ produced: delegate.producedBlocks,
+ missed: delegate.missedBlocks,
+ },
+ production: {
+ approval: delegateCalculator.calculateApproval(delegate),
+ productivity: delegateCalculator.calculateProductivity(delegate),
+ },
+ forged: {
+ fees: +delegate.forgedFees.toFixed(),
+ rewards: +delegate.forgedRewards.toFixed(),
+ total: +delegate.forgedFees.plus(delegate.forgedRewards).toFixed(),
+ },
+ };
+
+ const lastBlock = delegate.lastBlock;
+
+ if (lastBlock) {
+ // @ts-ignore
+ data.blocks.last = {
+ id: lastBlock.id,
+ height: lastBlock.height,
+ timestamp: formatTimestamp(lastBlock.timestamp),
+ };
+ }
+
+ return data;
+}
diff --git a/packages/core-api/src/versions/2/index.ts b/packages/core-api/src/versions/2/index.ts
new file mode 100644
index 0000000000..5600a74a8f
--- /dev/null
+++ b/packages/core-api/src/versions/2/index.ts
@@ -0,0 +1,23 @@
+import Hapi from "hapi";
+import * as Blockchain from "./blockchain";
+import * as Blocks from "./blocks";
+import * as Delegates from "./delegates";
+import * as Node from "./node";
+import * as Peers from "./peers";
+import * as Transactions from "./transactions";
+import * as Votes from "./votes";
+import * as Wallets from "./wallets";
+
+const register = async (server: Hapi.Server): Promise => {
+ const modules = [Blockchain, Blocks, Delegates, Node, Peers, Transactions, Votes, Wallets];
+
+ for (const module of modules) {
+ module.register(server);
+ }
+};
+
+export = {
+ register,
+ name: "Public API",
+ version: "2.0.0",
+};
diff --git a/packages/core-api/src/versions/2/node/controller.ts b/packages/core-api/src/versions/2/node/controller.ts
new file mode 100644
index 0000000000..51c7fa7d84
--- /dev/null
+++ b/packages/core-api/src/versions/2/node/controller.ts
@@ -0,0 +1,68 @@
+import { app } from "@arkecosystem/core-container";
+import Boom from "boom";
+import Hapi from "hapi";
+import { transactionsRepository } from "../../../repositories";
+import { Controller } from "../shared/controller";
+
+export class NodeController extends Controller {
+ public async status(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+ const networkHeight = await this.blockchain.p2p.getNetworkHeight();
+
+ return {
+ data: {
+ synced: this.blockchain.isSynced(),
+ now: lastBlock ? lastBlock.data.height : 0,
+ blocksCount: networkHeight - lastBlock.data.height || 0,
+ },
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async syncing(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const lastBlock = this.blockchain.getLastBlock();
+ const networkHeight = await this.blockchain.p2p.getNetworkHeight();
+
+ return {
+ data: {
+ syncing: !this.blockchain.isSynced(),
+ blocks: networkHeight - lastBlock.data.height || 0,
+ height: lastBlock.data.height,
+ id: lastBlock.data.id,
+ },
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async configuration(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const feeStatisticsData = await transactionsRepository.getFeeStatistics();
+
+ const network = this.config.get("network");
+
+ return {
+ data: {
+ nethash: network.nethash,
+ token: network.client.token,
+ symbol: network.client.symbol,
+ explorer: network.client.explorer,
+ version: network.pubKeyHash,
+ ports: super.toResource(request, this.config, "ports"),
+ constants: this.config.getMilestone(this.blockchain.getLastHeight()),
+ feeStatistics: super.toCollection(request, feeStatisticsData, "fee-statistics"),
+ transactionPool: {
+ maxTransactionAge: app.resolveOptions("transactionPool").maxTransactionAge,
+ },
+ },
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/node/index.ts b/packages/core-api/src/versions/2/node/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/2/node/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/node/routes.ts b/packages/core-api/src/versions/2/node/routes.ts
new file mode 100644
index 0000000000..7e7a799ddb
--- /dev/null
+++ b/packages/core-api/src/versions/2/node/routes.ts
@@ -0,0 +1,25 @@
+import Hapi from "hapi";
+import { NodeController } from "./controller";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new NodeController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/node/status",
+ handler: controller.status,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/node/syncing",
+ handler: controller.syncing,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/node/configuration",
+ handler: controller.configuration,
+ });
+}
diff --git a/packages/core-api/src/versions/2/peers/controller.ts b/packages/core-api/src/versions/2/peers/controller.ts
new file mode 100644
index 0000000000..ddf74a28c5
--- /dev/null
+++ b/packages/core-api/src/versions/2/peers/controller.ts
@@ -0,0 +1,84 @@
+import { app } from "@arkecosystem/core-container";
+import { P2P } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class PeersController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const allPeers = await this.blockchain.p2p.getPeers();
+
+ let result = allPeers.sort((a, b) => a.delay - b.delay);
+ // @ts-ignore
+ result = request.query.os
+ ? // @ts-ignore
+ result.filter(peer => peer.os === (request.query as any).os)
+ : result;
+ // @ts-ignore
+ result = request.query.status
+ ? // @ts-ignore
+ result.filter(peer => peer.status === (request.query as any).status)
+ : result;
+ // @ts-ignore
+ result = request.query.port
+ ? // @ts-ignore
+ result.filter(peer => peer.port === (request.query as any).port)
+ : result;
+ // @ts-ignore
+ result = request.query.version
+ ? // @ts-ignore
+ result.filter(peer => peer.version === (request.query as any).version)
+ : result;
+ // @ts-ignore
+ result = result.slice(0, request.query.limit || 100);
+
+ // @ts-ignore
+ if (request.query.orderBy) {
+ // @ts-ignore
+ const order = request.query.orderBy.split(":");
+
+ if (["port", "status", "os", "version"].includes(order[0])) {
+ result =
+ order[1].toUpperCase() === "ASC"
+ ? result.sort((a, b) => a[order[0]] - b[order[0]])
+ : result.sort((a, b) => a[order[0]] + b[order[0]]);
+ }
+ }
+
+ return super.toPagination(request, { rows: result, count: allPeers.length }, "peer");
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const peers = await this.blockchain.p2p.getPeers();
+ const peer = peers.find(p => p.ip === request.params.ip);
+
+ if (!peer) {
+ return Boom.notFound("Peer not found");
+ }
+
+ return super.respondWithResource(request, peer, "peer");
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async suspended(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ const peers = app.resolvePlugin("p2p").getSuspendedPeers();
+
+ return super.respondWithCollection(
+ request,
+ // @ts-ignore
+ Object.values(peers).map(peer => peer.peer),
+ "peer",
+ );
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/peers/index.ts b/packages/core-api/src/versions/2/peers/index.ts
new file mode 100644
index 0000000000..f13324009a
--- /dev/null
+++ b/packages/core-api/src/versions/2/peers/index.ts
@@ -0,0 +1,6 @@
+import Hapi from "hapi";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/peers/routes.ts b/packages/core-api/src/versions/2/peers/routes.ts
new file mode 100644
index 0000000000..962002f5ea
--- /dev/null
+++ b/packages/core-api/src/versions/2/peers/routes.ts
@@ -0,0 +1,32 @@
+import Hapi from "hapi";
+import { PeersController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new PeersController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/peers",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/peers/suspended",
+ handler: controller.suspended,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/peers/{ip}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+}
diff --git a/packages/core-api/src/versions/2/peers/schema.ts b/packages/core-api/src/versions/2/peers/schema.ts
new file mode 100644
index 0000000000..762fc6e09e
--- /dev/null
+++ b/packages/core-api/src/versions/2/peers/schema.ts
@@ -0,0 +1,22 @@
+import * as Joi from "joi";
+import { pagination } from "../shared/schemas/pagination";
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ ip: Joi.string().ip(),
+ os: Joi.string(),
+ status: Joi.string(),
+ port: Joi.number().port(),
+ version: Joi.string(),
+ orderBy: Joi.string(),
+ },
+ },
+};
+
+export const show: object = {
+ params: {
+ ip: Joi.string().ip(),
+ },
+};
diff --git a/packages/core-api/src/versions/2/peers/transformer.ts b/packages/core-api/src/versions/2/peers/transformer.ts
new file mode 100644
index 0000000000..b9837f021e
--- /dev/null
+++ b/packages/core-api/src/versions/2/peers/transformer.ts
@@ -0,0 +1,21 @@
+import { app } from "@arkecosystem/core-container";
+
+export function transformPeer(model) {
+ const config = app.getConfig();
+
+ const peer: any = {
+ ip: model.ip,
+ port: +model.port,
+ version: model.version,
+ height: model.state ? model.state.height : model.height,
+ status: [200, "OK"].includes(model.status) ? 200 : 400,
+ os: model.os,
+ latency: model.delay,
+ };
+
+ if (config.get("network.name") !== "mainnet") {
+ peer.hashid = model.hashid || "unknown";
+ }
+
+ return peer;
+}
diff --git a/packages/core-api/src/versions/2/shared/controller.ts b/packages/core-api/src/versions/2/shared/controller.ts
new file mode 100644
index 0000000000..659a8474d2
--- /dev/null
+++ b/packages/core-api/src/versions/2/shared/controller.ts
@@ -0,0 +1,46 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain, Database } from "@arkecosystem/core-interfaces";
+import Hapi from "hapi";
+import {
+ paginate,
+ respondWithCache,
+ respondWithCollection,
+ respondWithResource,
+ toCollection,
+ toPagination,
+ toResource,
+} from "../utils";
+
+export class Controller {
+ protected config = app.getConfig();
+ protected blockchain = app.resolvePlugin("blockchain");
+ protected databaseService = app.resolvePlugin("database");
+
+ protected paginate(request: Hapi.Request): any {
+ return paginate(request);
+ }
+
+ protected respondWithResource(request, data, transformer): any {
+ return respondWithResource(request, data, transformer);
+ }
+
+ protected respondWithCollection(request, data, transformer): object {
+ return respondWithCollection(request, data, transformer);
+ }
+
+ protected respondWithCache(data, h) {
+ return respondWithCache(data, h);
+ }
+
+ protected toResource(request, data, transformer): object {
+ return toResource(request, data, transformer);
+ }
+
+ protected toCollection(request, data, transformer): object {
+ return toCollection(request, data, transformer);
+ }
+
+ protected toPagination(request, data, transformer): object {
+ return toPagination(request, data, transformer);
+ }
+}
diff --git a/packages/core-api/src/versions/2/shared/schemas/pagination.ts b/packages/core-api/src/versions/2/shared/schemas/pagination.ts
new file mode 100644
index 0000000000..01edca9da5
--- /dev/null
+++ b/packages/core-api/src/versions/2/shared/schemas/pagination.ts
@@ -0,0 +1,14 @@
+import * as Joi from "joi";
+
+export const pagination = {
+ page: Joi.number()
+ .integer()
+ .positive(),
+ offset: Joi.number()
+ .integer()
+ .min(0),
+ limit: Joi.number()
+ .integer()
+ .min(1)
+ .max(100),
+};
diff --git a/packages/core-api/src/versions/2/shared/transformers/fee-statistics.ts b/packages/core-api/src/versions/2/shared/transformers/fee-statistics.ts
new file mode 100644
index 0000000000..9c8309c0ff
--- /dev/null
+++ b/packages/core-api/src/versions/2/shared/transformers/fee-statistics.ts
@@ -0,0 +1,10 @@
+export function transformFeeStatistics(model: any) {
+ return {
+ type: model.type,
+ fees: {
+ minFee: parseInt(model.minFee, 10),
+ maxFee: parseInt(model.maxFee, 10),
+ avgFee: parseInt(model.avgFee, 10),
+ },
+ };
+}
diff --git a/packages/core-api/src/versions/2/shared/transformers/ports.ts b/packages/core-api/src/versions/2/shared/transformers/ports.ts
new file mode 100644
index 0000000000..3193f00719
--- /dev/null
+++ b/packages/core-api/src/versions/2/shared/transformers/ports.ts
@@ -0,0 +1,32 @@
+export function transformPorts(config: any) {
+ const result = {};
+ const keys = [
+ "@arkecosystem/core-p2p",
+ "@arkecosystem/core-api",
+ "@arkecosystem/core-graphql",
+ "@arkecosystem/core-json-rpc",
+ "@arkecosystem/core-webhooks",
+ ];
+
+ const plugins = config.get("plugins");
+
+ result[keys[0]] = +plugins[keys[0]].port;
+
+ for (const [name, options] of Object.entries(plugins)) {
+ // @ts-ignore
+ if (keys.includes(name) && options.enabled) {
+ // @ts-ignore
+ if (options.server && options.server.enabled) {
+ // @ts-ignore
+ result[name] = +options.server.port;
+
+ continue;
+ }
+
+ // @ts-ignore
+ result[name] = +options.port;
+ }
+ }
+
+ return result;
+}
diff --git a/packages/core-api/src/versions/2/transactions/controller.ts b/packages/core-api/src/versions/2/transactions/controller.ts
new file mode 100644
index 0000000000..2cbdc28ecd
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/controller.ts
@@ -0,0 +1,149 @@
+import { app } from "@arkecosystem/core-container";
+import { P2P } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+import { TransactionGuard, TransactionPool } from "@arkecosystem/core-transaction-pool";
+import { constants } from "@arkecosystem/crypto";
+
+export class TransactionsController extends Controller {
+ private transactionPool = app.resolvePlugin("transactionPool");
+
+ public constructor() {
+ super();
+ }
+
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.transactions.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async store(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ if (!this.transactionPool.options.enabled) {
+ return Boom.serverUnavailable("Transaction pool is disabled.");
+ }
+
+ const guard = new TransactionGuard(this.transactionPool);
+
+ const result = await guard.validate((request.payload as any).transactions);
+
+ if (result.broadcast.length > 0) {
+ app.resolvePlugin("p2p").broadcastTransactions(guard.getBroadcastTransactions());
+ }
+
+ return {
+ data: {
+ accept: result.accept,
+ broadcast: result.broadcast,
+ excess: result.excess,
+ invalid: result.invalid,
+ },
+ errors: result.errors,
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.transactions.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async unconfirmed(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ if (!this.transactionPool.options.enabled) {
+ return Boom.serverUnavailable("Transaction pool is disabled.");
+ }
+
+ const pagination = super.paginate(request);
+
+ let transactions = this.transactionPool.getTransactions(pagination.offset, pagination.limit);
+ transactions = transactions.map(transaction => ({
+ serialized: transaction,
+ }));
+
+ return super.toPagination(
+ request,
+ {
+ count: this.transactionPool.getPoolSize(),
+ rows: transactions,
+ },
+ "transaction",
+ );
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async showUnconfirmed(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ if (!this.transactionPool.options.enabled) {
+ return Boom.serverUnavailable("Transaction pool is disabled.");
+ }
+
+ let transaction = this.transactionPool.getTransaction(request.params.id);
+
+ if (!transaction) {
+ return Boom.notFound("Transaction not found");
+ }
+
+ transaction = { serialized: transaction.serialized };
+
+ return super.respondWithResource(request, transaction, "transaction");
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async search(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.transactions.search(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async types(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // Remove reverse mapping from TransactionTypes enum.
+ const { TransactionTypes } = constants;
+ const data = Object.assign({}, TransactionTypes);
+ Object.values(TransactionTypes)
+ .filter(value => typeof value === "string")
+ .map((type: string) => data[type])
+ .forEach((key: string) => delete data[key]);
+
+ return { data };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async fees(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ return {
+ data: this.config.getMilestone(this.blockchain.getLastHeight()).fees.staticFees,
+ };
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/transactions/index.ts b/packages/core-api/src/versions/2/transactions/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/transactions/methods.ts b/packages/core-api/src/versions/2/transactions/methods.ts
new file mode 100644
index 0000000000..04707c22d6
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/methods.ts
@@ -0,0 +1,47 @@
+import Boom from "boom";
+import { transactionsRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWithResource, toPagination } from "../utils";
+
+const index = async request => {
+ const transactions = await transactionsRepository.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, transactions, "transaction");
+};
+
+const show = async request => {
+ const transaction = await transactionsRepository.findById(request.params.id);
+
+ if (!transaction) {
+ return Boom.notFound("Transaction not found");
+ }
+
+ return respondWithResource(request, transaction, "transaction");
+};
+
+const search = async request => {
+ const transactions = await transactionsRepository.search({
+ ...request.query,
+ ...request.payload,
+ ...paginate(request),
+ });
+
+ return toPagination(request, transactions, "transaction");
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v2.transactions.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.transactions.show", show, 8, request => ({ id: request.params.id }))
+ .method("v2.transactions.search", search, 30, request => ({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ }));
+}
diff --git a/packages/core-api/src/versions/2/transactions/routes.ts b/packages/core-api/src/versions/2/transactions/routes.ts
new file mode 100644
index 0000000000..b4a9e47663
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/routes.ts
@@ -0,0 +1,79 @@
+import Hapi from "hapi";
+import { TransactionsController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new TransactionsController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/transactions",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "POST",
+ path: "/transactions",
+ handler: controller.store,
+ options: {
+ validate: Schema.store,
+ plugins: {
+ pagination: {
+ enabled: false,
+ },
+ },
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/{id}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/unconfirmed",
+ handler: controller.unconfirmed,
+ options: {
+ validate: Schema.unconfirmed,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/unconfirmed/{id}",
+ handler: controller.showUnconfirmed,
+ options: {
+ validate: Schema.showUnconfirmed,
+ },
+ });
+
+ server.route({
+ method: "POST",
+ path: "/transactions/search",
+ handler: controller.search,
+ options: {
+ validate: Schema.search,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/types",
+ handler: controller.types,
+ });
+
+ server.route({
+ method: "GET",
+ path: "/transactions/fees",
+ handler: controller.fees,
+ });
+}
diff --git a/packages/core-api/src/versions/2/transactions/schema.ts b/packages/core-api/src/versions/2/transactions/schema.ts
new file mode 100644
index 0000000000..aeacd37f26
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/schema.ts
@@ -0,0 +1,130 @@
+import { app } from "@arkecosystem/core-container";
+import { Joi } from "@arkecosystem/crypto";
+import { pagination } from "../shared/schemas/pagination";
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: Joi.string()
+ .alphanum()
+ .length(34),
+ recipientId: Joi.string()
+ .alphanum()
+ .length(34),
+ ownerId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const store: object = {
+ payload: {
+ transactions: Joi.transactionArray()
+ .min(1)
+ .max(app.resolveOptions("transactionPool").maxTransactionsPerRequest)
+ .options({ stripUnknown: true }),
+ },
+};
+
+export const show: object = {
+ params: {
+ id: Joi.string()
+ .hex()
+ .length(64),
+ },
+};
+
+export const unconfirmed: object = {
+ query: pagination,
+};
+
+export const showUnconfirmed: object = {
+ params: {
+ id: Joi.string()
+ .hex()
+ .length(64),
+ },
+};
+
+const address: object = Joi.string()
+ .alphanum()
+ .length(34);
+
+export const search: object = {
+ query: pagination,
+ payload: {
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: address,
+ recipientId: address,
+ ownerId: address,
+ addresses: Joi.array()
+ .unique()
+ .min(1)
+ .max(50)
+ .items(address),
+ vendorFieldHex: Joi.string().hex(),
+ timestamp: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ amount: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ fee: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ },
+};
diff --git a/packages/core-api/src/versions/2/transactions/transformer.ts b/packages/core-api/src/versions/2/transactions/transformer.ts
new file mode 100644
index 0000000000..1b8d48c745
--- /dev/null
+++ b/packages/core-api/src/versions/2/transactions/transformer.ts
@@ -0,0 +1,30 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain } from "@arkecosystem/core-interfaces";
+import { bignumify, formatTimestamp } from "@arkecosystem/core-utils";
+import { crypto, models } from "@arkecosystem/crypto";
+
+export function transformTransaction(model) {
+ const config = app.getConfig();
+ const blockchain = app.resolvePlugin("blockchain");
+
+ const data: any = new models.Transaction(model.serialized.toString("hex"));
+ const lastBlock = blockchain.getLastBlock();
+
+ return {
+ id: data.id,
+ blockId: model.blockId,
+ version: data.version,
+ type: data.type,
+ amount: +bignumify(data.amount).toFixed(),
+ fee: +bignumify(data.fee).toFixed(),
+ sender: crypto.getAddress(data.senderPublicKey, config.get("network.pubKeyHash")),
+ recipient: data.recipientId,
+ signature: data.signature,
+ signSignature: data.signSignature,
+ signatures: data.signatures,
+ vendorField: data.vendorField,
+ asset: data.asset,
+ confirmations: model.block ? lastBlock.data.height - model.block.height : 0,
+ timestamp: formatTimestamp(model.timestamp || data.timestamp),
+ };
+}
diff --git a/packages/core-api/src/versions/2/utils.ts b/packages/core-api/src/versions/2/utils.ts
new file mode 100644
index 0000000000..442fd4aa34
--- /dev/null
+++ b/packages/core-api/src/versions/2/utils.ts
@@ -0,0 +1,64 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { transformerService } from "../../services/transformer";
+
+function paginate(request: Hapi.Request): any {
+ const pagination = {
+ // @ts-ignore
+ offset: (request.query.page - 1) * request.query.limit || 0,
+ // @ts-ignore
+ limit: request.query.limit || 100,
+ };
+
+ // @ts-ignore
+ if (request.query.offset) {
+ // @ts-ignore
+ pagination.offset = request.query.offset;
+ }
+
+ return pagination;
+}
+
+function respondWithResource(request, data, transformer): any {
+ return data ? { data: transformerService.toResource(request, data, transformer) } : Boom.notFound();
+}
+
+function respondWithCollection(request, data, transformer): object {
+ return {
+ data: transformerService.toCollection(request, data, transformer),
+ };
+}
+
+function respondWithCache(data, h): any {
+ const { value, cached } = data;
+ const lastModified = cached ? new Date(cached.stored) : new Date();
+
+ return value.isBoom
+ ? h.response(value.output.payload).code(value.output.statusCode)
+ : h.response(value).header("Last-modified", lastModified.toUTCString());
+}
+
+function toResource(request, data, transformer): object {
+ return transformerService.toResource(request, data, transformer);
+}
+
+function toCollection(request, data, transformer): object {
+ return transformerService.toCollection(request, data, transformer);
+}
+
+function toPagination(request, data, transformer): object {
+ return {
+ results: transformerService.toCollection(request, data.rows, transformer),
+ totalCount: data.count,
+ };
+}
+
+export {
+ paginate,
+ respondWithResource,
+ respondWithCollection,
+ respondWithCache,
+ toResource,
+ toCollection,
+ toPagination,
+};
diff --git a/packages/core-api/src/versions/2/votes/controller.ts b/packages/core-api/src/versions/2/votes/controller.ts
new file mode 100644
index 0000000000..0ef2901ee5
--- /dev/null
+++ b/packages/core-api/src/versions/2/votes/controller.ts
@@ -0,0 +1,27 @@
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class VotesController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.votes.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.votes.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/votes/index.ts b/packages/core-api/src/versions/2/votes/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/2/votes/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/votes/methods.ts b/packages/core-api/src/versions/2/votes/methods.ts
new file mode 100644
index 0000000000..59863752a5
--- /dev/null
+++ b/packages/core-api/src/versions/2/votes/methods.ts
@@ -0,0 +1,35 @@
+import { constants } from "@arkecosystem/crypto";
+import Boom from "boom";
+import { transactionsRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWithResource, toPagination } from "../utils";
+
+const { TransactionTypes } = constants;
+
+const index = async request => {
+ const transactions = await transactionsRepository.findAllByType(TransactionTypes.Vote, {
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, transactions, "transaction");
+};
+
+const show = async request => {
+ const transaction = await transactionsRepository.findByTypeAndId(TransactionTypes.Vote, request.params.id);
+
+ if (!transaction) {
+ return Boom.notFound("Vote not found");
+ }
+
+ return respondWithResource(request, transaction, "transaction");
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v2.votes.index", index, 8, request => ({
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.votes.show", show, 8, request => ({ id: request.params.id }));
+}
diff --git a/packages/core-api/src/versions/2/votes/routes.ts b/packages/core-api/src/versions/2/votes/routes.ts
new file mode 100644
index 0000000000..b01252f8f5
--- /dev/null
+++ b/packages/core-api/src/versions/2/votes/routes.ts
@@ -0,0 +1,26 @@
+import Hapi from "hapi";
+import { VotesController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new VotesController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/votes",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/votes/{id}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+}
diff --git a/packages/core-api/src/versions/2/votes/schema.ts b/packages/core-api/src/versions/2/votes/schema.ts
new file mode 100644
index 0000000000..1f9acde79d
--- /dev/null
+++ b/packages/core-api/src/versions/2/votes/schema.ts
@@ -0,0 +1,45 @@
+import * as Joi from "joi";
+import { pagination } from "../shared/schemas/pagination";
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: Joi.string()
+ .alphanum()
+ .length(34),
+ recipientId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const show: object = {
+ params: {
+ id: Joi.string()
+ .hex()
+ .length(64),
+ },
+};
diff --git a/packages/core-api/src/versions/2/wallets/controller.ts b/packages/core-api/src/versions/2/wallets/controller.ts
new file mode 100644
index 0000000000..07855fffb6
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/controller.ts
@@ -0,0 +1,94 @@
+import { app } from "@arkecosystem/core-container";
+import Boom from "boom";
+import Hapi from "hapi";
+import { Controller } from "../shared/controller";
+
+export class WalletsController extends Controller {
+ public async index(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.index(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async top(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.top(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async show(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.show(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async transactions(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.transactions(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async transactionsSent(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.transactionsSent(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async transactionsReceived(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.transactionsReceived(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async votes(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.votes(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+
+ public async search(request: Hapi.Request, h: Hapi.ResponseToolkit) {
+ try {
+ // @ts-ignore
+ const data = await request.server.methods.v2.wallets.search(request);
+
+ return super.respondWithCache(data, h);
+ } catch (error) {
+ return Boom.badImplementation(error);
+ }
+ }
+}
diff --git a/packages/core-api/src/versions/2/wallets/index.ts b/packages/core-api/src/versions/2/wallets/index.ts
new file mode 100644
index 0000000000..d2511817db
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/index.ts
@@ -0,0 +1,8 @@
+import Hapi from "hapi";
+import { registerMethods } from "./methods";
+import { registerRoutes } from "./routes";
+
+export function register(server: Hapi.Server): void {
+ registerMethods(server);
+ registerRoutes(server);
+}
diff --git a/packages/core-api/src/versions/2/wallets/methods.ts b/packages/core-api/src/versions/2/wallets/methods.ts
new file mode 100644
index 0000000000..c354ff87f5
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/methods.ts
@@ -0,0 +1,154 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import Boom from "boom";
+import { transactionsRepository } from "../../../repositories";
+import { ServerCache } from "../../../services";
+import { paginate, respondWithResource, toPagination } from "../utils";
+
+const databaseService = app.resolvePlugin("database");
+
+const index = async request => {
+ const wallets = await databaseService.wallets.findAll({
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, wallets, "wallet");
+};
+
+const top = async request => {
+ const wallets = await databaseService.wallets.top(paginate(request));
+
+ return toPagination(request, wallets, "wallet");
+};
+
+const show = async request => {
+ const wallet = await databaseService.wallets.findById(request.params.id);
+
+ if (!wallet) {
+ return Boom.notFound("Wallet not found");
+ }
+
+ return respondWithResource(request, wallet, "wallet");
+};
+
+const transactions = async request => {
+ const wallet = await databaseService.wallets.findById(request.params.id);
+
+ if (!wallet) {
+ return Boom.notFound("Wallet not found");
+ }
+
+ const rows = await transactionsRepository.findAllByWallet(wallet, {
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ });
+
+ return toPagination(request, rows, "transaction");
+};
+
+const transactionsSent = async request => {
+ const wallet = await databaseService.wallets.findById(request.params.id);
+
+ if (!wallet) {
+ return Boom.notFound("Wallet not found");
+ }
+
+ // NOTE: We unset this value because it otherwise will produce a faulty SQL query
+ delete request.params.id;
+
+ const rows = await transactionsRepository.findAllBySender(wallet.publicKey, {
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ });
+
+ return toPagination(request, rows, "transaction");
+};
+
+const transactionsReceived = async request => {
+ const wallet = await databaseService.wallets.findById(request.params.id);
+
+ if (!wallet) {
+ return Boom.notFound("Wallet not found");
+ }
+
+ // NOTE: We unset this value because it otherwise will produce a faulty SQL query
+ delete request.params.id;
+
+ const rows = await transactionsRepository.findAllByRecipient(wallet.address, {
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ });
+
+ return toPagination(request, rows, "transaction");
+};
+
+const votes = async request => {
+ const wallet = await databaseService.wallets.findById(request.params.id);
+
+ if (!wallet) {
+ return Boom.notFound("Wallet not found");
+ }
+
+ // NOTE: We unset this value because it otherwise will produce a faulty SQL query
+ delete request.params.id;
+
+ const rows = await transactionsRepository.allVotesBySender(wallet.publicKey, {
+ ...request.params,
+ ...paginate(request),
+ });
+
+ return toPagination(request, rows, "transaction");
+};
+
+const search = async request => {
+ const wallets = await databaseService.wallets.search({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ });
+
+ return toPagination(request, wallets, "wallet");
+};
+
+export function registerMethods(server) {
+ ServerCache.make(server)
+ .method("v2.wallets.index", index, 30, request => ({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ }))
+ .method("v2.wallets.top", top, 30, request => paginate(request))
+ .method("v2.wallets.show", show, 30, request => ({ id: request.params.id }))
+ .method("v2.wallets.transactions", transactions, 30, request => ({
+ ...{ id: request.params.id },
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ }))
+ .method("v2.wallets.transactionsSent", transactionsSent, 30, request => ({
+ ...{ id: request.params.id },
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ }))
+ .method("v2.wallets.transactionsReceived", transactionsReceived, 30, request => ({
+ ...{ id: request.params.id },
+ ...request.query,
+ ...request.params,
+ ...paginate(request),
+ }))
+ .method("v2.wallets.votes", votes, 30, request => ({
+ ...{ id: request.params.id },
+ ...request.params,
+ ...paginate(request),
+ }))
+ .method("v2.wallets.search", search, 30, request => ({
+ ...request.payload,
+ ...request.query,
+ ...paginate(request),
+ }));
+}
diff --git a/packages/core-api/src/versions/2/wallets/routes.ts b/packages/core-api/src/versions/2/wallets/routes.ts
new file mode 100644
index 0000000000..3f265ca897
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/routes.ts
@@ -0,0 +1,80 @@
+import Hapi from "hapi";
+import { WalletsController } from "./controller";
+import * as Schema from "./schema";
+
+export function registerRoutes(server: Hapi.Server): void {
+ const controller = new WalletsController();
+ server.bind(controller);
+
+ server.route({
+ method: "GET",
+ path: "/wallets",
+ handler: controller.index,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/top",
+ handler: controller.top,
+ options: {
+ validate: Schema.index,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/{id}",
+ handler: controller.show,
+ options: {
+ validate: Schema.show,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/{id}/transactions",
+ handler: controller.transactions,
+ options: {
+ validate: Schema.transactions,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/{id}/transactions/sent",
+ handler: controller.transactionsSent,
+ options: {
+ validate: Schema.transactionsSent,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/{id}/transactions/received",
+ handler: controller.transactionsReceived,
+ options: {
+ validate: Schema.transactionsReceived,
+ },
+ });
+
+ server.route({
+ method: "GET",
+ path: "/wallets/{id}/votes",
+ handler: controller.votes,
+ options: {
+ validate: Schema.votes,
+ },
+ });
+
+ server.route({
+ method: "POST",
+ path: "/wallets/search",
+ handler: controller.search,
+ options: {
+ validate: Schema.search,
+ },
+ });
+}
diff --git a/packages/core-api/src/versions/2/wallets/schema.ts b/packages/core-api/src/versions/2/wallets/schema.ts
new file mode 100644
index 0000000000..7e9bd48747
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/schema.ts
@@ -0,0 +1,215 @@
+import * as Joi from "joi";
+import { pagination } from "../shared/schemas/pagination";
+
+export const index: object = {
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ address: Joi.string()
+ .alphanum()
+ .length(34),
+ publicKey: Joi.string()
+ .hex()
+ .length(66),
+ secondPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ vote: Joi.string()
+ .hex()
+ .length(66),
+ username: Joi.string(),
+ balance: Joi.number().integer(),
+ voteBalance: Joi.number()
+ .integer()
+ .min(0),
+ producedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ missedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ },
+ },
+};
+
+export const show: object = {
+ params: {
+ id: Joi.string(),
+ },
+};
+
+export const transactions: object = {
+ params: {
+ id: Joi.string(),
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: Joi.string()
+ .alphanum()
+ .length(34),
+ recipientId: Joi.string()
+ .alphanum()
+ .length(34),
+ ownerId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const transactionsSent: object = {
+ params: {
+ id: Joi.string(),
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ recipientId: Joi.string()
+ .alphanum()
+ .length(34),
+ ownerId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const transactionsReceived: object = {
+ params: {
+ id: Joi.string(),
+ },
+ query: {
+ ...pagination,
+ ...{
+ orderBy: Joi.string(),
+ id: Joi.string()
+ .hex()
+ .length(64),
+ blockId: Joi.string().regex(/^[0-9]+$/, "numbers"),
+ type: Joi.number()
+ .integer()
+ .min(0),
+ version: Joi.number()
+ .integer()
+ .positive(),
+ senderPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ senderId: Joi.string()
+ .alphanum()
+ .length(34),
+ ownerId: Joi.string()
+ .alphanum()
+ .length(34),
+ timestamp: Joi.number()
+ .integer()
+ .min(0),
+ amount: Joi.number()
+ .integer()
+ .min(0),
+ fee: Joi.number()
+ .integer()
+ .min(0),
+ vendorFieldHex: Joi.string().hex(),
+ },
+ },
+};
+
+export const votes: object = {
+ params: {
+ id: Joi.string(),
+ },
+ query: pagination,
+};
+
+const address: object = Joi.string()
+ .alphanum()
+ .length(34);
+
+export const search: object = {
+ query: pagination,
+ payload: {
+ orderBy: Joi.string(),
+ address,
+ addresses: Joi.array()
+ .unique()
+ .min(1)
+ .max(50)
+ .items(address),
+ publicKey: Joi.string()
+ .hex()
+ .length(66),
+ secondPublicKey: Joi.string()
+ .hex()
+ .length(66),
+ vote: Joi.string()
+ .hex()
+ .length(66),
+ username: Joi.string(),
+ producedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ missedBlocks: Joi.number()
+ .integer()
+ .min(0),
+ balance: Joi.object().keys({
+ from: Joi.number().integer(),
+ to: Joi.number().integer(),
+ }),
+ voteBalance: Joi.object().keys({
+ from: Joi.number()
+ .integer()
+ .min(0),
+ to: Joi.number()
+ .integer()
+ .min(0),
+ }),
+ },
+};
diff --git a/packages/core-api/src/versions/2/wallets/transformer.ts b/packages/core-api/src/versions/2/wallets/transformer.ts
new file mode 100644
index 0000000000..8cafbf9ed6
--- /dev/null
+++ b/packages/core-api/src/versions/2/wallets/transformer.ts
@@ -0,0 +1,13 @@
+import { bignumify } from "@arkecosystem/core-utils";
+
+export function transformWallet(model) {
+ return {
+ address: model.address,
+ publicKey: model.publicKey,
+ username: model.username,
+ secondPublicKey: model.secondPublicKey,
+ balance: +bignumify(model.balance).toFixed(),
+ isDelegate: !!model.username,
+ vote: model.vote,
+ };
+}
diff --git a/packages/core-api/tsconfig.json b/packages/core-api/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-api/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-blockchain/CHANGELOG.md b/packages/core-blockchain/CHANGELOG.md
deleted file mode 100644
index b7adcfa807..0000000000
--- a/packages/core-blockchain/CHANGELOG.md
+++ /dev/null
@@ -1,40 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.1 - 2018-12-10
-
-### Fixed
-
-- Reset last downloaded block when block is discarded
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- More graceful handling of shutdown
-- State Storage to handle state machine data
-- Peer banning after forks
-
-### Changed
-
-- Improved the logic of how blocks are being processed
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-### Fixed
-
-- Properly stop blockchain if manually started
-- Various state issues with the last downloaded blocks
-- Various state issues with the wallet manager
-- Properly handle forks while idle
-
-## 0.1.1 - 2018-06-14
-
-### Added
-
-- initial release
diff --git a/packages/core-blockchain/LICENSE b/packages/core-blockchain/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-blockchain/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-blockchain/README.md b/packages/core-blockchain/README.md
index 3254f8dc58..7f9231f70f 100644
--- a/packages/core-blockchain/README.md
+++ b/packages/core-blockchain/README.md
@@ -14,10 +14,11 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [François-Xavier Thoorens](https://github.com/fix)
-- [Kristjan Košič](https://github.com/kristjank)
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [François-Xavier Thoorens](https://github.com/fix)
+- [Joshua Noack](https://github.com/supaiku0)
+- [Kristjan Košič](https://github.com/kristjank)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-blockchain/__tests__/__support__/setup.js b/packages/core-blockchain/__tests__/__support__/setup.js
deleted file mode 100644
index b2ba0ce46e..0000000000
--- a/packages/core-blockchain/__tests__/__support__/setup.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const appHelper = require('@arkecosystem/core-test-utils/lib/helpers/container')
-
-jest.setTimeout(60000)
-
-exports.setUp = async () => {
- await appHelper.setUp({
- exit: '@arkecosystem/core-p2p',
- exclude: ['@arkecosystem/core-blockchain'],
- })
-
- return app
-}
-
-exports.tearDown = async () => {
- await app.tearDown()
-}
diff --git a/packages/core-blockchain/__tests__/__support__/setup.ts b/packages/core-blockchain/__tests__/__support__/setup.ts
new file mode 100644
index 0000000000..110ad1343a
--- /dev/null
+++ b/packages/core-blockchain/__tests__/__support__/setup.ts
@@ -0,0 +1,33 @@
+import { app } from "@arkecosystem/core-container";
+import { registerWithContainer, setUpContainer } from "../../../core-test-utils/src/helpers/container";
+
+jest.setTimeout(60000);
+
+export const setUpFull = async () => {
+ await setUpContainer({
+ exit: "@arkecosystem/core-p2p",
+ exclude: ["@arkecosystem/core-blockchain"],
+ });
+
+ const { plugin } = require("../../src/plugin");
+ await registerWithContainer(plugin, {});
+
+ return app;
+};
+
+export const tearDownFull = async () => {
+ await app.tearDown();
+
+ const { plugin } = require("../../src/plugin");
+ await plugin.deregister(app, {});
+};
+
+export const setUp = async () =>
+ setUpContainer({
+ exit: "@arkecosystem/core-p2p",
+ exclude: ["@arkecosystem/core-blockchain"],
+ });
+
+export const tearDown = async () => {
+ await app.tearDown();
+};
diff --git a/packages/core-blockchain/__tests__/blockchain-networkStart.test.ts b/packages/core-blockchain/__tests__/blockchain-networkStart.test.ts
new file mode 100644
index 0000000000..8036b9e425
--- /dev/null
+++ b/packages/core-blockchain/__tests__/blockchain-networkStart.test.ts
@@ -0,0 +1,77 @@
+/* tslint:disable:max-line-length */
+import "@arkecosystem/core-test-utils";
+import { asValue } from "awilix";
+import { Blockchain } from "../src/blockchain";
+import { defaults } from "../src/defaults";
+import { setUp, tearDown } from "./__support__/setup";
+
+let container;
+let blockchain: Blockchain;
+
+describe("constructor - networkStart", () => {
+ let logger;
+ beforeAll(async () => {
+ container = await setUp();
+
+ logger = container.resolvePlugin("logger");
+ });
+ afterAll(async () => {
+ await tearDown();
+
+ jest.restoreAllMocks();
+ });
+
+ it("should output log messages if launched in networkStart mode", async () => {
+ const loggerWarn = jest.spyOn(logger, "warn");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ await __start(true);
+
+ expect(loggerWarn).toHaveBeenCalledWith(
+ "Ark Core is launched in Genesis Start mode. This is usually for starting the first node on the blockchain. Unless you know what you are doing, this is likely wrong. :warning:",
+ );
+ expect(loggerInfo).toHaveBeenCalledWith("Starting Ark Core for a new world, welcome aboard :rocket:");
+ });
+
+ describe("dispatch", () => {
+ it("should be ok", () => {
+ const nextState = blockchain.dispatch("START");
+
+ expect(blockchain.state.blockchain).toEqual(nextState);
+ });
+
+ it("should log an error if no action is found", () => {
+ const stateMachine = require("../src/state-machine").stateMachine;
+ const loggerError = jest.spyOn(logger, "error");
+
+ jest.spyOn(stateMachine, "transition").mockReturnValueOnce({
+ actions: ["yooo"],
+ });
+
+ blockchain.dispatch("STOP");
+ expect(loggerError).toHaveBeenCalledWith("No action 'yooo' found :interrobang:");
+ });
+ });
+});
+
+async function __start(networkStart) {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+ process.env.CORE_ENV = "false";
+
+ const plugin = require("../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart,
+ ...defaults,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+}
diff --git a/packages/core-blockchain/__tests__/blockchain.test.js b/packages/core-blockchain/__tests__/blockchain.test.js
deleted file mode 100644
index 44140f1ed7..0000000000
--- a/packages/core-blockchain/__tests__/blockchain.test.js
+++ /dev/null
@@ -1,587 +0,0 @@
-/* eslint no-use-before-define: "warn" */
-/* eslint max-len: "off" */
-/* eslint no-await-in-loop: "off" */
-
-const axios = require('axios')
-const MockAdapter = require('axios-mock-adapter')
-
-const axiosMock = new MockAdapter(axios)
-const delay = require('delay')
-
-const { asValue } = require('awilix')
-const { crypto, slots } = require('@arkecosystem/crypto')
-const { Block, Wallet } = require('@arkecosystem/crypto').models
-
-let genesisBlock
-let configManager
-let container
-let blockchain
-let logger
-let loggerDebugBackup
-let peerMock
-
-const blocks1to100 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.2-100')
-const blocks101to155 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.101-155')
-const app = require('./__support__/setup')
-
-beforeAll(async () => {
- container = await app.setUp()
-
- // Backup logger.debug function as we are going to mock it in the test suite
- logger = container.resolvePlugin('logger')
- loggerDebugBackup = logger.debug
-
- // Mock peer responses so that we can have blocks
- __mockPeer()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = new Block(
- require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json'),
- )
-
- configManager = container.resolvePlugin('config')
-
- // Workaround: Add genesis transactions to the exceptions list, because they have a fee of 0
- // and otherwise don't pass validation.
- configManager.network.exceptions.transactions = genesisBlock.transactions.map(
- tx => tx.id,
- )
-
- // Manually register the blockchain and start it
- await __start()
-})
-
-afterAll(async () => {
- axiosMock.reset()
-
- delete configManager.network.exceptions.transactions
-
- await __resetToHeight1()
-
- // Manually stop the blockchain
- await blockchain.stop()
-
- await app.tearDown()
-})
-
-afterEach(async () => {
- // Restore original logger.debug function
- logger.debug = loggerDebugBackup
-
- await __resetBlocksInCurrentRound()
-})
-
-describe('Blockchain', () => {
- it('should be an object', () => {
- expect(blockchain).toBeObject()
- })
-
- describe('dispatch', () => {
- it('should be a function', () => {
- expect(blockchain.dispatch).toBeFunction()
- })
-
- it('should be ok', () => {
- const nextState = blockchain.dispatch('START')
-
- expect(blockchain.state.blockchain).toEqual(nextState)
- })
- })
-
- describe('start', () => {
- it('should be a function', () => {
- expect(blockchain.start).toBeFunction()
- })
-
- it('should be ok', async () => {
- process.env.ARK_SKIP_BLOCKCHAIN = false
-
- const started = await blockchain.start(true)
-
- expect(started).toBeTrue()
- })
- })
-
- describe('checkNetwork', () => {
- it('should be a function', () => {
- expect(blockchain.checkNetwork).toBeFunction()
- })
-
- it('should throw an exception', () => {
- expect(() => blockchain.checkNetwork()).toThrow(
- 'Method [checkNetwork] not implemented!',
- )
- })
- })
-
- describe.skip('updateNetworkStatus', () => {
- it('should be a function', () => {
- expect(blockchain.updateNetworkStatus).toBeFunction()
- })
- })
-
- describe('rebuild', () => {
- it('should be a function', () => {
- expect(blockchain.rebuild).toBeFunction()
- })
-
- it('should throw an exception', () => {
- expect(() => blockchain.rebuild()).toThrow(
- 'Method [rebuild] not implemented!',
- )
- })
- })
-
- describe('resetState', () => {
- it('should be a function', () => {
- expect(blockchain.resetState).toBeFunction()
- })
- })
-
- describe('postTransactions', () => {
- it('should be a function', () => {
- expect(blockchain.postTransactions).toBeFunction()
- })
-
- it('should be ok', async () => {
- const transactionsWithoutType2 = genesisBlock.transactions.filter(
- tx => tx.type !== 2,
- )
-
- blockchain.transactionPool.flush()
- await blockchain.postTransactions(transactionsWithoutType2, false)
- const transactions = blockchain.transactionPool.getTransactions(0, 200)
-
- expect(transactions.length).toBe(transactionsWithoutType2.length)
-
- expect(transactions).toEqual(
- transactionsWithoutType2.map(transaction => transaction.serialized),
- )
-
- blockchain.transactionPool.flush()
- })
- })
-
- describe('queueBlock', () => {
- it('should be a function', () => {
- expect(blockchain.queueBlock).toBeFunction()
- })
-
- it('should be ok', async () => {
- const block = new Block(blocks101to155[54])
-
- await blockchain.queueBlock(blocks101to155[54])
-
- expect(blockchain.state.lastDownloadedBlock).toEqual(block)
- })
- })
-
- describe('rollbackCurrentRound', () => {
- it('should be a function', () => {
- expect(blockchain.rollbackCurrentRound).toBeFunction()
- })
-
- it('should rollback', async () => {
- await blockchain.rollbackCurrentRound()
- expect(blockchain.getLastBlock().data.height).toBe(153)
- })
- })
-
- describe('removeBlocks', () => {
- it('should be a function', () => {
- expect(blockchain.removeBlocks).toBeFunction()
- })
-
- it('should remove blocks', async () => {
- const lastBlockHeight = blockchain.getLastBlock().data.height
-
- await blockchain.removeBlocks(2)
- expect(blockchain.getLastBlock().data.height).toBe(lastBlockHeight - 2)
- })
- })
-
- describe('rebuildBlock', () => {
- it('should be a function', () => {
- expect(blockchain.rebuildBlock).toBeFunction()
- })
-
- it('should rebuild with a known block', async () => {
- const mockCallback = jest.fn(() => true)
- const lastBlock = blockchain.getLastBlock()
-
- await blockchain.rebuildBlock(lastBlock, mockCallback)
- await delay(2000) // wait a bit to give enough time for the callback to be called
-
- expect(mockCallback.mock.calls.length).toBe(1)
- })
-
- it('should rebuild with a new chained block', async () => {
- const mockCallback = jest.fn(() => true)
- const lastBlock = blockchain.getLastBlock()
-
- await blockchain.removeBlocks(1) // remove 1 block so that we can add it then as a chained block
-
- expect(blockchain.getLastBlock()).not.toEqual(lastBlock)
-
- await blockchain.rebuildBlock(lastBlock, mockCallback)
- await delay(2000) // wait a bit to give enough time for the callback to be called
-
- expect(mockCallback.mock.calls.length).toBe(1)
- expect(blockchain.getLastBlock()).toEqual(lastBlock)
- })
- })
-
- describe('processBlock', () => {
- it('should be a function', () => {
- expect(blockchain.processBlock).toBeFunction()
- })
-
- it('should process a new chained block', async () => {
- const mockCallback = jest.fn(() => true)
- const lastBlock = blockchain.getLastBlock()
-
- await blockchain.removeBlocks(1) // remove 1 block so that we can add it then as a chained block
-
- expect(blockchain.getLastBlock()).not.toEqual(lastBlock)
-
- await blockchain.processBlock(lastBlock, mockCallback)
- await delay(2000) // wait a bit to give enough time for the callback to be called
-
- expect(mockCallback.mock.calls.length).toBe(1)
- expect(blockchain.getLastBlock()).toEqual(lastBlock)
- })
-
- it('should process a valid block already known', async () => {
- const mockCallback = jest.fn(() => true)
- const lastBlock = blockchain.getLastBlock()
-
- await blockchain.processBlock(lastBlock, mockCallback)
- await delay(2000) // wait a bit to give enough time for the callback to be called
-
- expect(mockCallback.mock.calls.length).toBe(1)
- expect(blockchain.getLastBlock()).toEqual(lastBlock)
- })
- })
-
- describe('acceptChainedBlock', () => {
- it('should be a function', () => {
- expect(blockchain.acceptChainedBlock).toBeFunction()
- })
-
- it('should process a new chained block', async () => {
- const lastBlock = blockchain.getLastBlock()
-
- await blockchain.removeBlocks(1) // remove 1 block so that we can add it then as a chained block
-
- expect(await blockchain.database.getLastBlock()).not.toEqual(lastBlock)
-
- await blockchain.acceptChainedBlock(lastBlock)
-
- expect(await blockchain.database.getLastBlock()).toEqual(lastBlock)
-
- // manually set lastBlock because acceptChainedBlock doesn't do it
- blockchain.state.setLastBlock(lastBlock)
- })
- })
-
- describe('manageUnchainedBlock', () => {
- it('should be a function', () => {
- expect(blockchain.manageUnchainedBlock).toBeFunction()
- })
-
- it('should process a new unchained block', async () => {
- const mockLoggerDebug = jest.fn(message => true)
- logger.debug = mockLoggerDebug
-
- const lastBlock = blockchain.getLastBlock()
- await blockchain.removeBlocks(2) // remove 2 blocks so that we can have _lastBlock_ as an unchained block
- await blockchain.manageUnchainedBlock(lastBlock)
-
- expect(mockLoggerDebug).toHaveBeenCalled()
-
- const debugMessage = `Blockchain not ready to accept new block at height ${lastBlock.data.height.toLocaleString()}. Last block: ${(
- lastBlock.data.height - 2
- ).toLocaleString()} :warning:`
- expect(mockLoggerDebug).toHaveBeenLastCalledWith(debugMessage)
-
- expect(blockchain.getLastBlock().data.height).toBe(
- lastBlock.data.height - 2,
- )
- })
- })
-
- describe('getUnconfirmedTransactions', () => {
- it('should be a function', () => {
- expect(blockchain.getUnconfirmedTransactions).toBeFunction()
- })
-
- it('should get unconfirmed transactions', async () => {
- const transactionsWithoutType2 = genesisBlock.transactions.filter(
- tx => tx.type !== 2,
- )
-
- blockchain.transactionPool.flush()
- await blockchain.postTransactions(transactionsWithoutType2, false)
- const unconfirmedTransactions = blockchain.getUnconfirmedTransactions(200)
-
- expect(unconfirmedTransactions.transactions.length).toBe(
- transactionsWithoutType2.length,
- )
-
- expect(unconfirmedTransactions.transactions).toEqual(
- transactionsWithoutType2.map(transaction => transaction.serialized),
- )
-
- blockchain.transactionPool.flush()
- })
- })
-
- describe('getLastBlock', () => {
- it('should be a function', () => {
- expect(blockchain.getLastBlock).toBeFunction()
- })
-
- it('should be ok', () => {
- blockchain.state.setLastBlock(genesisBlock)
-
- expect(blockchain.getLastBlock()).toEqual(genesisBlock)
- })
- })
-
- describe('isSynced', () => {
- it('should be a function', () => {
- expect(blockchain.isSynced).toBeFunction()
- })
-
- describe('with a block param', () => {
- it('should be ok', () => {
- expect(
- blockchain.isSynced({
- data: {
- timestamp: slots.getTime(),
- height: genesisBlock.height,
- },
- }),
- ).toBeTrue()
- })
- })
-
- describe('without a block param', () => {
- it('should use the last block', () => {
- blockchain.getLastBlock = jest.fn().mockReturnValueOnce({
- data: {
- timestamp: slots.getTime(),
- height: genesisBlock.height,
- },
- })
- expect(blockchain.isSynced()).toBeTrue()
- expect(blockchain.getLastBlock).toHaveBeenCalled()
- })
- })
- })
-
- describe('isRebuildSynced', () => {
- it('should be a function', () => {
- expect(blockchain.isRebuildSynced).toBeFunction()
- })
-
- describe('with a block param', () => {
- it('should be ok', () => {
- expect(
- blockchain.isRebuildSynced({
- data: {
- timestamp: slots.getTime() - 3600 * 24 * 6,
- height: blocks101to155[52].height,
- },
- }),
- ).toBeTrue()
- })
- })
-
- describe('without a block param', () => {
- it('should use the last block', () => {
- blockchain.getLastBlock = jest.fn().mockReturnValueOnce({
- data: {
- timestamp: slots.getTime(),
- height: genesisBlock.height,
- },
- })
- expect(blockchain.isRebuildSynced()).toBeTrue()
- expect(blockchain.getLastBlock).toHaveBeenCalled()
- })
- })
- })
-
- describe('__isChained', () => {
- it('should be a function', () => {
- expect(blockchain.__isChained).toBeFunction()
- })
-
- it('should be ok', () => {
- const previousBlock = {
- data: {
- id: 1,
- timestamp: 1,
- height: 1,
- },
- }
-
- const nextBlock = {
- data: {
- id: 2,
- timestamp: 2,
- height: 2,
- previousBlock: 1,
- },
- }
-
- expect(blockchain.__isChained(previousBlock, nextBlock)).toBeTrue()
- })
-
- it('should not be ok', () => {
- const previousBlock = {
- data: {
- id: 2,
- timestamp: 2,
- height: 2,
- },
- }
-
- const nextBlock = {
- data: {
- id: 1,
- timestamp: 1,
- height: 1,
- previousBlock: 1,
- },
- }
-
- expect(blockchain.__isChained(previousBlock, nextBlock)).toBeFalse()
- })
- })
-
- describe('__registerQueue', () => {
- it('should be a function', () => {
- expect(blockchain.__registerQueue).toBeFunction()
- })
-
- it('should be ok', () => {
- blockchain.__registerQueue()
-
- expect(blockchain).toHaveProperty('queue')
- expect(blockchain).toHaveProperty('processQueue')
- expect(blockchain).toHaveProperty('rebuildQueue')
- })
- })
-})
-
-async function __start() {
- process.env.ARK_SKIP_BLOCKCHAIN = false
- process.env.ARK_ENV = false
-
- const plugin = require('../lib').plugin
-
- blockchain = await plugin.register(container, {
- networkStart: false,
- })
-
- await container.register(
- 'blockchain',
- asValue({
- name: 'blockchain',
- version: '0.1.0',
- plugin: blockchain,
- options: {},
- }),
- )
-
- const p2p = container.resolvePlugin('p2p')
- await p2p.acceptNewPeer(peerMock)
-
- await __resetToHeight1()
-
- await blockchain.start(true)
- while (
- !blockchain.getLastBlock() ||
- blockchain.getLastBlock().data.height < 155
- ) {
- await delay(1000)
- }
-}
-
-async function __resetBlocksInCurrentRound() {
- blockchain.database.blocksInCurrentRound = await blockchain.database.__getBlocksForRound()
-}
-
-async function __resetToHeight1() {
- const lastBlock = await blockchain.database.getLastBlock()
- if (lastBlock) {
- // Make sure the wallet manager has been fed or else revertRound
- // cannot determine the previous delegates. This is only necessary, because
- // the database is not dropped after the unit tests are done.
- await blockchain.database.buildWallets(lastBlock.data.height)
-
- // Index the genesis wallet or else revert block at height 1 fails
- const generator = crypto.getAddress(genesisBlock.data.generatorPublicKey)
- const genesis = new Wallet(generator)
- genesis.publicKey = genesisBlock.data.generatorPublicKey
- genesis.username = 'genesis'
- blockchain.database.walletManager.reindex(genesis)
-
- blockchain.state.clear()
-
- blockchain.state.setLastBlock(lastBlock)
- await __resetBlocksInCurrentRound(lastBlock)
- await blockchain.removeBlocks(lastBlock.data.height - 1)
- }
-}
-
-function __mockPeer() {
- // Mocking a peer which will send blocks until height 155
- const Peer = require('@arkecosystem/core-p2p/lib/peer')
- peerMock = new Peer('0.0.0.99', 4002)
- Object.assign(peerMock, peerMock.headers, { status: 200 })
-
- axiosMock
- .onGet(/.*\/peer\/blocks\/common.*/)
- .reply(() => [
- 200,
- { status: 200, success: true, common: true },
- peerMock.headers,
- ])
- axiosMock.onGet(/.*\/peer\/blocks/).reply(config => {
- let blocks = []
-
- if (config.params.lastBlockHeight === 1) {
- blocks = blocks1to100
- } else if (config.params.lastBlockHeight === 100) {
- blocks = blocks101to155
- }
-
- return [200, { status: 200, success: true, blocks }, peerMock.headers]
- })
- axiosMock
- .onGet(/.*\/peer\/status/)
- .reply(() => [
- 200,
- { status: 200, success: true, height: 155 },
- peerMock.headers,
- ])
- axiosMock.onGet(/.*\/peer\/list/).reply(() => [
- 200,
- {
- success: true,
- peers: [
- {
- status: 200,
- ip: peerMock.ip,
- port: 4002,
- height: 155,
- delay: 8,
- },
- ],
- },
- peerMock.headers,
- ])
-}
diff --git a/packages/core-blockchain/__tests__/blockchain.test.ts b/packages/core-blockchain/__tests__/blockchain.test.ts
new file mode 100644
index 0000000000..2e68b5481c
--- /dev/null
+++ b/packages/core-blockchain/__tests__/blockchain.test.ts
@@ -0,0 +1,685 @@
+/* tslint:disable:max-line-length */
+import "@arkecosystem/core-test-utils";
+import { blocks101to155 } from "@arkecosystem/core-test-utils/src/fixtures/testnet/blocks101to155";
+import { blocks2to100 } from "@arkecosystem/core-test-utils/src/fixtures/testnet/blocks2to100";
+import { crypto, models, slots } from "@arkecosystem/crypto";
+import { asValue } from "awilix";
+import delay from "delay";
+import { Blockchain } from "../src/blockchain";
+import { defaults } from "../src/defaults";
+import { setUp, tearDown } from "./__support__/setup";
+
+const { Block, Wallet } = models;
+
+let genesisBlock;
+let configManager;
+let container;
+let blockchain: Blockchain;
+let loggerDebugBackup;
+
+describe("Blockchain", () => {
+ let logger;
+ beforeAll(async () => {
+ container = await setUp();
+
+ // Backup logger.debug function as we are going to mock it in the test suite
+ logger = container.resolvePlugin("logger");
+ loggerDebugBackup = logger.debug;
+
+ // Create the genesis block after the setup has finished or else it uses a potentially
+ // wrong network config.
+ genesisBlock = new Block(require("@arkecosystem/core-test-utils/src/config/testnet/genesisBlock.json"));
+
+ configManager = container.getConfig();
+
+ // Workaround: Add genesis transactions to the exceptions list, because they have a fee of 0
+ // and otherwise don't pass validation.
+ configManager.set("exceptions.transactions", genesisBlock.transactions.map(tx => tx.id));
+
+ // Manually register the blockchain and start it
+ await __start(false);
+ });
+
+ afterAll(async () => {
+ configManager.set("exceptions.transactions", []);
+
+ await __resetToHeight1();
+
+ // Manually stop the blockchain
+ await blockchain.stop();
+
+ await tearDown();
+ });
+
+ afterEach(async () => {
+ // Restore original logger.debug function
+ logger.debug = loggerDebugBackup;
+
+ await __resetToHeight1();
+ await __addBlocks(5);
+ await __resetBlocksInCurrentRound();
+ });
+
+ describe("dispatch", () => {
+ it("should be ok", () => {
+ const nextState = blockchain.dispatch("START");
+
+ expect(blockchain.state.blockchain).toEqual(nextState);
+ });
+ });
+
+ describe("start", () => {
+ it("should be ok", async () => {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+
+ const started = await blockchain.start(true);
+
+ expect(started).toBeTrue();
+ });
+ });
+
+ describe("checkNetwork", () => {
+ it("should throw an exception", () => {
+ expect(() => blockchain.checkNetwork()).toThrow("Method [checkNetwork] not implemented!");
+ });
+ });
+
+ describe("updateNetworkStatus", () => {
+ it("should call p2p updateNetworkStatus", async () => {
+ const p2pUpdateNetworkStatus = jest.spyOn(blockchain.p2p, "updateNetworkStatus");
+
+ await blockchain.updateNetworkStatus();
+
+ expect(p2pUpdateNetworkStatus).toHaveBeenCalled();
+ });
+ });
+
+ describe("rebuild", () => {
+ it("should throw an exception", () => {
+ expect(() => blockchain.rebuild()).toThrow("Method [rebuild] not implemented!");
+ });
+ });
+
+ describe("postTransactions", () => {
+ it("should be ok", async () => {
+ const transactionsWithoutType2 = genesisBlock.transactions.filter(tx => tx.type !== 2);
+
+ blockchain.transactionPool.flush();
+ await blockchain.postTransactions(transactionsWithoutType2);
+ const transactions = blockchain.transactionPool.getTransactions(0, 200);
+
+ expect(transactions.length).toBe(transactionsWithoutType2.length);
+
+ expect(transactions).toEqual(transactionsWithoutType2.map(transaction => transaction.serialized));
+
+ blockchain.transactionPool.flush();
+ });
+ });
+
+ describe("enQueueBlocks", () => {
+ it("should just return if blocks provided are an empty array", async () => {
+ const processQueuePush = jest.spyOn(blockchain.processQueue, "push");
+
+ blockchain.enqueueBlocks([]);
+ expect(processQueuePush).not.toHaveBeenCalled();
+ });
+
+ it("should enqueue the blocks provided", async () => {
+ const processQueuePush = jest.spyOn(blockchain.processQueue, "push");
+
+ const blocksToEnqueue = [blocks101to155[54]];
+ blockchain.enqueueBlocks(blocksToEnqueue);
+ expect(processQueuePush).toHaveBeenCalledWith(blocksToEnqueue);
+ });
+ });
+
+ describe("rollbackCurrentRound", () => {
+ it("should rollback", async () => {
+ await __addBlocks(155);
+ await blockchain.rollbackCurrentRound();
+ expect(blockchain.getLastBlock().data.height).toBe(153);
+ });
+
+ it("shouldnt rollback more if previous round is round 2", async () => {
+ await __addBlocks(140);
+ await blockchain.rollbackCurrentRound();
+ expect(blockchain.getLastBlock().data.height).toBe(102);
+
+ await blockchain.rollbackCurrentRound();
+ expect(blockchain.getLastBlock().data.height).toBe(102);
+ });
+ });
+
+ describe("removeBlocks", () => {
+ it("should remove blocks", async () => {
+ const lastBlockHeight = blockchain.getLastBlock().data.height;
+
+ await blockchain.removeBlocks(2);
+ expect(blockchain.getLastBlock().data.height).toBe(lastBlockHeight - 2);
+ });
+
+ it("should remove (current height - 1) blocks if we provide a greater value", async () => {
+ await __resetToHeight1();
+
+ await blockchain.removeBlocks(9999);
+ expect(blockchain.getLastBlock().data.height).toBe(1);
+ });
+ });
+
+ describe("removeTopBlocks", () => {
+ it("should remove top blocks", async () => {
+ const dbLastBlockBefore = await blockchain.database.getLastBlock();
+ const lastBlockHeight = dbLastBlockBefore.data.height;
+
+ await blockchain.removeTopBlocks(2);
+ const dbLastBlockAfter = await blockchain.database.getLastBlock();
+
+ expect(dbLastBlockAfter.data.height).toBe(lastBlockHeight - 2);
+ });
+ });
+
+ describe("rebuildBlock", () => {
+ it("should rebuild with a known block", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+
+ await blockchain.rebuildBlock(lastBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ });
+
+ it("should rebuild with a new chained block", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+
+ await blockchain.removeBlocks(1); // remove 1 block so that we can add it then as a chained block
+
+ expect(blockchain.getLastBlock()).not.toEqual(lastBlock);
+
+ await blockchain.rebuildBlock(lastBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(blockchain.getLastBlock()).toEqual(lastBlock);
+ });
+
+ it("should disregard block with height == last height but different id", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+ const lastBlockCopy = new Block(lastBlock.data);
+ lastBlockCopy.data.id = "123456";
+
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ await blockchain.rebuildBlock(lastBlockCopy, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(loggerInfo).toHaveBeenCalledWith(
+ `Block ${lastBlockCopy.data.height.toLocaleString()} disregarded because on a fork :knife_fork_plate:`,
+ );
+ expect(blockchain.getLastBlock().data.id).toBe(lastBlock.data.id);
+ });
+
+ it("should disregard block with height > last height + 1", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+ const lastBlockCopy = new Block(lastBlock.data);
+ lastBlockCopy.data.height += 2;
+
+ await blockchain.rebuildBlock(lastBlockCopy, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(blockchain.getLastBlock().data.id).toBe(lastBlock.data.id);
+ expect(blockchain.state.lastDownloadedBlock).toBe(lastBlock);
+ });
+
+ it("should disregard block not verified", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+ const lastBlockCopy = new Block(lastBlock.data);
+ lastBlockCopy.verification.verified = false;
+
+ const loggerWarn = jest.spyOn(logger, "warn");
+
+ await blockchain.rebuildBlock(lastBlockCopy, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(loggerWarn).toHaveBeenCalledWith(
+ `Block ${lastBlockCopy.data.height.toLocaleString()} disregarded because verification failed :scroll:`,
+ );
+ expect(blockchain.getLastBlock().data.id).toBe(lastBlock.data.id);
+ });
+
+ it("should commitQueuedQueries if block height % 20 000 == 0", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+ const lastBlockHeight = lastBlock.data.height;
+ const nextBlock = new Block(blocks2to100[lastBlock.data.height - 1]);
+ lastBlock.data.height = 19999;
+ nextBlock.data.height = 20000;
+
+ const commitQueuedQueries = jest
+ .spyOn(blockchain.database, "commitQueuedQueries")
+ .mockReturnValueOnce(true);
+ jest.spyOn(blockchain.database, "enqueueSaveBlock").mockReturnValueOnce(true);
+
+ await blockchain.rebuildBlock(nextBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(commitQueuedQueries).toHaveBeenCalled();
+ expect(blockchain.getLastBlock().data.id).toBe(nextBlock.data.id);
+
+ // reset to "stable" state
+ lastBlock.data.height = lastBlockHeight;
+ blockchain.state.setLastBlock(lastBlock);
+ });
+ });
+
+ describe("processBlock", () => {
+ it("should process a new chained block", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.state.getLastBlock();
+
+ await blockchain.removeBlocks(1); // remove 1 block so that we can add it then as a chained block
+
+ expect(blockchain.getLastBlock()).not.toEqual(lastBlock);
+
+ await blockchain.processBlock(lastBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(blockchain.getLastBlock()).toEqual(lastBlock);
+ });
+
+ it("should process a valid block already known", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+
+ await blockchain.processBlock(lastBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(blockchain.getLastBlock()).toEqual(lastBlock);
+ });
+
+ it("should broadcast a block if (slots.getSlotNumber() * blocktime <= block.data.timestamp)", async () => {
+ const mockCallback = jest.fn(() => true);
+ const lastBlock = blockchain.getLastBlock();
+ lastBlock.data.timestamp =
+ slots.getSlotNumber() * configManager.getMilestone(lastBlock.data.height).blocktime;
+
+ const broadcastBlock = jest.spyOn(blockchain.p2p, "broadcastBlock");
+
+ await blockchain.processBlock(lastBlock, mockCallback);
+ await delay(200);
+
+ expect(mockCallback.mock.calls.length).toBe(1);
+ expect(broadcastBlock).toHaveBeenCalled();
+ });
+ });
+
+ describe("acceptChainedBlock", () => {
+ it.skip("should process a new chained block", async () => {
+ const lastBlock = blockchain.getLastBlock();
+
+ await blockchain.removeBlocks(1); // remove 1 block so that we can add it then as a chained block
+
+ expect(await blockchain.database.getLastBlock()).not.toEqual(lastBlock);
+
+ // await blockchain.acceptChainedBlock(lastBlock);
+
+ expect(await blockchain.database.getLastBlock()).toEqual(lastBlock);
+
+ // manually set lastBlock because acceptChainedBlock doesn't do it
+ blockchain.state.setLastBlock(lastBlock);
+ });
+ });
+
+ describe("manageUnchainedBlock", () => {
+ it.skip("should process a new unchained block", async () => {
+ const mockLoggerDebug = jest.fn(message => true);
+ logger.debug = mockLoggerDebug;
+
+ const lastBlock = blockchain.getLastBlock();
+ await blockchain.removeBlocks(2); // remove 2 blocks so that we can have _lastBlock_ as an unchained block
+ // await blockchain.manageUnchainedBlock(lastBlock);
+
+ expect(mockLoggerDebug).toHaveBeenCalled();
+
+ const debugMessage = `Blockchain not ready to accept new block at height ${lastBlock.data.height.toLocaleString()}. Last block: ${(
+ lastBlock.data.height - 2
+ ).toLocaleString()} :warning:`;
+ expect(mockLoggerDebug).toHaveBeenCalledWith(debugMessage);
+
+ expect(blockchain.getLastBlock().data.height).toBe(lastBlock.data.height - 2);
+ });
+ });
+
+ describe("getUnconfirmedTransactions", () => {
+ it("should get unconfirmed transactions", async () => {
+ const transactionsWithoutType2 = genesisBlock.transactions.filter(tx => tx.type !== 2);
+
+ blockchain.transactionPool.flush();
+ await blockchain.postTransactions(transactionsWithoutType2);
+ const unconfirmedTransactions = blockchain.getUnconfirmedTransactions(200);
+
+ expect(unconfirmedTransactions.transactions.length).toBe(transactionsWithoutType2.length);
+
+ expect(unconfirmedTransactions.transactions).toEqual(
+ transactionsWithoutType2.map(transaction => transaction.serialized),
+ );
+
+ blockchain.transactionPool.flush();
+ });
+
+ it("should return object with count == -1 if getTransactionsForForging returned a falsy value", async () => {
+ jest.spyOn(blockchain.transactionPool, "getTransactionsForForging").mockReturnValueOnce(null);
+
+ const unconfirmedTransactions = blockchain.getUnconfirmedTransactions(200);
+ expect(unconfirmedTransactions.count).toBe(-1);
+ });
+ });
+
+ describe("getLastBlock", () => {
+ it("should be ok", () => {
+ blockchain.state.setLastBlock(genesisBlock);
+
+ expect(blockchain.getLastBlock()).toEqual(genesisBlock);
+ });
+ });
+
+ describe("handleIncomingBlock", () => {
+ it("should be ok", () => {
+ const dispatch = blockchain.dispatch;
+ const enqueueBlocks = blockchain.enqueueBlocks;
+ blockchain.dispatch = jest.fn(() => true);
+ blockchain.enqueueBlocks = jest.fn(() => true);
+
+ const block = {
+ height: 100,
+ timestamp: slots.getEpochTime(),
+ };
+
+ blockchain.handleIncomingBlock(block);
+
+ expect(blockchain.dispatch).toHaveBeenCalled();
+ expect(blockchain.enqueueBlocks).toHaveBeenCalled();
+
+ blockchain.dispatch = dispatch;
+ blockchain.enqueueBlocks = enqueueBlocks;
+ });
+
+ it("should not handle block from future slot", () => {
+ const dispatch = blockchain.dispatch;
+ const enqueueBlocks = blockchain.enqueueBlocks;
+ blockchain.dispatch = jest.fn(() => true);
+ blockchain.enqueueBlocks = jest.fn(() => true);
+
+ const block = {
+ height: 100,
+ timestamp: slots.getSlotTime(slots.getNextSlot()),
+ };
+
+ blockchain.handleIncomingBlock(block);
+
+ expect(blockchain.dispatch).not.toHaveBeenCalled();
+ expect(blockchain.enqueueBlocks).not.toHaveBeenCalled();
+
+ blockchain.dispatch = dispatch;
+ blockchain.enqueueBlocks = enqueueBlocks;
+ });
+
+ it("should disregard block when blockchain is not ready", async () => {
+ blockchain.state.started = false;
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ const mockGetSlotNumber = jest
+ .spyOn(slots, "getSlotNumber")
+ .mockReturnValueOnce(1)
+ .mockReturnValueOnce(1);
+
+ await blockchain.handleIncomingBlock(blocks101to155[54]);
+
+ expect(loggerInfo).toHaveBeenCalledWith("Block disregarded because blockchain is not ready :exclamation:");
+ blockchain.state.started = true;
+
+ mockGetSlotNumber.mockRestore();
+ });
+ });
+
+ describe("forceWakeup", () => {
+ it("should dispatch WAKEUP", () => {
+ expect(() => blockchain.forceWakeup()).toDispatch(blockchain, "WAKEUP");
+ });
+ });
+
+ describe("forkBlock", () => {
+ it("should dispatch FORK and set state.forkedBlock", () => {
+ const forkedBlock = new Block(blocks2to100[11]);
+ expect(() => blockchain.forkBlock(forkedBlock)).toDispatch(blockchain, "FORK");
+ expect(blockchain.state.forkedBlock).toBe(forkedBlock);
+
+ blockchain.state.forkedBlock = null; // reset
+ });
+ });
+
+ describe("isSynced", () => {
+ describe("with a block param", () => {
+ it("should be ok", () => {
+ expect(
+ blockchain.isSynced({
+ data: {
+ timestamp: slots.getTime(),
+ height: genesisBlock.height,
+ },
+ } as models.IBlock),
+ ).toBeTrue();
+ });
+ });
+
+ describe("without a block param", () => {
+ it("should use the last block", () => {
+ jest.spyOn(blockchain.p2p, "hasPeers").mockReturnValueOnce(true);
+ const getLastBlock = jest.spyOn(blockchain, "getLastBlock").mockReturnValueOnce({
+ data: {
+ timestamp: slots.getTime(),
+ height: genesisBlock.height,
+ },
+ });
+ expect(blockchain.isSynced()).toBeTrue();
+ expect(getLastBlock).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe("isRebuildSynced", () => {
+ describe("with a block param", () => {
+ it("should be ok", () => {
+ jest.spyOn(blockchain.p2p, "hasPeers").mockReturnValueOnce(true);
+ expect(
+ blockchain.isRebuildSynced({
+ data: {
+ timestamp: slots.getTime() - 3600 * 24 * 6,
+ height: blocks101to155[52].height,
+ },
+ } as models.IBlock),
+ ).toBeTrue();
+ });
+ });
+
+ describe("without a block param", () => {
+ it("should use the last block", () => {
+ jest.spyOn(blockchain.p2p, "hasPeers").mockReturnValueOnce(true);
+ const getLastBlock = jest.spyOn(blockchain, "getLastBlock").mockReturnValueOnce({
+ data: {
+ timestamp: slots.getTime(),
+ height: genesisBlock.height,
+ },
+ });
+ expect(blockchain.isRebuildSynced()).toBeTrue();
+ expect(getLastBlock).toHaveBeenCalled();
+ });
+ });
+
+ it("should return true when there is no peer", () => {
+ jest.spyOn(blockchain.p2p, "hasPeers").mockReturnValueOnce(false);
+
+ expect(blockchain.isRebuildSynced()).toBeTrue();
+ });
+ });
+
+ describe("getBlockPing", () => {
+ it("should return state.blockPing", () => {
+ const blockPing = {
+ count: 1,
+ first: new Date().getTime(),
+ last: new Date().getTime(),
+ block: {},
+ };
+ blockchain.state.blockPing = blockPing;
+
+ expect(blockchain.getBlockPing()).toBe(blockPing);
+ });
+ });
+
+ describe("pingBlock", () => {
+ it("should call state.pingBlock", () => {
+ blockchain.state.blockPing = null;
+
+ // returns false if no state.blockPing
+ expect(blockchain.pingBlock(blocks2to100[3])).toBeFalse();
+ });
+ });
+
+ describe("pushPingBlock", () => {
+ it("should call state.pushPingBlock", () => {
+ blockchain.state.blockPing = null;
+
+ blockchain.pushPingBlock(blocks2to100[3]);
+ expect(blockchain.state.blockPing).toBeObject();
+ expect(blockchain.state.blockPing.block).toBe(blocks2to100[3]);
+ });
+ });
+
+ describe("getEvents", () => {
+ it("should return the events", () => {
+ expect(blockchain.getEvents()).toEqual([
+ "block.applied",
+ "block.forged",
+ "block.reverted",
+ "delegate.registered",
+ "delegate.resigned",
+ "forger.failed",
+ "forger.missing",
+ "forger.started",
+ "peer.added",
+ "peer.removed",
+ "round.created",
+ "state:started",
+ "transaction.applied",
+ "transaction.expired",
+ "transaction.forged",
+ "transaction.reverted",
+ "wallet.saved",
+ "wallet.created.cold",
+ ]);
+ });
+ });
+
+ describe("__registerQueue", () => {
+ it("should be ok", () => {
+ blockchain.__registerQueue();
+
+ expect(blockchain).toHaveProperty("queue");
+ expect(blockchain).toHaveProperty("processQueue");
+ expect(blockchain).toHaveProperty("rebuildQueue");
+ });
+ });
+
+ describe("stop on emit shutdown", () => {
+ it("should trigger the stop method when receiving 'shutdown' event", async () => {
+ const emitter = container.resolvePlugin("event-emitter");
+
+ const stop = jest.spyOn(blockchain, "stop").mockReturnValue(true);
+
+ emitter.emit("shutdown");
+
+ await delay(200);
+
+ expect(stop).toHaveBeenCalled();
+ });
+ });
+});
+
+async function __start(networkStart) {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+ process.env.CORE_ENV = "false";
+
+ const plugin = require("../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart,
+ ...defaults,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+
+ if (networkStart) {
+ return;
+ }
+
+ await __resetToHeight1();
+
+ await blockchain.start();
+ await __addBlocks(5);
+}
+
+async function __resetBlocksInCurrentRound() {
+ await blockchain.database.loadBlocksFromCurrentRound();
+}
+
+async function __resetToHeight1() {
+ const lastBlock = await blockchain.database.getLastBlock();
+ if (lastBlock) {
+ // Make sure the wallet manager has been fed or else revertRound
+ // cannot determine the previous delegates. This is only necessary, because
+ // the database is not dropped after the unit tests are done.
+ await blockchain.database.buildWallets(lastBlock.data.height);
+
+ // Index the genesis wallet or else revert block at height 1 fails
+ const generator = crypto.getAddress(genesisBlock.data.generatorPublicKey);
+ const genesis = new Wallet(generator);
+ genesis.publicKey = genesisBlock.data.generatorPublicKey;
+ genesis.username = "genesis";
+ blockchain.database.walletManager.reindex(genesis);
+
+ blockchain.state.clear();
+
+ blockchain.state.setLastBlock(lastBlock);
+ await __resetBlocksInCurrentRound();
+ await blockchain.removeBlocks(lastBlock.data.height - 1);
+ }
+}
+
+async function __addBlocks(untilHeight) {
+ const allBlocks = [...blocks2to100, ...blocks101to155];
+ const lastHeight = blockchain.getLastHeight();
+
+ for (let height = lastHeight + 1; height < untilHeight && height < 155; height++) {
+ const blockToProcess = new Block(allBlocks[height - 2]);
+ await blockchain.processBlock(blockToProcess, () => null);
+ }
+}
diff --git a/packages/core-blockchain/__tests__/machines/actions/fork.test.js b/packages/core-blockchain/__tests__/machines/actions/fork.test.js
deleted file mode 100644
index 324ec1e2ff..0000000000
--- a/packages/core-blockchain/__tests__/machines/actions/fork.test.js
+++ /dev/null
@@ -1,52 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers') // eslint-disable-line no-unused-vars
-
-const machine = require('../../../lib/machines/blockchain')
-
-describe('Blockchain machine > Fork', () => {
- it('should start with the `analysing` state', () => {
- expect(machine.states.fork).toHaveProperty('initial', 'analysing')
- })
-
- describe('state `analysing`', () => {
- it('should execute the `analyseFork` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'fork.analysing',
- actions: ['analyseFork'],
- })
- })
-
- it('should transition to `revertBlocks` on `REBUILD`', () => {
- expect(machine).toTransition({
- from: 'fork.analysing',
- on: 'REBUILD',
- to: 'fork.revertBlocks',
- })
- })
-
- it('should transition to `exit` on `NOFORK`', () => {
- expect(machine).toTransition({
- from: 'fork.analysing',
- on: 'NOFORK',
- to: 'fork.exit',
- })
- })
- })
-
- describe('state `network`', () => {
- it('should execute the `checkNetwork` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'fork.network',
- actions: ['checkNetwork'],
- })
- })
- })
-
- describe('state `exit`', () => {
- it('should execute the `forkRecovered` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'fork.exit',
- actions: ['forkRecovered'],
- })
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/machines/actions/fork.test.ts b/packages/core-blockchain/__tests__/machines/actions/fork.test.ts
new file mode 100644
index 0000000000..f9566de037
--- /dev/null
+++ b/packages/core-blockchain/__tests__/machines/actions/fork.test.ts
@@ -0,0 +1,52 @@
+import "@arkecosystem/core-test-utils/";
+
+import { blockchainMachine } from "../../../src/machines/blockchain";
+
+describe("Blockchain machine > Fork", () => {
+ it("should start with the `analysing` state", () => {
+ expect(blockchainMachine.states.fork).toHaveProperty("initial", "analysing");
+ });
+
+ describe("state `analysing`", () => {
+ it("should execute the `analyseFork` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "fork.analysing",
+ actions: ["analyseFork"],
+ });
+ });
+
+ it("should transition to `revertBlocks` on `REBUILD`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "fork.analysing",
+ on: "REBUILD",
+ to: "fork.revertBlocks",
+ });
+ });
+
+ it("should transition to `exit` on `NOFORK`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "fork.analysing",
+ on: "NOFORK",
+ to: "fork.exit",
+ });
+ });
+ });
+
+ describe("state `network`", () => {
+ it("should execute the `checkNetwork` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "fork.network",
+ actions: ["checkNetwork"],
+ });
+ });
+ });
+
+ describe("state `exit`", () => {
+ it("should execute the `forkRecovered` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "fork.exit",
+ actions: ["forkRecovered"],
+ });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.js b/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.js
deleted file mode 100644
index ceb699213c..0000000000
--- a/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.js
+++ /dev/null
@@ -1,155 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers') // eslint-disable-line no-unused-vars
-
-const machine = require('../../../lib/machines/blockchain')
-
-describe('Blockchain machine > Rebuilding', () => {
- it('should start with the `rebuilding` state', () => {
- expect(machine.states.rebuild).toHaveProperty('initial', 'rebuilding')
- })
-
- describe('state `rebuilding`', () => {
- it('should execute the `checkLastDownloadedBlockSynced` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.rebuilding',
- actions: ['checkLastDownloadedBlockSynced'],
- })
- })
-
- it('should transition to `waitingFinished` on `SYNCED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuilding',
- on: 'SYNCED',
- to: 'rebuild.waitingFinished',
- })
- })
-
- it('should transition to `revertBlocks` on `NOTSYNCED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuilding',
- on: 'NOTSYNCED',
- to: 'rebuild.rebuildBlocks',
- })
- })
-
- it('should transition to `rebuildPaused` on `PAUSED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuilding',
- on: 'PAUSED',
- to: 'rebuild.rebuildPaused',
- })
- })
- })
-
- describe('state `idle`', () => {
- it('should transition to `rebuildBlocks` on `DOWNLOADED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.idle',
- on: 'DOWNLOADED',
- to: 'rebuild.rebuildBlocks',
- })
- })
- })
-
- describe('state `rebuildBlocks`', () => {
- it('should execute the `rebuildBlocks` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.rebuildBlocks',
- actions: ['rebuildBlocks'],
- })
- })
-
- it('should transition to `rebuilding` on `DOWNLOADED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuildBlocks',
- on: 'DOWNLOADED',
- to: 'rebuild.rebuilding',
- })
- })
-
- it('should transition to `rebuilding` on `NOBLOCK`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuildBlocks',
- on: 'NOBLOCK',
- to: 'rebuild.rebuilding',
- })
- })
- })
-
- describe('state `waitingFinished`', () => {
- it('should transition to `rebuildFinished` on `REBUILDFINISHED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.waitingFinished',
- on: 'REBUILDFINISHED',
- to: 'rebuild.rebuildFinished',
- })
- })
- })
-
- describe('state `processFinished`', () => {
- it('should execute the `checkRebuildBlockSynced` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.processFinished',
- actions: ['checkRebuildBlockSynced'],
- })
- })
-
- it('should transition to `processFinished` on `SYNCED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.processFinished',
- on: 'SYNCED',
- to: 'rebuild.end',
- })
- })
-
- it('should transition to `processFinished` on `NOTSYNCED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.processFinished',
- on: 'NOTSYNCED',
- to: 'rebuild.rebuildBlocks',
- })
- })
- })
-
- describe('state `rebuildPaused`', () => {
- it('should execute the `downloadPaused` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.rebuildPaused',
- actions: ['downloadPaused'],
- })
- })
-
- it('should transition to `processFinished` on `REBUILDFINISHED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuildPaused',
- on: 'REBUILDFINISHED',
- to: 'rebuild.processFinished',
- })
- })
- })
-
- describe('state `rebuildFinished`', () => {
- it('should execute the `rebuildFinished` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.rebuildFinished',
- actions: ['rebuildFinished'],
- })
- })
-
- it('should transition to `processFinished` on `PROCESSFINISHED`', () => {
- expect(machine).toTransition({
- from: 'rebuild.rebuildFinished',
- on: 'PROCESSFINISHED',
- to: 'rebuild.processFinished',
- })
- })
- })
-
- describe('state `end`', () => {
- it('should execute the `rebuildingComplete` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'rebuild.end',
- actions: ['rebuildingComplete'],
- })
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.ts b/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.ts
new file mode 100644
index 0000000000..1821a7389b
--- /dev/null
+++ b/packages/core-blockchain/__tests__/machines/actions/rebuild-from-network.test.ts
@@ -0,0 +1,155 @@
+import "@arkecosystem/core-test-utils/";
+
+import { blockchainMachine as machine } from "../../../src/machines/blockchain";
+
+describe("Blockchain machine > Rebuilding", () => {
+ it("should start with the `rebuilding` state", () => {
+ expect(machine.states.rebuild).toHaveProperty("initial", "rebuilding");
+ });
+
+ describe("state `rebuilding`", () => {
+ it("should execute the `checkLastDownloadedBlockSynced` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.rebuilding",
+ actions: ["checkLastDownloadedBlockSynced"],
+ });
+ });
+
+ it("should transition to `waitingFinished` on `SYNCED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuilding",
+ on: "SYNCED",
+ to: "rebuild.waitingFinished",
+ });
+ });
+
+ it("should transition to `revertBlocks` on `NOTSYNCED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuilding",
+ on: "NOTSYNCED",
+ to: "rebuild.rebuildBlocks",
+ });
+ });
+
+ it("should transition to `rebuildPaused` on `PAUSED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuilding",
+ on: "PAUSED",
+ to: "rebuild.rebuildPaused",
+ });
+ });
+ });
+
+ describe("state `idle`", () => {
+ it("should transition to `rebuildBlocks` on `DOWNLOADED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.idle",
+ on: "DOWNLOADED",
+ to: "rebuild.rebuildBlocks",
+ });
+ });
+ });
+
+ describe("state `rebuildBlocks`", () => {
+ it("should execute the `rebuildBlocks` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.rebuildBlocks",
+ actions: ["rebuildBlocks"],
+ });
+ });
+
+ it("should transition to `rebuilding` on `DOWNLOADED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuildBlocks",
+ on: "DOWNLOADED",
+ to: "rebuild.rebuilding",
+ });
+ });
+
+ it("should transition to `rebuilding` on `NOBLOCK`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuildBlocks",
+ on: "NOBLOCK",
+ to: "rebuild.rebuilding",
+ });
+ });
+ });
+
+ describe("state `waitingFinished`", () => {
+ it("should transition to `rebuildFinished` on `REBUILDFINISHED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.waitingFinished",
+ on: "REBUILDFINISHED",
+ to: "rebuild.rebuildFinished",
+ });
+ });
+ });
+
+ describe("state `processFinished`", () => {
+ it("should execute the `checkRebuildBlockSynced` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.processFinished",
+ actions: ["checkRebuildBlockSynced"],
+ });
+ });
+
+ it("should transition to `processFinished` on `SYNCED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.processFinished",
+ on: "SYNCED",
+ to: "rebuild.end",
+ });
+ });
+
+ it("should transition to `processFinished` on `NOTSYNCED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.processFinished",
+ on: "NOTSYNCED",
+ to: "rebuild.rebuildBlocks",
+ });
+ });
+ });
+
+ describe("state `rebuildPaused`", () => {
+ it("should execute the `downloadPaused` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.rebuildPaused",
+ actions: ["downloadPaused"],
+ });
+ });
+
+ it("should transition to `processFinished` on `REBUILDFINISHED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuildPaused",
+ on: "REBUILDFINISHED",
+ to: "rebuild.processFinished",
+ });
+ });
+ });
+
+ describe("state `rebuildFinished`", () => {
+ it("should execute the `rebuildFinished` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.rebuildFinished",
+ actions: ["rebuildFinished"],
+ });
+ });
+
+ it("should transition to `processFinished` on `PROCESSFINISHED`", () => {
+ expect(machine).toTransition({
+ from: "rebuild.rebuildFinished",
+ on: "PROCESSFINISHED",
+ to: "rebuild.processFinished",
+ });
+ });
+ });
+
+ describe("state `end`", () => {
+ it("should execute the `rebuildingComplete` action when is entered", () => {
+ expect(machine).toExecuteOnEntry({
+ state: "rebuild.end",
+ actions: ["rebuildingComplete"],
+ });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.js b/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.js
deleted file mode 100644
index e46a59a922..0000000000
--- a/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.js
+++ /dev/null
@@ -1,153 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers') // eslint-disable-line no-unused-vars
-
-const machine = require('../../../lib/machines/blockchain')
-
-describe('Blockchain machine > SyncWithNetwork', () => {
- it('should start with the `syncing` state', () => {
- expect(machine.states.syncWithNetwork).toHaveProperty('initial', 'syncing')
- })
-
- describe('state `syncing`', () => {
- it('should execute the `checkLastDownloadedBlockSynced` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.syncing',
- actions: ['checkLastDownloadedBlockSynced'],
- })
- })
-
- it('should transition to `downloadFinished` on `SYNCED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.syncing',
- on: 'SYNCED',
- to: 'syncWithNetwork.downloadFinished',
- })
- })
-
- it('should transition to `downloadBlocks` on `NOTSYNCED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.syncing',
- on: 'NOTSYNCED',
- to: 'syncWithNetwork.downloadBlocks',
- })
- })
-
- it('should transition to `downloadPaused` on `PAUSED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.syncing',
- on: 'PAUSED',
- to: 'syncWithNetwork.downloadPaused',
- })
- })
-
- it('should transition to `end` on `NETWORKHALTED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.syncing',
- on: 'NETWORKHALTED',
- to: 'syncWithNetwork.end',
- })
- })
- })
-
- describe('state `idle`', () => {
- it('should transition to `downloadBlocks` on `DOWNLOADED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.idle',
- on: 'DOWNLOADED',
- to: 'syncWithNetwork.downloadBlocks',
- })
- })
- })
-
- describe('state `downloadBlocks`', () => {
- it('should execute the `downloadBlocks` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.downloadBlocks',
- actions: ['downloadBlocks'],
- })
- })
-
- it('should transition to `syncing` on `DOWNLOADED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.downloadBlocks',
- on: 'DOWNLOADED',
- to: 'syncWithNetwork.syncing',
- })
- })
-
- it('should transition to `syncing` on `NOBLOCK`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.downloadBlocks',
- on: 'NOBLOCK',
- to: 'syncWithNetwork.syncing',
- })
- })
- })
-
- describe('state `downloadFinished`', () => {
- it('should execute the `downloadFinished` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.downloadFinished',
- actions: ['downloadFinished'],
- })
- })
-
- it('should transition to `processFinished` on `PROCESSFINISHED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.downloadFinished',
- on: 'PROCESSFINISHED',
- to: 'syncWithNetwork.processFinished',
- })
- })
- })
-
- describe('state `downloadPaused`', () => {
- it('should execute the `downloadPaused` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.downloadPaused',
- actions: ['downloadPaused'],
- })
- })
-
- it('should transition to `processFinished` on `PROCESSFINISHED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.downloadPaused',
- on: 'PROCESSFINISHED',
- to: 'syncWithNetwork.processFinished',
- })
- })
- })
-
- describe('state `processFinished`', () => {
- it('should execute the `checkLastBlockSynced` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.processFinished',
- actions: ['checkLastBlockSynced'],
- })
- })
-
- it('should transition to `end` on `SYNCED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.processFinished',
- on: 'SYNCED',
- to: 'syncWithNetwork.end',
- })
- })
-
- it('should transition to `downloadBlocks` on `NOTSYNCED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork.processFinished',
- on: 'NOTSYNCED',
- to: 'syncWithNetwork.downloadBlocks',
- })
- })
- })
-
- describe('state `end`', () => {
- it('should execute the `syncingComplete` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'syncWithNetwork.end',
- actions: ['syncingComplete'],
- })
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.ts b/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.ts
new file mode 100644
index 0000000000..ca7d86b7d1
--- /dev/null
+++ b/packages/core-blockchain/__tests__/machines/actions/sync-with-network.test.ts
@@ -0,0 +1,153 @@
+import "@arkecosystem/core-test-utils/";
+
+import { blockchainMachine } from "../../../src/machines/blockchain";
+
+describe("Blockchain machine > SyncWithNetwork", () => {
+ it("should start with the `syncing` state", () => {
+ expect(blockchainMachine.states.syncWithNetwork).toHaveProperty("initial", "syncing");
+ });
+
+ describe("state `syncing`", () => {
+ it("should execute the `checkLastDownloadedBlockSynced` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.syncing",
+ actions: ["checkLastDownloadedBlockSynced"],
+ });
+ });
+
+ it("should transition to `downloadFinished` on `SYNCED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.syncing",
+ on: "SYNCED",
+ to: "syncWithNetwork.downloadFinished",
+ });
+ });
+
+ it("should transition to `downloadBlocks` on `NOTSYNCED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.syncing",
+ on: "NOTSYNCED",
+ to: "syncWithNetwork.downloadBlocks",
+ });
+ });
+
+ it("should transition to `downloadPaused` on `PAUSED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.syncing",
+ on: "PAUSED",
+ to: "syncWithNetwork.downloadPaused",
+ });
+ });
+
+ it("should transition to `end` on `NETWORKHALTED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.syncing",
+ on: "NETWORKHALTED",
+ to: "syncWithNetwork.end",
+ });
+ });
+ });
+
+ describe("state `idle`", () => {
+ it("should transition to `downloadBlocks` on `DOWNLOADED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.idle",
+ on: "DOWNLOADED",
+ to: "syncWithNetwork.downloadBlocks",
+ });
+ });
+ });
+
+ describe("state `downloadBlocks`", () => {
+ it("should execute the `downloadBlocks` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.downloadBlocks",
+ actions: ["downloadBlocks"],
+ });
+ });
+
+ it("should transition to `syncing` on `DOWNLOADED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.downloadBlocks",
+ on: "DOWNLOADED",
+ to: "syncWithNetwork.syncing",
+ });
+ });
+
+ it("should transition to `syncing` on `NOBLOCK`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.downloadBlocks",
+ on: "NOBLOCK",
+ to: "syncWithNetwork.syncing",
+ });
+ });
+ });
+
+ describe("state `downloadFinished`", () => {
+ it("should execute the `downloadFinished` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.downloadFinished",
+ actions: ["downloadFinished"],
+ });
+ });
+
+ it("should transition to `processFinished` on `PROCESSFINISHED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.downloadFinished",
+ on: "PROCESSFINISHED",
+ to: "syncWithNetwork.processFinished",
+ });
+ });
+ });
+
+ describe("state `downloadPaused`", () => {
+ it("should execute the `downloadPaused` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.downloadPaused",
+ actions: ["downloadPaused"],
+ });
+ });
+
+ it("should transition to `processFinished` on `PROCESSFINISHED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.downloadPaused",
+ on: "PROCESSFINISHED",
+ to: "syncWithNetwork.processFinished",
+ });
+ });
+ });
+
+ describe("state `processFinished`", () => {
+ it("should execute the `checkLastBlockSynced` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.processFinished",
+ actions: ["checkLastBlockSynced"],
+ });
+ });
+
+ it("should transition to `end` on `SYNCED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.processFinished",
+ on: "SYNCED",
+ to: "syncWithNetwork.end",
+ });
+ });
+
+ it("should transition to `downloadBlocks` on `NOTSYNCED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork.processFinished",
+ on: "NOTSYNCED",
+ to: "syncWithNetwork.downloadBlocks",
+ });
+ });
+ });
+
+ describe("state `end`", () => {
+ it("should execute the `syncingComplete` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "syncWithNetwork.end",
+ actions: ["syncingComplete"],
+ });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/machines/blockchain.test.js b/packages/core-blockchain/__tests__/machines/blockchain.test.js
deleted file mode 100644
index 5e850caaff..0000000000
--- a/packages/core-blockchain/__tests__/machines/blockchain.test.js
+++ /dev/null
@@ -1,188 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers') // eslint-disable-line no-unused-vars
-
-const machine = require('../../lib/machines/blockchain')
-
-describe('Blockchain machine', () => {
- it('should use `blockchain` as the key', () => {
- expect(machine).toHaveProperty('key', 'blockchain')
- })
-
- it('should start with the `uninitialised` state', () => {
- expect(machine.initialState).toHaveProperty('value', 'uninitialised')
- })
-
- describe('state `uninitialised`', () => {
- it('should transition to `init` on `START`', () => {
- expect(machine).toTransition({
- from: 'uninitialised',
- on: 'START',
- to: 'init',
- })
- })
- })
-
- describe('state `init`', () => {
- it('should execute the `init` action when is entered', () => {
- expect(machine).toExecuteOnEntry({ state: 'init', actions: ['init'] })
- })
-
- it('should transition to `rebuild` on `REBUILD`', () => {
- expect(machine).toTransition({
- from: 'init',
- on: 'REBUILD',
- to: 'rebuild',
- })
- })
-
- it('should transition to `rebuild` on `NETWORKSTART`', () => {
- expect(machine).toTransition({
- from: 'init',
- on: 'NETWORKSTART',
- to: 'idle',
- })
- })
-
- it('should transition to `rebuild` on `STARTED`', () => {
- expect(machine).toTransition({
- from: 'init',
- on: 'STARTED',
- to: 'syncWithNetwork',
- })
- })
-
- it('should transition to `rebuild` on `FAILURE`', () => {
- expect(machine).toTransition({ from: 'init', on: 'FAILURE', to: 'exit' })
- })
- })
-
- describe('state `rebuild`', () => {
- it('should transition to `syncWithNetwork` on `REBUILDCOMPLETE`', () => {
- expect(machine).toTransition({
- from: 'rebuild',
- on: 'REBUILDCOMPLETE',
- to: 'syncWithNetwork',
- })
- })
-
- it('should transition to `fork` on `FORK`', () => {
- expect(machine).toTransition({ from: 'rebuild', on: 'FORK', to: 'fork' })
- })
- })
-
- describe('state `syncWithNetwork`', () => {
- it('should transition to `idle` on `TEST`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork',
- on: 'TEST',
- to: 'idle',
- })
- })
-
- it('should transition to `idle` on `SYNCFINISHED`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork',
- on: 'SYNCFINISHED',
- to: 'idle',
- })
- })
-
- it('should transition to `fork` on `FORK`', () => {
- expect(machine).toTransition({
- from: 'syncWithNetwork',
- on: 'FORK',
- to: 'fork',
- })
- })
- })
-
- describe('state `idle`', () => {
- it('should execute the `checkLater` and `blockchainReady` actions when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'idle',
- actions: ['checkLater', 'blockchainReady'],
- })
- })
-
- it('should transition to `syncWithNetwork` on `WAKEUP`', () => {
- expect(machine).toTransition({
- from: 'idle',
- on: 'WAKEUP',
- to: 'syncWithNetwork',
- })
- })
-
- it('should transition to `newBlock` on `NEWBLOCK`', () => {
- expect(machine).toTransition({
- from: 'idle',
- on: 'NEWBLOCK',
- to: 'newBlock',
- })
- })
-
- it('should transition to `stopped` on `STOP`', () => {
- expect(machine).toTransition({ from: 'idle', on: 'STOP', to: 'stopped' })
- })
- })
-
- describe('state `newBlock`', () => {
- it('should transition to `idle` on `PROCESSFINISHED`', () => {
- expect(machine).toTransition({
- from: 'newBlock',
- on: 'PROCESSFINISHED',
- to: 'idle',
- })
- })
-
- it('should transition to `fork` on `FORK`', () => {
- expect(machine).toTransition({
- from: 'newBlock',
- on: 'FORK',
- to: 'fork',
- })
- })
-
- it('should transition to `stopped` on `STOP`', () => {
- expect(machine).toTransition({
- from: 'newBlock',
- on: 'STOP',
- to: 'stopped',
- })
- })
- })
-
- describe('state `fork`', () => {
- it('should execute the `processBlock` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'fork',
- actions: ['startForkRecovery'],
- })
- })
-
- it('should transition to `idle` on `SUCCESS`', () => {
- expect(machine).toTransition({
- from: 'fork',
- on: 'SUCCESS',
- to: 'syncWithNetwork',
- })
- })
-
- it('should transition to `fork` on `FAILURE`', () => {
- expect(machine).toTransition({ from: 'fork', on: 'FAILURE', to: 'exit' })
- })
- })
-
- describe('state `stopped`', () => {
- it('should execute the `stopped` action when is entered', () => {
- expect(machine).toExecuteOnEntry({
- state: 'stopped',
- actions: ['stopped'],
- })
- })
- })
-
- describe('state `exit`', () => {
- it('should execute the `exitApp` action when is entered', () => {
- expect(machine).toExecuteOnEntry({ state: 'exit', actions: ['exitApp'] })
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/machines/blockchain.test.ts b/packages/core-blockchain/__tests__/machines/blockchain.test.ts
new file mode 100644
index 0000000000..e7adfef260
--- /dev/null
+++ b/packages/core-blockchain/__tests__/machines/blockchain.test.ts
@@ -0,0 +1,188 @@
+import "@arkecosystem/core-test-utils/";
+
+import { blockchainMachine } from "../../src/machines/blockchain";
+
+describe("Blockchain machine", () => {
+ it("should use `blockchain` as the key", () => {
+ expect(blockchainMachine).toHaveProperty("key", "blockchain");
+ });
+
+ it("should start with the `uninitialised` state", () => {
+ expect(blockchainMachine.initialState).toHaveProperty("value", "uninitialised");
+ });
+
+ describe("state `uninitialised`", () => {
+ it("should transition to `init` on `START`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "uninitialised",
+ on: "START",
+ to: "init",
+ });
+ });
+ });
+
+ describe("state `init`", () => {
+ it("should execute the `init` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({ state: "init", actions: ["init"] });
+ });
+
+ it("should transition to `rebuild` on `REBUILD`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "init",
+ on: "REBUILD",
+ to: "rebuild",
+ });
+ });
+
+ it("should transition to `rebuild` on `NETWORKSTART`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "init",
+ on: "NETWORKSTART",
+ to: "idle",
+ });
+ });
+
+ it("should transition to `rebuild` on `STARTED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "init",
+ on: "STARTED",
+ to: "syncWithNetwork",
+ });
+ });
+
+ it("should transition to `rebuild` on `FAILURE`", () => {
+ expect(blockchainMachine).toTransition({ from: "init", on: "FAILURE", to: "exit" });
+ });
+ });
+
+ describe("state `rebuild`", () => {
+ it("should transition to `syncWithNetwork` on `REBUILDCOMPLETE`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "rebuild",
+ on: "REBUILDCOMPLETE",
+ to: "syncWithNetwork",
+ });
+ });
+
+ it("should transition to `fork` on `FORK`", () => {
+ expect(blockchainMachine).toTransition({ from: "rebuild", on: "FORK", to: "fork" });
+ });
+ });
+
+ describe("state `syncWithNetwork`", () => {
+ it("should transition to `idle` on `TEST`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork",
+ on: "TEST",
+ to: "idle",
+ });
+ });
+
+ it("should transition to `idle` on `SYNCFINISHED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork",
+ on: "SYNCFINISHED",
+ to: "idle",
+ });
+ });
+
+ it("should transition to `fork` on `FORK`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "syncWithNetwork",
+ on: "FORK",
+ to: "fork",
+ });
+ });
+ });
+
+ describe("state `idle`", () => {
+ it("should execute the `checkLater` and `blockchainReady` actions when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "idle",
+ actions: ["checkLater", "blockchainReady"],
+ });
+ });
+
+ it("should transition to `syncWithNetwork` on `WAKEUP`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "idle",
+ on: "WAKEUP",
+ to: "syncWithNetwork",
+ });
+ });
+
+ it("should transition to `newBlock` on `NEWBLOCK`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "idle",
+ on: "NEWBLOCK",
+ to: "newBlock",
+ });
+ });
+
+ it("should transition to `stopped` on `STOP`", () => {
+ expect(blockchainMachine).toTransition({ from: "idle", on: "STOP", to: "stopped" });
+ });
+ });
+
+ describe("state `newBlock`", () => {
+ it("should transition to `idle` on `PROCESSFINISHED`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "newBlock",
+ on: "PROCESSFINISHED",
+ to: "idle",
+ });
+ });
+
+ it("should transition to `fork` on `FORK`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "newBlock",
+ on: "FORK",
+ to: "fork",
+ });
+ });
+
+ it("should transition to `stopped` on `STOP`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "newBlock",
+ on: "STOP",
+ to: "stopped",
+ });
+ });
+ });
+
+ describe("state `fork`", () => {
+ it("should execute the `processBlock` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "fork",
+ actions: ["startForkRecovery"],
+ });
+ });
+
+ it("should transition to `idle` on `SUCCESS`", () => {
+ expect(blockchainMachine).toTransition({
+ from: "fork",
+ on: "SUCCESS",
+ to: "syncWithNetwork",
+ });
+ });
+
+ it("should transition to `fork` on `FAILURE`", () => {
+ expect(blockchainMachine).toTransition({ from: "fork", on: "FAILURE", to: "exit" });
+ });
+ });
+
+ describe("state `stopped`", () => {
+ it("should execute the `stopped` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({
+ state: "stopped",
+ actions: ["stopped"],
+ });
+ });
+ });
+
+ describe("state `exit`", () => {
+ it("should execute the `exitApp` action when is entered", () => {
+ expect(blockchainMachine).toExecuteOnEntry({ state: "exit", actions: ["exitApp"] });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/processor/block-processor.test.ts b/packages/core-blockchain/__tests__/processor/block-processor.test.ts
new file mode 100644
index 0000000000..f5249322c9
--- /dev/null
+++ b/packages/core-blockchain/__tests__/processor/block-processor.test.ts
@@ -0,0 +1,293 @@
+import "@arkecosystem/core-test-utils";
+import { fixtures, generators } from "@arkecosystem/core-test-utils";
+import genesisBlockTestnet from "@arkecosystem/core-test-utils/src/config/testnet/genesisBlock.json";
+import { models } from "@arkecosystem/crypto";
+import { Blockchain } from "../../src/blockchain";
+import { BlockProcessor, BlockProcessorResult } from "../../src/processor";
+import * as handlers from "../../src/processor/handlers";
+import { ExceptionHandler, VerificationFailedHandler } from "../../src/processor/handlers";
+import { setUpFull, tearDownFull } from "../__support__/setup";
+
+const { Block } = models;
+const { delegates } = fixtures;
+const { generateTransfers } = generators;
+
+let app;
+let blockchain: Blockchain;
+let blockProcessor: BlockProcessor;
+
+beforeAll(async () => {
+ app = await setUpFull();
+ blockchain = app.resolvePlugin("blockchain");
+ blockProcessor = new BlockProcessor(blockchain);
+});
+
+afterAll(async () => {
+ await tearDownFull();
+});
+
+const resetBlocks = async () => blockchain.removeBlocks(blockchain.getLastHeight() - 1); // reset to block height 1
+
+beforeEach(resetBlocks);
+afterEach(resetBlocks);
+
+describe("Block processor", () => {
+ const blockTemplate = {
+ id: "17882607875259085966",
+ version: 0,
+ timestamp: 46583330,
+ height: 2,
+ reward: 0,
+ previousBlock: genesisBlockTestnet.id,
+ numberOfTransactions: 1,
+ transactions: [],
+ totalAmount: 0,
+ totalFee: 0,
+ payloadLength: 0,
+ payloadHash: genesisBlockTestnet.payloadHash,
+ generatorPublicKey: delegates[0].publicKey,
+ blockSignature:
+ "3045022100e7385c6ea42bd950f7f6ab8c8619cf2f66a41d8f8f185b0bc99af032cb25f30d02200b6210176a6cedfdcbe483167fd91c21d740e0e4011d24d679c601fdd46b0de9",
+ createdAt: "2019-07-11T16:48:50.550Z",
+ };
+
+ describe("getHandler", () => {
+ it("should return ExceptionHandler if block is an exception", async () => {
+ const exceptionBlock = new Block(blockTemplate);
+ exceptionBlock.data.id = "998877";
+
+ const configManager = app.getConfig();
+
+ configManager.set("exceptions.blocks", ["998877"]);
+
+ expect(await blockProcessor.getHandler(exceptionBlock)).toBeInstanceOf(ExceptionHandler);
+ });
+
+ it("should return VerificationFailedHandler if block failed verification", async () => {
+ const failedVerifBlock = new Block(blockTemplate);
+ failedVerifBlock.verification.verified = false;
+
+ expect(await blockProcessor.getHandler(failedVerifBlock)).toBeInstanceOf(VerificationFailedHandler);
+ });
+ });
+
+ describe("process", () => {
+ const getBlock = transactions =>
+ Object.assign({}, blockTemplate, {
+ transactions,
+ totalAmount: transactions.reduce((acc, curr) => acc + curr.amount, 0),
+ totalFee: transactions.reduce((acc, curr) => acc + curr.fee, 0),
+ numberOfTransactions: transactions.length,
+ });
+ const processBlock = async transactions => {
+ const block = getBlock(transactions);
+ const blockVerified = new Block(block);
+ blockVerified.verification.verified = true;
+
+ await blockchain.processBlock(blockVerified, () => null);
+
+ return Object.assign(block, { id: blockVerified.data.id });
+ };
+
+ describe("should not accept replay transactions", () => {
+ it("should not validate an already forged transaction", async () => {
+ const transfers = generateTransfers(
+ "unitnet",
+ delegates[0].passphrase,
+ delegates[1].address,
+ 11,
+ 1,
+ true,
+ );
+ const block = await processBlock(transfers);
+ block.height = 3;
+ block.previousBlock = block.id;
+ block.id = "17882607875259085967";
+ block.timestamp += 1000;
+
+ const blockVerified = new Block(block);
+ blockVerified.verification.verified = true;
+
+ const handler = await blockProcessor.getHandler(blockVerified);
+ expect(handler instanceof handlers.AlreadyForgedHandler).toBeTrue();
+
+ const result = await blockProcessor.process(blockVerified);
+ expect(result).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ });
+
+ it("should not validate an already forged transaction - trying to tweak the tx id", async () => {
+ const transfers = generateTransfers(
+ "unitnet",
+ delegates[0].passphrase,
+ delegates[1].address,
+ 11,
+ 1,
+ true,
+ );
+ const block = await processBlock(transfers);
+ block.height = 3;
+ block.previousBlock = block.id;
+ block.id = "17882607875259085967";
+ block.timestamp += 1000;
+ block.transactions[0].id = "123456"; // change the tx id to try to make it accept as a new transaction
+
+ const blockVerified = new Block(block);
+ blockVerified.verification.verified = true;
+
+ const handler = await blockProcessor.getHandler(blockVerified);
+ expect(handler instanceof handlers.AlreadyForgedHandler).toBeTrue();
+
+ const result = await blockProcessor.process(blockVerified);
+ expect(result).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ });
+ });
+
+ describe("lastDownloadedBlock", () => {
+ it.each([
+ "AlreadyForgedHandler",
+ "InvalidGeneratorHandler",
+ "UnchainedHandler",
+ "VerificationFailedHandler",
+ ])(
+ "should not increment lastDownloadedBlock or lastBlock when processing block fails with %s",
+ async handler => {
+ const lastBlock = blockchain.getLastBlock();
+ const lastDownloadedBlock = blockchain.getLastDownloadedBlock();
+ const blockToProcess = new Block(blockTemplate);
+
+ const getHanderBackup = blockProcessor.getHandler; // save for restoring afterwards
+ blockProcessor.getHandler = jest.fn(() => new handlers[handler](blockchain, blockToProcess));
+
+ await blockProcessor.process(blockToProcess);
+
+ expect(blockchain.getLastBlock()).toEqual(lastBlock);
+ expect(blockchain.getLastDownloadedBlock()).toEqual(lastDownloadedBlock);
+
+ blockProcessor.getHandler = getHanderBackup; // restore original function
+ },
+ );
+ });
+
+ describe("Forging delegates", () => {
+ it("should use InvalidGeneratorHandler if forging delegate is invalid", async () => {
+ const database = app.resolvePlugin("database");
+ const getActiveDelegatesBackup = database.getActiveDelegates; // save for restoring afterwards
+ database.getActiveDelegates = jest.fn(() => [delegates[50]]);
+
+ const blockVerified = new Block(getBlock([]));
+ blockVerified.verification.verified = true;
+
+ const handler = await blockProcessor.getHandler(blockVerified);
+ expect(handler instanceof handlers.InvalidGeneratorHandler).toBeTrue();
+
+ const result = await blockProcessor.process(blockVerified);
+ expect(result).toBe(BlockProcessorResult.Rejected);
+
+ database.getActiveDelegates = getActiveDelegatesBackup; // restore the original function
+ });
+ });
+
+ describe("Unchained blocks", () => {
+ it("should 'discard but broadcast' when same block comes again", async () => {
+ /* We process a valid block then try processing the same block again.
+ Should detect as "double-forging" and reject the duplicate block. */
+ const blockVerified = new Block(getBlock([]));
+ blockVerified.verification.verified = true;
+
+ // accept a valid first block
+ const accepted = await blockProcessor.process(blockVerified);
+ expect(accepted).toBe(BlockProcessorResult.Accepted);
+
+ // get handler on same block, should be handled by UnchainedHandler
+ const handler = await blockProcessor.getHandler(blockVerified);
+ expect(handler instanceof handlers.UnchainedHandler).toBeTrue();
+
+ // if we try to process the block, it should be discarded but broadcasted
+ const rejected = await blockProcessor.process(blockVerified);
+ expect(rejected).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ });
+
+ it("should reject a double-forging block", async () => {
+ /* We process a valid block then try processing the same block again.
+ Should detect as "double-forging" and reject the duplicate block. */
+ const blockVerified = new Block(getBlock([]));
+ blockVerified.verification.verified = true;
+
+ // accept a valid first block
+ const accepted = await blockProcessor.process(blockVerified);
+ expect(accepted).toBe(BlockProcessorResult.Accepted);
+
+ // new block for double-forging : same height different id
+ const blockDoubleForging = new Block(getBlock([]));
+ blockDoubleForging.verification.verified = true;
+ blockDoubleForging.data.id = "123456";
+
+ // get handler on the "new" block, should be handled by UnchainedHandler
+ const handler = await blockProcessor.getHandler(blockDoubleForging);
+ expect(handler instanceof handlers.UnchainedHandler).toBeTrue();
+
+ // if we try to process the block, it should be rejected
+ const rejected = await blockProcessor.process(blockDoubleForging);
+ expect(rejected).toBe(BlockProcessorResult.Rejected);
+ });
+
+ it("should reject a block with invalid timestamp", async () => {
+ const database = app.resolvePlugin("database");
+ const getActiveDelegatesBackup = database.getActiveDelegates;
+ database.getActiveDelegates = jest.fn(() => [delegates[0]]);
+
+ const forkBlockBackup = blockchain.forkBlock;
+ blockchain.forkBlock = jest.fn();
+
+ const block = new Block(getBlock([]));
+ block.verification.verified = true;
+ block.data.timestamp = 46582922;
+
+ blockchain.getLastBlock().data.timestamp = 46583330;
+
+ const rejected = await blockProcessor.process(block);
+ expect(blockchain.forkBlock).not.toHaveBeenCalled();
+ expect(rejected).toBe(BlockProcessorResult.Rejected);
+
+ blockchain.getLastBlock().data.timestamp = 0;
+ blockchain.forkBlock = forkBlockBackup;
+ database.getActiveDelegates = getActiveDelegatesBackup;
+ });
+
+ it("should 'discard but broadcast' a block higher than current height + 1", async () => {
+ const blockVerified = new Block(getBlock([]));
+ blockVerified.verification.verified = true;
+ blockVerified.data.height = 3;
+
+ const handler = await blockProcessor.getHandler(blockVerified);
+ expect(handler instanceof handlers.UnchainedHandler).toBeTrue();
+
+ const result = await blockProcessor.process(blockVerified);
+ expect(result).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ });
+
+ it("should 'discard but broadcast' a block lower than current height", async () => {
+ const blockVerified = new Block(getBlock([]));
+ blockVerified.verification.verified = true;
+
+ // accept a valid first block
+ const accepted = await blockProcessor.process(blockVerified);
+ expect(accepted).toBe(BlockProcessorResult.Accepted);
+
+ // new block with height < current
+ const blockLowerHeight = new Block(getBlock([]));
+ blockLowerHeight.verification.verified = true;
+ blockLowerHeight.data.id = "123456";
+ blockLowerHeight.data.height = 1;
+
+ // get handler on the "new" block, should be handled by UnchainedHandler
+ const handler = await blockProcessor.getHandler(blockLowerHeight);
+ expect(handler instanceof handlers.UnchainedHandler).toBeTrue();
+
+ // if we try to process the block, it should be 'discarded but can be broadcasted'
+ const result = await blockProcessor.process(blockLowerHeight);
+ expect(result).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/processor/handlers/accept-handler.test.ts b/packages/core-blockchain/__tests__/processor/handlers/accept-handler.test.ts
new file mode 100644
index 0000000000..710be16e12
--- /dev/null
+++ b/packages/core-blockchain/__tests__/processor/handlers/accept-handler.test.ts
@@ -0,0 +1,68 @@
+import "@arkecosystem/core-test-utils";
+import { AcceptBlockHandler } from "../../../src/processor/handlers";
+
+import { models } from "@arkecosystem/crypto";
+import { blocks2to100 } from "../../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+import { Blockchain } from "../../../src/blockchain";
+import { BlockProcessorResult } from "../../../src/processor";
+import { setUpFull, tearDownFull } from "../../__support__/setup";
+
+const { Block } = models;
+let app;
+let blockchain: Blockchain;
+let logger;
+
+beforeAll(async () => {
+ app = await setUpFull();
+ blockchain = app.resolvePlugin("blockchain");
+ logger = app.resolvePlugin("logger");
+
+ // mock apply / saveBlock - we dont want to actually do anything to the db
+ jest.spyOn(blockchain.database, "applyBlock").mockReturnValue(true);
+ jest.spyOn(blockchain.database, "saveBlock").mockReturnValue(true);
+});
+
+afterAll(async () => {
+ await tearDownFull();
+});
+
+describe("Accept handler", () => {
+ describe("execute", () => {
+ it("should log message if we recovered from fork and update state.forkedBlock", async () => {
+ const handler = new AcceptBlockHandler(blockchain, new Block(blocks2to100[0]));
+
+ const loggerInfo = jest.spyOn(logger, "info");
+ blockchain.state.forkedBlock = new Block(blocks2to100[0]);
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Accepted);
+ expect(loggerInfo).toHaveBeenCalledWith("Successfully recovered from fork :star2:");
+ expect(blockchain.state.forkedBlock).toBe(null);
+ });
+
+ it("should log warning message if transactionPool accepChainedBlock threw an exception", async () => {
+ const handler = new AcceptBlockHandler(blockchain, new Block(blocks2to100[0]));
+
+ const loggerWarn = jest.spyOn(logger, "warn");
+ jest.spyOn(blockchain.transactionPool, "acceptChainedBlock").mockImplementationOnce(() => {
+ throw new Error("¯_(ツ)_/¯");
+ });
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Accepted);
+ expect(loggerWarn).toHaveBeenCalledWith("Issue applying block to transaction pool");
+ });
+
+ it("should log error message if an exception was thrown", async () => {
+ const block = new Block(blocks2to100[0]);
+ const handler = new AcceptBlockHandler(blockchain, block);
+
+ jest.restoreAllMocks();
+ const loggerError = jest.spyOn(logger, "error");
+ jest.spyOn(blockchain.database, "applyBlock").mockImplementationOnce(() => {
+ throw new Error("¯_(ツ)_/¯");
+ });
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Rejected);
+ expect(loggerError).toHaveBeenCalledWith(`Refused new block ${JSON.stringify(block.data)}`);
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/processor/handlers/exception-handler.test.ts b/packages/core-blockchain/__tests__/processor/handlers/exception-handler.test.ts
new file mode 100644
index 0000000000..034083c5bb
--- /dev/null
+++ b/packages/core-blockchain/__tests__/processor/handlers/exception-handler.test.ts
@@ -0,0 +1,39 @@
+import "@arkecosystem/core-test-utils";
+import { ExceptionHandler } from "../../../src/processor/handlers";
+
+import { models } from "@arkecosystem/crypto";
+import { blocks2to100 } from "../../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+import { Blockchain } from "../../../src/blockchain";
+import { BlockProcessorResult } from "../../../src/processor";
+import { setUpFull, tearDownFull } from "../../__support__/setup";
+
+const { Block } = models;
+let app;
+let blockchain: Blockchain;
+
+beforeAll(async () => {
+ app = await setUpFull();
+ blockchain = app.resolvePlugin("blockchain");
+});
+
+afterAll(async () => {
+ await tearDownFull();
+});
+
+describe("Exception handler", () => {
+ describe("execute", () => {
+ it("should reject if block has already been forged", async () => {
+ const handler = new ExceptionHandler(blockchain, new Block(blocks2to100[0]));
+
+ jest.spyOn(blockchain.database, "getBlock").mockReturnValueOnce(true);
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Rejected);
+ });
+
+ it("should accept if block has not already been forged", async () => {
+ const handler = new ExceptionHandler(blockchain, new Block(blocks2to100[0]));
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Accepted);
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/processor/handlers/unchained-handler.test.ts b/packages/core-blockchain/__tests__/processor/handlers/unchained-handler.test.ts
new file mode 100644
index 0000000000..e69061f43c
--- /dev/null
+++ b/packages/core-blockchain/__tests__/processor/handlers/unchained-handler.test.ts
@@ -0,0 +1,55 @@
+import "@arkecosystem/core-test-utils";
+import { UnchainedHandler } from "../../../src/processor/handlers";
+
+import { models } from "@arkecosystem/crypto";
+import { blocks2to100 } from "../../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+import { Blockchain } from "../../../src/blockchain";
+import { BlockProcessorResult } from "../../../src/processor";
+import { setUpFull, tearDownFull } from "../../__support__/setup";
+
+const { Block } = models;
+let app;
+let blockchain: Blockchain;
+
+beforeAll(async () => {
+ app = await setUpFull();
+ blockchain = app.resolvePlugin("blockchain");
+});
+
+afterAll(async () => {
+ await tearDownFull();
+});
+
+describe("Exception handler", () => {
+ describe("execute", () => {
+ it("should fork if double forging is detected", async () => {
+ jest.spyOn(blockchain, "getLastBlock").mockReturnValue(new Block(blocks2to100[0]));
+ jest.spyOn(blockchain.database, "getActiveDelegates").mockReturnValue([
+ {
+ publicKey: blocks2to100[0].generatorPublicKey,
+ },
+ ]);
+ const forkBlock = jest.spyOn(blockchain, "forkBlock").mockReturnValue(true);
+
+ const sameBlockDifferentId = new Block(blocks2to100[0]);
+ sameBlockDifferentId.data.id = "7536951";
+
+ const handler = new UnchainedHandler(blockchain, sameBlockDifferentId, true);
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.Rejected);
+ expect(forkBlock).toHaveBeenCalled();
+ });
+
+ it("should log that blocks are being discarded when discarding blocks with height > current + 1", async () => {
+ jest.spyOn(blockchain, "getLastBlock").mockReturnValue(new Block(blocks2to100[0]));
+ blockchain.processQueue.length = () => 5;
+
+ const loggerDebug = jest.spyOn(app.resolvePlugin("logger"), "debug");
+
+ const handler = new UnchainedHandler(blockchain, new Block(blocks2to100[5]), true);
+
+ expect(await handler.execute()).toBe(BlockProcessorResult.DiscardedButCanBeBroadcasted);
+ expect(loggerDebug).toHaveBeenCalledWith("Discarded 5 downloaded blocks.");
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/queue/interface.test.ts b/packages/core-blockchain/__tests__/queue/interface.test.ts
new file mode 100644
index 0000000000..9ed581b1f6
--- /dev/null
+++ b/packages/core-blockchain/__tests__/queue/interface.test.ts
@@ -0,0 +1,71 @@
+import "@arkecosystem/core-test-utils";
+import async from "async";
+import { asValue } from "awilix";
+import delay from "delay";
+import { Blockchain } from "../../src/blockchain";
+import { QueueInterface } from "../../src/queue/interface";
+import { setUp, tearDown } from "../__support__/setup";
+
+let fakeQueue;
+let container;
+let blockchain: Blockchain;
+
+class FakeQueue extends QueueInterface {
+ /**
+ * Create an instance of the process queue.
+ */
+ constructor(readonly blockchainInstance: Blockchain, readonly event: string) {
+ super(blockchainInstance, event);
+
+ this.queue = async.queue(async (item: any, cb) => {
+ await delay(1000);
+ return cb();
+ }, 1);
+ }
+}
+
+beforeAll(async () => {
+ container = await setUp();
+
+ process.env.CORE_SKIP_BLOCKCHAIN = "true";
+
+ // Manually register the blockchain
+ const plugin = require("../../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart: false,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+beforeEach(async () => {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+
+ fakeQueue = new FakeQueue(blockchain, "fake");
+});
+
+describe("FakeQueue", () => {
+ it("should remove successfully an item from the queue", async () => {
+ const cb = jest.fn();
+ fakeQueue.push(cb);
+
+ expect(fakeQueue.queue.length()).toBe(1);
+
+ fakeQueue.remove(obj => true); // removes everything, see async queue doc
+
+ expect(fakeQueue.queue.length()).toBe(0);
+ });
+});
diff --git a/packages/core-blockchain/__tests__/queue/process.test.ts b/packages/core-blockchain/__tests__/queue/process.test.ts
new file mode 100644
index 0000000000..3406f71ff7
--- /dev/null
+++ b/packages/core-blockchain/__tests__/queue/process.test.ts
@@ -0,0 +1,73 @@
+import "@arkecosystem/core-test-utils";
+import { asValue } from "awilix";
+import delay from "delay";
+import { blocks2to100 } from "../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+import { Blockchain } from "../../src/blockchain";
+import { setUp, tearDown } from "../__support__/setup";
+
+let processQueue;
+let container;
+let blockchain: Blockchain;
+
+beforeAll(async () => {
+ container = await setUp();
+
+ process.env.CORE_SKIP_BLOCKCHAIN = "true";
+
+ // Manually register the blockchain
+ const plugin = require("../../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart: false,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+});
+
+afterAll(async () => {
+ jest.restoreAllMocks();
+ await tearDown();
+});
+
+beforeEach(async () => {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+ jest.restoreAllMocks();
+
+ const ProcessQueue = require("../../src/queue").ProcessQueue;
+ processQueue = new ProcessQueue(blockchain, "processEvent");
+});
+
+describe("ProcessQueue", () => {
+ it("should call blockchain processBlock when pushing a block to the queue", async () => {
+ const processBlock = jest.spyOn(blockchain, "processBlock").mockReturnValue(true);
+
+ const cb = jest.fn();
+ processQueue.push(blocks2to100[3], cb);
+
+ await delay(200);
+ expect(processBlock).toHaveBeenCalled();
+ });
+
+ it("should log error and call callback when blockchain processBlock throws", async () => {
+ const processBlock = jest.spyOn(blockchain, "processBlock").mockImplementation(() => {
+ throw new Error("wooo");
+ });
+
+ const loggerError = jest.spyOn(container.resolvePlugin("logger"), "error");
+
+ const cb = jest.fn();
+ processQueue.push(blocks2to100[3], cb);
+
+ await delay(200);
+ expect(processBlock).toHaveBeenCalled();
+ expect(loggerError).toHaveBeenCalledWith(`Failed to process block in ProcessQueue: ${blocks2to100[3].height}`);
+ });
+});
diff --git a/packages/core-blockchain/__tests__/queue/rebuild.test.ts b/packages/core-blockchain/__tests__/queue/rebuild.test.ts
new file mode 100644
index 0000000000..419c2782ac
--- /dev/null
+++ b/packages/core-blockchain/__tests__/queue/rebuild.test.ts
@@ -0,0 +1,88 @@
+import "@arkecosystem/core-test-utils";
+import { asValue } from "awilix";
+import delay from "delay";
+import { blocks2to100 } from "../../../core-test-utils/src/fixtures/testnet/blocks2to100";
+import { Blockchain } from "../../src/blockchain";
+import { setUp, tearDown } from "../__support__/setup";
+
+let rebuildQueue;
+let container;
+let blockchain: Blockchain;
+
+beforeAll(async () => {
+ container = await setUp();
+
+ process.env.CORE_SKIP_BLOCKCHAIN = "true";
+
+ // Manually register the blockchain
+ const plugin = require("../../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart: false,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+});
+
+afterAll(async () => {
+ jest.restoreAllMocks();
+ await tearDown();
+});
+
+beforeEach(async () => {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+ jest.restoreAllMocks();
+
+ const RebuildQueue = require("../../src/queue").RebuildQueue;
+ rebuildQueue = new RebuildQueue(blockchain, "processEvent");
+});
+
+describe("RebuildQueue", () => {
+ it("should call blockchain rebuildBlock when pushing a block to the queue", async () => {
+ const rebuildBlock = jest.spyOn(blockchain, "rebuildBlock").mockReturnValue(true);
+
+ const cb = jest.fn();
+ rebuildQueue.push(blocks2to100[3], cb);
+
+ await delay(200);
+ expect(rebuildBlock).toHaveBeenCalled();
+ });
+
+ it.skip("should just call callback if queue is paused when pushing a block to the queue", async () => {
+ // should call callback, but doesn't seem so... TODO
+ const rebuildBlock = jest.spyOn(blockchain, "rebuildBlock").mockReturnValue(true);
+
+ const cb = jest.fn(() => {
+ throw new Error("uuuui");
+ });
+ rebuildQueue.queue.paused = true;
+ rebuildQueue.queue.push(blocks2to100[3], cb);
+
+ await delay(200);
+ expect(rebuildBlock).not.toHaveBeenCalled();
+ expect(cb).toHaveBeenCalled();
+ });
+
+ it("should log error and call callback when blockchain rebuildBlock throws", async () => {
+ const rebuildBlock = jest.spyOn(blockchain, "rebuildBlock").mockImplementation(() => {
+ throw new Error("wooo");
+ });
+
+ const loggerError = jest.spyOn(container.resolvePlugin("logger"), "error");
+
+ const cb = jest.fn(() => true);
+ rebuildQueue.push(blocks2to100[3], cb);
+
+ await delay(200);
+ expect(rebuildBlock).toHaveBeenCalled();
+ expect(loggerError).toHaveBeenCalledWith(`Failed to rebuild block in RebuildQueue: ${blocks2to100[3].height}`);
+ });
+});
diff --git a/packages/core-blockchain/__tests__/state-machine.test.js b/packages/core-blockchain/__tests__/state-machine.test.js
deleted file mode 100644
index c0ebdedff1..0000000000
--- a/packages/core-blockchain/__tests__/state-machine.test.js
+++ /dev/null
@@ -1,228 +0,0 @@
-require('@arkecosystem/core-test-utils/lib/matchers') // eslint-disable-line no-unused-vars
-
-const { asValue } = require('awilix')
-
-const app = require('./__support__/setup')
-
-let stateMachine
-let container
-let blockchain
-
-beforeAll(async () => {
- container = await app.setUp()
-
- process.env.ARK_SKIP_BLOCKCHAIN = true
-
- // Manually register the blockchain
- const plugin = require('../lib').plugin
-
- blockchain = await plugin.register(container, {
- networkStart: false,
- })
-
- await container.register(
- 'blockchain',
- asValue({
- name: 'blockchain',
- version: '0.1.0',
- plugin: blockchain,
- options: {},
- }),
- )
-
- stateMachine = require('../lib/state-machine')
-})
-
-afterAll(async () => {
- // Manually stop the blockchain
- await blockchain.stop()
-
- await app.tearDown()
-})
-
-beforeEach(async () => {
- process.env.ARK_SKIP_BLOCKCHAIN = false
- blockchain.resetState()
-})
-
-describe('State Machine', () => {
- it('should be an object', () => {
- expect(stateMachine).toBeObject()
- })
-
- describe('actionMap', () => {
- let actionMap
-
- beforeEach(() => {
- actionMap = stateMachine.actionMap(blockchain)
- })
-
- describe('checkLater', () => {
- it('should be a function', () => {
- expect(actionMap.checkLater).toBeFunction()
- })
-
- it('should dispatch the event "WAKEUP" after a delay', async () => {
- jest.useFakeTimers()
- blockchain.dispatch = jest.fn()
-
- actionMap.checkLater()
- expect(blockchain.dispatch).not.toBeCalled()
-
- jest.runAllTimers()
- expect(blockchain.dispatch).toHaveBeenCalled()
- expect(blockchain.dispatch).toHaveBeenCalledWith('WAKEUP')
-
- jest.useRealTimers() // restore standard timers
- })
- })
-
- describe('checkLastBlockSynced', () => {
- it('should be a function', () => {
- expect(actionMap.checkLastBlockSynced).toBeFunction()
- })
-
- it('should dispatch the event "SYNCED" if the blockchain is synced', () => {
- blockchain.isSynced = jest.fn(() => true)
- expect(actionMap.checkLastBlockSynced).toDispatch(blockchain, 'SYNCED')
- })
-
- it('should dispatch the event "NOTSYNCED" if the blockchain is not synced', () => {
- blockchain.isSynced = jest.fn(() => false)
- expect(() => actionMap.checkLastBlockSynced()).toDispatch(
- blockchain,
- 'NOTSYNCED',
- )
- })
- })
-
- describe('checkRebuildBlockSynced', () => {
- it('should be a function', () => {
- expect(actionMap.checkRebuildBlockSynced).toBeFunction()
- })
-
- it('should dispatch the event "SYNCED" if the blockchain is synced after a rebuild', () => {
- blockchain.isRebuildSynced = jest.fn(() => true)
- expect(() => actionMap.checkRebuildBlockSynced()).toDispatch(
- blockchain,
- 'SYNCED',
- )
- })
-
- it('should dispatch the event "NOTSYNCED" if the blockchain is not synced after a rebuild', () => {
- blockchain.isRebuildSynced = jest.fn(() => false)
- expect(() => actionMap.checkRebuildBlockSynced()).toDispatch(
- blockchain,
- 'NOTSYNCED',
- )
- })
- })
-
- describe('checkLastDownloadedBlockSynced', () => {
- it('should be a function', () => {
- expect(actionMap.checkLastDownloadedBlockSynced).toBeFunction()
- })
- })
-
- describe('downloadFinished', () => {
- it('should be a function', () => {
- expect(actionMap.downloadFinished).toBeFunction()
- })
-
- describe('if the network has started', () => {
- it('should dispatch the event "SYNCFINISHED"', () => {
- stateMachine.state.networkStart = true
- expect(actionMap.downloadFinished).toDispatch(
- blockchain,
- 'SYNCFINISHED',
- )
- })
-
- it('should toggle its state', () => {
- stateMachine.state.networkStart = true
- actionMap.downloadFinished()
- expect(stateMachine.state.networkStart).toBe(false)
- })
- })
-
- describe('if the network has not started', () => {
- it('should not do anything', () => {
- stateMachine.state.networkStart = false
- expect(() => actionMap.downloadFinished()).not.toDispatch([
- blockchain,
- 'SYNCFINISHED',
- ])
- expect(stateMachine.state.networkStart).toBe(false)
- })
- })
- })
-
- describe('rebuildFinished', () => {
- it('should be a function', () => {
- expect(actionMap.rebuildFinished).toBeFunction()
- })
- })
-
- describe('downloadPaused', () => {
- it('should be a function', () => {
- expect(actionMap.downloadPaused).toBeFunction()
- })
-
- it('should dispatch the event "SYNCFINISHED"', () => {
- expect(() => actionMap.syncingComplete()).toDispatch(
- blockchain,
- 'SYNCFINISHED',
- )
- })
- })
-
- describe('rebuildingComplete', () => {
- it('should be a function', () => {
- expect(actionMap.rebuildingComplete).toBeFunction()
- })
-
- it('should dispatch the event "REBUILDCOMPLETE"', () => {
- expect(() => actionMap.rebuildingComplete()).toDispatch(
- blockchain,
- 'REBUILDCOMPLETE',
- )
- })
- })
-
- describe('exitApp', () => {
- it('should be a function', () => {
- expect(actionMap.exitApp).toBeFunction()
- })
- })
-
- describe('init', () => {
- it('should be a function', () => {
- expect(actionMap.init).toBeFunction()
- })
- })
-
- describe('rebuildBlocks', () => {
- it('should be a function', () => {
- expect(actionMap.rebuildBlocks).toBeFunction()
- })
- })
-
- describe('downloadBlocks', () => {
- it('should be a function', () => {
- expect(actionMap.downloadBlocks).toBeFunction()
- })
- })
-
- describe('analyseFork', () => {
- it('should be a function', () => {
- expect(actionMap.analyseFork).toBeFunction()
- })
- })
-
- describe('startForkRecovery', () => {
- it('should be a function', () => {
- expect(actionMap.startForkRecovery).toBeFunction()
- })
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/state-machine.test.ts b/packages/core-blockchain/__tests__/state-machine.test.ts
new file mode 100644
index 0000000000..d9b37d4606
--- /dev/null
+++ b/packages/core-blockchain/__tests__/state-machine.test.ts
@@ -0,0 +1,623 @@
+import "@arkecosystem/core-test-utils";
+import { roundCalculator } from "@arkecosystem/core-utils";
+import { slots } from "@arkecosystem/crypto";
+import { Block } from "@arkecosystem/crypto/dist/models";
+import { asValue } from "awilix";
+import { Blockchain } from "../src/blockchain";
+import { stateStorage } from "../src/state-storage";
+import { config as localConfig } from "./../src/config";
+import { setUp, tearDown } from "./__support__/setup";
+
+let stateMachine;
+let container;
+let blockchain: Blockchain;
+
+beforeAll(async () => {
+ container = await setUp();
+
+ process.env.CORE_SKIP_BLOCKCHAIN = "true";
+ process.env.CORE_ENV = "";
+
+ // Manually register the blockchain
+ const plugin = require("../src").plugin;
+
+ blockchain = await plugin.register(container, {
+ networkStart: false,
+ });
+
+ await container.register(
+ "blockchain",
+ asValue({
+ name: "blockchain",
+ version: "0.1.0",
+ plugin: blockchain,
+ options: {},
+ }),
+ );
+
+ stateMachine = require("../src/state-machine").stateMachine;
+});
+
+afterAll(async () => {
+ // Manually stop the blockchain
+ await blockchain.stop();
+
+ await tearDown();
+});
+
+beforeEach(async () => {
+ process.env.CORE_SKIP_BLOCKCHAIN = "false";
+ blockchain.resetState();
+});
+
+describe("State Machine", () => {
+ describe("actionMap", () => {
+ let actionMap;
+
+ beforeEach(() => {
+ actionMap = stateMachine.actionMap(blockchain);
+ });
+
+ describe("checkLater", () => {
+ it('should dispatch the event "WAKEUP" after a delay', async () => {
+ jest.useFakeTimers();
+ blockchain.dispatch = jest.fn();
+
+ actionMap.checkLater();
+ expect(blockchain.dispatch).not.toBeCalled();
+
+ jest.runAllTimers();
+ expect(blockchain.dispatch).toHaveBeenCalled();
+ expect(blockchain.dispatch).toHaveBeenCalledWith("WAKEUP");
+
+ jest.useRealTimers(); // restore standard timers
+ });
+ });
+
+ describe("checkLastBlockSynced", () => {
+ it('should dispatch the event "SYNCED" if the blockchain is synced', () => {
+ blockchain.isSynced = jest.fn(() => true);
+ expect(actionMap.checkLastBlockSynced).toDispatch(blockchain, "SYNCED");
+ });
+
+ it('should dispatch the event "NOTSYNCED" if the blockchain is not synced', () => {
+ blockchain.isSynced = jest.fn(() => false);
+ expect(() => actionMap.checkLastBlockSynced()).toDispatch(blockchain, "NOTSYNCED");
+ });
+ });
+
+ describe("checkRebuildBlockSynced", () => {
+ it('should dispatch the event "SYNCED" if the blockchain is synced after a rebuild', () => {
+ blockchain.isRebuildSynced = jest.fn(() => true);
+ expect(() => actionMap.checkRebuildBlockSynced()).toDispatch(blockchain, "SYNCED");
+ });
+
+ it('should dispatch the event "NOTSYNCED" if the blockchain is not synced after a rebuild', () => {
+ blockchain.isRebuildSynced = jest.fn(() => false);
+ expect(() => actionMap.checkRebuildBlockSynced()).toDispatch(blockchain, "NOTSYNCED");
+ });
+ });
+
+ describe("checkLastDownloadedBlockSynced", () => {
+ it('should dispatch the event "NOTSYNCED" by default', async () => {
+ blockchain.isSynced = jest.fn(() => false);
+ blockchain.processQueue.length = jest.fn(() => 1);
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "NOTSYNCED");
+ });
+
+ it('should dispatch the event "PAUSED" if the blockchain rebuild / process queue is more than 10000 long', async () => {
+ blockchain.isSynced = jest.fn(() => false);
+ blockchain.rebuildQueue.length = jest.fn(() => 10001);
+ blockchain.processQueue.length = jest.fn(() => 1);
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "PAUSED");
+
+ blockchain.rebuildQueue.length = jest.fn(() => 1);
+ blockchain.processQueue.length = jest.fn(() => 10001);
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "PAUSED");
+ });
+
+ it('should dispatch the event "NETWORKHALTED" if stateStorage.noBlockCounter > 5 and process queue is empty', async () => {
+ blockchain.isSynced = jest.fn(() => false);
+ blockchain.processQueue.length = jest.fn(() => 0);
+ stateStorage.noBlockCounter = 6;
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "NETWORKHALTED");
+ });
+
+ it(`should dispatch the event "FORK" if
+ - stateStorage.noBlockCounter > 5 and process queue is empty
+ - stateStorage.p2pUpdateCounter + 1 > 3 (network keeps missing blocks)
+ - blockchain.p2p.updatePeersOnMissingBlocks() returns "rollback"`, async () => {
+ blockchain.isSynced = jest.fn(() => false);
+ blockchain.processQueue.length = jest.fn(() => 0);
+ stateStorage.noBlockCounter = 6;
+ stateStorage.p2pUpdateCounter = 3;
+ jest.spyOn(blockchain.p2p, "updatePeersOnMissingBlocks").mockImplementation(() => "rollback");
+
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "FORK");
+ });
+
+ it('should dispatch the event "SYNCED" if stateStorage.networkStart is true', async () => {
+ blockchain.isSynced = jest.fn(() => false);
+ stateStorage.noBlockCounter = 0;
+ stateStorage.networkStart = true;
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "SYNCED");
+ });
+
+ it('should dispatch the event "TEST" if process.env.CORE_ENV === "test"', async () => {
+ const coreEnv = process.env.CORE_ENV;
+ process.env.CORE_ENV = "test";
+ blockchain.isSynced = jest.fn(() => false);
+ await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "TEST");
+
+ process.env.CORE_ENV = coreEnv;
+ });
+ });
+
+ describe("downloadFinished", () => {
+ describe("if the network has started", () => {
+ it('should dispatch the event "SYNCFINISHED"', () => {
+ stateMachine.state.networkStart = true;
+ expect(actionMap.downloadFinished).toDispatch(blockchain, "SYNCFINISHED");
+ });
+
+ it("should toggle its state", () => {
+ stateMachine.state.networkStart = true;
+ actionMap.downloadFinished();
+ expect(stateMachine.state.networkStart).toBe(false);
+ });
+ });
+
+ describe("if the network has not started", () => {
+ it("should not do anything", () => {
+ stateMachine.state.networkStart = false;
+ expect(() => actionMap.downloadFinished()).not.toDispatch(blockchain, "SYNCFINISHED");
+ expect(stateMachine.state.networkStart).toBe(false);
+ });
+ });
+ });
+
+ describe("rebuildFinished", () => {
+ it('should dispatch the event "PROCESSFINISHED"', async () => {
+ localConfig.set("state.maxLastBlocks", 50);
+ const config = container.getConfig();
+ const genesisBlock = config.get("genesisBlock");
+
+ stateStorage.setLastBlock(new Block(genesisBlock));
+
+ await expect(actionMap.rebuildFinished).toDispatch(blockchain, "PROCESSFINISHED");
+ });
+
+ it('should dispatch the event "FAILURE" when some called method threw an exception', async () => {
+ jest.spyOn(blockchain.database, "commitQueuedQueries").mockImplementationOnce(() => {
+ throw new Error("oops");
+ });
+ await expect(actionMap.rebuildFinished).toDispatch(blockchain, "FAILURE");
+ });
+ });
+
+ describe("downloadPaused", () => {
+ it('should log the info message "Blockchain download paused"', () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+ actionMap.downloadPaused();
+ expect(loggerInfo).lastCalledWith("Blockchain download paused :clock1030:");
+ });
+ });
+
+ describe("syncingComplete", () => {
+ it('should dispatch the event "SYNCFINISHED"', () => {
+ expect(() => actionMap.syncingComplete()).toDispatch(blockchain, "SYNCFINISHED");
+ });
+ });
+
+ describe("rebuildingComplete", () => {
+ it('should dispatch the event "REBUILDCOMPLETE"', () => {
+ expect(() => actionMap.rebuildingComplete()).toDispatch(blockchain, "REBUILDCOMPLETE");
+ });
+ });
+
+ describe("stopped", () => {
+ it('should log the info message "The blockchain has been stopped"', () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+ actionMap.stopped();
+ expect(loggerInfo).lastCalledWith("The blockchain has been stopped :guitar:");
+ });
+ });
+
+ describe("exitApp", () => {
+ it("should call container forceExit with error message", () => {
+ const forceExit = jest.spyOn(container, "forceExit").mockImplementationOnce(() => null);
+ actionMap.exitApp();
+ expect(forceExit).lastCalledWith("Failed to startup blockchain. Exiting Ark Core! :rotating_light:");
+ });
+ });
+
+ describe("init", () => {
+ let databaseMocks: any = {};
+ let loggerInfo;
+ let loggerError;
+ let loggerWarn;
+
+ beforeAll(() => {
+ const logger = container.resolvePlugin("logger");
+ loggerInfo = jest.spyOn(logger, "info");
+ loggerError = jest.spyOn(logger, "error");
+ loggerWarn = jest.spyOn(logger, "warn");
+ });
+
+ beforeEach(() => {
+ databaseMocks = {
+ getLastBlock: jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue({
+ data: {
+ height: 1,
+ timestamp: slots.getTime(),
+ },
+ }),
+ saveBlock: jest.spyOn(blockchain.database, "saveBlock").mockReturnValue(true),
+ verifyBlockchain: jest.spyOn(blockchain.database, "verifyBlockchain").mockReturnValue({
+ valid: true,
+ }),
+ deleteRound: jest.spyOn(blockchain.database, "deleteRound").mockReturnValue(true),
+ buildWallets: jest.spyOn(blockchain.database, "buildWallets").mockReturnValue(true),
+ saveWallets: jest.spyOn(blockchain.database, "saveWallets").mockReturnValue(true),
+ applyRound: jest.spyOn(blockchain.database, "applyRound").mockReturnValue(true),
+ getActiveDelegates: jest.spyOn(blockchain.database, "getActiveDelegates").mockReturnValue(true),
+ };
+ });
+
+ afterEach(() => jest.resetAllMocks());
+ afterAll(() => {
+ jest.restoreAllMocks();
+
+ process.env.NODE_ENV = "TEST";
+ });
+
+ it("should get genesis block from config if there is no last block in database", async () => {
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue(null);
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(databaseMocks.saveBlock).toHaveBeenCalled();
+ });
+
+ it("should dispatch FAILURE if there is no last block in database and genesis block payload hash != configured nethash", async () => {
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue(null);
+ const config = container.getConfig();
+ const genesisBlock = config.get("genesisBlock");
+ const mockConfigGet = jest
+ .spyOn(config, "get")
+ .mockImplementation(key => (key === "genesisBlock" ? genesisBlock : ""));
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "FAILURE");
+
+ mockConfigGet.mockRestore();
+ });
+
+ it("should verify database integrity if database recovery was not successful (!restoredDatabaseIntegrity)", async () => {
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(loggerInfo).nthCalledWith(1, "Verifying database integrity :hourglass_flowing_sand:");
+ expect(loggerInfo).nthCalledWith(2, "Verified database integrity :smile_cat:");
+ });
+
+ it("should dispatch ROLLBACK if database recovery was not successful and verifyBlockchain failed", async () => {
+ jest.spyOn(blockchain.database, "verifyBlockchain").mockReturnValue({
+ valid: false,
+ });
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "ROLLBACK");
+ expect(loggerError).nthCalledWith(1, "FATAL: The database is corrupted :fire:");
+ });
+
+ it("should skip database integrity check if database recovery was successful (restoredDatabaseIntegrity)", async () => {
+ blockchain.database.restoredDatabaseIntegrity = true;
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(loggerInfo).nthCalledWith(
+ 1,
+ "Skipping database integrity check after successful database recovery :smile_cat:",
+ );
+ });
+
+ it("should dispatch STARTED if networkStart is enabled", async () => {
+ stateStorage.networkStart = true;
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(databaseMocks.buildWallets).toHaveBeenCalledWith(1);
+ expect(databaseMocks.saveWallets).toHaveBeenCalledWith(true);
+ expect(databaseMocks.applyRound).toHaveBeenCalledWith(1);
+
+ stateStorage.networkStart = false; // reset to default value
+ });
+
+ it('should dispatch STARTED if NODE_ENV === "test"', async () => {
+ process.env.NODE_ENV = "test";
+ const logger = container.resolvePlugin("logger");
+ const loggerVerbose = jest.spyOn(logger, "verbose");
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(databaseMocks.buildWallets).toHaveBeenCalledWith(1);
+ expect(loggerVerbose).toHaveBeenCalledWith(
+ "TEST SUITE DETECTED! SYNCING WALLETS AND STARTING IMMEDIATELY. :bangbang:",
+ );
+ });
+
+ it("should dispatch REBUILD if stateStorage.fastRebuild", async () => {
+ process.env.NODE_ENV = "";
+
+ // mock getLastBlock() timestamp and fastRebuild config to trigger stateStorage.fastRebuild = true
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue({
+ data: {
+ height: 1,
+ timestamp: 0,
+ },
+ });
+ const mockConfigGet = jest
+ .spyOn(localConfig, "get")
+ .mockImplementation(key => (key === "fastRebuild" ? true : ""));
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "REBUILD");
+
+ mockConfigGet.mockRestore();
+ });
+
+ it("should rollbackCurrentRound and dispatch STARTED if couldnt get activeDelegates", async () => {
+ process.env.NODE_ENV = "";
+ jest.spyOn(blockchain.database, "getActiveDelegates").mockReturnValue(undefined);
+ const spyRollbackCurrentRound = jest.spyOn(blockchain, "rollbackCurrentRound").mockReturnThis();
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(spyRollbackCurrentRound).toHaveBeenCalled();
+ });
+
+ it("should rebuild wallets table and dispatch STARTED if database.buildWallets() failed", async () => {
+ process.env.NODE_ENV = "";
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue({
+ data: {
+ height: 2,
+ timestamp: slots.getTime(),
+ },
+ });
+ jest.spyOn(blockchain.database, "buildWallets").mockReturnValue(false);
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(loggerWarn).toHaveBeenCalledWith(
+ "Rebuilding wallets table because of some inconsistencies. Most likely due to an unfortunate shutdown. :hammer:",
+ );
+ expect(databaseMocks.saveWallets).toHaveBeenCalledWith(true);
+ });
+
+ it("should clean round data if new round starts at block.height + 1 (and dispatch STARTED)", async () => {
+ process.env.NODE_ENV = "";
+ const spyIsNewRound = jest.spyOn(roundCalculator, "isNewRound").mockReturnValue(true);
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "STARTED");
+ expect(databaseMocks.deleteRound).toHaveBeenCalled();
+ expect(loggerInfo).toHaveBeenCalledWith(
+ "New round 1 detected. Cleaning calculated data before restarting!",
+ );
+
+ spyIsNewRound.mockRestore();
+ });
+
+ it("should log error and dispatch FAILURE if an exception was thrown", async () => {
+ jest.spyOn(blockchain.database, "getLastBlock").mockImplementation(() => {
+ throw new Error("oops");
+ });
+
+ await expect(() => actionMap.init()).toDispatch(blockchain, "FAILURE");
+ expect(loggerError.mock.calls[0][0]).toContain("Error: oops");
+ });
+ });
+
+ describe("rebuildBlocks", () => {
+ let genesisBlock;
+
+ beforeAll(() => {
+ const config = container.getConfig();
+ genesisBlock = config.get("genesisBlock");
+ });
+
+ it("should dispatch NOBLOCK if no new blocks were downloaded from peer", async () => {
+ stateStorage.lastDownloadedBlock = new Block(genesisBlock);
+
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([]);
+ await expect(() => actionMap.rebuildBlocks()).toDispatch(blockchain, "NOBLOCK");
+ expect(loggerInfo).toHaveBeenCalledWith("No new blocks found on this peer");
+ });
+
+ it("should dispatch DOWNLOADED if new blocks were successfully downloaded from peer", async () => {
+ stateStorage.lastDownloadedBlock = new Block(genesisBlock);
+
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([
+ {
+ numberOfTransactions: 2,
+ previousBlock: genesisBlock.id,
+ },
+ ]);
+ await expect(() => actionMap.rebuildBlocks()).toDispatch(blockchain, "DOWNLOADED");
+ expect(loggerInfo).toHaveBeenCalledWith(
+ "Downloaded 1 new block accounting for a total of 2 transactions",
+ );
+ });
+
+ it("should dispatch NOBLOCK if new blocks were downloaded from peer but didnt match last known block", async () => {
+ stateStorage.lastDownloadedBlock = new Block(genesisBlock);
+
+ const logger = container.resolvePlugin("logger");
+ const loggerWarn = jest.spyOn(logger, "warn");
+
+ const downloadedBlock = {
+ numberOfTransactions: 2,
+ previousBlock: "123456",
+ };
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([downloadedBlock]);
+ await expect(() => actionMap.rebuildBlocks()).toDispatch(blockchain, "NOBLOCK");
+ expect(loggerWarn).toHaveBeenCalledWith(
+ `Downloaded block not accepted: ${JSON.stringify(downloadedBlock)}`,
+ );
+ });
+ });
+
+ describe("downloadBlocks", () => {
+ let genesisBlock;
+ let loggerInfo;
+ let loggerWarn;
+
+ beforeAll(() => {
+ const config = container.getConfig();
+ genesisBlock = config.get("genesisBlock");
+
+ const logger = container.resolvePlugin("logger");
+ loggerInfo = jest.spyOn(logger, "info");
+ loggerWarn = jest.spyOn(logger, "warn");
+ });
+
+ beforeEach(() => {
+ stateStorage.lastDownloadedBlock = new Block(genesisBlock);
+ });
+
+ afterEach(() => jest.resetAllMocks());
+
+ it("should just return if blockchain isStopped", async () => {
+ blockchain.isStopped = true;
+ expect(await actionMap.downloadBlocks()).toBe(undefined);
+
+ blockchain.isStopped = false; // reset to original value
+ });
+
+ it("should dispatch DOWNLOADED if new blocks downloaded are chained", async () => {
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([
+ {
+ numberOfTransactions: 2,
+ previousBlock: genesisBlock.id,
+ height: 2,
+ timestamp: genesisBlock.timestamp + 115,
+ },
+ ]);
+ const enQueueBlocks = jest.spyOn(blockchain, "enqueueBlocks").mockReturnValue(true);
+
+ await expect(() => actionMap.downloadBlocks()).toDispatch(blockchain, "DOWNLOADED");
+ expect(loggerInfo).toHaveBeenCalledWith(
+ "Downloaded 1 new block accounting for a total of 2 transactions",
+ );
+ expect(enQueueBlocks).toHaveBeenCalled();
+
+ enQueueBlocks.mockRestore();
+ });
+
+ it("should dispatch NOBLOCK if new blocks downloaded are not chained", async () => {
+ const downloadedBlock = {
+ numberOfTransactions: 2,
+ previousBlock: genesisBlock.id,
+ height: 3,
+ timestamp: genesisBlock.timestamp + 115,
+ };
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([downloadedBlock]);
+ await expect(() => actionMap.downloadBlocks()).toDispatch(blockchain, "NOBLOCK");
+ expect(loggerWarn).toHaveBeenCalledWith(
+ `Downloaded block not accepted: ${JSON.stringify(downloadedBlock)}`,
+ );
+ });
+
+ it("should dispatch NOBLOCK if new blocks downloaded are empty", async () => {
+ jest.spyOn(blockchain.p2p, "downloadBlocks").mockReturnValue([]);
+ await expect(() => actionMap.downloadBlocks()).toDispatch(blockchain, "NOBLOCK");
+ expect(loggerInfo).toHaveBeenCalledWith("No new block found on this peer");
+ });
+ });
+
+ describe("analyseFork", () => {
+ it("should log 'analysing fork' message", () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ actionMap.analyseFork();
+
+ expect(loggerInfo).toHaveBeenCalledWith("Analysing fork :mag:");
+ });
+ });
+
+ describe("startForkRecovery", () => {
+ it("should proceed to fork recovery and dispatch SUCCESS", async () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ const methodsCalled = [
+ jest.spyOn(blockchain.database, "commitQueuedQueries").mockReturnValue(true),
+ jest.spyOn(blockchain.transactionPool, "buildWallets").mockReturnValue(true),
+ jest.spyOn(blockchain.p2p, "refreshPeersAfterFork").mockReturnValue(true),
+ jest.spyOn(blockchain, "clearAndStopQueue"),
+ jest.spyOn(blockchain, "removeBlocks").mockReturnValue(true),
+ ];
+ await expect(() => actionMap.startForkRecovery()).toDispatch(blockchain, "SUCCESS");
+
+ expect(loggerInfo).toHaveBeenCalledWith("Starting fork recovery :fork_and_knife:");
+ methodsCalled.forEach(method => {
+ expect(method).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe("rollbackDatabase", () => {
+ afterEach(() => jest.restoreAllMocks());
+
+ it("should try to remove X blocks based on databaseRollback config until database.verifyBlockchain() passes - and dispatch SUCCESS", async () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ jest.spyOn(localConfig, "get").mockReturnValue({
+ maxBlockRewind: 14,
+ steps: 3,
+ });
+ const removeTopBlocks = jest.spyOn(blockchain, "removeTopBlocks").mockReturnValue(true);
+ jest.spyOn(blockchain.database, "verifyBlockchain")
+ .mockReturnValue({ valid: true }) // default
+ .mockReturnValueOnce({ valid: false }) // first call
+ .mockReturnValueOnce({ valid: false }); // 2nd call
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue({
+ data: {
+ height: 1,
+ },
+ });
+
+ await expect(() => actionMap.rollbackDatabase()).toDispatch(blockchain, "SUCCESS");
+
+ expect(loggerInfo).toHaveBeenCalledWith(
+ "Database integrity verified again after rollback to height 1 :green_heart:",
+ );
+ expect(removeTopBlocks).toHaveBeenCalledTimes(3); // because the 3rd time verifyBlockchain returned true
+ });
+
+ it(`should try to remove X blocks based on databaseRollback config until database.verifyBlockchain() passes
+ and dispatch FAILURE as verifyBlockchain never passed`, async () => {
+ const logger = container.resolvePlugin("logger");
+ const loggerError = jest.spyOn(logger, "error");
+
+ jest.spyOn(localConfig, "get").mockReturnValue({
+ maxBlockRewind: 14,
+ steps: 3,
+ });
+ const removeTopBlocks = jest.spyOn(blockchain, "removeTopBlocks").mockReturnValue(true);
+ jest.spyOn(blockchain.database, "verifyBlockchain").mockReturnValue({ valid: false });
+ jest.spyOn(blockchain.database, "getLastBlock").mockReturnValue({
+ data: {
+ height: 1,
+ },
+ });
+
+ await expect(() => actionMap.rollbackDatabase()).toDispatch(blockchain, "FAILURE");
+
+ expect(loggerError).toHaveBeenCalledWith(
+ "FATAL: Failed to restore database integrity :skull: :skull: :skull:",
+ );
+ expect(removeTopBlocks).toHaveBeenCalledTimes(5); // because after 5 times we get past maxBlockRewind
+ });
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/state-storage.test.js b/packages/core-blockchain/__tests__/state-storage.test.js
deleted file mode 100644
index 8ea7aa96b9..0000000000
--- a/packages/core-blockchain/__tests__/state-storage.test.js
+++ /dev/null
@@ -1,326 +0,0 @@
-const { Block } = require('@arkecosystem/crypto').models
-const blocks1to100 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.2-100')
-const blocks101to155 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.101-155')
-
-const state = require('../lib/state-storage')
-const app = require('./__support__/setup')
-
-const blocks = blocks1to100
- .concat(blocks101to155)
- .map(block => new Block(block))
-
-beforeAll(async () => {
- await app.setUp()
-})
-
-afterAll(async () => {
- await app.tearDown()
-})
-
-beforeEach(() => {
- state.reset()
-})
-
-describe('State Storage', () => {
- it('should be an object', () => {
- expect(state).toBeObject()
- })
-
- describe('getLastBlock', () => {
- it('should be a function', () => {
- expect(state.getLastBlock).toBeFunction()
- })
-
- it('should return null when no last block', () => {
- expect(state.getLastBlock()).toBeNull()
- })
-
- it('should return the last block', () => {
- state.setLastBlock(blocks[0])
- state.setLastBlock(blocks[1])
-
- expect(state.getLastBlock()).toBe(blocks[1])
- })
- })
-
- describe('setLastBlock', () => {
- it('should be a function', () => {
- expect(state.setLastBlock).toBeFunction()
- })
-
- it('should set the last block', () => {
- state.setLastBlock(blocks[0])
- expect(state.getLastBlock()).toBe(blocks[0])
- })
-
- it('should not exceed the max last blocks', () => {
- for (let i = 0; i < 100; i++) {
- // 100 is default
- state.setLastBlock(blocks[i])
- }
-
- expect(state.getLastBlocks()).toHaveLength(100)
- expect(state.getLastBlock()).toBe(blocks[99])
- expect(state.getLastBlocks().slice(-1)[0]).toBe(blocks[0])
-
- // Push one more to remove the first last block.
- state.setLastBlock(blocks[100])
-
- expect(state.getLastBlocks()).toHaveLength(100)
- expect(state.getLastBlock()).toBe(blocks[100])
- expect(state.getLastBlocks().slice(-1)[0]).toBe(blocks[1])
- })
-
- it('should remove last blocks when going to lower height', () => {
- for (let i = 0; i < 100; i++) {
- // 100 is default
- state.setLastBlock(blocks[i])
- }
-
- expect(state.getLastBlocks()).toHaveLength(100)
- expect(state.getLastBlock()).toBe(blocks[99])
-
- // Set last height - 1
- state.setLastBlock(blocks[98])
-
- expect(state.getLastBlocks()).toHaveLength(99)
- expect(state.getLastBlock()).toBe(blocks[98])
-
- // Set to first block
- state.setLastBlock(blocks[0])
- expect(state.getLastBlocks()).toHaveLength(1)
- expect(state.getLastBlock()).toBe(blocks[0])
- })
- })
-
- describe('getLastBlocks', () => {
- it('should be a function', () => {
- expect(state.getLastBlocks).toBeFunction()
- })
-
- it('should return the last blocks', () => {
- for (let i = 0; i < 5; i++) {
- state.setLastBlock(blocks[i])
- }
-
- const lastBlocks = state.getLastBlocks()
- expect(lastBlocks).toHaveLength(5)
-
- for (let i = 0; i < 5; i++) {
- expect(lastBlocks[i]).toBeInstanceOf(Block)
- expect(lastBlocks[i].data.height).toBe(6 - i) // Height started at 2
- expect(lastBlocks[i]).toBe(blocks[4 - i])
- }
- })
- })
-
- describe('getLastBlocksData', () => {
- it('should be a function', () => {
- expect(state.getLastBlocksData).toBeFunction()
- })
-
- it('should return the last blocks data', () => {
- for (let i = 0; i < 5; i++) {
- state.setLastBlock(blocks[i])
- }
-
- const lastBlocksData = state.getLastBlocksData().toArray()
- expect(lastBlocksData).toHaveLength(5)
-
- for (let i = 0; i < 5; i++) {
- expect(lastBlocksData[0]).not.toBeInstanceOf(Block)
- expect(lastBlocksData[i].height).toBe(6 - i) // Height started at 2
- expect(lastBlocksData[i]).toHaveProperty('transactions')
- delete lastBlocksData[i].transactions
- expect(lastBlocksData[i]).toEqual(blocks[4 - i].data)
- }
- })
- })
-
- describe('getLastBlockIds', () => {
- it('should be a function', () => {
- expect(state.getLastBlockIds).toBeFunction()
- })
-
- it('should return the last blocks data', () => {
- for (let i = 0; i < 5; i++) {
- state.setLastBlock(blocks[i])
- }
-
- const lastBlockIds = state.getLastBlockIds()
- expect(lastBlockIds).toHaveLength(5)
-
- for (let i = 0; i < 5; i++) {
- expect(lastBlockIds[i]).toBe(blocks[4 - i].data.id)
- }
- })
- })
-
- describe('getLastBlocksByHeight', () => {
- it('should be a function', () => {
- expect(state.getLastBlocksByHeight).toBeFunction()
- })
-
- it('should return the last blocks data', () => {
- for (let i = 0; i < 100; i++) {
- state.setLastBlock(blocks[i])
- }
-
- const lastBlocksByHeight = state.getLastBlocksByHeight(0, 101)
- expect(lastBlocksByHeight).toHaveLength(100)
- expect(lastBlocksByHeight[0].height).toBe(blocks[0].data.height)
- })
-
- it('should return one last block if no end height', () => {
- for (let i = 0; i < 100; i++) {
- state.setLastBlock(blocks[i])
- }
-
- const lastBlocksByHeight = state.getLastBlocksByHeight(50)
- expect(lastBlocksByHeight).toHaveLength(1)
- expect(lastBlocksByHeight[0].height).toBe(50)
- })
- })
-
- describe('getCommonBlocks', () => {
- it('should be a function', () => {
- expect(state.getCommonBlocks).toBeFunction()
- })
-
- it('should get common blocks', () => {
- for (let i = 0; i < 100; i++) {
- state.setLastBlock(blocks[i])
- }
-
- // Heights 90 - 100
- const ids = blocks.slice(89, 99).map(block => block.data.id)
- const commonBlocks = state.getCommonBlocks(ids)
- expect(ids).toHaveLength(10)
- expect(commonBlocks).toHaveLength(10)
-
- for (let i = 0; i < commonBlocks.length; i++) {
- expect(commonBlocks[i].height).toBe(blocks[98 - i].data.height)
- }
- })
- })
-
- describe('cacheTransactions', () => {
- it('should be a function', () => {
- expect(state.cacheTransactions).toBeFunction()
- })
-
- it('should add transaction id', () => {
- expect(state.cacheTransactions([{ id: '1' }])).toEqual({
- added: [{ id: '1' }],
- notAdded: [],
- })
- expect(state.getCachedTransactionIds()).toHaveLength(1)
- })
-
- it('should not add duplicate transaction ids', () => {
- expect(state.cacheTransactions([{ id: '1' }])).toEqual({
- added: [{ id: '1' }],
- notAdded: [],
- })
- expect(state.cacheTransactions([{ id: '1' }])).toEqual({
- added: [],
- notAdded: [{ id: '1' }],
- })
- expect(state.getCachedTransactionIds()).toHaveLength(1)
- })
-
- it('should not add more than 10000 unique transaction ids', () => {
- const transactions = []
- for (let i = 0; i < 10000; i++) {
- transactions.push({ id: i.toString() })
- }
-
- expect(state.cacheTransactions(transactions)).toEqual({
- added: transactions,
- notAdded: [],
- })
-
- expect(state.getCachedTransactionIds()).toHaveLength(10000)
- expect(state.getCachedTransactionIds()[0]).toEqual('0')
-
- expect(state.cacheTransactions([{ id: '10000' }])).toEqual({
- added: [{ id: '10000' }],
- notAdded: [],
- })
- expect(state.getCachedTransactionIds()).toHaveLength(10000)
- expect(state.getCachedTransactionIds()[0]).toEqual('1')
- })
- })
-
- describe('removeCachedTransactionIds', () => {
- it('should be a function', () => {
- expect(state.removeCachedTransactionIds).toBeFunction()
- })
-
- it('should remove cached transaction ids', () => {
- const transactions = []
- for (let i = 0; i < 10; i++) {
- transactions.push({ id: i.toString() })
- }
-
- expect(state.cacheTransactions(transactions)).toEqual({
- added: transactions,
- notAdded: [],
- })
-
- expect(state.getCachedTransactionIds()).toHaveLength(10)
- state.removeCachedTransactionIds(transactions.map(tx => tx.id))
- expect(state.getCachedTransactionIds()).toHaveLength(0)
- })
- })
-
- describe('getCachedTransactionIds', () => {
- it('should be a function', () => {
- expect(state.getCachedTransactionIds).toBeFunction()
- })
- })
-
- describe('pingBlock', () => {
- it('should be a function', () => {
- expect(state.pingBlock).toBeFunction()
- })
- })
-
- describe('pushPingBlock', () => {
- it('should be a function', () => {
- expect(state.pushPingBlock).toBeFunction()
- })
- })
-
- describe('reset', () => {
- it('should be a function', () => {
- expect(state.reset).toBeFunction()
- })
-
- it('should reset the state', () => {
- for (let i = 0; i < 100; i++) {
- state.setLastBlock(blocks[i])
- }
-
- expect(state.getLastBlocks()).toHaveLength(100)
- state.reset()
- expect(state.getLastBlocks()).toHaveLength(0)
- })
- })
-
- describe('clear', () => {
- it('should be a function', () => {
- expect(state.clear).toBeFunction()
- })
-
- it('should clear the last blocks', () => {
- for (let i = 0; i < 100; i++) {
- state.setLastBlock(blocks[i])
- }
-
- expect(state.getLastBlocks()).toHaveLength(100)
- state.clear()
- expect(state.getLastBlocks()).toHaveLength(0)
- })
- })
-})
diff --git a/packages/core-blockchain/__tests__/state-storage.test.ts b/packages/core-blockchain/__tests__/state-storage.test.ts
new file mode 100644
index 0000000000..4be0ba851e
--- /dev/null
+++ b/packages/core-blockchain/__tests__/state-storage.test.ts
@@ -0,0 +1,338 @@
+import "@arkecosystem/core-test-utils";
+import { blocks101to155 } from "@arkecosystem/core-test-utils/src/fixtures/testnet/blocks101to155";
+import { blocks2to100 } from "@arkecosystem/core-test-utils/src/fixtures/testnet/blocks2to100";
+import { models } from "@arkecosystem/crypto";
+import delay from "delay";
+import { config } from "../src/config";
+import { defaults } from "../src/defaults";
+import { setUp, tearDown } from "./__support__/setup";
+
+const { Block } = models;
+const blocks = blocks2to100.concat(blocks101to155).map(block => new Block(block));
+let app;
+let stateStorage;
+
+beforeAll(async () => {
+ app = await setUp();
+ config.init(defaults);
+
+ stateStorage = require("../src").stateStorage;
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+beforeEach(() => {
+ stateStorage.reset();
+});
+
+describe("State Storage", () => {
+ describe("getLastBlock", () => {
+ it("should return null when no last block", () => {
+ expect(stateStorage.getLastBlock()).toBeNull();
+ });
+
+ it("should return the last block", () => {
+ stateStorage.setLastBlock(blocks[0]);
+ stateStorage.setLastBlock(blocks[1]);
+
+ expect(stateStorage.getLastBlock()).toBe(blocks[1]);
+ });
+ });
+
+ describe("setLastBlock", () => {
+ it("should set the last block", () => {
+ stateStorage.setLastBlock(blocks[0]);
+ expect(stateStorage.getLastBlock()).toBe(blocks[0]);
+ });
+
+ it("should not exceed the max last blocks", () => {
+ for (let i = 0; i < 100; i++) {
+ // 100 is default
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(100);
+ expect(stateStorage.getLastBlock()).toBe(blocks[99]);
+ expect(stateStorage.getLastBlocks().slice(-1)[0]).toBe(blocks[0]);
+
+ // Push one more to remove the first last block.
+ stateStorage.setLastBlock(blocks[100]);
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(100);
+ expect(stateStorage.getLastBlock()).toBe(blocks[100]);
+ expect(stateStorage.getLastBlocks().slice(-1)[0]).toBe(blocks[1]);
+ });
+
+ it("should remove last blocks when going to lower height", () => {
+ for (let i = 0; i < 100; i++) {
+ // 100 is default
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(100);
+ expect(stateStorage.getLastBlock()).toBe(blocks[99]);
+
+ // Set last height - 1
+ stateStorage.setLastBlock(blocks[98]);
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(99);
+ expect(stateStorage.getLastBlock()).toBe(blocks[98]);
+
+ // Set to first block
+ stateStorage.setLastBlock(blocks[0]);
+ expect(stateStorage.getLastBlocks()).toHaveLength(1);
+ expect(stateStorage.getLastBlock()).toBe(blocks[0]);
+ });
+ });
+
+ describe("getLastBlocks", () => {
+ it("should return the last blocks", () => {
+ for (let i = 0; i < 5; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ const lastBlocks = stateStorage.getLastBlocks();
+ expect(lastBlocks).toHaveLength(5);
+
+ for (let i = 0; i < 5; i++) {
+ expect(lastBlocks[i]).toBeInstanceOf(Block);
+ expect(lastBlocks[i].data.height).toBe(6 - i); // Height started at 2
+ expect(lastBlocks[i]).toBe(blocks[4 - i]);
+ }
+ });
+ });
+
+ describe("getLastBlocksData", () => {
+ it("should return the last blocks data", () => {
+ for (let i = 0; i < 5; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ const lastBlocksData = stateStorage.getLastBlocksData().toArray() as models.IBlockData[];
+ expect(lastBlocksData).toHaveLength(5);
+
+ for (let i = 0; i < 5; i++) {
+ expect(lastBlocksData[0]).not.toBeInstanceOf(Block);
+ expect(lastBlocksData[i].height).toBe(6 - i); // Height started at 2
+ expect(lastBlocksData[i]).toHaveProperty("transactions");
+ delete lastBlocksData[i].transactions;
+ expect(lastBlocksData[i]).toEqual(blocks[4 - i].data);
+ }
+ });
+ });
+
+ describe("getLastBlockIds", () => {
+ it("should return the last blocks data", () => {
+ for (let i = 0; i < 5; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ const lastBlockIds = stateStorage.getLastBlockIds();
+ expect(lastBlockIds).toHaveLength(5);
+
+ for (let i = 0; i < 5; i++) {
+ expect(lastBlockIds[i]).toBe(blocks[4 - i].data.id);
+ }
+ });
+ });
+
+ describe("getLastBlocksByHeight", () => {
+ it("should return the last blocks data", () => {
+ for (let i = 0; i < 100; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ const lastBlocksByHeight = stateStorage.getLastBlocksByHeight(0, 101);
+ expect(lastBlocksByHeight).toHaveLength(100);
+ expect(lastBlocksByHeight[0].height).toBe(blocks[0].data.height);
+ });
+
+ it("should return one last block if no end height", () => {
+ for (let i = 0; i < 100; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ const lastBlocksByHeight = stateStorage.getLastBlocksByHeight(50);
+ expect(lastBlocksByHeight).toHaveLength(1);
+ expect(lastBlocksByHeight[0].height).toBe(50);
+ });
+ });
+
+ describe("getCommonBlocks", () => {
+ it("should get common blocks", () => {
+ for (let i = 0; i < 100; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ // Heights 90 - 100
+ const ids = blocks.slice(89, 99).map(block => block.data.id);
+ const commonBlocks = stateStorage.getCommonBlocks(ids);
+ expect(ids).toHaveLength(10);
+ expect(commonBlocks).toHaveLength(10);
+
+ for (let i = 0; i < commonBlocks.length; i++) {
+ expect(commonBlocks[i].height).toBe(blocks[98 - i].data.height);
+ }
+ });
+ });
+
+ describe("cacheTransactions", () => {
+ it("should add transaction id", () => {
+ expect(stateStorage.cacheTransactions([{ id: "1" } as models.ITransactionData])).toEqual({
+ added: [{ id: "1" }],
+ notAdded: [],
+ });
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(1);
+ });
+
+ it("should not add duplicate transaction ids", () => {
+ expect(stateStorage.cacheTransactions([{ id: "1" } as models.ITransactionData])).toEqual({
+ added: [{ id: "1" }],
+ notAdded: [],
+ });
+ expect(stateStorage.cacheTransactions([{ id: "1" } as models.ITransactionData])).toEqual({
+ added: [],
+ notAdded: [{ id: "1" }],
+ });
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(1);
+ });
+
+ it("should not add more than 10000 unique transaction ids", () => {
+ const transactions = [];
+ for (let i = 0; i < 10000; i++) {
+ transactions.push({ id: i.toString() });
+ }
+
+ expect(stateStorage.cacheTransactions(transactions)).toEqual({
+ added: transactions,
+ notAdded: [],
+ });
+
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(10000);
+ expect(stateStorage.getCachedTransactionIds()[0]).toEqual("0");
+
+ expect(stateStorage.cacheTransactions([{ id: "10000" } as any])).toEqual({
+ added: [{ id: "10000" }],
+ notAdded: [],
+ });
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(10000);
+ expect(stateStorage.getCachedTransactionIds()[0]).toEqual("1");
+ });
+ });
+
+ describe("removeCachedTransactionIds", () => {
+ it("should remove cached transaction ids", () => {
+ const transactions = [];
+ for (let i = 0; i < 10; i++) {
+ transactions.push({ id: i.toString() });
+ }
+
+ expect(stateStorage.cacheTransactions(transactions)).toEqual({
+ added: transactions,
+ notAdded: [],
+ });
+
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(10);
+ stateStorage.removeCachedTransactionIds(transactions.map(tx => tx.id));
+ expect(stateStorage.getCachedTransactionIds()).toHaveLength(0);
+ });
+ });
+
+ describe("reset", () => {
+ it("should reset the state", () => {
+ for (let i = 0; i < 100; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(100);
+ stateStorage.reset();
+ expect(stateStorage.getLastBlocks()).toHaveLength(0);
+ });
+ });
+
+ describe("clear", () => {
+ it("should clear the last blocks", () => {
+ for (let i = 0; i < 100; i++) {
+ stateStorage.setLastBlock(blocks[i]);
+ }
+
+ expect(stateStorage.getLastBlocks()).toHaveLength(100);
+ stateStorage.clear();
+ expect(stateStorage.getLastBlocks()).toHaveLength(0);
+ });
+ });
+
+ describe("pingBlock", () => {
+ it("should return false if there is no blockPing", () => {
+ stateStorage.blockPing = null;
+ expect(stateStorage.pingBlock(blocks2to100[5])).toBeFalse();
+ });
+
+ it("should return true if block pinged == current blockPing and should update stats", async () => {
+ const currentTime = new Date().getTime();
+ stateStorage.blockPing = {
+ count: 1,
+ first: currentTime,
+ last: currentTime,
+ block: blocks2to100[5],
+ };
+ await delay(20);
+
+ expect(stateStorage.pingBlock(blocks2to100[5])).toBeTrue();
+ expect(stateStorage.blockPing.count).toBe(2);
+ expect(stateStorage.blockPing.block).toBe(blocks2to100[5]);
+ expect(stateStorage.blockPing.last).toBeGreaterThan(currentTime);
+ expect(stateStorage.blockPing.first).toBe(currentTime);
+ });
+
+ it("should return false if block pinged != current blockPing", () => {
+ const currentTime = new Date().getTime();
+ stateStorage.blockPing = {
+ count: 1,
+ first: currentTime,
+ last: currentTime,
+ block: blocks2to100[3],
+ };
+ expect(stateStorage.pingBlock(blocks2to100[5])).toBeFalse();
+ expect(stateStorage.blockPing.count).toBe(1);
+ expect(stateStorage.blockPing.block).toBe(blocks2to100[3]);
+ expect(stateStorage.blockPing.last).toBe(currentTime);
+ expect(stateStorage.blockPing.first).toBe(currentTime);
+ });
+ });
+
+ describe("pushPingBlock", () => {
+ it("should push the block provided as blockPing", () => {
+ stateStorage.blockPing = null;
+
+ stateStorage.pushPingBlock(blocks2to100[5]);
+
+ expect(stateStorage.blockPing).toBeObject();
+ expect(stateStorage.blockPing.block).toBe(blocks2to100[5]);
+ expect(stateStorage.blockPing.count).toBe(1);
+ });
+
+ it("should log info message if there is already a blockPing", async () => {
+ stateStorage.blockPing = {
+ count: 1,
+ first: new Date().getTime(),
+ last: new Date().getTime(),
+ block: blocks2to100[3],
+ };
+
+ const logger = app.resolvePlugin("logger");
+ const loggerInfo = jest.spyOn(logger, "info");
+
+ stateStorage.pushPingBlock(blocks2to100[5]);
+
+ expect(loggerInfo).toHaveBeenCalledWith(
+ `Block ${blocks2to100[3].height.toLocaleString()} pinged blockchain 1 times`,
+ );
+ expect(stateStorage.blockPing).toBeObject();
+ expect(stateStorage.blockPing.block).toBe(blocks2to100[5]);
+ expect(stateStorage.blockPing.count).toBe(1);
+ });
+ });
+});
diff --git a/packages/core-blockchain/__tests__/utils/is-blocked-chained.test.ts b/packages/core-blockchain/__tests__/utils/is-blocked-chained.test.ts
new file mode 100644
index 0000000000..295e567452
--- /dev/null
+++ b/packages/core-blockchain/__tests__/utils/is-blocked-chained.test.ts
@@ -0,0 +1,116 @@
+import "jest-extended";
+
+import { models, slots } from "@arkecosystem/crypto";
+import { isBlockChained } from "../../src/utils";
+
+describe("isChained", () => {
+ it("should be ok", () => {
+ const previousBlock = {
+ data: {
+ id: "1",
+ timestamp: slots.getSlotTime(0),
+ height: 1,
+ previousBlock: null,
+ },
+ } as models.IBlock;
+
+ const nextBlock = {
+ data: {
+ id: "2",
+ timestamp: slots.getSlotTime(1),
+ height: 2,
+ previousBlock: "1",
+ },
+ } as models.IBlock;
+
+ expect(isBlockChained(previousBlock, nextBlock)).toBeTrue();
+ });
+
+ it("should not chain when previous block does not match", () => {
+ const previousBlock = {
+ data: {
+ id: "2",
+ timestamp: slots.getSlotTime(0),
+ height: 2,
+ previousBlock: null,
+ },
+ } as models.IBlock;
+
+ const nextBlock = {
+ data: {
+ id: "1",
+ timestamp: slots.getSlotTime(1),
+ height: 3,
+ previousBlock: "1",
+ },
+ } as models.IBlock;
+
+ expect(isBlockChained(previousBlock, nextBlock)).toBeFalse();
+ });
+
+ it("should not chain when next height is not plus 1", () => {
+ const previousBlock = {
+ data: {
+ id: "1",
+ timestamp: slots.getSlotTime(0),
+ height: 1,
+ previousBlock: null,
+ },
+ } as models.IBlock;
+
+ const nextBlock = {
+ data: {
+ id: "2",
+ timestamp: slots.getSlotTime(1),
+ height: 3,
+ previousBlock: "1",
+ },
+ } as models.IBlock;
+
+ expect(isBlockChained(previousBlock, nextBlock)).toBeFalse();
+ });
+
+ it("should not chain when same slot", () => {
+ const previousBlock = {
+ data: {
+ id: "1",
+ timestamp: slots.getSlotTime(0),
+ height: 1,
+ previousBlock: null,
+ },
+ } as models.IBlock;
+
+ const nextBlock = {
+ data: {
+ id: "2",
+ timestamp: slots.getSlotTime(0),
+ height: 3,
+ previousBlock: "1",
+ },
+ } as models.IBlock;
+
+ expect(isBlockChained(previousBlock, nextBlock)).toBeFalse();
+ });
+
+ it("should not chain when lower slot", () => {
+ const previousBlock = {
+ data: {
+ id: "1",
+ timestamp: slots.getSlotTime(1),
+ height: 1,
+ previousBlock: null,
+ },
+ } as models.IBlock;
+
+ const nextBlock = {
+ data: {
+ id: "2",
+ timestamp: slots.getSlotTime(0),
+ height: 3,
+ previousBlock: "1",
+ },
+ } as models.IBlock;
+
+ expect(isBlockChained(previousBlock, nextBlock)).toBeFalse();
+ });
+});
diff --git a/packages/core-blockchain/__tests__/utils/tick-sync-tracker.test.ts b/packages/core-blockchain/__tests__/utils/tick-sync-tracker.test.ts
new file mode 100644
index 0000000000..6c41cf973a
--- /dev/null
+++ b/packages/core-blockchain/__tests__/utils/tick-sync-tracker.test.ts
@@ -0,0 +1,63 @@
+import "jest-extended";
+
+const info = jest.fn();
+jest.mock("@arkecosystem/core-container", () => {
+ return {
+ app: {
+ resolvePlugin: name => ({
+ info,
+ getNetworkHeight: () => 120,
+ }),
+ },
+ };
+});
+
+let tickSyncTracker;
+const DateBackup = Date;
+
+describe("tickSyncTracker", () => {
+ beforeEach(() => {
+ global.Date = DateBackup;
+ tickSyncTracker = require("../../src/utils").tickSyncTracker;
+ });
+
+ it("print tracker stats when percent < 100", () => {
+ const now = new Date();
+ const nowMinus8Ms = new Date();
+ nowMinus8Ms.setMilliseconds(now.getMilliseconds() - 8);
+
+ // mocking Date to return nowMinus8Ms then now
+ global.Date = jest
+ .fn()
+ .mockImplementationOnce(() => nowMinus8Ms)
+ .mockImplementationOnce(() => now) as any;
+ global.Date.UTC = DateBackup.UTC;
+ global.Date.parse = DateBackup.parse;
+ global.Date.now = DateBackup.now;
+
+ tickSyncTracker(8, 52);
+
+ expect(info).toHaveBeenLastCalledWith("Synchronising In Progress (60 of 120 blocks - Est. 60ms)");
+ });
+
+ it("should stop tracker when percent == 100", () => {
+ tickSyncTracker(60, 60); // should set tracker = null, so when we call again tickSyncTracker it is reset
+
+ const now = new Date();
+ const nowMinus8Ms = new Date();
+ nowMinus8Ms.setMilliseconds(now.getMilliseconds() - 8);
+
+ // mocking Date to return nowMinus8Ms then now
+ global.Date = jest
+ .fn()
+ .mockImplementationOnce(() => nowMinus8Ms)
+ .mockImplementationOnce(() => now) as any;
+ global.Date.UTC = DateBackup.UTC;
+ global.Date.parse = DateBackup.parse;
+ global.Date.now = DateBackup.now;
+
+ tickSyncTracker(8, 12);
+
+ expect(info).toHaveBeenLastCalledWith("Synchronising In Progress (20 of 120 blocks - Est. 100ms)");
+ });
+});
diff --git a/packages/core-blockchain/jest.config.js b/packages/core-blockchain/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-blockchain/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-blockchain/lib/blockchain.js b/packages/core-blockchain/lib/blockchain.js
deleted file mode 100644
index 880e7916ae..0000000000
--- a/packages/core-blockchain/lib/blockchain.js
+++ /dev/null
@@ -1,742 +0,0 @@
-/* eslint max-len: "off" */
-/* eslint no-await-in-loop: "off" */
-
-const { slots } = require('@arkecosystem/crypto')
-const { Block } = require('@arkecosystem/crypto').models
-const app = require('@arkecosystem/core-container')
-
-const logger = app.resolvePlugin('logger')
-const config = app.resolvePlugin('config')
-const emitter = app.resolvePlugin('event-emitter')
-const delay = require('delay')
-const pluralize = require('pluralize')
-const stateMachine = require('./state-machine')
-const Queue = require('./queue')
-
-module.exports = class Blockchain {
- /**
- * Create a new blockchain manager instance.
- * @param {Boolean} networkStart
- * @return {void}
- */
- constructor(networkStart) {
- // flag to force a network start
- this.state.networkStart = !!networkStart
-
- if (this.state.networkStart) {
- logger.warn(
- 'Ark Core is launched in Genesis Start mode. This is usually for starting the first node on the blockchain. Unless you know what you are doing, this is likely wrong. :warning:',
- )
- logger.info('Starting Ark Core for a new world, welcome aboard :rocket:')
- }
-
- this.actions = stateMachine.actionMap(this)
-
- this.__registerQueue()
- }
-
- /**
- * Dispatch an event to transition the state machine.
- * @param {String} event
- * @return {void}
- */
- dispatch(event) {
- const nextState = stateMachine.transition(this.state.blockchain, event)
-
- if (nextState.actions.length > 0) {
- logger.debug(
- `event '${event}': ${JSON.stringify(
- this.state.blockchain.value,
- )} -> ${JSON.stringify(
- nextState.value,
- )} -> actions: [${nextState.actions.map(a => a.type).join(', ')}]`,
- )
- }
-
- this.state.blockchain = nextState
-
- nextState.actions.forEach(actionKey => {
- const action = this.actions[actionKey]
-
- if (action) {
- return setTimeout(() => action.call(this, event), 0)
- }
-
- logger.error(`No action '${actionKey}' found :interrobang:`)
- })
-
- return nextState
- }
-
- /**
- * Start the blockchain and wait for it to be ready.
- * @return {void}
- */
- async start(skipStartedCheck = false) {
- logger.info('Starting Blockchain Manager :chains:')
-
- this.dispatch('START')
-
- emitter.once('shutdown', () => {
- this.stop()
- })
-
- if (skipStartedCheck || process.env.ARK_SKIP_BLOCKCHAIN_STARTED_CHECK) {
- return true
- }
-
- // TODO: this state needs to be set after state.getLastBlock() is available if ARK_ENV=test
- while (!this.state.started && !this.isStopped) {
- await delay(1000)
- }
-
- return true
- }
-
- async stop() {
- if (!this.isStopped) {
- logger.info('Stopping Blockchain Manager :chains:')
-
- this.isStopped = true
- this.state.clearCheckLater()
-
- this.dispatch('STOP')
-
- this.queue.destroy()
- }
- }
-
- checkNetwork() {
- throw new Error('Method [checkNetwork] not implemented!')
- }
-
- /**
- * Update network status.
- * @return {void}
- */
- async updateNetworkStatus() {
- return this.p2p.updateNetworkStatus()
- }
-
- /**
- * Rebuild N blocks in the blockchain.
- * @param {Number} nblocks
- * @return {void}
- */
- rebuild(nblocks) {
- throw new Error('Method [rebuild] not implemented!')
- }
-
- /**
- * Reset the state of the blockchain.
- * @return {void}
- */
- resetState() {
- this.queue.pause()
- this.queue.clear()
-
- this.state.reset()
- }
-
- /**
- * Hand the given transactions to the transaction handler.
- * @param {Array} transactions
- * @return {void}
- */
- async postTransactions(transactions) {
- logger.info(
- `Received ${transactions.length} new ${pluralize(
- 'transaction',
- transactions.length,
- )} :moneybag:`,
- )
-
- await this.transactionPool.addTransactions(transactions)
- }
-
- /**
- * Push a block to the process queue.
- * @param {Block} block
- * @return {void}
- */
- queueBlock(block) {
- logger.info(
- `Received new block at height ${block.height.toLocaleString()} with ${pluralize(
- 'transaction',
- block.numberOfTransactions,
- true,
- )} from ${block.ip}`,
- )
-
- const currentSlot = slots.getSlotNumber();
- const blockSlot = slots.getSlotNumber(block.timestamp);
-
- if (blockSlot > currentSlot) {
- logger.info(`Block disregarded because the block takes a future slot.`)
- return;
- }
-
- if (
- this.state.started &&
- this.state.blockchain.value === 'idle' &&
- !this.state.forked
- ) {
- this.dispatch('NEWBLOCK')
-
- this.processQueue.push(block)
- this.state.lastDownloadedBlock = new Block(block)
- } else {
- logger.info(
- `Block disregarded because blockchain is ${
- this.state.forked ? 'forked' : 'not ready'
- } :exclamation:`,
- )
- }
- }
-
- /**
- * Rollback all blocks up to the previous round.
- * @return {void}
- */
- async rollbackCurrentRound() {
- const height = this.state.getLastBlock().data.height
- const maxDelegates = config.getConstants(height).activeDelegates
- const previousRound = Math.floor((height - 1) / maxDelegates)
-
- if (previousRound < 2) {
- return
- }
-
- const newHeight = previousRound * maxDelegates
- const blocksToRemove = await this.database.getBlocks(
- newHeight,
- height - newHeight - 1,
- )
- const deleteLastBlock = async () => {
- const lastBlock = this.state.getLastBlock()
- await this.database.enqueueDeleteBlock(lastBlock)
-
- const newLastBlock = new Block(blocksToRemove.pop())
-
- this.state.setLastBlock(newLastBlock)
- this.state.lastDownloadedBlock = newLastBlock
- }
-
- logger.info(
- `Removing ${pluralize(
- 'block',
- height - newHeight,
- true,
- )} to reset current round :warning:`,
- )
-
- let count = 0
- const max = this.state.getLastBlock().data.height - newHeight
-
- while (this.state.getLastBlock().data.height >= newHeight + 1) {
- const removalBlockId = this.state.getLastBlock().data.id
- const removalBlockHeight = this.state
- .getLastBlock()
- .data.height.toLocaleString()
- logger.printTracker(
- 'Removing block',
- count++,
- max,
- `ID: ${removalBlockId}, height: ${removalBlockHeight}`,
- )
-
- await deleteLastBlock()
- }
-
- // Commit delete blocks
- await this.database.commitQueuedQueries()
-
- logger.stopTracker(`${pluralize('block', max, true)} removed`, count, max)
-
- await this.database.deleteRound(previousRound + 1)
- }
-
- /**
- * Remove N number of blocks.
- * @param {Number} nblocks
- * @return {void}
- */
- async removeBlocks(nblocks) {
- const blocksToRemove = await this.database.getBlocks(
- this.state.getLastBlock().data.height - nblocks,
- nblocks - 1,
- )
-
- const revertLastBlock = async () => {
- const lastBlock = this.state.getLastBlock()
-
- // TODO: if revertBlock Failed, it might corrupt the database because one block could be left stored
- await this.database.revertBlock(lastBlock)
- this.database.enqueueDeleteBlock(lastBlock)
-
- if (this.transactionPool) {
- await this.transactionPool.addTransactions(lastBlock.transactions)
- }
-
- const newLastBlock = new Block(blocksToRemove.pop())
-
- this.state.setLastBlock(newLastBlock)
- this.state.lastDownloadedBlock = newLastBlock
- }
-
- const __removeBlocks = async numberOfBlocks => {
- if (numberOfBlocks < 1) {
- return
- }
-
- logger.info(
- `Undoing block ${this.state
- .getLastBlock()
- .data.height.toLocaleString()}`,
- )
-
- await revertLastBlock()
- await __removeBlocks(numberOfBlocks - 1)
- }
-
- const lastBlock = this.state.getLastBlock()
- if (nblocks >= lastBlock.data.height) {
- nblocks = lastBlock.data.height - 1
- }
-
- const resetHeight = lastBlock.data.height - nblocks
- logger.info(
- `Removing ${pluralize(
- 'block',
- nblocks,
- true,
- )}. Reset to height ${resetHeight.toLocaleString()}`,
- )
-
- this.queue.pause()
- this.queue.clear()
-
- this.state.lastDownloadedBlock = lastBlock
-
- await __removeBlocks(nblocks)
-
- // Commit delete blocks
- await this.database.commitQueuedQueries()
-
- this.queue.resume()
- }
-
- /**
- * Remove the top blocks from database.
- * NOTE: Only used when trying to restore database integrity.
- * @param {Number} count
- * @return {void}
- */
- async removeTopBlocks(count) {
- const blocks = await this.database.getTopBlocks(count)
-
- logger.info(
- `Removing ${pluralize(
- 'block',
- blocks.length,
- true,
- )} from height ${blocks[0].height.toLocaleString()}`,
- )
-
- for (let block of blocks) {
- block = new Block(block)
-
- this.database.enqueueDeleteRound(block.data.height)
- this.database.enqueueDeleteBlock(block)
- }
-
- await this.database.commitQueuedQueries()
- }
-
- /**
- * Hande a block during a rebuild.
- * NOTE: We should be sure this is fail safe (ie callback() is being called only ONCE)
- * @param {Block} block
- * @param {Function} callback
- * @return {Object}
- */
- async rebuildBlock(block, callback) {
- const lastBlock = this.state.getLastBlock()
-
- if (block.verification.verified) {
- if (this.__isChained(lastBlock, block)) {
- // save block on database
- this.database.enqueueSaveBlock(block)
-
- // committing to db every 20,000 blocks
- if (block.data.height % 20000 === 0) {
- await this.database.commitQueuedQueries()
- }
-
- this.state.setLastBlock(block)
-
- return callback()
- }
- if (block.data.height > lastBlock.data.height + 1) {
- this.state.lastDownloadedBlock = lastBlock
- return callback()
- }
- if (
- block.data.height < lastBlock.data.height ||
- (block.data.height === lastBlock.data.height &&
- block.data.id === lastBlock.data.id)
- ) {
- this.state.lastDownloadedBlock = lastBlock
- return callback()
- }
- this.state.lastDownloadedBlock = lastBlock
- logger.info(
- `Block ${block.data.height.toLocaleString()} disregarded because on a fork :knife_fork_plate:`,
- )
- return callback()
- }
- logger.warn(
- `Block ${block.data.height.toLocaleString()} disregarded because verification failed :scroll:`,
- )
- logger.warn(block.verification)
- return callback()
- }
-
- /**
- * Process the given block.
- * NOTE: We should be sure this is fail safe (ie callback() is being called only ONCE)
- * @param {Block} block
- * @param {Function} callback
- * @return {(Function|void)}
- */
- async processBlock(block, callback) {
- if (!block.verification.verified) {
- logger.warn(
- `Block ${block.data.height.toLocaleString()} disregarded because verification failed :scroll:`,
- )
-
- this.transactionPool.purgeSendersWithInvalidTransactions(block)
- this.state.lastDownloadedBlock = this.state.getLastBlock()
- return callback()
- }
-
- try {
- if (this.__isChained(this.state.getLastBlock(), block)) {
- await this.acceptChainedBlock(block)
- this.state.setLastBlock(block)
- } else {
- await this.manageUnchainedBlock(block)
- }
- } catch (error) {
- logger.error(`Refused new block ${JSON.stringify(block.data)}`)
- logger.debug(error.stack)
-
- this.transactionPool.purgeBlock(block)
-
- // Only fork when the block generator is an active delegate
- if (error.message !== "inactive generator") {
- this.dispatch('FORK');
- }
-
- return callback()
- }
-
- try {
- // broadcast only current block
- const blocktime = config.getConstants(block.data.height).blocktime
- if (slots.getSlotNumber() * blocktime <= block.data.timestamp) {
- this.p2p.broadcastBlock(block)
- }
- } catch (error) {
- logger.warn(
- `Can't properly broadcast block ${block.data.height.toLocaleString()}`,
- )
- logger.debug(error.stack)
- }
-
- return callback()
- }
-
- /**
- * Accept a new chained block.
- * @param {Block} block
- * @param {Object} state
- * @return {void}
- */
- async acceptChainedBlock(block) {
- await this.database.applyBlock(block)
- await this.database.saveBlock(block)
-
- // Check if we recovered from a fork
- if (
- this.state.forked &&
- this.state.forkedBlock.height === block.data.height
- ) {
- logger.info('Successfully recovered from fork :star2:')
- this.state.forked = false
- this.state.forkedBlock = null
- }
-
- if (this.transactionPool) {
- try {
- this.transactionPool.acceptChainedBlock(block)
- } catch (error) {
- logger.warn('Issue applying block to transaction pool')
- logger.debug(error.stack)
- }
- }
- }
-
- /**
- * Manage a block that is out of order.
- * @param {Block} block
- * @param {Object} state
- * @return {void}
- */
- async manageUnchainedBlock(block) {
- const lastBlock = this.state.getLastBlock()
-
- if (block.data.height > lastBlock.data.height + 1) {
- logger.debug(
- `Blockchain not ready to accept new block at height ${block.data.height.toLocaleString()}. Last block: ${lastBlock.data.height.toLocaleString()} :warning:`,
- )
- this.state.lastDownloadedBlock = lastBlock
- } else if (block.data.height < lastBlock.data.height) {
- logger.debug(
- `Block ${block.data.height.toLocaleString()} disregarded because already in blockchain :warning:`,
- )
- } else if (
- block.data.height === lastBlock.data.height &&
- block.data.id === lastBlock.data.id
- ) {
- logger.debug(
- `Block ${block.data.height.toLocaleString()} just received :chains:`,
- )
- } else if (block.data.timestamp < lastBlock.data.timestamp) {
- logger.debug(
- `Block ${block.data.height.toLocaleString()} disregarded because it has a lower timestamp than the last block :bangbang:`
- )
- } else {
- const isValid = await this.database.validateForkedBlock(block)
-
- if (isValid) {
- this.dispatch('FORK')
- } else {
- this.state.lastDownloadedBlock = lastBlock;
-
- logger.info(
- `Forked block disregarded because it is not allowed to forge. Caused by delegate: ${
- block.data.generatorPublicKey
- } :bangbang:`,
- )
- }
- }
- }
-
- /**
- * Called by forger to wake up and sync with the network.
- * It clears the checkLaterTimeout if set.
- * @param {Number} blockSize
- * @param {Boolean} forForging
- * @return {Object}
- */
- forceWakeup() {
- this.state.clearCheckLater()
- this.dispatch('WAKEUP')
- }
-
- /**
- * Get unconfirmed transactions for the specified block size.
- * @param {Number} blockSize
- * @param {Boolean} forForging
- * @return {Object}
- */
- getUnconfirmedTransactions(blockSize) {
- const transactions = this.transactionPool.getTransactionsForForging(
- blockSize,
- )
-
- return {
- transactions,
- poolSize: this.transactionPool.getPoolSize(),
- count: transactions ? transactions.length : -1,
- }
- }
-
- /**
- * Determine if the blockchain is synced.
- * @param {Block} [block=getLastBlock()] block
- * @return {Boolean}
- */
- isSynced(block) {
- if (!this.p2p.hasPeers()) {
- return true
- }
-
- block = block || this.getLastBlock()
-
- return (
- slots.getTime() - block.data.timestamp <
- 3 * config.getConstants(block.data.height).blocktime
- )
- }
-
- /**
- * Determine if the blockchain is synced after a rebuild.
- * @param {Block} block
- * @return {Boolean}
- */
- isRebuildSynced(block) {
- if (!this.p2p.hasPeers()) {
- return true
- }
-
- block = block || this.getLastBlock()
-
- const remaining = slots.getTime() - block.data.timestamp
- logger.info(`Remaining block timestamp ${remaining} :hourglass:`)
-
- // stop fast rebuild 7 days before the last network block
- return slots.getTime() - block.data.timestamp < 3600 * 24 * 7
- // return slots.getTime() - block.data.timestamp < 100 * config.getConstants(block.data.height).blocktime
- }
-
- /**
- * Get the last block of the blockchain.
- * @return {Object}
- */
- getLastBlock() {
- return this.state.getLastBlock()
- }
-
- /**
- * Get the last height of the blockchain.
- * @return {Object}
- */
- getLastHeight() {
- return this.state.getLastBlock().data.height
- }
-
- /**
- * Get the last downloaded block of the blockchain.
- * @return {Object}
- */
- getLastDownloadedBlock() {
- return this.state.lastDownloadedBlock
- }
-
- /**
- * Get the block ping.
- * @return {Object}
- */
- getBlockPing() {
- return this.state.blockPing
- }
-
- /**
- * Ping a block.
- * @return {Object}
- */
- pingBlock(incomingBlock) {
- return this.state.pingBlock(incomingBlock)
- }
-
- /**
- * Push ping block.
- * @return {Object}
- */
- pushPingBlock(block) {
- this.state.pushPingBlock(block)
- }
-
- /**
- * Get the list of events that are available.
- * @return {Array}
- */
- getEvents() {
- return [
- 'block.applied',
- 'block.forged',
- 'block.reverted',
- 'delegate.registered',
- 'delegate.resigned',
- 'forger.failed',
- 'forger.missing',
- 'forger.started',
- 'peer.added',
- 'peer.removed',
- 'round.created',
- 'state:started',
- 'transaction.applied',
- 'transaction.expired',
- 'transaction.forged',
- 'transaction.reverted',
- 'wallet.saved',
- 'wallet.created.cold',
- ]
- }
-
- /**
- * Get the state of the blockchain.
- * @return {StateStorage}
- */
- get state() {
- return stateMachine.state
- }
-
- /**
- * Get the network (p2p) interface.
- * @return {P2PInterface}
- */
- get p2p() {
- return app.resolvePlugin('p2p')
- }
-
- /**
- * Get the transaction handler.
- * @return {TransactionPool}
- */
- get transactionPool() {
- return app.resolvePlugin('transactionPool')
- }
-
- /**
- * Get the database connection.
- * @return {ConnectionInterface}
- */
- get database() {
- return app.resolvePlugin('database')
- }
-
- /**
- * Check if the given block is in order.
- * @param {Block} previousBlock
- * @param {Block} nextBlock
- * @return {Boolean}
- */
- __isChained(previousBlock, nextBlock) {
- const followsPrevious =
- nextBlock.data.previousBlock === previousBlock.data.id
- const isPlusOne = nextBlock.data.height === previousBlock.data.height + 1
-
- const previousSlot = slots.getSlotNumber(previousBlock.data.timestamp);
- const nextSlot = slots.getSlotNumber(nextBlock.data.timestamp);
- const isAfterLastSlot = previousSlot < nextSlot;
-
- return followsPrevious && isPlusOne && isAfterLastSlot
- }
-
- /**
- * Register the block queue.
- * @return {void}
- */
- __registerQueue() {
- this.queue = new Queue(this, {
- process: 'PROCESSFINISHED',
- rebuild: 'REBUILDFINISHED',
- })
-
- this.processQueue = this.queue.process
- this.rebuildQueue = this.queue.rebuild
- }
-}
diff --git a/packages/core-blockchain/lib/defaults.js b/packages/core-blockchain/lib/defaults.js
deleted file mode 100644
index 24f90de245..0000000000
--- a/packages/core-blockchain/lib/defaults.js
+++ /dev/null
@@ -1,11 +0,0 @@
-module.exports = {
- fastRebuild: false,
- databaseRollback: {
- maxBlockRewind: 10000,
- steps: 1000,
- },
- state: {
- maxLastBlocks: 100,
- maxLastTransactionIds: 10000,
- },
-}
diff --git a/packages/core-blockchain/lib/index.js b/packages/core-blockchain/lib/index.js
deleted file mode 100644
index 54d667b686..0000000000
--- a/packages/core-blockchain/lib/index.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const { asValue } = require('awilix')
-const Blockchain = require('./blockchain')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'blockchain',
- async register(container, options) {
- const blockchain = new Blockchain(options.networkStart)
-
- container.register('state', asValue(require('./state-storage')))
-
- if (!process.env.ARK_SKIP_BLOCKCHAIN) {
- await blockchain.start()
- }
-
- return blockchain
- },
- async deregister(container, options) {
- await container.resolvePlugin('blockchain').stop()
- },
-}
-
-/**
- * Access to the state.
- * @type {StateStorage}
- */
-exports.state = require('./state-storage')
diff --git a/packages/core-blockchain/lib/machines/actions/fork.js b/packages/core-blockchain/lib/machines/actions/fork.js
deleted file mode 100644
index 87d90d65f1..0000000000
--- a/packages/core-blockchain/lib/machines/actions/fork.js
+++ /dev/null
@@ -1,61 +0,0 @@
-module.exports = {
- initial: 'analysing',
- states: {
- analysing: {
- onEntry: ['analyseFork'],
- on: {
- REBUILD: 'revertBlocks',
- NOFORK: 'exit',
- },
- },
- network: {
- onEntry: ['checkNetwork'],
- /* these transitions are not used yet (TODO?)
- on: {
- SUCCESS: 'blockchain',
- FAILURE: 'reset'
- }
- */
- },
- revertBlocks: {},
- exit: {
- onEntry: ['forkRecovered'],
- },
- },
-}
-
-// const fork = {
-// initial: 'network',
-// states: {
-// network: {
-// onEntry: ['checkNetwork'],
-// on: {
-// SUCCESS: 'blockchain',
-// FAILURE: 'reset'
-// }
-// },
-// blockchain: {
-// onEntry: ['removeBlocks'],
-// on: {
-// SUCCESS: 'wallets',
-// FAILURE: 'reset'
-// }
-// },
-// wallets: {
-// onEntry: ['rebuildWallets'],
-// on: {
-// SUCCESS: 'success',
-// FAILURE: 'reset'
-// }
-// },
-// reset: {
-// onEntry: ['resetNode'],
-// on: {
-// RESET: 'success',
-// FAILURE: 'reset'
-// }
-// },
-// success: {
-// }
-// }
-// }
diff --git a/packages/core-blockchain/lib/machines/actions/rebuild-from-network.js b/packages/core-blockchain/lib/machines/actions/rebuild-from-network.js
deleted file mode 100644
index afd3749fc4..0000000000
--- a/packages/core-blockchain/lib/machines/actions/rebuild-from-network.js
+++ /dev/null
@@ -1,52 +0,0 @@
-module.exports = {
- initial: 'rebuilding',
- states: {
- rebuilding: {
- onEntry: ['checkLastDownloadedBlockSynced'],
- on: {
- SYNCED: 'waitingFinished',
- NOTSYNCED: 'rebuildBlocks',
- PAUSED: 'rebuildPaused',
- },
- },
- idle: {
- on: {
- DOWNLOADED: 'rebuildBlocks',
- },
- },
- rebuildBlocks: {
- onEntry: ['rebuildBlocks'],
- on: {
- DOWNLOADED: 'rebuilding',
- NOBLOCK: 'rebuilding',
- },
- },
- waitingFinished: {
- on: {
- REBUILDFINISHED: 'rebuildFinished',
- },
- },
- rebuildFinished: {
- onEntry: ['rebuildFinished'],
- on: {
- PROCESSFINISHED: 'processFinished',
- },
- },
- rebuildPaused: {
- onEntry: ['downloadPaused'],
- on: {
- REBUILDFINISHED: 'processFinished',
- },
- },
- processFinished: {
- onEntry: ['checkRebuildBlockSynced'],
- on: {
- SYNCED: 'end',
- NOTSYNCED: 'rebuildBlocks',
- },
- },
- end: {
- onEntry: ['rebuildingComplete'],
- },
- },
-}
diff --git a/packages/core-blockchain/lib/machines/actions/sync-with-network.js b/packages/core-blockchain/lib/machines/actions/sync-with-network.js
deleted file mode 100644
index eb81479e88..0000000000
--- a/packages/core-blockchain/lib/machines/actions/sync-with-network.js
+++ /dev/null
@@ -1,48 +0,0 @@
-module.exports = {
- initial: 'syncing',
- states: {
- syncing: {
- onEntry: ['checkLastDownloadedBlockSynced'],
- on: {
- SYNCED: 'downloadFinished',
- NOTSYNCED: 'downloadBlocks',
- PAUSED: 'downloadPaused',
- NETWORKHALTED: 'end',
- },
- },
- idle: {
- on: {
- DOWNLOADED: 'downloadBlocks',
- },
- },
- downloadBlocks: {
- onEntry: ['downloadBlocks'],
- on: {
- DOWNLOADED: 'syncing',
- NOBLOCK: 'syncing',
- },
- },
- downloadFinished: {
- onEntry: ['downloadFinished'],
- on: {
- PROCESSFINISHED: 'processFinished',
- },
- },
- downloadPaused: {
- onEntry: ['downloadPaused'],
- on: {
- PROCESSFINISHED: 'processFinished',
- },
- },
- processFinished: {
- onEntry: ['checkLastBlockSynced'],
- on: {
- SYNCED: 'end',
- NOTSYNCED: 'downloadBlocks',
- },
- },
- end: {
- onEntry: ['syncingComplete'],
- },
- },
-}
diff --git a/packages/core-blockchain/lib/machines/blockchain.js b/packages/core-blockchain/lib/machines/blockchain.js
deleted file mode 100644
index ed8a167e7a..0000000000
--- a/packages/core-blockchain/lib/machines/blockchain.js
+++ /dev/null
@@ -1,89 +0,0 @@
-const Machine = require('xstate').Machine
-const syncWithNetwork = require('./actions/sync-with-network')
-const rebuildFromNetwork = require('./actions/rebuild-from-network')
-const fork = require('./actions/fork')
-
-module.exports = Machine({
- key: 'blockchain',
- initial: 'uninitialised',
- states: {
- uninitialised: {
- on: {
- START: 'init',
- STOP: 'stopped',
- },
- },
- init: {
- onEntry: ['init'],
- on: {
- REBUILD: 'rebuild',
- NETWORKSTART: 'idle',
- STARTED: 'syncWithNetwork',
- ROLLBACK: 'rollback',
- FAILURE: 'exit',
- STOP: 'stopped',
- },
- },
- rebuild: {
- on: {
- REBUILDCOMPLETE: 'syncWithNetwork',
- FORK: 'fork',
- TEST: 'syncWithNetwork',
- STOP: 'stopped',
- },
- ...rebuildFromNetwork,
- },
- syncWithNetwork: {
- on: {
- TEST: 'idle',
- SYNCFINISHED: 'idle',
- FORK: 'fork',
- STOP: 'stopped',
- },
- ...syncWithNetwork,
- },
- idle: {
- onEntry: ['checkLater', 'blockchainReady'],
- on: {
- WAKEUP: 'syncWithNetwork',
- NEWBLOCK: 'newBlock',
- STOP: 'stopped',
- },
- },
- newBlock: {
- on: {
- PROCESSFINISHED: 'idle',
- FORK: 'fork',
- STOP: 'stopped',
- },
- },
- fork: {
- onEntry: ['startForkRecovery'],
- on: {
- SUCCESS: 'syncWithNetwork',
- FAILURE: 'exit',
- STOP: 'stopped',
- },
- ...fork,
- },
- rollback: {
- onEntry: ['rollbackDatabase'],
- on: {
- SUCCESS: 'init',
- FAILURE: 'exit',
- STOP: 'stopped',
- },
- },
- /**
- * This state should be used for stopping the blockchain on purpose, not as
- * a result of critical errors. In those cases, using the `exit` state would
- * be a better option
- */
- stopped: {
- onEntry: ['stopped'],
- },
- exit: {
- onEntry: ['exitApp'],
- },
- },
-})
diff --git a/packages/core-blockchain/lib/queue/index.js b/packages/core-blockchain/lib/queue/index.js
deleted file mode 100644
index e132cfebee..0000000000
--- a/packages/core-blockchain/lib/queue/index.js
+++ /dev/null
@@ -1,47 +0,0 @@
-const ProcessQueue = require('./process')
-const RebuildQueue = require('./rebuild')
-
-module.exports = class Queue {
- /**
- * Create an instance of the queue.
- * @param {Blockchain} blockchain
- * @param {Object} events
- * @return {void}
- */
- constructor(blockchain, events) {
- this.process = new ProcessQueue(blockchain, events.process)
- this.rebuild = new RebuildQueue(blockchain, events.rebuild)
- }
-
- /**
- * Pause all queues.
- * @return {void}
- */
- pause() {
- this.rebuild.pause()
- this.process.pause()
- }
-
- /**
- * Flush all queues.
- * @return {void}
- */
- clear() {
- this.rebuild.clear()
- this.process.clear()
- }
-
- /**
- * Resue all queues.
- * @return {void}
- */
- resume() {
- this.rebuild.resume()
- this.process.resume()
- }
-
- destroy() {
- this.rebuild.destroy()
- this.process.destroy()
- }
-}
diff --git a/packages/core-blockchain/lib/queue/interface.js b/packages/core-blockchain/lib/queue/interface.js
deleted file mode 100644
index e2731b7498..0000000000
--- a/packages/core-blockchain/lib/queue/interface.js
+++ /dev/null
@@ -1,73 +0,0 @@
-module.exports = class QueueInterface {
- /**
- * Create an instance of the process queue.
- * @param {Blockchain} blockchain
- * @param {String} event
- * @return {void}
- */
- constructor(blockchain, event) {
- this.blockchain = blockchain
- this.event = event
- }
-
- /**
- * Drain the queue.
- * @return {void}
- */
- drain() {
- this.queue.drain = () => this.blockchain.dispatch(this.event)
- }
-
- /**
- * Pause the queue.
- * @return {void}
- */
- pause() {
- return this.queue.pause()
- }
-
- /**
- * Flush the queue.
- * @return {void}
- */
- clear() {
- return this.queue.remove(() => true)
- }
-
- /**
- * Resume the queue.
- * @return {void}
- */
- resume() {
- return this.queue.resume()
- }
-
- /**
- * Remove the item from the queue.
- * @return {void}
- */
- remove(item) {
- return this.queue.remove(item)
- }
-
- /**
- * Push the item to the queue.
- * @param {Function} callback
- * @return {void}
- */
- push(callback) {
- return this.queue.push(callback)
- }
-
- /**
- * Get the length of the queue.
- * @return {void}
- */
- length() {
- return this.queue.length()
- }
-
- destroy() {
- return this.queue.kill()
- }
-}
diff --git a/packages/core-blockchain/lib/queue/process.js b/packages/core-blockchain/lib/queue/process.js
deleted file mode 100644
index b2e3fc8d13..0000000000
--- a/packages/core-blockchain/lib/queue/process.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const async = require('async')
-const logger = require('@arkecosystem/core-container').resolvePlugin('logger')
-const { Block } = require('@arkecosystem/crypto').models
-const QueueInterface = require('./interface')
-
-module.exports = class ProcessQueue extends QueueInterface {
- /**
- * Create an instance of the process queue.
- * @param {Blockchain} blockchain
- * @return {void}
- */
- constructor(blockchain, event) {
- super(blockchain, event)
-
- this.queue = async.queue((block, cb) => {
- try {
- return blockchain.processBlock(new Block(block), cb)
- } catch (error) {
- logger.error(
- `Failed to process block in ProcessQueue: ${block.height.toLocaleString()}`,
- )
- logger.error(error.stack)
- return cb()
- }
- }, 1)
-
- this.drain()
- }
-}
diff --git a/packages/core-blockchain/lib/queue/rebuild.js b/packages/core-blockchain/lib/queue/rebuild.js
deleted file mode 100644
index 5d75f81ec3..0000000000
--- a/packages/core-blockchain/lib/queue/rebuild.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const async = require('async')
-const logger = require('@arkecosystem/core-container').resolvePlugin('logger')
-const { Block } = require('@arkecosystem/crypto').models
-const QueueInterface = require('./interface')
-
-module.exports = class RebuildQueue extends QueueInterface {
- /**
- * Create an instance of the process queue.
- * @param {Blockchain} blockchain
- * @return {void}
- */
- constructor(blockchain, event) {
- super(blockchain, event)
-
- this.queue = async.queue((block, cb) => {
- if (this.queue.paused) return cb()
- try {
- return blockchain.rebuildBlock(new Block(block), cb)
- } catch (error) {
- logger.error(
- `Failed to rebuild block in RebuildQueue: ${block.height.toLocaleString()}`,
- )
- return cb()
- }
- }, 1)
-
- this.drain()
- }
-}
diff --git a/packages/core-blockchain/lib/state-machine.js b/packages/core-blockchain/lib/state-machine.js
deleted file mode 100644
index 2e7ccf5979..0000000000
--- a/packages/core-blockchain/lib/state-machine.js
+++ /dev/null
@@ -1,448 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const emitter = app.resolvePlugin('event-emitter')
-const logger = app.resolvePlugin('logger')
-
-const { slots } = require('@arkecosystem/crypto')
-const { Block } = require('@arkecosystem/crypto').models
-const { roundCalculator } = require('@arkecosystem/core-utils')
-
-const pluralize = require('pluralize')
-const tickSyncTracker = require('./utils/tick-sync-tracker')
-const blockchainMachine = require('./machines/blockchain')
-const state = require('./state-storage')
-
-/**
- * @type {StateStorage}
- */
-blockchainMachine.state = state
-
-/**
- * The blockchain actions.
- * @param {Blockchain} blockchain
- * @return {Object}
- */
-blockchainMachine.actionMap = blockchain => ({
- blockchainReady: () => {
- if (!state.started) {
- state.started = true
- emitter.emit('state:started', true)
- }
- },
-
- checkLater() {
- if (!blockchain.isStopped && !state.checkLaterTimeout) {
- state.checkLaterTimeout = setTimeout(() => {
- state.checkLaterTimeout = null
- return blockchain.dispatch('WAKEUP')
- }, 60000)
- }
- },
-
- checkLastBlockSynced() {
- return blockchain.dispatch(blockchain.isSynced() ? 'SYNCED' : 'NOTSYNCED')
- },
-
- checkRebuildBlockSynced() {
- return blockchain.dispatch(
- blockchain.isRebuildSynced() ? 'SYNCED' : 'NOTSYNCED',
- )
- },
-
- async checkLastDownloadedBlockSynced() {
- let event = 'NOTSYNCED'
- logger.debug(
- `Queued blocks (rebuild: ${blockchain.rebuildQueue.length()} process: ${blockchain.processQueue.length()})`,
- )
-
- if (
- blockchain.rebuildQueue.length() > 10000 ||
- blockchain.processQueue.length() > 10000
- ) {
- event = 'PAUSED'
- }
-
- // tried to download but no luck after 5 tries (looks like network missing blocks)
- if (state.noBlockCounter > 5) {
- // TODO: make this dynamic in 2.1
- logger.info(
- 'Tried to sync 5 times to different nodes, looks like the network is missing blocks :umbrella:',
- )
-
- state.noBlockCounter = 0
- event = 'NETWORKHALTED'
-
- if (state.p2pUpdateCounter + 1 > 3) {
- logger.info('Network keeps missing blocks. :umbrella:')
-
- const result = await blockchain.p2p.updatePeersOnMissingBlocks()
- if (result === 'rollback') {
- event = 'FORK'
- }
-
- state.p2pUpdateCounter = 0
- } else {
- state.p2pUpdateCounter++
- }
- }
-
- if (blockchain.isSynced(state.lastDownloadedBlock)) {
- state.noBlockCounter = 0
- state.p2pUpdateCounter = 0
-
- event = 'SYNCED'
- }
-
- if (state.networkStart) {
- event = 'SYNCED'
- }
-
- if (process.env.ARK_ENV === 'test') {
- event = 'TEST'
- }
-
- blockchain.dispatch(event)
- },
-
- downloadFinished() {
- logger.info('Block download finished :rocket:')
-
- if (state.networkStart) {
- // next time we will use normal behaviour
- state.networkStart = false
-
- blockchain.dispatch('SYNCFINISHED')
- } else if (blockchain.rebuildQueue.length() === 0) {
- blockchain.dispatch('PROCESSFINISHED')
- }
- },
-
- async rebuildFinished() {
- try {
- logger.info('Blockchain rebuild finished :chains:')
-
- state.rebuild = false
-
- await blockchain.database.commitQueuedQueries()
- await blockchain.rollbackCurrentRound()
- await blockchain.database.buildWallets(state.getLastBlock().data.height)
- await blockchain.database.saveWallets(true)
- await blockchain.transactionPool.buildWallets()
-
- return blockchain.dispatch('PROCESSFINISHED')
- } catch (error) {
- logger.error(error.stack)
- return blockchain.dispatch('FAILURE')
- }
- },
-
- downloadPaused: () => logger.info('Blockchain download paused :clock1030:'),
-
- syncingComplete() {
- logger.info('Blockchain 100% in sync :100:')
- blockchain.dispatch('SYNCFINISHED')
- },
-
- rebuildingComplete() {
- logger.info('Blockchain rebuild complete :unicorn_face:')
- blockchain.dispatch('REBUILDCOMPLETE')
- },
-
- stopped() {
- logger.info('The blockchain has been stopped :guitar:')
- },
-
- exitApp() {
- app.forceExit(
- 'Failed to startup blockchain. Exiting Ark Core! :rotating_light:',
- )
- },
-
- async init() {
- try {
- let block = await blockchain.database.getLastBlock()
-
- if (!block) {
- logger.warn('No block found in database :hushed:')
-
- block = new Block(config.genesisBlock)
-
- if (block.data.payloadHash !== config.network.nethash) {
- logger.error(
- 'FATAL: The genesis block payload hash is different from configured the nethash :rotating_light:',
- )
-
- return blockchain.dispatch('FAILURE')
- }
-
- await blockchain.database.saveBlock(block)
- }
-
- if (!blockchain.restoredDatabaseIntegrity) {
- logger.info('Verifying database integrity :hourglass_flowing_sand:')
-
- const blockchainAudit = await blockchain.database.verifyBlockchain()
- if (!blockchainAudit.valid) {
- logger.error('FATAL: The database is corrupted :fire:')
- logger.error(JSON.stringify(blockchainAudit.errors, null, 4))
-
- return blockchain.dispatch('ROLLBACK')
- }
-
- logger.info('Verified database integrity :smile_cat:')
- } else {
- logger.info(
- 'Skipping database integrity check after successful database recovery :smile_cat:',
- )
- }
-
- // only genesis block? special case of first round needs to be dealt with
- if (block.data.height === 1) {
- await blockchain.database.deleteRound(1)
- }
-
- /** *******************************
- * state machine data init *
- ******************************* */
- const constants = config.getConstants(block.data.height)
- state.setLastBlock(block)
- state.lastDownloadedBlock = block
-
- if (state.networkStart) {
- await blockchain.database.buildWallets(block.data.height)
- await blockchain.database.saveWallets(true)
- await blockchain.database.applyRound(block.data.height)
- await blockchain.transactionPool.buildWallets()
-
- return blockchain.dispatch('STARTED')
- }
-
- state.rebuild =
- slots.getTime() - block.data.timestamp >
- (constants.activeDelegates + 1) * constants.blocktime
- // no fast rebuild if in last week
- state.fastRebuild =
- slots.getTime() - block.data.timestamp > 3600 * 24 * 7 &&
- !!app.resolveOptions('blockchain').fastRebuild
-
- if (process.env.NODE_ENV === 'test') {
- logger.verbose(
- 'TEST SUITE DETECTED! SYNCING WALLETS AND STARTING IMMEDIATELY. :bangbang:',
- )
-
- state.setLastBlock(new Block(config.genesisBlock))
- await blockchain.database.buildWallets(block.data.height)
-
- return blockchain.dispatch('STARTED')
- }
-
- logger.info(`Fast rebuild: ${state.fastRebuild}`)
- logger.info(
- `Last block in database: ${block.data.height.toLocaleString()}`,
- )
-
- if (state.fastRebuild) {
- return blockchain.dispatch('REBUILD')
- }
-
- // removing blocks up to the last round to compute active delegate list later if needed
- const activeDelegates = await blockchain.database.getActiveDelegates(
- block.data.height,
- )
-
- if (!activeDelegates) {
- await blockchain.rollbackCurrentRound()
- }
-
- /** *******************************
- * database init *
- ******************************* */
- // SPV rebuild
- const verifiedWalletsIntegrity = await blockchain.database.buildWallets(
- block.data.height,
- )
- if (!verifiedWalletsIntegrity && block.data.height > 1) {
- logger.warn(
- 'Rebuilding wallets table because of some inconsistencies. Most likely due to an unfortunate shutdown. :hammer:',
- )
- await blockchain.database.saveWallets(true)
- }
-
- // NOTE: if the node is shutdown between round, the round has already been applied
- if (roundCalculator.isNewRound(block.data.height + 1)) {
- const { round } = roundCalculator.calculateRound(block.data.height + 1)
-
- logger.info(
- `New round ${round.toLocaleString()} detected. Cleaning calculated data before restarting!`,
- )
-
- await blockchain.database.deleteRound(round)
- }
-
- await blockchain.database.applyRound(block.data.height)
- await blockchain.transactionPool.buildWallets()
-
- return blockchain.dispatch('STARTED')
- } catch (error) {
- logger.error(error.stack)
-
- return blockchain.dispatch('FAILURE')
- }
- },
-
- async rebuildBlocks() {
- const lastBlock = state.lastDownloadedBlock || state.getLastBlock()
- const blocks = await blockchain.p2p.downloadBlocks(lastBlock.data.height)
-
- tickSyncTracker(blocks.length, lastBlock.data.height)
-
- if (!blocks || blocks.length === 0) {
- logger.info('No new blocks found on this peer')
-
- blockchain.dispatch('NOBLOCK')
- } else {
- logger.info(
- `Downloaded ${blocks.length} new ${pluralize(
- 'block',
- blocks.length,
- )} accounting for a total of ${pluralize(
- 'transaction',
- blocks.reduce((sum, b) => sum + b.numberOfTransactions, 0),
- true,
- )}`,
- )
-
- if (blocks.length && blocks[0].previousBlock === lastBlock.data.id) {
- state.lastDownloadedBlock = { data: blocks.slice(-1)[0] }
- blockchain.rebuildQueue.push(blocks)
- blockchain.dispatch('DOWNLOADED')
- } else {
- logger.warn(
- `Downloaded block not accepted: ${JSON.stringify(blocks[0])}`,
- )
- logger.warn(`Last block: ${JSON.stringify(lastBlock.data)}`)
-
- // disregard the whole block list
- blockchain.dispatch('NOBLOCK')
- }
- }
- },
-
- async downloadBlocks() {
- const lastBlock = state.lastDownloadedBlock || state.getLastBlock()
- const blocks = await blockchain.p2p.downloadBlocks(lastBlock.data.height)
-
- if (blockchain.isStopped) {
- return
- }
-
- if (!blocks || blocks.length === 0) {
- logger.info('No new block found on this peer')
-
- state.noBlockCounter++
-
- blockchain.dispatch('NOBLOCK')
- } else {
- logger.info(
- `Downloaded ${blocks.length} new ${pluralize(
- 'block',
- blocks.length,
- )} accounting for a total of ${pluralize(
- 'transaction',
- blocks.reduce((sum, b) => sum + b.numberOfTransactions, 0),
- true,
- )}`,
- )
-
- if (blocks.length && blocks[0].previousBlock === lastBlock.data.id) {
- state.noBlockCounter = 0
- state.p2pUpdateCounter = 0
- state.lastDownloadedBlock = { data: blocks.slice(-1)[0] }
-
- blockchain.processQueue.push(blocks)
-
- blockchain.dispatch('DOWNLOADED')
- } else {
- state.lastDownloadedBlock = lastBlock
-
- logger.warn(
- `Downloaded block not accepted: ${JSON.stringify(blocks[0])}`,
- )
- logger.warn(`Last block: ${JSON.stringify(lastBlock.data)}`)
-
- state.forked = true
- state.forkedBlock = blocks[0]
-
- // disregard the whole block list
- blockchain.dispatch('FORK')
- }
- }
- },
-
- async analyseFork() {
- logger.info('Analysing fork :mag:')
- },
-
- async startForkRecovery() {
- logger.info('Starting fork recovery :fork_and_knife:')
-
- await blockchain.database.commitQueuedQueries()
-
- let random = Math.floor(4 / Math.random())
-
- if (random > 102) {
- random = 102
- }
-
- await blockchain.removeBlocks(random)
-
- logger.info(`Removed ${pluralize('block', random, true)} :wastebasket:`)
-
- await blockchain.transactionPool.buildWallets()
- await blockchain.p2p.refreshPeersAfterFork()
-
- blockchain.dispatch('SUCCESS')
- },
-
- async rollbackDatabase() {
- logger.info('Trying to restore database integrity :fire_engine:')
-
- const { maxBlockRewind, steps } = app.resolveOptions(
- 'blockchain',
- ).databaseRollback
- let blockchainAudit
-
- for (let i = maxBlockRewind; i >= 0; i -= steps) {
- await blockchain.removeTopBlocks(steps)
-
- blockchainAudit = await blockchain.database.verifyBlockchain()
- if (blockchainAudit.valid) {
- break
- }
- }
-
- if (!blockchainAudit.valid) {
- // TODO: multiple attempts? rewind further? restore snapshot?
- logger.error(
- 'FATAL: Failed to restore database integrity :skull: :skull: :skull:',
- )
- logger.error(JSON.stringify(blockchainAudit.errors, null, 4))
- blockchain.dispatch('FAILURE')
- return
- }
-
- blockchain.restoredDatabaseIntegrity = true
-
- const lastBlock = await blockchain.database.getLastBlock()
- logger.info(
- `Database integrity verified again after rollback to height ${lastBlock.data.height.toLocaleString()} :green_heart:`,
- )
-
- blockchain.dispatch('SUCCESS')
- },
-})
-
-module.exports = blockchainMachine
diff --git a/packages/core-blockchain/lib/state-storage.js b/packages/core-blockchain/lib/state-storage.js
deleted file mode 100644
index 92e9ddc0e3..0000000000
--- a/packages/core-blockchain/lib/state-storage.js
+++ /dev/null
@@ -1,254 +0,0 @@
-/* eslint max-len: "off" */
-
-const app = require('@arkecosystem/core-container')
-
-const logger = app.resolvePlugin('logger')
-const immutable = require('immutable')
-const assert = require('assert')
-const blockchainMachine = require('./machines/blockchain')
-
-// Stores the last n blocks in ascending height. The amount of last blocks
-// can be configured with the option `state.maxLastBlocks`.
-let _lastBlocks = immutable.OrderedMap()
-
-// Stores the last n incoming transaction ids. The amount of transaction ids
-// can be configred with the option `state.maxLastTransactionIds`.
-let _cachedTransactionIds = immutable.OrderedSet()
-
-// Map Block instances to block data.
-const _mapToBlockData = blocks =>
- blocks.map(block => ({ ...block.data, transactions: block.transactions }))
-
-/**
- * Represents an in-memory storage for state machine data.
- */
-class StateStorage {
- constructor() {
- this.reset()
- }
-
- /**
- * Resets the state.
- * @returns {void}
- */
- reset() {
- this.blockchain = blockchainMachine.initialState
- this.lastDownloadedBlock = null
- this.blockPing = null
- this.started = false
- this.forked = false
- this.forkedBlock = null
- this.rebuild = true
- this.fastRebuild = false
- this.checkLaterTimeout = null
- this.noBlockCounter = 0
- this.p2pUpdateCounter = 0
- this.networkStart = false
-
- this.clear()
- }
-
- /**
- * Clear last blocks.
- * @returns {void}
- */
- clear() {
- _lastBlocks = _lastBlocks.clear()
- _cachedTransactionIds = _cachedTransactionIds.clear()
- }
-
- /**
- * Clear check later timeout.
- * @returns {void}
- */
- clearCheckLater() {
- if (this.checkLaterTimeout) {
- clearTimeout(this.checkLaterTimeout)
- this.checkLaterTimeout = null
- }
- }
-
- /**
- * Get the last block.
- * @returns {Block|null}
- */
- getLastBlock() {
- return _lastBlocks.last() || null
- }
-
- /**
- * Sets the last block.
- * @returns {void}
- */
- setLastBlock(block) {
- // Only keep blocks which are below the new block height (i.e. rollback)
- if (
- _lastBlocks.last() &&
- _lastBlocks.last().data.height !== block.data.height - 1
- ) {
- assert(block.data.height - 1 <= _lastBlocks.last().data.height)
- _lastBlocks = _lastBlocks.filter(b => b.data.height < block.data.height)
- }
-
- _lastBlocks = _lastBlocks.set(block.data.height, block)
-
- // Delete oldest block if size exceeds the maximum
- if (
- _lastBlocks.size >
- app.resolveOptions('blockchain').state.maxLastBlocks
- ) {
- _lastBlocks = _lastBlocks.delete(_lastBlocks.first().data.height)
- }
- }
-
- /**
- * Get the last blocks.
- * @returns {Array}
- */
- getLastBlocks() {
- return _lastBlocks
- .valueSeq()
- .reverse()
- .toArray()
- }
-
- /**
- * Get the last blocks data.
- * @returns {Seq}
- */
- getLastBlocksData() {
- return _mapToBlockData(_lastBlocks.valueSeq().reverse())
- }
-
- /**
- * Get the last block ids.
- * @returns {Array}
- */
- getLastBlockIds() {
- return _lastBlocks
- .valueSeq()
- .reverse()
- .map(b => b.data.id)
- .toArray()
- }
-
- /**
- * Get last blocks in the given height range in ascending order.
- * @param {Number} start
- * @param {Number} end
- */
- getLastBlocksByHeight(start, end) {
- end = end || start
-
- const blocks = _lastBlocks
- .valueSeq()
- .filter(block => block.data.height >= start && block.data.height <= end)
-
- return _mapToBlockData(blocks).toArray()
- }
-
- /**
- * Get common blocks for the given IDs.
- * @returns {Array}
- */
- getCommonBlocks(ids) {
- return this.getLastBlocksData()
- .filter(block => ids.includes(block.id))
- .toArray()
- }
-
- /**
- * Cache the ids of the given transactions.
- * @param {Array} transactions
- * @return Object {
- * added: array of added transactions,
- * notAdded: array of previously added transactions
- * }
- */
- cacheTransactions(transactions) {
- const notAdded = []
- const added = transactions.filter(tx => {
- if (_cachedTransactionIds.has(tx.id)) {
- notAdded.push(tx)
- return false
- }
- return true
- })
-
- _cachedTransactionIds = _cachedTransactionIds.withMutations(cache => {
- added.forEach(tx => cache.add(tx.id))
- })
-
- // Cap the Set of last transaction ids to maxLastTransactionIds
- const limit = app.resolveOptions('blockchain').state
- .maxLastTransactionIds
- if (_cachedTransactionIds.size > limit) {
- _cachedTransactionIds = _cachedTransactionIds.takeLast(limit)
- }
-
- return { added, notAdded }
- }
-
- /**
- * Remove the given transaction ids from the cache.
- * @param {Array} transactionIds
- * @returns {void}
- */
- removeCachedTransactionIds(transactionIds) {
- _cachedTransactionIds = _cachedTransactionIds.subtract(transactionIds)
- }
-
- /**
- * Get cached transaction ids.
- * @returns {Array}
- */
- getCachedTransactionIds() {
- return _cachedTransactionIds.toArray()
- }
-
- /**
- * Ping a block.
- * @param {Block} incomingBlock
- * @returns {Boolean}
- */
- pingBlock(incomingBlock) {
- if (!this.blockPing) return false
-
- if (
- this.blockPing.block.height === incomingBlock.height &&
- this.blockPing.block.id === incomingBlock.id
- ) {
- this.blockPing.count++
- this.blockPing.last = new Date().getTime()
-
- return true
- }
-
- return false
- }
-
- /**
- * Push ping block
- * @param {Block} block
- * @returns {void}
- */
- pushPingBlock(block) {
- // logging for stats about network health
- if (this.blockPing) {
- logger.info(
- `Block ${this.blockPing.block.height.toLocaleString()} pinged blockchain ${
- this.blockPing.count
- } times`,
- )
- }
-
- this.blockPing = {
- count: 1,
- first: new Date().getTime(),
- last: new Date().getTime(),
- block,
- }
- }
-}
-
-module.exports = Object.seal(new StateStorage())
diff --git a/packages/core-blockchain/lib/utils/tick-sync-tracker.js b/packages/core-blockchain/lib/utils/tick-sync-tracker.js
deleted file mode 100644
index e80ed908e8..0000000000
--- a/packages/core-blockchain/lib/utils/tick-sync-tracker.js
+++ /dev/null
@@ -1,65 +0,0 @@
-const prettyMs = require('pretty-ms')
-const app = require('@arkecosystem/core-container')
-
-const logger = app.resolvePlugin('logger')
-let tracker = null
-
-module.exports = async (blockCount, count) => {
- if (!tracker) {
- tracker = {
- start: new Date().getTime(),
- networkHeight: app.resolvePlugin('p2p').getNetworkHeight(),
- blocksInitial: +count,
- blocksDownloaded: +count,
- blocksSession: 0,
- blocksPerMillisecond: 0,
- remainingInMilliseconds: 0,
- percent: 0,
- }
- }
-
- // The total amount of downloaded blocks equals the current height
- tracker.blocksDownloaded += +blockCount
-
- // The total amount of downloaded blocks downloaded since start of the current session
- tracker.blocksSession = tracker.blocksDownloaded - tracker.blocksInitial
-
- // The number of blocks the node can download per millisecond
- const diffSinceStart = new Date().getTime() - tracker.start
- tracker.blocksPerMillisecond = tracker.blocksSession / diffSinceStart
-
- // The time left to download the missing blocks in milliseconds
- tracker.remainingInMilliseconds =
- (tracker.networkHeight - tracker.blocksDownloaded) /
- tracker.blocksPerMillisecond
- tracker.remainingInMilliseconds = Math.abs(
- Math.trunc(tracker.remainingInMilliseconds),
- )
-
- // The percentage of total blocks that has been downloaded
- tracker.percent = (tracker.blocksDownloaded * 100) / tracker.networkHeight
-
- if (
- tracker.percent < 100 &&
- Number.isFinite(tracker.remainingInMilliseconds)
- ) {
- const blocksDownloaded = tracker.blocksDownloaded.toLocaleString()
- const networkHeight = tracker.networkHeight.toLocaleString()
- const timeLeft = prettyMs(tracker.remainingInMilliseconds, {
- secDecimalDigits: 0,
- })
-
- logger.printTracker(
- 'Fast Sync',
- tracker.percent,
- 100,
- `(${blocksDownloaded} of ${networkHeight} blocks - Est. ${timeLeft})`,
- )
- }
-
- if (tracker.percent === 100) {
- tracker = null
-
- logger.stopTracker('Fast Sync', 100, 100)
- }
-}
diff --git a/packages/core-blockchain/package.json b/packages/core-blockchain/package.json
index ed37563365..ed89405356 100644
--- a/packages/core-blockchain/package.json
+++ b/packages/core-blockchain/package.json
@@ -1,44 +1,65 @@
{
- "name": "@arkecosystem/core-blockchain",
- "description": "Blockchain Manager for Ark Core",
- "version": "0.2.1",
- "contributors": [
- "François-Xavier Thoorens ",
- "Kristjan Košič ",
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/core-container": "~0.2",
- "@arkecosystem/core-utils": "~0.2",
- "@arkecosystem/crypto": "~0.2",
- "async": "^2.6.1",
- "awilix": "^4.0.1",
- "delay": "^4.1.0",
- "pretty-ms": "^4.0.0",
- "xstate": "^4.2.1",
- "immutable": "^4.0.0-rc.12",
- "pluralize": "^7.0.0"
- },
- "devDependencies": {
- "@arkecosystem/core-p2p": "~0.2",
- "@arkecosystem/core-test-utils": "~0.2",
- "axios": "^0.18.0",
- "axios-mock-adapter": "^1.15.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-blockchain",
+ "description": "Blockchain Manager for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "François-Xavier Thoorens ",
+ "Kristjan Košič ",
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index",
+ "types": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "bash ../../scripts/pre-test.sh",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/core-container": "^2.1.0",
+ "@arkecosystem/core-utils": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/lodash.get": "^4.4.4",
+ "async": "^2.6.1",
+ "awilix": "^4.0.1",
+ "delay": "^4.1.0",
+ "immutable": "^4.0.0-rc.12",
+ "lodash.get": "^4.4.2",
+ "pluralize": "^7.0.0",
+ "pretty-ms": "^4.0.0",
+ "xstate": "^4.2.2"
+ },
+ "devDependencies": {
+ "@arkecosystem/core-p2p": "^2.1.0",
+ "@arkecosystem/core-test-utils": "^2.1.0",
+ "@types/async": "^2.0.50",
+ "@types/pluralize": "^0.0.29",
+ "@types/pretty-ms": "^4.0.0",
+ "axios": "^0.18.0",
+ "axios-mock-adapter": "^1.15.0"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-blockchain/src/blockchain.ts b/packages/core-blockchain/src/blockchain.ts
new file mode 100644
index 0000000000..9e2dea2f8e
--- /dev/null
+++ b/packages/core-blockchain/src/blockchain.ts
@@ -0,0 +1,619 @@
+/* tslint:disable:max-line-length */
+import { app } from "@arkecosystem/core-container";
+import {
+ Blockchain as blockchain,
+ Database,
+ EventEmitter,
+ Logger,
+ P2P,
+ TransactionPool,
+} from "@arkecosystem/core-interfaces";
+import { models, slots } from "@arkecosystem/crypto";
+
+import delay from "delay";
+import pluralize from "pluralize";
+import { BlockProcessor, BlockProcessorResult } from "./processor";
+import { ProcessQueue, Queue, RebuildQueue } from "./queue";
+import { stateMachine } from "./state-machine";
+import { StateStorage } from "./state-storage";
+import { isBlockChained } from "./utils";
+
+const logger = app.resolvePlugin("logger");
+const config = app.getConfig();
+const emitter = app.resolvePlugin("event-emitter");
+const { Block } = models;
+
+export class Blockchain implements blockchain.IBlockchain {
+ /**
+ * Get the state of the blockchain.
+ * @return {IStateStorage}
+ */
+ get state(): StateStorage {
+ return stateMachine.state;
+ }
+
+ /**
+ * Get the network (p2p) interface.
+ * @return {P2PInterface}
+ */
+ get p2p() {
+ return app.resolvePlugin("p2p");
+ }
+
+ /**
+ * Get the transaction handler.
+ * @return {TransactionPool}
+ */
+ get transactionPool() {
+ return app.resolvePlugin("transactionPool");
+ }
+
+ /**
+ * Get the database connection.
+ * @return {ConnectionInterface}
+ */
+ get database() {
+ return app.resolvePlugin("database");
+ }
+
+ public isStopped: boolean;
+ public options: any;
+ public processQueue: ProcessQueue;
+ public rebuildQueue: RebuildQueue;
+ private actions: any;
+ private queue: Queue;
+ private blockProcessor: BlockProcessor;
+
+ /**
+ * Create a new blockchain manager instance.
+ * @param {Object} options
+ * @return {void}
+ */
+ constructor(options) {
+ // flag to force a network start
+ this.state.networkStart = !!options.networkStart;
+
+ if (this.state.networkStart) {
+ logger.warn(
+ "Ark Core is launched in Genesis Start mode. This is usually for starting the first node on the blockchain. Unless you know what you are doing, this is likely wrong. :warning:",
+ );
+ logger.info("Starting Ark Core for a new world, welcome aboard :rocket:");
+ }
+
+ this.actions = stateMachine.actionMap(this);
+ this.blockProcessor = new BlockProcessor(this);
+
+ this.__registerQueue();
+ }
+
+ /**
+ * Dispatch an event to transition the state machine.
+ * @param {String} event
+ * @return {void}
+ */
+ public dispatch(event) {
+ const nextState = stateMachine.transition(this.state.blockchain, event);
+
+ if (nextState.actions.length > 0) {
+ logger.debug(
+ `event '${event}': ${JSON.stringify(this.state.blockchain.value)} -> ${JSON.stringify(
+ nextState.value,
+ )} -> actions: [${nextState.actions.map(a => a.type).join(", ")}]`,
+ );
+ }
+
+ this.state.blockchain = nextState;
+
+ nextState.actions.forEach(actionKey => {
+ const action = this.actions[actionKey];
+
+ if (action) {
+ setTimeout(() => action.call(this, event), 0);
+ } else {
+ logger.error(`No action '${actionKey}' found :interrobang:`);
+ }
+ });
+
+ return nextState;
+ }
+
+ /**
+ * Start the blockchain and wait for it to be ready.
+ * @return {void}
+ */
+ public async start(skipStartedCheck = false) {
+ logger.info("Starting Blockchain Manager :chains:");
+
+ this.dispatch("START");
+
+ emitter.once("shutdown", () => {
+ this.stop();
+ });
+
+ if (skipStartedCheck || process.env.CORE_SKIP_BLOCKCHAIN_STARTED_CHECK) {
+ return true;
+ }
+
+ // TODO: this state needs to be set after state.getLastBlock() is available if CORE_ENV=test
+ while (!this.state.started && !this.isStopped) {
+ await delay(1000);
+ }
+
+ return true;
+ }
+
+ public async stop() {
+ if (!this.isStopped) {
+ logger.info("Stopping Blockchain Manager :chains:");
+
+ this.isStopped = true;
+ this.state.clearWakeUpTimeout();
+
+ this.dispatch("STOP");
+
+ this.queue.destroy();
+ }
+ }
+
+ public checkNetwork() {
+ throw new Error("Method [checkNetwork] not implemented!");
+ }
+
+ /**
+ * Set wakeup timeout to check the network for new blocks.
+ */
+ public setWakeUp() {
+ this.state.wakeUpTimeout = setTimeout(() => {
+ this.state.wakeUpTimeout = null;
+ return this.dispatch("WAKEUP");
+ }, 60000);
+ }
+
+ /**
+ * Reset the wakeup timeout.
+ */
+ public resetWakeUp() {
+ this.state.clearWakeUpTimeout();
+ this.setWakeUp();
+ }
+
+ /**
+ * Update network status.
+ * @return {void}
+ */
+ public async updateNetworkStatus() {
+ return this.p2p.updateNetworkStatus();
+ }
+
+ /**
+ * Rebuild N blocks in the blockchain.
+ * @param {Number} nblocks
+ * @return {void}
+ */
+ public rebuild(nblocks?: number) {
+ throw new Error("Method [rebuild] not implemented!");
+ }
+
+ /**
+ * Reset the state of the blockchain.
+ * @return {void}
+ */
+ public resetState() {
+ this.clearAndStopQueue();
+ this.state.reset();
+ }
+
+ /**
+ * Clear and stop the queue.
+ * @return {void}
+ */
+ public clearAndStopQueue() {
+ this.queue.pause();
+ this.queue.clear();
+ }
+
+ /**
+ * Hand the given transactions to the transaction handler.
+ * @param {Array} transactions
+ * @return {void}
+ */
+ public async postTransactions(transactions) {
+ logger.info(`Received ${transactions.length} new ${pluralize("transaction", transactions.length)} :moneybag:`);
+
+ await this.transactionPool.addTransactions(transactions);
+ }
+
+ /**
+ * Push a block to the process queue.
+ * @param {Block} block
+ * @return {void}
+ */
+ public handleIncomingBlock(block) {
+ logger.info(
+ `Received new block at height ${block.height.toLocaleString()} with ${pluralize(
+ "transaction",
+ block.numberOfTransactions,
+ true,
+ )} from ${block.ip}`,
+ );
+
+ const currentSlot = slots.getSlotNumber();
+ const receivedSlot = slots.getSlotNumber(block.timestamp);
+ if (receivedSlot > currentSlot) {
+ logger.info(`Discarded block ${block.height.toLocaleString()} because it takes a future slot.`);
+ return;
+ }
+
+ if (this.state.started && this.state.blockchain.value === "idle") {
+ this.dispatch("NEWBLOCK");
+ this.enqueueBlocks([block]);
+ } else {
+ logger.info(`Block disregarded because blockchain is not ready :exclamation:`);
+ }
+ }
+
+ /**
+ * Enqueue blocks in process queue and set last downloaded block to last item in list.
+ */
+ public enqueueBlocks(blocks: any[]) {
+ if (blocks.length === 0) {
+ return;
+ }
+
+ this.processQueue.push(blocks);
+ this.state.lastDownloadedBlock = new Block(blocks.slice(-1)[0]);
+ }
+
+ /**
+ * Rollback all blocks up to the previous round.
+ * @return {void}
+ */
+ public async rollbackCurrentRound() {
+ const height = this.state.getLastBlock().data.height;
+ const maxDelegates = config.getMilestone(height).activeDelegates;
+ const previousRound = Math.floor((height - 1) / maxDelegates);
+
+ if (previousRound < 2) {
+ return;
+ }
+
+ const newHeight = previousRound * maxDelegates;
+ // If the current chain height is H and we will be removing blocks [N, H],
+ // then blocksToRemove[] will contain blocks [N - 1, H - 1].
+ const blocksToRemove = await this.database.getBlocks(newHeight, height - newHeight);
+ const deleteLastBlock = async () => {
+ const lastBlock = this.state.getLastBlock();
+ await this.database.enqueueDeleteBlock(lastBlock);
+
+ const newLastBlock = new Block(blocksToRemove.pop());
+
+ this.state.setLastBlock(newLastBlock);
+ this.state.lastDownloadedBlock = newLastBlock;
+ };
+
+ logger.info(`Removing ${pluralize("block", height - newHeight, true)} to reset current round :warning:`);
+
+ let count = 0;
+ const max = this.state.getLastBlock().data.height - newHeight;
+
+ while (this.state.getLastBlock().data.height >= newHeight + 1) {
+ const removalBlockId = this.state.getLastBlock().data.id;
+ const removalBlockHeight = this.state.getLastBlock().data.height.toLocaleString();
+
+ logger.info(`Removing block ${count++} of ${max} - ID: ${removalBlockId}, height: ${removalBlockHeight}`);
+
+ await deleteLastBlock();
+ }
+
+ // Commit delete blocks
+ await this.database.commitQueuedQueries();
+
+ logger.info(`Removed ${count} ${pluralize("block", max, true)}`);
+
+ await this.database.deleteRound(previousRound + 1);
+ }
+
+ /**
+ * Remove N number of blocks.
+ * @param {Number} nblocks
+ * @return {void}
+ */
+ public async removeBlocks(nblocks) {
+ this.clearAndStopQueue();
+
+ // If the current chain height is H and we will be removing blocks [N, H],
+ // then blocksToRemove[] will contain blocks [N - 1, H - 1].
+ const blocksToRemove = await this.database.getBlocks(this.state.getLastBlock().data.height - nblocks, nblocks);
+
+ const revertLastBlock = async () => {
+ // tslint:disable-next-line:no-shadowed-variable
+ const lastBlock = this.state.getLastBlock();
+
+ // TODO: if revertBlock Failed, it might corrupt the database because one block could be left stored
+ await this.database.revertBlock(lastBlock);
+ this.database.enqueueDeleteBlock(lastBlock);
+
+ if (this.transactionPool) {
+ await this.transactionPool.addTransactions(lastBlock.transactions);
+ }
+
+ const newLastBlock = new Block(blocksToRemove.pop());
+
+ this.state.setLastBlock(newLastBlock);
+ this.state.lastDownloadedBlock = newLastBlock;
+ };
+
+ // tslint:disable-next-line:variable-name
+ const __removeBlocks = async numberOfBlocks => {
+ if (numberOfBlocks < 1) {
+ return;
+ }
+
+ logger.info(`Undoing block ${this.state.getLastBlock().data.height.toLocaleString()}`);
+
+ await revertLastBlock();
+ await __removeBlocks(numberOfBlocks - 1);
+ };
+
+ const lastBlock = this.state.getLastBlock();
+ if (nblocks >= lastBlock.data.height) {
+ nblocks = lastBlock.data.height - 1;
+ }
+
+ const resetHeight = lastBlock.data.height - nblocks;
+ logger.info(`Removing ${pluralize("block", nblocks, true)}. Reset to height ${resetHeight.toLocaleString()}`);
+
+ this.state.lastDownloadedBlock = lastBlock;
+
+ await __removeBlocks(nblocks);
+
+ // Commit delete blocks
+ await this.database.commitQueuedQueries();
+
+ this.queue.resume();
+ }
+
+ /**
+ * Remove the top blocks from database.
+ * NOTE: Only used when trying to restore database integrity.
+ * @param {Number} count
+ * @return {void}
+ */
+ public async removeTopBlocks(count) {
+ const blocks = await this.database.getTopBlocks(count);
+
+ logger.info(
+ `Removing ${pluralize(
+ "block",
+ blocks.length,
+ true,
+ )} from height ${(blocks[0] as any).height.toLocaleString()}`,
+ );
+
+ for (let block of blocks) {
+ block = new Block(block);
+
+ this.database.enqueueDeleteRound(block.data.height);
+ this.database.enqueueDeleteBlock(block);
+ }
+
+ await this.database.commitQueuedQueries();
+ await this.database.loadBlocksFromCurrentRound();
+ }
+
+ /**
+ * Hande a block during a rebuild.
+ * NOTE: We should be sure this is fail safe (ie callback() is being called only ONCE)
+ * @param {Block} block
+ * @param {Function} callback
+ * @return {Object}
+ */
+ public async rebuildBlock(block, callback) {
+ const lastBlock = this.state.getLastBlock();
+
+ if (block.verification.verified) {
+ if (isBlockChained(lastBlock, block)) {
+ // save block on database
+ this.database.enqueueSaveBlock(block);
+
+ // committing to db every 20,000 blocks
+ if (block.data.height % 20000 === 0) {
+ await this.database.commitQueuedQueries();
+ }
+
+ this.state.setLastBlock(block);
+
+ return callback();
+ }
+ if (block.data.height > lastBlock.data.height + 1) {
+ this.state.lastDownloadedBlock = lastBlock;
+ return callback();
+ }
+ if (
+ block.data.height < lastBlock.data.height ||
+ (block.data.height === lastBlock.data.height && block.data.id === lastBlock.data.id)
+ ) {
+ this.state.lastDownloadedBlock = lastBlock;
+ return callback();
+ }
+ this.state.lastDownloadedBlock = lastBlock;
+ logger.info(`Block ${block.data.height.toLocaleString()} disregarded because on a fork :knife_fork_plate:`);
+ return callback();
+ }
+ logger.warn(`Block ${block.data.height.toLocaleString()} disregarded because verification failed :scroll:`);
+ logger.warn(JSON.stringify(block.verification, null, 4));
+ return callback();
+ }
+
+ /**
+ * Process the given block.
+ */
+ public async processBlock(block: models.Block, callback) {
+ const result = await this.blockProcessor.process(block);
+
+ if (result === BlockProcessorResult.Accepted || result === BlockProcessorResult.DiscardedButCanBeBroadcasted) {
+ // broadcast only current block
+ const blocktime = config.getMilestone(block.data.height).blocktime;
+ if (slots.getSlotNumber() * blocktime <= block.data.timestamp) {
+ this.p2p.broadcastBlock(block);
+ }
+ }
+
+ return callback();
+ }
+
+ /**
+ * Reset the last downloaded block to last chained block.
+ */
+ public resetLastDownloadedBlock() {
+ this.state.lastDownloadedBlock = this.getLastBlock();
+ }
+
+ /**
+ * Called by forger to wake up and sync with the network.
+ * It clears the wakeUpTimeout if set.
+ */
+ public forceWakeup() {
+ this.state.clearWakeUpTimeout();
+ this.dispatch("WAKEUP");
+ }
+
+ /**
+ * Fork the chain at the given block.
+ */
+ public forkBlock(block: models.Block): void {
+ this.state.forkedBlock = block;
+
+ this.dispatch("FORK");
+ }
+
+ /**
+ * Get unconfirmed transactions for the specified block size.
+ * @param {Number} blockSize
+ * @param {Boolean} forForging
+ * @return {Object}
+ */
+ public getUnconfirmedTransactions(blockSize) {
+ const transactions = this.transactionPool.getTransactionsForForging(blockSize);
+
+ return {
+ transactions,
+ poolSize: this.transactionPool.getPoolSize(),
+ count: transactions ? transactions.length : -1,
+ };
+ }
+
+ /**
+ * Determine if the blockchain is synced.
+ */
+ public isSynced(block?: models.IBlock): boolean {
+ if (!this.p2p.hasPeers()) {
+ return true;
+ }
+
+ block = block || this.getLastBlock();
+
+ return slots.getTime() - block.data.timestamp < 3 * config.getMilestone(block.data.height).blocktime;
+ }
+
+ /**
+ * Determine if the blockchain is synced after a rebuild.
+ */
+ public isRebuildSynced(block?: models.IBlock): boolean {
+ if (!this.p2p.hasPeers()) {
+ return true;
+ }
+
+ block = block || this.getLastBlock();
+
+ const remaining = slots.getTime() - block.data.timestamp;
+ logger.info(`Remaining block timestamp ${remaining} :hourglass:`);
+
+ // stop fast rebuild 7 days before the last network block
+ return slots.getTime() - block.data.timestamp < 3600 * 24 * 7;
+ // return slots.getTime() - block.data.timestamp < 100 * config.getMilestone(block.data.height).blocktime
+ }
+
+ /**
+ * Get the last block of the blockchain.
+ */
+ public getLastBlock(): models.Block {
+ return this.state.getLastBlock();
+ }
+
+ /**
+ * Get the last height of the blockchain.
+ */
+ public getLastHeight(): number {
+ return this.state.getLastBlock().data.height;
+ }
+
+ /**
+ * Get the last downloaded block of the blockchain.
+ */
+ public getLastDownloadedBlock(): { data: models.IBlockData } {
+ return this.state.lastDownloadedBlock;
+ }
+
+ /**
+ * Get the block ping.
+ */
+ public getBlockPing() {
+ return this.state.blockPing;
+ }
+
+ /**
+ * Ping a block.
+ */
+ public pingBlock(incomingBlock: models.IBlockData): boolean {
+ return this.state.pingBlock(incomingBlock);
+ }
+
+ /**
+ * Push ping block.
+ */
+ public pushPingBlock(block: models.IBlockData) {
+ this.state.pushPingBlock(block);
+ }
+
+ /**
+ * Get the list of events that are available.
+ * @return {Array}
+ */
+ public getEvents() {
+ return [
+ "block.applied",
+ "block.forged",
+ "block.reverted",
+ "delegate.registered",
+ "delegate.resigned",
+ "forger.failed",
+ "forger.missing",
+ "forger.started",
+ "peer.added",
+ "peer.removed",
+ "round.created",
+ "state:started",
+ "transaction.applied",
+ "transaction.expired",
+ "transaction.forged",
+ "transaction.reverted",
+ "wallet.saved",
+ "wallet.created.cold",
+ ];
+ }
+
+ /**
+ * Register the block queue.
+ * @return {void}
+ */
+ public __registerQueue() {
+ this.queue = new Queue(this, {
+ process: "PROCESSFINISHED",
+ rebuild: "REBUILDFINISHED",
+ });
+
+ this.processQueue = this.queue.process;
+ this.rebuildQueue = this.queue.rebuild;
+ }
+}
diff --git a/packages/core-blockchain/src/config.ts b/packages/core-blockchain/src/config.ts
new file mode 100644
index 0000000000..1b95990a82
--- /dev/null
+++ b/packages/core-blockchain/src/config.ts
@@ -0,0 +1,2 @@
+import { Shared } from "@arkecosystem/core-interfaces";
+export const config = new Shared.Config();
diff --git a/packages/core-blockchain/src/defaults.ts b/packages/core-blockchain/src/defaults.ts
new file mode 100644
index 0000000000..2838157dd8
--- /dev/null
+++ b/packages/core-blockchain/src/defaults.ts
@@ -0,0 +1,11 @@
+export const defaults = {
+ fastRebuild: false,
+ databaseRollback: {
+ maxBlockRewind: 10000,
+ steps: 1000,
+ },
+ state: {
+ maxLastBlocks: 100,
+ maxLastTransactionIds: 10000,
+ },
+};
diff --git a/packages/core-blockchain/src/index.ts b/packages/core-blockchain/src/index.ts
new file mode 100644
index 0000000000..727dae7ba7
--- /dev/null
+++ b/packages/core-blockchain/src/index.ts
@@ -0,0 +1,5 @@
+export * from "./defaults";
+export * from "./config";
+export * from "./blockchain";
+export * from "./state-storage";
+export * from "./plugin";
diff --git a/packages/core-blockchain/src/machines/actions/fork.ts b/packages/core-blockchain/src/machines/actions/fork.ts
new file mode 100644
index 0000000000..0b205c99e4
--- /dev/null
+++ b/packages/core-blockchain/src/machines/actions/fork.ts
@@ -0,0 +1,61 @@
+export const fork = {
+ initial: "analysing",
+ states: {
+ analysing: {
+ onEntry: ["analyseFork"],
+ on: {
+ REBUILD: "revertBlocks",
+ NOFORK: "exit",
+ },
+ },
+ network: {
+ onEntry: ["checkNetwork"],
+ /* these transitions are not used yet (TODO?)
+ on: {
+ SUCCESS: 'blockchain',
+ FAILURE: 'reset'
+ }
+ */
+ },
+ revertBlocks: {},
+ exit: {
+ onEntry: ["forkRecovered"],
+ },
+ },
+};
+
+// const fork = {
+// initial: 'network',
+// states: {
+// network: {
+// onEntry: ['checkNetwork'],
+// on: {
+// SUCCESS: 'blockchain',
+// FAILURE: 'reset'
+// }
+// },
+// blockchain: {
+// onEntry: ['removeBlocks'],
+// on: {
+// SUCCESS: 'wallets',
+// FAILURE: 'reset'
+// }
+// },
+// wallets: {
+// onEntry: ['rebuildWallets'],
+// on: {
+// SUCCESS: 'success',
+// FAILURE: 'reset'
+// }
+// },
+// reset: {
+// onEntry: ['resetNode'],
+// on: {
+// RESET: 'success',
+// FAILURE: 'reset'
+// }
+// },
+// success: {
+// }
+// }
+// }
diff --git a/packages/core-blockchain/src/machines/actions/rebuild-from-network.ts b/packages/core-blockchain/src/machines/actions/rebuild-from-network.ts
new file mode 100644
index 0000000000..70ca9e2a7e
--- /dev/null
+++ b/packages/core-blockchain/src/machines/actions/rebuild-from-network.ts
@@ -0,0 +1,52 @@
+export const rebuildFromNetwork = {
+ initial: "rebuilding",
+ states: {
+ rebuilding: {
+ onEntry: ["checkLastDownloadedBlockSynced"],
+ on: {
+ SYNCED: "waitingFinished",
+ NOTSYNCED: "rebuildBlocks",
+ PAUSED: "rebuildPaused",
+ },
+ },
+ idle: {
+ on: {
+ DOWNLOADED: "rebuildBlocks",
+ },
+ },
+ rebuildBlocks: {
+ onEntry: ["rebuildBlocks"],
+ on: {
+ DOWNLOADED: "rebuilding",
+ NOBLOCK: "rebuilding",
+ },
+ },
+ waitingFinished: {
+ on: {
+ REBUILDFINISHED: "rebuildFinished",
+ },
+ },
+ rebuildFinished: {
+ onEntry: ["rebuildFinished"],
+ on: {
+ PROCESSFINISHED: "processFinished",
+ },
+ },
+ rebuildPaused: {
+ onEntry: ["downloadPaused"],
+ on: {
+ REBUILDFINISHED: "processFinished",
+ },
+ },
+ processFinished: {
+ onEntry: ["checkRebuildBlockSynced"],
+ on: {
+ SYNCED: "end",
+ NOTSYNCED: "rebuildBlocks",
+ },
+ },
+ end: {
+ onEntry: ["rebuildingComplete"],
+ },
+ },
+};
diff --git a/packages/core-blockchain/src/machines/actions/sync-with-network.ts b/packages/core-blockchain/src/machines/actions/sync-with-network.ts
new file mode 100644
index 0000000000..f8683ad344
--- /dev/null
+++ b/packages/core-blockchain/src/machines/actions/sync-with-network.ts
@@ -0,0 +1,48 @@
+export const syncWithNetwork = {
+ initial: "syncing",
+ states: {
+ syncing: {
+ onEntry: ["checkLastDownloadedBlockSynced"],
+ on: {
+ SYNCED: "downloadFinished",
+ NOTSYNCED: "downloadBlocks",
+ PAUSED: "downloadPaused",
+ NETWORKHALTED: "end",
+ },
+ },
+ idle: {
+ on: {
+ DOWNLOADED: "downloadBlocks",
+ },
+ },
+ downloadBlocks: {
+ onEntry: ["downloadBlocks"],
+ on: {
+ DOWNLOADED: "syncing",
+ NOBLOCK: "syncing",
+ },
+ },
+ downloadFinished: {
+ onEntry: ["downloadFinished"],
+ on: {
+ PROCESSFINISHED: "processFinished",
+ },
+ },
+ downloadPaused: {
+ onEntry: ["downloadPaused"],
+ on: {
+ PROCESSFINISHED: "processFinished",
+ },
+ },
+ processFinished: {
+ onEntry: ["checkLastBlockSynced"],
+ on: {
+ SYNCED: "end",
+ NOTSYNCED: "downloadBlocks",
+ },
+ },
+ end: {
+ onEntry: ["syncingComplete"],
+ },
+ },
+};
diff --git a/packages/core-blockchain/src/machines/blockchain.ts b/packages/core-blockchain/src/machines/blockchain.ts
new file mode 100644
index 0000000000..779bfb5b5d
--- /dev/null
+++ b/packages/core-blockchain/src/machines/blockchain.ts
@@ -0,0 +1,89 @@
+import { Machine } from "xstate";
+import { fork } from "./actions/fork";
+import { rebuildFromNetwork } from "./actions/rebuild-from-network";
+import { syncWithNetwork } from "./actions/sync-with-network";
+
+export const blockchainMachine: any = Machine({
+ key: "blockchain",
+ initial: "uninitialised",
+ states: {
+ uninitialised: {
+ on: {
+ START: "init",
+ STOP: "stopped",
+ },
+ },
+ init: {
+ onEntry: ["init"],
+ on: {
+ REBUILD: "rebuild",
+ NETWORKSTART: "idle",
+ STARTED: "syncWithNetwork",
+ ROLLBACK: "rollback",
+ FAILURE: "exit",
+ STOP: "stopped",
+ },
+ },
+ rebuild: {
+ on: {
+ REBUILDCOMPLETE: "syncWithNetwork",
+ FORK: "fork",
+ TEST: "syncWithNetwork",
+ STOP: "stopped",
+ },
+ ...rebuildFromNetwork,
+ },
+ syncWithNetwork: {
+ on: {
+ TEST: "idle",
+ SYNCFINISHED: "idle",
+ FORK: "fork",
+ STOP: "stopped",
+ },
+ ...syncWithNetwork,
+ },
+ idle: {
+ onEntry: ["checkLater", "blockchainReady"],
+ on: {
+ WAKEUP: "syncWithNetwork",
+ NEWBLOCK: "newBlock",
+ STOP: "stopped",
+ },
+ },
+ newBlock: {
+ on: {
+ PROCESSFINISHED: "idle",
+ FORK: "fork",
+ STOP: "stopped",
+ },
+ },
+ fork: {
+ onEntry: ["startForkRecovery"],
+ on: {
+ SUCCESS: "syncWithNetwork",
+ FAILURE: "exit",
+ STOP: "stopped",
+ },
+ ...fork,
+ },
+ rollback: {
+ onEntry: ["rollbackDatabase"],
+ on: {
+ SUCCESS: "init",
+ FAILURE: "exit",
+ STOP: "stopped",
+ },
+ },
+ /**
+ * This state should be used for stopping the blockchain on purpose, not as
+ * a result of critical errors. In those cases, using the `exit` state would
+ * be a better option
+ */
+ stopped: {
+ onEntry: ["stopped"],
+ },
+ exit: {
+ onEntry: ["exitApp"],
+ },
+ },
+});
diff --git a/packages/core-blockchain/src/plugin.ts b/packages/core-blockchain/src/plugin.ts
new file mode 100644
index 0000000000..57771cefc7
--- /dev/null
+++ b/packages/core-blockchain/src/plugin.ts
@@ -0,0 +1,32 @@
+import { Container } from "@arkecosystem/core-interfaces";
+import { asValue } from "awilix";
+import { Blockchain } from "./blockchain";
+import { config } from "./config";
+import { defaults } from "./defaults";
+import { stateStorage } from "./state-storage";
+
+/**
+ * The struct used by the plugin container.
+ * @type {Object}
+ */
+export const plugin: Container.PluginDescriptor = {
+ pkg: require("../package.json"),
+ defaults,
+ alias: "blockchain",
+ async register(container: Container.IContainer, options) {
+ const blockchain = new Blockchain(options);
+
+ config.init(options);
+
+ container.register("state", asValue(stateStorage));
+
+ if (!process.env.CORE_SKIP_BLOCKCHAIN) {
+ await blockchain.start();
+ }
+
+ return blockchain;
+ },
+ async deregister(container: Container.IContainer, options) {
+ await container.resolvePlugin("blockchain").stop();
+ },
+};
diff --git a/packages/core-blockchain/src/processor/block-processor.ts b/packages/core-blockchain/src/processor/block-processor.ts
new file mode 100644
index 0000000000..29d51782db
--- /dev/null
+++ b/packages/core-blockchain/src/processor/block-processor.ts
@@ -0,0 +1,102 @@
+// tslint:disable:max-classes-per-file
+
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import { isException, models } from "@arkecosystem/crypto";
+import { Blockchain } from "../blockchain";
+import { isBlockChained } from "../utils/is-block-chained";
+import { validateGenerator } from "../utils/validate-generator";
+
+import {
+ AcceptBlockHandler,
+ AlreadyForgedHandler,
+ BlockHandler,
+ ExceptionHandler,
+ InvalidGeneratorHandler,
+ UnchainedHandler,
+ VerificationFailedHandler,
+} from "./handlers";
+
+export enum BlockProcessorResult {
+ Accepted,
+ DiscardedButCanBeBroadcasted,
+ Rejected,
+}
+
+export class BlockProcessor {
+ private logger: Logger.ILogger;
+
+ public constructor(private blockchain: Blockchain) {
+ this.logger = app.resolvePlugin("logger");
+ }
+
+ public async process(block: models.Block): Promise {
+ const handler = await this.getHandler(block);
+ return handler.execute();
+ }
+
+ public async getHandler(block: models.Block): Promise {
+ if (isException(block.data)) {
+ return new ExceptionHandler(this.blockchain, block);
+ }
+
+ if (!this.verifyBlock(block)) {
+ return new VerificationFailedHandler(this.blockchain, block);
+ }
+
+ const isValidGenerator = await validateGenerator(block);
+ const isChained = isBlockChained(this.blockchain.getLastBlock(), block);
+ if (!isChained) {
+ return new UnchainedHandler(this.blockchain, block, isValidGenerator);
+ }
+
+ if (!isValidGenerator) {
+ return new InvalidGeneratorHandler(this.blockchain, block);
+ }
+
+ const containsForgedTransactions = await this.checkBlockContainsForgedTransactions(block);
+ if (containsForgedTransactions) {
+ return new AlreadyForgedHandler(this.blockchain, block);
+ }
+
+ return new AcceptBlockHandler(this.blockchain, block);
+ }
+
+ /**
+ * Checks if the given block is verified or an exception.
+ */
+ private verifyBlock(block: models.Block): boolean {
+ const verified = block.verification.verified;
+ if (!verified) {
+ this.logger.warn(
+ `Block ${block.data.height.toLocaleString()} (${
+ block.data.id
+ }) disregarded because verification failed :scroll:`,
+ );
+ this.logger.warn(JSON.stringify(block.verification, null, 4));
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Checks if the given block contains an already forged transaction.
+ */
+ private async checkBlockContainsForgedTransactions(block: models.Block): Promise {
+ if (block.transactions.length > 0) {
+ const forgedIds = await this.blockchain.database.getForgedTransactionsIds(
+ block.transactions.map(tx => tx.id),
+ );
+ if (forgedIds.length > 0) {
+ this.logger.warn(
+ `Block ${block.data.height.toLocaleString()} disregarded, because it contains already forged transactions :scroll:`,
+ );
+ this.logger.debug(`${JSON.stringify(forgedIds, null, 4)}`);
+ return true;
+ }
+ }
+
+ return false;
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/accept-block-handler.ts b/packages/core-blockchain/src/processor/handlers/accept-block-handler.ts
new file mode 100644
index 0000000000..91f9472a82
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/accept-block-handler.ts
@@ -0,0 +1,52 @@
+import { BlockProcessorResult } from "../block-processor";
+import { BlockHandler } from "./block-handler";
+
+export class AcceptBlockHandler extends BlockHandler {
+ public async execute(): Promise {
+ const { database, state, transactionPool } = this.blockchain;
+
+ try {
+ await database.applyBlock(this.block);
+ await database.saveBlock(this.block);
+
+ // Check if we recovered from a fork
+ if (state.forkedBlock && state.forkedBlock.data.height === this.block.data.height) {
+ this.logger.info("Successfully recovered from fork :star2:");
+ state.forkedBlock = null;
+ }
+
+ if (transactionPool) {
+ try {
+ transactionPool.acceptChainedBlock(this.block);
+ } catch (error) {
+ this.logger.warn("Issue applying block to transaction pool");
+ this.logger.debug(error.stack);
+ }
+ }
+
+ // Reset wake-up timer after chaining a block, since there's no need to
+ // wake up at all if blocks arrive periodically. Only wake up when there are
+ // no new blocks.
+ if (state.started) {
+ this.blockchain.resetWakeUp();
+ }
+
+ state.setLastBlock(this.block);
+
+ // Ensure the lastDownloadedBlock is never behind the last accepted block.
+ if (state.lastDownloadedBlock && state.lastDownloadedBlock.data.height < this.block.data.height) {
+ state.lastDownloadedBlock = this.block;
+ }
+
+ return BlockProcessorResult.Accepted;
+ } catch (error) {
+ this.logger.error(`Refused new block ${JSON.stringify(this.block.data)}`);
+ this.logger.debug(error.stack);
+
+ this.blockchain.transactionPool.purgeBlock(this.block);
+ this.blockchain.forkBlock(this.block);
+
+ return super.execute();
+ }
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/already-forged-handler.ts b/packages/core-blockchain/src/processor/handlers/already-forged-handler.ts
new file mode 100644
index 0000000000..b1185cb74e
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/already-forged-handler.ts
@@ -0,0 +1,9 @@
+import { BlockProcessorResult } from "../block-processor";
+import { BlockHandler } from "./block-handler";
+
+export class AlreadyForgedHandler extends BlockHandler {
+ public async execute(): Promise {
+ super.execute();
+ return BlockProcessorResult.DiscardedButCanBeBroadcasted;
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/block-handler.ts b/packages/core-blockchain/src/processor/handlers/block-handler.ts
new file mode 100644
index 0000000000..5a5ffd0494
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/block-handler.ts
@@ -0,0 +1,18 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+import { Blockchain } from "../../blockchain";
+import { BlockProcessorResult } from "../block-processor";
+
+export abstract class BlockHandler {
+ protected logger: Logger.ILogger;
+
+ public constructor(protected blockchain: Blockchain, protected block: models.Block) {
+ this.logger = app.resolvePlugin("logger");
+ }
+
+ public async execute(): Promise {
+ this.blockchain.resetLastDownloadedBlock();
+ return BlockProcessorResult.Rejected;
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/exception-handler.ts b/packages/core-blockchain/src/processor/handlers/exception-handler.ts
new file mode 100644
index 0000000000..52f05e8bec
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/exception-handler.ts
@@ -0,0 +1,20 @@
+import { BlockProcessorResult } from "../block-processor";
+import { AcceptBlockHandler } from "./accept-block-handler";
+import { BlockHandler } from "./block-handler";
+
+export class ExceptionHandler extends BlockHandler {
+ public async execute(): Promise {
+ // Ensure the block has not been forged yet, as an exceptional
+ // block bypasses all other checks.
+ const forgedBlock = await this.blockchain.database.getBlock(this.block.data.id);
+ if (forgedBlock) {
+ return super.execute();
+ }
+
+ this.logger.warn(
+ `Block ${this.block.data.height.toLocaleString()} (${this.block.data.id}) forcibly accepted. :exclamation:`,
+ );
+
+ return new AcceptBlockHandler(this.blockchain, this.block).execute();
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/index.ts b/packages/core-blockchain/src/processor/handlers/index.ts
new file mode 100644
index 0000000000..924075ccec
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/index.ts
@@ -0,0 +1,7 @@
+export * from "./accept-block-handler";
+export * from "./already-forged-handler";
+export * from "./block-handler";
+export * from "./exception-handler";
+export * from "./invalid-generator-handler";
+export * from "./unchained-handler";
+export * from "./verification-failed-handler";
diff --git a/packages/core-blockchain/src/processor/handlers/invalid-generator-handler.ts b/packages/core-blockchain/src/processor/handlers/invalid-generator-handler.ts
new file mode 100644
index 0000000000..2e2906480c
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/invalid-generator-handler.ts
@@ -0,0 +1,3 @@
+import { BlockHandler } from "./block-handler";
+
+export class InvalidGeneratorHandler extends BlockHandler {}
diff --git a/packages/core-blockchain/src/processor/handlers/unchained-handler.ts b/packages/core-blockchain/src/processor/handlers/unchained-handler.ts
new file mode 100644
index 0000000000..f706288a43
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/unchained-handler.ts
@@ -0,0 +1,98 @@
+import { app } from "@arkecosystem/core-container";
+import { models } from "@arkecosystem/crypto";
+import { Blockchain } from "../../blockchain";
+import { BlockProcessorResult } from "../block-processor";
+import { BlockHandler } from "./block-handler";
+
+enum UnchainedBlockStatus {
+ NotReadyToAcceptNewHeight,
+ AlreadyInBlockchain,
+ EqualToLastBlock,
+ GeneratorMismatch,
+ DoubleForging,
+ InvalidTimestamp,
+}
+
+export class UnchainedHandler extends BlockHandler {
+ public constructor(
+ protected blockchain: Blockchain,
+ protected block: models.Block,
+ private isValidGenerator: boolean,
+ ) {
+ super(blockchain, block);
+ }
+
+ public async execute(): Promise {
+ super.execute();
+
+ this.blockchain.processQueue.clear();
+
+ const status = this.checkUnchainedBlock();
+ switch (status) {
+ case UnchainedBlockStatus.DoubleForging: {
+ const database = app.resolvePlugin("database");
+ const delegates = await database.getActiveDelegates(this.block.data.height);
+ if (delegates.some(delegate => delegate.publicKey === this.block.data.generatorPublicKey)) {
+ this.blockchain.forkBlock(this.block);
+ }
+
+ return BlockProcessorResult.Rejected;
+ }
+
+ case UnchainedBlockStatus.GeneratorMismatch:
+ case UnchainedBlockStatus.InvalidTimestamp: {
+ return BlockProcessorResult.Rejected;
+ }
+
+ default: {
+ return BlockProcessorResult.DiscardedButCanBeBroadcasted;
+ }
+ }
+ }
+
+ private checkUnchainedBlock(): UnchainedBlockStatus {
+ const lastBlock = this.blockchain.getLastBlock();
+ if (this.block.data.height > lastBlock.data.height + 1) {
+ this.logger.debug(
+ `Blockchain not ready to accept new block at height ${this.block.data.height.toLocaleString()}. Last block: ${lastBlock.data.height.toLocaleString()} :warning:`,
+ );
+
+ // Also remove all remaining queued blocks. Since blocks are downloaded in batches,
+ // it is very likely that all blocks will be disregarded at this point anyway.
+ // NOTE: This isn't really elegant, but still better than spamming the log with
+ // useless `not ready to accept` messages.
+ if (this.blockchain.processQueue.length() > 0) {
+ this.logger.debug(`Discarded ${this.blockchain.processQueue.length()} downloaded blocks.`);
+ }
+
+ return UnchainedBlockStatus.NotReadyToAcceptNewHeight;
+ } else if (this.block.data.height < lastBlock.data.height) {
+ this.logger.debug(
+ `Block ${this.block.data.height.toLocaleString()} disregarded because already in blockchain :warning:`,
+ );
+
+ return UnchainedBlockStatus.AlreadyInBlockchain;
+ } else if (this.block.data.height === lastBlock.data.height && this.block.data.id === lastBlock.data.id) {
+ this.logger.debug(`Block ${this.block.data.height.toLocaleString()} just received :chains:`);
+ return UnchainedBlockStatus.EqualToLastBlock;
+ } else if (this.block.data.timestamp < lastBlock.data.timestamp) {
+ this.logger.debug(
+ `Block ${this.block.data.height.toLocaleString()} disregarded, because the timestamp is lower than the previous timestamp.`,
+ );
+ return UnchainedBlockStatus.InvalidTimestamp;
+ } else {
+ if (this.isValidGenerator) {
+ this.logger.warn(`Detect double forging by ${this.block.data.generatorPublicKey} :chains:`);
+ return UnchainedBlockStatus.DoubleForging;
+ }
+
+ this.logger.info(
+ `Forked block disregarded because it is not allowed to be forged. Caused by delegate: ${
+ this.block.data.generatorPublicKey
+ } :bangbang:`,
+ );
+
+ return UnchainedBlockStatus.GeneratorMismatch;
+ }
+ }
+}
diff --git a/packages/core-blockchain/src/processor/handlers/verification-failed-handler.ts b/packages/core-blockchain/src/processor/handlers/verification-failed-handler.ts
new file mode 100644
index 0000000000..874e907408
--- /dev/null
+++ b/packages/core-blockchain/src/processor/handlers/verification-failed-handler.ts
@@ -0,0 +1,9 @@
+import { BlockProcessorResult } from "../block-processor";
+import { BlockHandler } from "./block-handler";
+
+export class VerificationFailedHandler extends BlockHandler {
+ public async execute(): Promise {
+ this.blockchain.transactionPool.purgeSendersWithInvalidTransactions(this.block);
+ return super.execute();
+ }
+}
diff --git a/packages/core-blockchain/src/processor/index.ts b/packages/core-blockchain/src/processor/index.ts
new file mode 100644
index 0000000000..95c35dacb5
--- /dev/null
+++ b/packages/core-blockchain/src/processor/index.ts
@@ -0,0 +1 @@
+export * from "../processor/block-processor";
diff --git a/packages/core-blockchain/src/queue/index.ts b/packages/core-blockchain/src/queue/index.ts
new file mode 100644
index 0000000000..031522c6c0
--- /dev/null
+++ b/packages/core-blockchain/src/queue/index.ts
@@ -0,0 +1,53 @@
+import { ProcessQueue } from "./process";
+import { RebuildQueue } from "./rebuild";
+
+export { ProcessQueue };
+export { RebuildQueue };
+
+export class Queue {
+ public process: ProcessQueue;
+ public rebuild: RebuildQueue;
+
+ /**
+ * Create an instance of the queue.
+ * @param {Blockchain} blockchain
+ * @param {Object} events
+ * @return {void}
+ */
+ constructor(blockchain, events) {
+ this.process = new ProcessQueue(blockchain, events.process);
+ this.rebuild = new RebuildQueue(blockchain, events.rebuild);
+ }
+
+ /**
+ * Pause all queues.
+ * @return {void}
+ */
+ public pause() {
+ this.rebuild.pause();
+ this.process.pause();
+ }
+
+ /**
+ * Flush all queues.
+ * @return {void}
+ */
+ public clear() {
+ this.rebuild.clear();
+ this.process.clear();
+ }
+
+ /**
+ * Resume all queues.
+ * @return {void}
+ */
+ public resume() {
+ this.rebuild.resume();
+ this.process.resume();
+ }
+
+ public destroy() {
+ this.rebuild.destroy();
+ this.process.destroy();
+ }
+}
diff --git a/packages/core-blockchain/src/queue/interface.ts b/packages/core-blockchain/src/queue/interface.ts
new file mode 100644
index 0000000000..45b863c9a4
--- /dev/null
+++ b/packages/core-blockchain/src/queue/interface.ts
@@ -0,0 +1,71 @@
+import async from "async";
+import { Blockchain } from "../blockchain";
+
+export abstract class QueueInterface {
+ protected queue: any;
+
+ /**
+ * Create an instance of the process queue.
+ */
+ constructor(readonly blockchain: Blockchain, readonly event: string) {}
+
+ /**
+ * Drain the queue.
+ */
+ public drain() {
+ this.queue.drain = () => this.blockchain.dispatch(this.event);
+ }
+
+ /**
+ * Pause the queue.
+ * @return {void}
+ */
+ public pause() {
+ return this.queue.pause();
+ }
+
+ /**
+ * Flush the queue.
+ * @return {void}
+ */
+ public clear() {
+ return this.queue.remove(() => true);
+ }
+
+ /**
+ * Resume the queue.
+ * @return {void}
+ */
+ public resume() {
+ return this.queue.resume();
+ }
+
+ /**
+ * Remove the item from the queue.
+ * @return {void}
+ */
+ public remove(item) {
+ return this.queue.remove(item);
+ }
+
+ /**
+ * Push the item to the queue.
+ * @param {Function} callback
+ * @return {void}
+ */
+ public push(callback) {
+ return this.queue.push(callback);
+ }
+
+ /**
+ * Get the length of the queue.
+ * @return {void}
+ */
+ public length() {
+ return this.queue.length();
+ }
+
+ public destroy() {
+ return this.queue.kill();
+ }
+}
diff --git a/packages/core-blockchain/src/queue/process.ts b/packages/core-blockchain/src/queue/process.ts
new file mode 100644
index 0000000000..bd4afc25ba
--- /dev/null
+++ b/packages/core-blockchain/src/queue/process.ts
@@ -0,0 +1,29 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+import async from "async";
+import { Blockchain } from "../blockchain";
+import { QueueInterface } from "./interface";
+
+const logger = app.resolvePlugin("logger");
+
+export class ProcessQueue extends QueueInterface {
+ /**
+ * Create an instance of the process queue.
+ */
+ constructor(readonly blockchain: Blockchain, readonly event: string) {
+ super(blockchain, event);
+
+ this.queue = async.queue((block: models.IBlockData, cb) => {
+ try {
+ return blockchain.processBlock(new models.Block(block), cb);
+ } catch (error) {
+ logger.error(`Failed to process block in ProcessQueue: ${block.height.toLocaleString()}`);
+ logger.error(error.stack);
+ return cb();
+ }
+ }, 1);
+
+ this.drain();
+ }
+}
diff --git a/packages/core-blockchain/src/queue/rebuild.ts b/packages/core-blockchain/src/queue/rebuild.ts
new file mode 100644
index 0000000000..786d3dc4bf
--- /dev/null
+++ b/packages/core-blockchain/src/queue/rebuild.ts
@@ -0,0 +1,31 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+import async from "async";
+import { Blockchain } from "../blockchain";
+import { QueueInterface } from "./interface";
+
+const logger = app.resolvePlugin("logger");
+
+export class RebuildQueue extends QueueInterface {
+ /**
+ * Create an instance of the process queue.
+ */
+ constructor(readonly blockchain: Blockchain, readonly event: string) {
+ super(blockchain, event);
+
+ this.queue = async.queue((block: models.IBlockData, cb) => {
+ if (this.queue.paused) {
+ return cb();
+ }
+ try {
+ return blockchain.rebuildBlock(new models.Block(block), cb);
+ } catch (error) {
+ logger.error(`Failed to rebuild block in RebuildQueue: ${block.height.toLocaleString()}`);
+ return cb();
+ }
+ }, 1);
+
+ this.drain();
+ }
+}
diff --git a/packages/core-blockchain/src/state-machine.ts b/packages/core-blockchain/src/state-machine.ts
new file mode 100644
index 0000000000..b5f5754475
--- /dev/null
+++ b/packages/core-blockchain/src/state-machine.ts
@@ -0,0 +1,415 @@
+/* tslint:disable:jsdoc-format max-line-length */
+
+import { app } from "@arkecosystem/core-container";
+import { EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+
+import { roundCalculator } from "@arkecosystem/core-utils";
+import { isException, models, slots } from "@arkecosystem/crypto";
+
+import pluralize from "pluralize";
+import { config as localConfig } from "./config";
+import { blockchainMachine } from "./machines/blockchain";
+import { stateStorage } from "./state-storage";
+import { isBlockChained, tickSyncTracker } from "./utils";
+
+import { Blockchain } from "./blockchain";
+
+const { Block } = models;
+const config = app.getConfig();
+const emitter = app.resolvePlugin("event-emitter");
+const logger = app.resolvePlugin("logger");
+
+/**
+ * @type {IStateStorage}
+ */
+blockchainMachine.state = stateStorage;
+
+/**
+ * The blockchain actions.
+ * @param {Blockchain} blockchain
+ * @return {Object}
+ */
+blockchainMachine.actionMap = (blockchain: Blockchain) => ({
+ blockchainReady: () => {
+ if (!stateStorage.started) {
+ stateStorage.started = true;
+ emitter.emit("state:started", true);
+ }
+ },
+
+ checkLater() {
+ if (!blockchain.isStopped && !stateStorage.wakeUpTimeout) {
+ blockchain.setWakeUp();
+ }
+ },
+
+ checkLastBlockSynced() {
+ return blockchain.dispatch(blockchain.isSynced() ? "SYNCED" : "NOTSYNCED");
+ },
+
+ checkRebuildBlockSynced() {
+ return blockchain.dispatch(blockchain.isRebuildSynced() ? "SYNCED" : "NOTSYNCED");
+ },
+
+ async checkLastDownloadedBlockSynced() {
+ let event = "NOTSYNCED";
+ logger.debug(
+ `Queued blocks (rebuild: ${blockchain.rebuildQueue.length()} process: ${blockchain.processQueue.length()})`,
+ );
+
+ if (blockchain.rebuildQueue.length() > 10000 || blockchain.processQueue.length() > 10000) {
+ event = "PAUSED";
+ }
+
+ // tried to download but no luck after 5 tries (looks like network missing blocks)
+ if (stateStorage.noBlockCounter > 5 && blockchain.processQueue.length() === 0) {
+ // TODO: make this dynamic in 2.1
+ logger.info(
+ "Tried to sync 5 times to different nodes, looks like the network is missing blocks :umbrella:",
+ );
+
+ stateStorage.noBlockCounter = 0;
+ event = "NETWORKHALTED";
+
+ if (stateStorage.p2pUpdateCounter + 1 > 3) {
+ logger.info("Network keeps missing blocks. :umbrella:");
+
+ const result = await blockchain.p2p.updatePeersOnMissingBlocks();
+ if (result === "rollback") {
+ event = "FORK";
+ }
+
+ stateStorage.p2pUpdateCounter = 0;
+ } else {
+ stateStorage.p2pUpdateCounter++;
+ }
+ }
+
+ if (blockchain.isSynced(stateStorage.lastDownloadedBlock)) {
+ stateStorage.noBlockCounter = 0;
+ stateStorage.p2pUpdateCounter = 0;
+
+ event = "SYNCED";
+ }
+
+ if (stateStorage.networkStart) {
+ event = "SYNCED";
+ }
+
+ if (process.env.CORE_ENV === "test") {
+ event = "TEST";
+ }
+
+ blockchain.dispatch(event);
+ },
+
+ downloadFinished() {
+ logger.info("Block download finished :rocket:");
+
+ if (stateStorage.networkStart) {
+ // next time we will use normal behaviour
+ stateStorage.networkStart = false;
+
+ blockchain.dispatch("SYNCFINISHED");
+ } else if (blockchain.rebuildQueue.length() === 0) {
+ blockchain.dispatch("PROCESSFINISHED");
+ }
+ },
+
+ async rebuildFinished() {
+ try {
+ logger.info("Blockchain rebuild finished :chains:");
+
+ stateStorage.rebuild = false;
+
+ await blockchain.database.commitQueuedQueries();
+ await blockchain.rollbackCurrentRound();
+ await blockchain.database.buildWallets(stateStorage.getLastBlock().data.height);
+ await blockchain.database.saveWallets(true);
+ await blockchain.transactionPool.buildWallets();
+
+ return blockchain.dispatch("PROCESSFINISHED");
+ } catch (error) {
+ logger.error(error.stack);
+ return blockchain.dispatch("FAILURE");
+ }
+ },
+
+ downloadPaused: () => logger.info("Blockchain download paused :clock1030:"),
+
+ syncingComplete() {
+ logger.info("Blockchain 100% in sync :100:");
+ blockchain.dispatch("SYNCFINISHED");
+ },
+
+ rebuildingComplete() {
+ logger.info("Blockchain rebuild complete :unicorn_face:");
+ blockchain.dispatch("REBUILDCOMPLETE");
+ },
+
+ stopped() {
+ logger.info("The blockchain has been stopped :guitar:");
+ },
+
+ exitApp() {
+ app.forceExit("Failed to startup blockchain. Exiting Ark Core! :rotating_light:");
+ },
+
+ async init() {
+ try {
+ let block = await blockchain.database.getLastBlock();
+
+ if (!block) {
+ logger.warn("No block found in database :hushed:");
+
+ block = new Block(config.get("genesisBlock"));
+
+ if (block.data.payloadHash !== config.get("network.nethash")) {
+ logger.error(
+ "FATAL: The genesis block payload hash is different from configured the nethash :rotating_light:",
+ );
+
+ return blockchain.dispatch("FAILURE");
+ }
+
+ await blockchain.database.saveBlock(block);
+ }
+
+ if (!blockchain.database.restoredDatabaseIntegrity) {
+ logger.info("Verifying database integrity :hourglass_flowing_sand:");
+
+ const blockchainAudit = await blockchain.database.verifyBlockchain();
+ if (!blockchainAudit.valid) {
+ logger.error("FATAL: The database is corrupted :fire:");
+ logger.error(JSON.stringify(blockchainAudit.errors, null, 4));
+
+ return blockchain.dispatch("ROLLBACK");
+ }
+
+ logger.info("Verified database integrity :smile_cat:");
+ } else {
+ logger.info("Skipping database integrity check after successful database recovery :smile_cat:");
+ }
+
+ // only genesis block? special case of first round needs to be dealt with
+ if (block.data.height === 1) {
+ await blockchain.database.deleteRound(1);
+ }
+
+ /** *******************************
+ * state machine data init *
+ ******************************* */
+ const constants = config.getMilestone(block.data.height);
+ stateStorage.setLastBlock(block);
+ stateStorage.lastDownloadedBlock = block;
+
+ if (stateStorage.networkStart) {
+ await blockchain.database.buildWallets(block.data.height);
+ await blockchain.database.saveWallets(true);
+ await blockchain.database.applyRound(block.data.height);
+ await blockchain.transactionPool.buildWallets();
+
+ return blockchain.dispatch("STARTED");
+ }
+
+ stateStorage.rebuild =
+ slots.getTime() - block.data.timestamp > (constants.activeDelegates + 1) * constants.blocktime;
+ // no fast rebuild if in last week
+ stateStorage.fastRebuild =
+ slots.getTime() - block.data.timestamp > 3600 * 24 * 7 && !!localConfig.get("fastRebuild");
+
+ if (process.env.NODE_ENV === "test") {
+ logger.verbose("TEST SUITE DETECTED! SYNCING WALLETS AND STARTING IMMEDIATELY. :bangbang:");
+
+ stateStorage.setLastBlock(new Block(config.get("genesisBlock")));
+ await blockchain.database.buildWallets(block.data.height);
+
+ return blockchain.dispatch("STARTED");
+ }
+
+ logger.info(`Fast rebuild: ${stateStorage.fastRebuild}`);
+ logger.info(`Last block in database: ${block.data.height.toLocaleString()}`);
+
+ if (stateStorage.fastRebuild) {
+ return blockchain.dispatch("REBUILD");
+ }
+
+ // removing blocks up to the last round to compute active delegate list later if needed
+ const activeDelegates = await blockchain.database.getActiveDelegates(block.data.height);
+
+ if (!activeDelegates) {
+ await blockchain.rollbackCurrentRound();
+ }
+
+ /** *******************************
+ * database init *
+ ******************************* */
+ // SPV rebuild
+ const verifiedWalletsIntegrity = await blockchain.database.buildWallets(block.data.height);
+ if (!verifiedWalletsIntegrity && block.data.height > 1) {
+ logger.warn(
+ "Rebuilding wallets table because of some inconsistencies. Most likely due to an unfortunate shutdown. :hammer:",
+ );
+ await blockchain.database.saveWallets(true);
+ }
+
+ // NOTE: if the node is shutdown between round, the round has already been applied
+ if (roundCalculator.isNewRound(block.data.height + 1)) {
+ const { round } = roundCalculator.calculateRound(block.data.height + 1);
+
+ logger.info(
+ `New round ${round.toLocaleString()} detected. Cleaning calculated data before restarting!`,
+ );
+
+ await blockchain.database.deleteRound(round);
+ }
+
+ await blockchain.database.applyRound(block.data.height);
+ await blockchain.transactionPool.buildWallets();
+
+ return blockchain.dispatch("STARTED");
+ } catch (error) {
+ logger.error(error.stack);
+
+ return blockchain.dispatch("FAILURE");
+ }
+ },
+
+ async rebuildBlocks() {
+ const lastBlock = stateStorage.lastDownloadedBlock || stateStorage.getLastBlock();
+ const blocks = await blockchain.p2p.downloadBlocks(lastBlock.data.height);
+
+ tickSyncTracker(blocks.length, lastBlock.data.height);
+
+ if (!blocks || blocks.length === 0) {
+ logger.info("No new blocks found on this peer");
+
+ blockchain.dispatch("NOBLOCK");
+ } else {
+ logger.info(
+ `Downloaded ${blocks.length} new ${pluralize(
+ "block",
+ blocks.length,
+ )} accounting for a total of ${pluralize(
+ "transaction",
+ blocks.reduce((sum, b) => sum + b.numberOfTransactions, 0),
+ true,
+ )}`,
+ );
+
+ if (blocks.length && blocks[0].previousBlock === lastBlock.data.id) {
+ stateStorage.lastDownloadedBlock = { data: blocks.slice(-1)[0] };
+ blockchain.rebuildQueue.push(blocks);
+ blockchain.dispatch("DOWNLOADED");
+ } else {
+ logger.warn(`Downloaded block not accepted: ${JSON.stringify(blocks[0])}`);
+ logger.warn(`Last block: ${JSON.stringify(lastBlock.data)}`);
+
+ // disregard the whole block list
+ blockchain.dispatch("NOBLOCK");
+ }
+ }
+ },
+
+ async downloadBlocks() {
+ const lastDownloadedBlock = stateStorage.lastDownloadedBlock || stateStorage.getLastBlock();
+ const blocks = await blockchain.p2p.downloadBlocks(lastDownloadedBlock.data.height);
+
+ if (blockchain.isStopped) {
+ return;
+ }
+
+ const empty = !blocks || blocks.length === 0;
+ const chained = !empty && (isBlockChained(lastDownloadedBlock, { data: blocks[0] }) || isException(blocks[0]));
+
+ if (chained) {
+ logger.info(
+ `Downloaded ${blocks.length} new ${pluralize(
+ "block",
+ blocks.length,
+ )} accounting for a total of ${pluralize(
+ "transaction",
+ blocks.reduce((sum, b) => sum + b.numberOfTransactions, 0),
+ true,
+ )}`,
+ );
+
+ stateStorage.noBlockCounter = 0;
+ stateStorage.p2pUpdateCounter = 0;
+
+ blockchain.enqueueBlocks(blocks);
+ blockchain.dispatch("DOWNLOADED");
+ } else {
+ if (empty) {
+ logger.info("No new block found on this peer");
+ } else {
+ logger.warn(`Downloaded block not accepted: ${JSON.stringify(blocks[0])}`);
+ logger.warn(`Last downloaded block: ${JSON.stringify(lastDownloadedBlock.data)}`);
+ blockchain.processQueue.clear();
+ }
+
+ stateStorage.noBlockCounter++;
+ stateStorage.lastDownloadedBlock = stateStorage.getLastBlock();
+
+ blockchain.dispatch("NOBLOCK");
+ }
+ },
+
+ async analyseFork() {
+ logger.info("Analysing fork :mag:");
+ },
+
+ async startForkRecovery() {
+ logger.info("Starting fork recovery :fork_and_knife:");
+ blockchain.clearAndStopQueue();
+
+ await blockchain.database.commitQueuedQueries();
+
+ const random = 4 + Math.floor(Math.random() * 99); // random int inside [4, 102] range
+
+ await blockchain.removeBlocks(random);
+
+ logger.info(`Removed ${pluralize("block", random, true)} :wastebasket:`);
+
+ await blockchain.transactionPool.buildWallets();
+ await blockchain.p2p.refreshPeersAfterFork();
+
+ blockchain.dispatch("SUCCESS");
+ },
+
+ async rollbackDatabase() {
+ logger.info("Trying to restore database integrity :fire_engine:");
+
+ const { maxBlockRewind, steps } = localConfig.get("databaseRollback");
+ let blockchainAudit;
+
+ for (let i = maxBlockRewind; i >= 0; i -= steps) {
+ await blockchain.removeTopBlocks(steps);
+
+ blockchainAudit = await blockchain.database.verifyBlockchain();
+ if (blockchainAudit.valid) {
+ break;
+ }
+ }
+
+ if (!blockchainAudit.valid) {
+ // TODO: multiple attempts? rewind further? restore snapshot?
+ logger.error("FATAL: Failed to restore database integrity :skull: :skull: :skull:");
+ logger.error(JSON.stringify(blockchainAudit.errors, null, 4));
+ blockchain.dispatch("FAILURE");
+ return;
+ }
+
+ blockchain.database.restoredDatabaseIntegrity = true;
+
+ const lastBlock = await blockchain.database.getLastBlock();
+ logger.info(
+ `Database integrity verified again after rollback to height ${lastBlock.data.height.toLocaleString()} :green_heart:`,
+ );
+
+ blockchain.dispatch("SUCCESS");
+ },
+});
+
+const stateMachine = blockchainMachine;
+export { stateMachine };
diff --git a/packages/core-blockchain/src/state-storage.ts b/packages/core-blockchain/src/state-storage.ts
new file mode 100644
index 0000000000..0274dc7006
--- /dev/null
+++ b/packages/core-blockchain/src/state-storage.ts
@@ -0,0 +1,240 @@
+// tslint:disable:variable-name
+
+import { app } from "@arkecosystem/core-container";
+import { Blockchain, Logger } from "@arkecosystem/core-interfaces";
+import { configManager, models } from "@arkecosystem/crypto";
+import assert from "assert";
+import immutable from "immutable";
+import { config } from "./config";
+import { blockchainMachine } from "./machines/blockchain";
+
+const logger = app.resolvePlugin("logger");
+
+// Stores the last n blocks in ascending height. The amount of last blocks
+// can be configured with the option `state.maxLastBlocks`.
+let _lastBlocks: immutable.OrderedMap = immutable.OrderedMap();
+
+// Stores the last n incoming transaction ids. The amount of transaction ids
+// can be configred with the option `state.maxLastTransactionIds`.
+let _cachedTransactionIds: immutable.OrderedSet = immutable.OrderedSet();
+
+// Map Block instances to block data.
+const _mapToBlockData = (blocks: immutable.Seq): immutable.Seq =>
+ blocks.map(block => ({ ...block.data, transactions: block.transactions }));
+
+/**
+ * Represents an in-memory storage for state machine data.
+ */
+export class StateStorage implements Blockchain.IStateStorage {
+ public blockchain: any;
+ public lastDownloadedBlock: models.IBlock | null;
+ public blockPing: any;
+ public started: boolean;
+ public forkedBlock: models.Block | null;
+ public rebuild: boolean;
+ public fastRebuild: boolean;
+ public wakeUpTimeout: any;
+ public noBlockCounter: number;
+ public p2pUpdateCounter: number;
+ public networkStart: boolean;
+
+ constructor() {
+ this.reset();
+ }
+
+ /**
+ * Resets the state.
+ */
+ public reset(): void {
+ this.blockchain = blockchainMachine.initialState;
+ this.lastDownloadedBlock = null;
+ this.blockPing = null;
+ this.started = false;
+ this.forkedBlock = null;
+ this.rebuild = true;
+ this.fastRebuild = false;
+ this.wakeUpTimeout = null;
+ this.noBlockCounter = 0;
+ this.p2pUpdateCounter = 0;
+ this.networkStart = false;
+
+ this.clear();
+ }
+
+ /**
+ * Clear last blocks.
+ */
+ public clear(): void {
+ _lastBlocks = _lastBlocks.clear();
+ _cachedTransactionIds = _cachedTransactionIds.clear();
+ }
+
+ /**
+ * Clear check later timeout.
+ */
+ public clearWakeUpTimeout(): void {
+ if (this.wakeUpTimeout) {
+ clearTimeout(this.wakeUpTimeout);
+ this.wakeUpTimeout = null;
+ }
+ }
+
+ /**
+ * Get the last block.
+ */
+ public getLastBlock(): models.Block | null {
+ return _lastBlocks.last() || null;
+ }
+
+ /**
+ * Sets the last block.
+ */
+ public setLastBlock(block: models.Block): void {
+ // Only keep blocks which are below the new block height (i.e. rollback)
+ if (_lastBlocks.last() && _lastBlocks.last().data.height !== block.data.height - 1) {
+ assert(block.data.height - 1 <= _lastBlocks.last().data.height);
+ _lastBlocks = _lastBlocks.filter(b => b.data.height < block.data.height);
+ }
+
+ _lastBlocks = _lastBlocks.set(block.data.height, block);
+ configManager.setHeight(block.data.height);
+
+ // Delete oldest block if size exceeds the maximum
+ if (_lastBlocks.size > config.get("state.maxLastBlocks")) {
+ _lastBlocks = _lastBlocks.delete(_lastBlocks.first().data.height);
+ }
+ }
+
+ /**
+ * Get the last blocks.
+ */
+ public getLastBlocks(): models.Block[] {
+ return _lastBlocks
+ .valueSeq()
+ .reverse()
+ .toArray();
+ }
+
+ /**
+ * Get the last blocks data.
+ */
+ public getLastBlocksData(): immutable.Seq {
+ return _mapToBlockData(_lastBlocks.valueSeq().reverse());
+ }
+
+ /**
+ * Get the last block ids.
+ */
+ public getLastBlockIds(): string[] {
+ return _lastBlocks
+ .valueSeq()
+ .reverse()
+ .map(b => b.data.id)
+ .toArray();
+ }
+
+ /**
+ * Get last blocks in the given height range in ascending order.
+ * @param {Number} start
+ * @param {Number} end
+ */
+ public getLastBlocksByHeight(start, end?): models.IBlockData[] {
+ end = end || start;
+
+ const blocks = _lastBlocks.valueSeq().filter(block => block.data.height >= start && block.data.height <= end);
+
+ return _mapToBlockData(blocks).toArray() as models.IBlockData[];
+ }
+
+ /**
+ * Get common blocks for the given IDs.
+ */
+ public getCommonBlocks(ids): models.IBlockData[] {
+ const idsHash = {};
+ ids.forEach(id => (idsHash[id] = true));
+ return this.getLastBlocksData()
+ .filter(block => idsHash[block.id])
+ .toArray() as models.IBlockData[];
+ }
+
+ /**
+ * Cache the ids of the given transactions.
+ */
+ public cacheTransactions(
+ transactions: models.ITransactionData[],
+ ): { added: models.ITransactionData[]; notAdded: models.ITransactionData[] } {
+ const notAdded = [];
+ const added = transactions.filter(tx => {
+ if (_cachedTransactionIds.has(tx.id)) {
+ notAdded.push(tx);
+ return false;
+ }
+ return true;
+ });
+
+ _cachedTransactionIds = _cachedTransactionIds.withMutations(cache => {
+ added.forEach(tx => cache.add(tx.id));
+ });
+
+ // Cap the Set of last transaction ids to maxLastTransactionIds
+ const limit = config.get("state.maxLastTransactionIds");
+ if (_cachedTransactionIds.size > limit) {
+ _cachedTransactionIds = _cachedTransactionIds.takeLast(limit);
+ }
+
+ return { added, notAdded };
+ }
+
+ /**
+ * Remove the given transaction ids from the cache.
+ */
+ public removeCachedTransactionIds(transactionIds: string[]): void {
+ _cachedTransactionIds = _cachedTransactionIds.subtract(transactionIds);
+ }
+
+ /**
+ * Get cached transaction ids.
+ */
+ public getCachedTransactionIds(): string[] {
+ return _cachedTransactionIds.toArray();
+ }
+
+ /**
+ * Ping a block.
+ */
+ public pingBlock(incomingBlock: models.IBlockData): boolean {
+ if (!this.blockPing) {
+ return false;
+ }
+
+ if (this.blockPing.block.height === incomingBlock.height && this.blockPing.block.id === incomingBlock.id) {
+ this.blockPing.count++;
+ this.blockPing.last = new Date().getTime();
+
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Push ping block.
+ */
+ public pushPingBlock(block: models.IBlockData) {
+ // logging for stats about network health
+ if (this.blockPing) {
+ logger.info(
+ `Block ${this.blockPing.block.height.toLocaleString()} pinged blockchain ${this.blockPing.count} times`,
+ );
+ }
+
+ this.blockPing = {
+ count: 1,
+ first: new Date().getTime(),
+ last: new Date().getTime(),
+ block,
+ };
+ }
+}
+
+export const stateStorage = Object.seal(new StateStorage());
diff --git a/packages/core-blockchain/src/utils/index.ts b/packages/core-blockchain/src/utils/index.ts
new file mode 100644
index 0000000000..5ef9a13ed4
--- /dev/null
+++ b/packages/core-blockchain/src/utils/index.ts
@@ -0,0 +1,3 @@
+export * from "./is-block-chained";
+export * from "./tick-sync-tracker";
+export * from "./validate-generator";
diff --git a/packages/core-blockchain/src/utils/is-block-chained.ts b/packages/core-blockchain/src/utils/is-block-chained.ts
new file mode 100644
index 0000000000..5f3396a3e2
--- /dev/null
+++ b/packages/core-blockchain/src/utils/is-block-chained.ts
@@ -0,0 +1,12 @@
+import { models, slots } from "@arkecosystem/crypto";
+
+export const isBlockChained = (previousBlock: models.IBlock, nextBlock: models.IBlock): boolean => {
+ const followsPrevious = nextBlock.data.previousBlock === previousBlock.data.id;
+ const isPlusOne = nextBlock.data.height === previousBlock.data.height + 1;
+
+ const previousSlot = slots.getSlotNumber(previousBlock.data.timestamp);
+ const nextSlot = slots.getSlotNumber(nextBlock.data.timestamp);
+ const isAfterPreviousSlot = previousSlot < nextSlot;
+
+ return followsPrevious && isPlusOne && isAfterPreviousSlot;
+};
diff --git a/packages/core-blockchain/src/utils/tick-sync-tracker.ts b/packages/core-blockchain/src/utils/tick-sync-tracker.ts
new file mode 100644
index 0000000000..ae104568ca
--- /dev/null
+++ b/packages/core-blockchain/src/utils/tick-sync-tracker.ts
@@ -0,0 +1,52 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger, P2P } from "@arkecosystem/core-interfaces";
+import prettyMs from "pretty-ms";
+
+const logger = app.resolvePlugin("logger");
+let tracker = null;
+
+export function tickSyncTracker(blockCount, count) {
+ if (!tracker) {
+ tracker = {
+ start: new Date().getTime(),
+ networkHeight: app.resolvePlugin("p2p").getNetworkHeight(),
+ blocksInitial: +count,
+ blocksDownloaded: +count,
+ blocksSession: 0,
+ blocksPerMillisecond: 0,
+ remainingInMilliseconds: 0,
+ percent: 0,
+ };
+ }
+
+ // The total amount of downloaded blocks equals the current height
+ tracker.blocksDownloaded += +blockCount;
+
+ // The total amount of downloaded blocks downloaded since start of the current session
+ tracker.blocksSession = tracker.blocksDownloaded - tracker.blocksInitial;
+
+ // The number of blocks the node can download per millisecond
+ const diffSinceStart = new Date().getTime() - tracker.start;
+ tracker.blocksPerMillisecond = tracker.blocksSession / diffSinceStart;
+
+ // The time left to download the missing blocks in milliseconds
+ tracker.remainingInMilliseconds = (tracker.networkHeight - tracker.blocksDownloaded) / tracker.blocksPerMillisecond;
+ tracker.remainingInMilliseconds = Math.abs(Math.trunc(tracker.remainingInMilliseconds));
+
+ // The percentage of total blocks that has been downloaded
+ tracker.percent = (tracker.blocksDownloaded * 100) / tracker.networkHeight;
+
+ if (tracker.percent < 100 && Number.isFinite(tracker.remainingInMilliseconds)) {
+ const blocksDownloaded = tracker.blocksDownloaded.toLocaleString();
+ const networkHeight = tracker.networkHeight.toLocaleString();
+ const timeLeft = prettyMs(tracker.remainingInMilliseconds, {
+ secDecimalDigits: 0,
+ });
+
+ logger.info(`Synchronising In Progress (${blocksDownloaded} of ${networkHeight} blocks - Est. ${timeLeft})`);
+ }
+
+ if (tracker.percent === 100) {
+ tracker = null;
+ }
+}
diff --git a/packages/core-blockchain/src/utils/validate-generator.ts b/packages/core-blockchain/src/utils/validate-generator.ts
new file mode 100644
index 0000000000..f517b58948
--- /dev/null
+++ b/packages/core-blockchain/src/utils/validate-generator.ts
@@ -0,0 +1,40 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import { models, slots } from "@arkecosystem/crypto";
+
+export const validateGenerator = async (block: models.Block): Promise => {
+ const database = app.resolvePlugin("database");
+ const logger = app.resolvePlugin("logger");
+
+ const delegates = await database.getActiveDelegates(block.data.height);
+ const slot = slots.getSlotNumber(block.data.timestamp);
+ const forgingDelegate = delegates[slot % delegates.length];
+
+ const generatorUsername = database.walletManager.findByPublicKey(block.data.generatorPublicKey).username;
+
+ if (!forgingDelegate) {
+ logger.debug(
+ `Could not decide if delegate ${generatorUsername} (${
+ block.data.generatorPublicKey
+ }) is allowed to forge block ${block.data.height.toLocaleString()} :grey_question:`,
+ );
+ } else if (forgingDelegate.publicKey !== block.data.generatorPublicKey) {
+ const forgingUsername = database.walletManager.findByPublicKey(forgingDelegate.publicKey).username;
+
+ logger.warn(
+ `Delegate ${generatorUsername} (${
+ block.data.generatorPublicKey
+ }) not allowed to forge, should be ${forgingUsername} (${forgingDelegate.publicKey}) :-1:`,
+ );
+
+ return false;
+ }
+
+ logger.debug(
+ `Delegate ${generatorUsername} (${
+ block.data.generatorPublicKey
+ }) allowed to forge block ${block.data.height.toLocaleString()} :+1:`,
+ );
+
+ return true;
+};
diff --git a/packages/core-blockchain/tsconfig.json b/packages/core-blockchain/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-blockchain/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-config/CHANGELOG.md b/packages/core-config/CHANGELOG.md
deleted file mode 100644
index 1158e8a0d3..0000000000
--- a/packages/core-config/CHANGELOG.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.0 - 2018-12-03
-
-### Fixed
-
-- Stricter regular expression to avoid picking wrong config files
-
-### Changed
-
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-## 0.1.1 - 2018-06-14
-
-### Added
-
-- initial release
diff --git a/packages/core-config/LICENSE b/packages/core-config/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-config/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-config/README.md b/packages/core-config/README.md
deleted file mode 100644
index 49ea1474a3..0000000000
--- a/packages/core-config/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Ark Core - Configuration
-
-
-
-
-
-## Documentation
-
-You can find installation instructions and detailed instructions on how to use this package at the [dedicated documentation site](https://docs.ark.io/guidebook/core/plugins/core-config.html).
-
-## Security
-
-If you discover a security vulnerability within this package, please send an e-mail to security@ark.io. All security vulnerabilities will be promptly addressed.
-
-## Credits
-
-- [François-Xavier Thoorens](https://github.com/fix)
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
-
-## License
-
-[MIT](LICENSE) © [ArkEcosystem](https://ark.io)
diff --git a/packages/core-config/__tests__/__stubs__/delegates.json b/packages/core-config/__tests__/__stubs__/delegates.json
deleted file mode 100644
index c615b110c3..0000000000
--- a/packages/core-config/__tests__/__stubs__/delegates.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "secrets": ["this is a test"]
-}
diff --git a/packages/core-config/__tests__/__stubs__/genesisBlock.json b/packages/core-config/__tests__/__stubs__/genesisBlock.json
deleted file mode 100644
index 1f6b5c1bf0..0000000000
--- a/packages/core-config/__tests__/__stubs__/genesisBlock.json
+++ /dev/null
@@ -1,896 +0,0 @@
-{
- "version": 0,
- "totalAmount": 12500000000000000,
- "totalFee": 0,
- "reward": 0,
- "payloadHash": "578e820911f24e039733b45e4882b73e301f813a0d2c31330dafda84534ffa23",
- "timestamp": 0,
- "numberOfTransactions": 52,
- "payloadLength": 11401,
- "previousBlock": null,
- "generatorPublicKey": "024c8247388a02ecd1de2a3e3fd5b7c61ecc2797fa3776599d558333ef1802d231",
- "transactions": [
- {
- "type": 0,
- "amount": 12500000000000000,
- "fee": 0,
- "recipientId": "DGihocTkwDygiFvmg6aG8jThYTic47GzU9",
- "timestamp": 0,
- "asset": {},
- "senderPublicKey": "03cb7bca143376721d0e9e3f3ccb0dc2e7e8470c06e630c3cef73f03e309b558ad",
- "signature": "3044022016ecdf3039e69514c7d75861b22fc076496b61c07a1fcf793dc4f5c76fa0532b0220579c4c0c9d13720f9db5d9df29ed8ceab0adc266c6c160d612d4894dc5867eb1",
- "id": "e40ce11cab82736da1cc91191716f3c1f446ca7b6a9f4f93b7120ef105ba06e8",
- "senderId": "DUFeXjJmYt1mWY3auywA1EQSqfCv5kYYfP"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d1",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_1",
- "publicKey": "03e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d1"
- }
- },
- "signature": "3045022100e3e38811778023e6f17fefd447f179d45ab92c398c7cfb1e34e2f6e1b167c95a022070c36439ecec0fc3c43850070f29515910435d389e059579878d61b5ff2ea337",
- "id": "eb0146ac79afc228f0474a5ae1c4771970ae7880450b998c401029f522cd8a21",
- "senderId": "DNL81CT6WNG1PHjobBmLvKwLV3UUscBymB"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "031137050d5fed0b5229b150257da2ac9c135efdf4bcb382b0ad0c197d7be458f4",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_3",
- "publicKey": "031137050d5fed0b5229b150257da2ac9c135efdf4bcb382b0ad0c197d7be458f4"
- }
- },
- "signature": "30440220124baaa04491287d0abbf5a167c9b0f5ac95c22b196f42ff3d275cc9a213c2fd02206e6ebada85f67063e642dbcde6b956f8c99c05f4b9c55f1551d3eebba6375043",
- "id": "c9c554056b3428951633a7059dd64dfcbd776fef7f4a156ea362b37ee6ce74c7",
- "senderId": "DG9LYv5rqX67wuGvGVa9is5k1r86LKCVTA"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "037def83d085778d7767a182a179f345207953441089081f5bc13f86d3891308aa",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_4",
- "publicKey": "037def83d085778d7767a182a179f345207953441089081f5bc13f86d3891308aa"
- }
- },
- "signature": "3045022100900cea3c2df393414899c9d74db57d89c9f311c70d08b974d0fd4a98bfae2fc902204a2aa51a1ec71da27c26afc033de6bd2d15978813c120c95e1a4dafca75ce876",
- "id": "c82ccaa16be0e3c7ff4a53e2807968b71a0d88115223c3af2eb320f32449ac32",
- "senderId": "DMSwarrHg5N9ZAZ6nsqPuUjyAU6gdRAM9d"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "033f28ad2e9b897d46f1e67c7c52070e9ca46b04c0679ebb21fb236719e38aade3",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_5",
- "publicKey": "033f28ad2e9b897d46f1e67c7c52070e9ca46b04c0679ebb21fb236719e38aade3"
- }
- },
- "signature": "30440220285188d8900cd3cffccf5e1de305b18856451dd04d2ed21165dffe9a7ce4afc1022009457be6bfe536971697105d47ad1f829738a5cacdb27a23c5d1e8a8dddf3ebd",
- "id": "ee6a19fff622ab4e6e96d159396de56d6034b4b18a9cf5c99efcf4e61b28e15a",
- "senderId": "DFcYHfCwhGWcBNy6cp48wy5SfXbQmfBYgT"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "023e577a7b3362e0aba70e6911d230e86d729b4cb640f0e0b25637b812a3e38b53",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_6",
- "publicKey": "023e577a7b3362e0aba70e6911d230e86d729b4cb640f0e0b25637b812a3e38b53"
- }
- },
- "signature": "3045022100afa56542dd473c424b36d4d9f24da68180cfd90527681ab84098f415b2544a8702201e8ebdd619a2dd200e37a57c39a4529afe76d35f6089c00f6dffba6bf7b8a836",
- "id": "0dcd6e380bd7eaef8724f64f4b86104ce7497308dacf775afbe6ec0d401007fe",
- "senderId": "D5e2FzTPqdEHridjzpFZCCVyepAu6Vpmk4"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "02af5e6341efc14f4ba39a9ff65e151cc7304fc742ce7b2678d9aa446c555ee9c1",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_7",
- "publicKey": "02af5e6341efc14f4ba39a9ff65e151cc7304fc742ce7b2678d9aa446c555ee9c1"
- }
- },
- "signature": "3045022100c8980155c8f8964d76baf3e8d690075708f1a84757c1de52e311772466382da2022012599acfc7839fa1ef6bbd445ab34555fb718491db3089f40d4842b1bc2d3178",
- "id": "8af6abb117c69c130e388970d595b741374b1bbca709d9e91459e9e3c721397b",
- "senderId": "DDLbnve6XK48cGsQiFhesUJQRQdKkZTfPh"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "02845161cfca4d6ddde8e0d53538b6f881fb3ad9383cd77cebc55375dd6fd17663",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_8",
- "publicKey": "02845161cfca4d6ddde8e0d53538b6f881fb3ad9383cd77cebc55375dd6fd17663"
- }
- },
- "signature": "30450221009bce7c5c10a4b6306cebe5724adfd3de049a425c44dd314a10154774764c11090220070fb775e71dda6a68f7fc9e0c762fbf96021908911f0de0ca8e9b0c613cb896",
- "id": "bd346035d4516b85fb3a2cce6260fdcc6f1c434999e586978e065de3bf98e02a",
- "senderId": "DDAHPjVTTV3uur653TB27fcLGh7XXWnvxW"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03f264a6d2ebb62279313a6fd7fec4e2244785839b625a0b0c261e689ce5401d87",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_9",
- "publicKey": "03f264a6d2ebb62279313a6fd7fec4e2244785839b625a0b0c261e689ce5401d87"
- }
- },
- "signature": "30450221009f74425c2ec50dbee462e735dee3e7917c8433fd5250ff09af4506c38d2df05902206a14a19b9a5defe3c8c59c77d52c182ea34d81d2e0b05dc5925133f2829a1960",
- "id": "b48068fb7c848ffd57e82a4d381f53bb69916f3943e0e8935971a028ba245564",
- "senderId": "DFHdEBuVCz5zfj8yeo3BmKEdsEKpMaYRRw"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03efd265a086c2a099cda4f4fd202adbac07567e1229ce5e6fe39963b714c1e2d5",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_10",
- "publicKey": "03efd265a086c2a099cda4f4fd202adbac07567e1229ce5e6fe39963b714c1e2d5"
- }
- },
- "signature": "3044022004df492965ed328134aa6443d38ac4dd951a640e00330da9aa4e80c1577af41a0220588f030f5f9584959647898bb977a1ffe6bba639b1c64a728880f2cd3fd7aa3c",
- "id": "73b3b4375e39aabe51ec205559cd728a18c987dabaa0599c611b3076c38c7a49",
- "senderId": "DL7Y6smfHHs3Ms3hAYmSYYd5PZukmtDY1i"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "027d616d20f03c375067676c79ff9787e8e42991fbd9e878501d704d23d246d9b0",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_11",
- "publicKey": "027d616d20f03c375067676c79ff9787e8e42991fbd9e878501d704d23d246d9b0"
- }
- },
- "signature": "3044022051c2f8af62163ca621eeb3087a35bfaca0d679f7be8b19a25972f5a4b24ad8c90220422f3e0e480bf1bf2211e871a102edc15a957c0f97a553d9d707418e6538df26",
- "id": "80f1d01158452da31d44f0c24f464a0ade37da51d2f61356ad75a019a91a1ff5",
- "senderId": "DBVoRSXBHBPPvssBXrswv22r4dUSpN1fbA"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "038918951152a37b74dfe61115f83e4b5e3521145065650c4a6d3e94add57d9a9b",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_12",
- "publicKey": "038918951152a37b74dfe61115f83e4b5e3521145065650c4a6d3e94add57d9a9b"
- }
- },
- "signature": "3045022100facf6ed992c28d41595419666b006800fcb33c6bad4b522e013b4d688e51dc8502207695e968059f7a35486389c430d6a3037e69d3e5f1d4f0a294d8818e4750cf0d",
- "id": "86d76b0aad8f496d8c20926bfdeb50ad10db242ea6152b68266680c48e1e1aca",
- "senderId": "DHsSK81gRWjgNx1A9gtHgkRsEwshsog7AM"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03231d8f2f39925fa79efc8f8561e6a8d29b95164a753cbb604a46e8a2e96606fc",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_13",
- "publicKey": "03231d8f2f39925fa79efc8f8561e6a8d29b95164a753cbb604a46e8a2e96606fc"
- }
- },
- "signature": "304402204c627ec3d24fb7b4f86709c0566cee9909ebddb26039e87a2fa673f1f7227362022003be5aa3303b8f4cdab768f80b4699440a61814950cab0fd983526771c4c52ec",
- "id": "464614909ac7531a016a0489d78defe262dc0934324f41199975ad42a86f37ac",
- "senderId": "DDr7UTGQuPTjxLDWZ8RMjWJMKNXAMj3Bor"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "021e6d971e5885a3147ddf1e45bf5c8d0887ad9fc659e24bdf95c2c9607e7e3fe8",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_14",
- "publicKey": "021e6d971e5885a3147ddf1e45bf5c8d0887ad9fc659e24bdf95c2c9607e7e3fe8"
- }
- },
- "signature": "3045022100898e59efe518745d3eb3f2b16f7b6192e3289bb4289d43013224549f2015aa4902204e7be92cbba37a05551151e46224da4e5d0ad86ee2106d3a9c0b9afee5f1c4cf",
- "id": "9559866ff439959529f69b0947ad2e72d739511ee1f6533c0bca2ebd6dd4ae4a",
- "senderId": "DRXNNQ9gQXh6VNUVKaAn9xHAViyiHKtBHZ"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03d5b3efbe98631443c5cdf4de8a610dd2655b86427bf70aa209451b54256f6758",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_15",
- "publicKey": "03d5b3efbe98631443c5cdf4de8a610dd2655b86427bf70aa209451b54256f6758"
- }
- },
- "signature": "3044022037fa085e37a582b2e0b3734d44b813bb18be939f73100c5b6f977d4f53ae708f022064ae54f6a1b17b193ab6b6d633f7b7a7b8171a158cdba7480afe380f383930dc",
- "id": "7bab92d5397a4ad291c5d01b8d681e480d19b437a7ab5cbd4c6807c96ef2716f",
- "senderId": "DT12wf9erZyNJbBQrpbPDmfH3J8txiDgTE"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0297f2e8e609b2a6799214481e7573a043a197f8adf7b8bb306576fc3da83d2aaa",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_16",
- "publicKey": "0297f2e8e609b2a6799214481e7573a043a197f8adf7b8bb306576fc3da83d2aaa"
- }
- },
- "signature": "304402202eee94bc3b53c64f8dee7790fe3eed8639da8faf0aa1f785e921cf139df0fb7e02200224efb0c07ae3972287c12a32143c1356adb93e00ac9e04a1358c8245a24cab",
- "id": "1e59740fa596b615231660974d0b656122b799a8b13102ade8c1b779aa5de7b5",
- "senderId": "DKGYWPSqa4m4z6h3433rNFbWPDdvHj5wwd"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0361b914fd5823bf39ae467e95d99e9f6ddb7d85cc6df3055ce00274b8e4a976cc",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_17",
- "publicKey": "0361b914fd5823bf39ae467e95d99e9f6ddb7d85cc6df3055ce00274b8e4a976cc"
- }
- },
- "signature": "3044022002ad92b9b9d81dabf96ac7d90034debc55eeeae879b3fe6ffc026bde86bb7ad902205c57d31c5e5e0099b504ba4c49e220a00ff325dceb64c46aefbb7a0ad8570099",
- "id": "bf305776da902802923c19b9d2c7f1a809b0847992131cfa578d5e5518c924bf",
- "senderId": "DJshaeFyHcFTjiGJnVPaDmFXhnJ9bp96i5"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03fa6bc09bd2ff348b304e0cfbc2d2ec50aa3b9aee0de6a66c13fcd8ee5ac891cd",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_18",
- "publicKey": "03fa6bc09bd2ff348b304e0cfbc2d2ec50aa3b9aee0de6a66c13fcd8ee5ac891cd"
- }
- },
- "signature": "3045022100be50b19c17a9ff221aae20394a45d92ea47e8c1072b6d5a302937d2fc48cba8002205e9bcb3471a734c07ceff0083ad9ba1570507a29e5014e889ba42a85e797cb5e",
- "id": "44e48364b5b8cff3c68ae03de7dfde8d7ba6bcb99bf82b32fdc8bc3d0d9adeca",
- "senderId": "DSuNttSb1UvCWg8iormfwPwi67EA84P5Mu"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03241957edca9ed28308e35cbf36762d22de706ebbd7c6a3a2d235d905d660c5c7",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_19",
- "publicKey": "03241957edca9ed28308e35cbf36762d22de706ebbd7c6a3a2d235d905d660c5c7"
- }
- },
- "signature": "3045022100c11f8b863133535192e6c3fff20253a2695a2df74cdf1445d4ca0966803f708c0220200d4c2723d84f6334ba5d1cc1a0d45854867f4523fbcc9d09b3d53dd1972950",
- "id": "5cba288f9ffc1361ba8f7f19f28347ffd917f37df8cf46ba1e0816725f288528",
- "senderId": "DCZt1ozEVvPdYVvkHmUKK6k7gnyNNQDpMq"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "035ae2364c838bc21edf4c04a99c85799f26fb02cc0740c5a1c67d4dc1748ff913",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_20",
- "publicKey": "035ae2364c838bc21edf4c04a99c85799f26fb02cc0740c5a1c67d4dc1748ff913"
- }
- },
- "signature": "304402203066f06a1c165795d8a069499a8c0998913ec93e689219f14145754aa3e26e4e02206e9f88da16f1f8a8ebaf481eff798452487738714fe9b5694fec6a5ef8c152a5",
- "id": "ada1696532f7faad1dda594bc6db7bfc029a1759402c924348b74222873a3a27",
- "senderId": "D7JyqWMPKhhRNQcKTAvrPGBjEjjBcGgPca"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "023aff4a16c3876e885aea70e5bce9734ce5acc95a2c41c9783f5acd617f7c7533",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_21",
- "publicKey": "023aff4a16c3876e885aea70e5bce9734ce5acc95a2c41c9783f5acd617f7c7533"
- }
- },
- "signature": "3045022100f5150c23596b9479c8b277401ab9e7da9b2275436f3927dabd70395e52c3ea7c02204e318d498b0176b5f05bb96418c49da3375a8d9b47b3b1e72a6f4db30b3f8c34",
- "id": "e186a679f2e47300ec2f24c670192bcede1cb12f359cb8e827374b22f41fbe12",
- "senderId": "D6itxYJr4n7ZZk2bd9cZbJE1xaDmpfkNFL"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0217d7ce9c3754f7fc7e5b4c64a1ff397dc75931cd6c92e32d8b42068ad50fe4eb",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_22",
- "publicKey": "0217d7ce9c3754f7fc7e5b4c64a1ff397dc75931cd6c92e32d8b42068ad50fe4eb"
- }
- },
- "signature": "3045022100b84f69a7ff67ed147fc0a750c3b7b2ecabd582b6d0cb698c0bb4a531daa6ca46022039d2722e486e1674d0db422078d63fcdb90b21bed0dcc1265adff72d0c2bf8b9",
- "id": "86d9d146b62dbafe212aba5ec9764223b67f72c3c1aa93e54a270e3a528a8b20",
- "senderId": "DDy4aKhF3cMadGhjFZnjaA1tx2rwnSEWcc"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "024019207f50dcb3e8aeb9ac1b00993d2bf131346e7e6d296429ea813a8373818e",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_23",
- "publicKey": "024019207f50dcb3e8aeb9ac1b00993d2bf131346e7e6d296429ea813a8373818e"
- }
- },
- "signature": "3045022100aa83596b740639ee8947aa6d0f0ee123e4a5b87c39a4c6dd8a50304d4a7c97d102205fd45f85f5bdb076585a77888ef880bea52ade689731dff694d777de34913efc",
- "id": "6301b791844e02116df528b1ea46d788e91521189c3828ce224e45a1b72cda59",
- "senderId": "D6BwyDJkNFkaDLedcJTE4rPUw5bRtb4K8f"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0275db912c21dca0f0213a76f4544137d7c741b47f281cfd4f8b7cb8187e7ce3cc",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_24",
- "publicKey": "0275db912c21dca0f0213a76f4544137d7c741b47f281cfd4f8b7cb8187e7ce3cc"
- }
- },
- "signature": "3045022100c7eda0d9cd7ef522615643d1b985c73add2d3612344bdcc0117779fa4f4f54d302203e33fb5d185f5174e9cb7634a3d307b74d3bb56cc2354024ce69c74905a85203",
- "id": "eee776fcb8024469eacab3e4b23c3d14185326431369aa84f17921abab8ad0ad",
- "senderId": "DHQSmrRdfYAp9Y6CuebKnkoQNzuN7Pk2oQ"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0331c615ca4bc89d4eeb8d7a9cfbb5c0d4ce49d2f480afbce499b0c7f8c6a24f2f",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_25",
- "publicKey": "0331c615ca4bc89d4eeb8d7a9cfbb5c0d4ce49d2f480afbce499b0c7f8c6a24f2f"
- }
- },
- "signature": "304402203e69be3a73c5917d89d58f3c0ae18febbbf364d3f9dfbec6b526a5294f9c435902201750bcf6368c181aabc53c73fd271a2967a6f215e1d0506eded5dd1800fea1c8",
- "id": "ec3d17c6d38c0b9848c7cb57b968efd1f3872b1d1b8bcfb74bae2b0aaa15877c",
- "senderId": "D6EVFQx5Z7M2X9DWXHtfX51CtVekuKPMQF"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0338ca9b719f8047580eed23b64a40aecad3803a12c0dde83e3ec2c2a9bfaa8147",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_26",
- "publicKey": "0338ca9b719f8047580eed23b64a40aecad3803a12c0dde83e3ec2c2a9bfaa8147"
- }
- },
- "signature": "3045022100e0bf90949739012b641793da162b3daa88b34c8753ee31b26850729e9df579810220439a3f2f1b8e719767ee68df46f4bc1f18c8c3b2da4118edff22396616d319fb",
- "id": "14cd65c5f28f4cefc7c0157518a24f90c2260eb7166105b6b3358d91164ddf39",
- "senderId": "DLCQ1jPsYbBCV7JfUJTasKbKoyGbK4a4HG"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03127001718bee76f14133272f0f4a928ffa8c2b38cafd94d7100253dac732c644",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_2",
- "publicKey": "03127001718bee76f14133272f0f4a928ffa8c2b38cafd94d7100253dac732c644"
- }
- },
- "signature": "3044022003d2e76aca2848aedfe25415c11b9368dc72f687b66bef4527b40e2997b86b8c022076f7f82cbeb282d26535a2c1f0af0f02b48025d42c1bd56ac687fba1a3adb706",
- "id": "0daff3992b54b1384f52f751c933c727cbaaf4fac435eba88a1817a425753614",
- "senderId": "D9rv3h61heDYHQ3b3Xk3V5epHSTTC6Vn1d"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0241734825ba45b6de29d6f26242c25ae1ef125b82615ee89a9fdd5b0f3c6b5132",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_28",
- "publicKey": "0241734825ba45b6de29d6f26242c25ae1ef125b82615ee89a9fdd5b0f3c6b5132"
- }
- },
- "signature": "3045022100bb2903424bcd0a72da531470779144d60286191bea1b200c5617ae4f92229ba6022046a876e3e6cb85469a16f34d2f937e2eef787011c6a313ee50258f15116148ac",
- "id": "bd17dbd23f8dbba2736688702ac185a87c88c43b24ee6d7764a5b4138b2f38b7",
- "senderId": "DAcQPbKa8zBWwDHbxj37N13C61iseMDWM9"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03b9409203d7091e3f4d49168529b749e942ed18f21beddd236d57d692f09a8f86",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_29",
- "publicKey": "03b9409203d7091e3f4d49168529b749e942ed18f21beddd236d57d692f09a8f86"
- }
- },
- "signature": "3044022016d7ecfa776930a6f83464548e7a686735fde752903539a38eb9da0ce2488bbd02203c5e23a4072c8de35a90b296145cce3156a31cc0d754b8a37d363fb088bc7387",
- "id": "16e02d3ef24dca4b03a1e489e20335224f18d888ed04f7e3512572f8e0cf92ae",
- "senderId": "D5mmTaDAMSyPNKiDKrqwTFGWzWrZA3xaF8"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "02c7b92a2d0027309e21855cf9c42a432b21ad13925e9dfc206f9c01e18fefa08a",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_30",
- "publicKey": "02c7b92a2d0027309e21855cf9c42a432b21ad13925e9dfc206f9c01e18fefa08a"
- }
- },
- "signature": "30450221009de8828a7ad87cb5d52900e09d5beb680f9edc7640a3707d08a379511a7ba0f102202aa1d9294f9631f1325f252adb87c0d866e7398ce410037a42dc861d94308e15",
- "id": "fece556bee4de2c7f1bb3099a05a84a33d0c963979fe1a222a899c13b7abb1fc",
- "senderId": "DJ3NywAwQh4srbooLH1jTs9ma1hJE79v3z"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0221297804a26a93bb441a9d20a2916abf27fa7b29967678ef1a7a58062f73f40d",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_31",
- "publicKey": "0221297804a26a93bb441a9d20a2916abf27fa7b29967678ef1a7a58062f73f40d"
- }
- },
- "signature": "3045022100b969611ef532557fa3da8a0325b2c88f3ebec954d64f158431d86b8e07929ea50220520affdcd0728cb7c5f63a58a1200d44133e90b1f7a6a9e28744ad6b0dcc2a75",
- "id": "ee086317ea2fdc522f5eb502a0db9f3d4955b2318559e40a1f22a3f5f8d6344b",
- "senderId": "D5P7eti7FUY4Tk5KXoxdf2tDAVQrRVCESA"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "027f504f6f20648e3bf171952629c7b868a2f799aa4b60f8eb3fe96afff16bbef0",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_32",
- "publicKey": "027f504f6f20648e3bf171952629c7b868a2f799aa4b60f8eb3fe96afff16bbef0"
- }
- },
- "signature": "3044022006be7cbaa74089cabe47d02621f756762587d210a3f211ee941b5fcd0650908f02207d4040408bd25a2de03e5724362735ee8ad36c099b0c16efd4716e1dd7ec62ae",
- "id": "764dd21aa4d0e2e0fa17bb2ff5e7ca304995d9e3593542badecc8ed24d5ea3ea",
- "senderId": "D9q26yBTrEYuxHg7bbfZphv6129KvLu4v2"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "031954315b84db8f49ab7ee21357270450bb68d06b34472e5e93ddfa5710edc0c9",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_33",
- "publicKey": "031954315b84db8f49ab7ee21357270450bb68d06b34472e5e93ddfa5710edc0c9"
- }
- },
- "signature": "3045022100859f93df994d86995fdf834bfe86b41eebaa04e5ab7d09f0b37acb50d313cd9802203c8993b793602c96d305fa795a9f2459f4706b340993584f3c56579392c0995c",
- "id": "efd9e7c638afe62bec9be61783193ea52eea7b335053bd5af6c758d5b0e5847c",
- "senderId": "D9iPFb5kAVnuDdomehRP9LncJj5ng2vrsr"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0267b310eac2bb0d6594de382a1ab74ac75b91e9d64a590b6249247b10fd9be829",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_34",
- "publicKey": "0267b310eac2bb0d6594de382a1ab74ac75b91e9d64a590b6249247b10fd9be829"
- }
- },
- "signature": "3045022100a678978ab899e3903e760ee98640e3f658792a096a8d771c575944af6536cfdb0220428c312f1e0eb4be73ce4b256a754447570176200cfb6c09b3eb55f66526dd80",
- "id": "70edcce5df67a250b6ba3567879bae6379ce4c688597fcedfbfd0313da6998e8",
- "senderId": "D6xZmtyBzZKCEkK29JNPAD581TJ8XXrXYn"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "028f32320c66a89779756b04946d2aa256dff6cd547349d46e1938710063e387c4",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_35",
- "publicKey": "028f32320c66a89779756b04946d2aa256dff6cd547349d46e1938710063e387c4"
- }
- },
- "signature": "304402206bc95876897527b39eacf4c961f9c036a9c8a0e53a17ce925c592d079fa643030220096e115d7fbd54aca4af7f621d64178dfcf2c13361106a3e3b5025dca97b44ee",
- "id": "7f23f44157f3a677e81514fa431227410a27442e5fd1f2491b177c0f580f296d",
- "senderId": "D9dW4eXJjABDQXSQB9GtvY5UBuRWWWejWb"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0218b889a24988527ab3948d80f97cfc37b923082e1f0398bc162190fd66ec4dee",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_36",
- "publicKey": "0218b889a24988527ab3948d80f97cfc37b923082e1f0398bc162190fd66ec4dee"
- }
- },
- "signature": "3045022100c40a3f4cf15f9274e2b25ca8608cb965316aa0f00fa77817b79620ad8ccbdd5902206203a1043b03ba58aa9b7399694f8215cf45d30eb0caa748cc06f1a85a8faea9",
- "id": "a65244ed17a9280aa694abdf6804b1a0b78dfc052b4845abcd3c89380159b29e",
- "senderId": "DFHK7SdmPdjxNZ9uweqLZAv6v5GQ1NnBNe"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "035392ee88c60617764b4fe89ae2cc96560dfa5f992b03be31ce5680db9b863f73",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_37",
- "publicKey": "035392ee88c60617764b4fe89ae2cc96560dfa5f992b03be31ce5680db9b863f73"
- }
- },
- "signature": "3044022036200c3191f8f01b77676644b9b94728b5afb2ab2de8c5c7c5582e795465661c02207848f1f2f0ab378d8906fd45aa048f354d5dbac4cb87c15973ffa86fe84ff0cd",
- "id": "219e0942afe5f65c548ec2118a1c49febb7ec03fca4334ac16649062db9d146b",
- "senderId": "DSh7AAC9KahXU2JZ539HAqEa5sHafxsxDQ"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03e75127d1deccf65844a1761bd26611b6c65c5b51a52eba27e3ee20a539fd63f1",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_38",
- "publicKey": "03e75127d1deccf65844a1761bd26611b6c65c5b51a52eba27e3ee20a539fd63f1"
- }
- },
- "signature": "304402201a2990b2baae72f5cc8f2d1890f328e4082af0cf2a787d8f05208c3424ce089d0220790dbc7606dd6c03568fd0a771e9e8e89557257238ae90cfcb3bb8f3b475987b",
- "id": "ee9ad2a66e9b2009a9fc671f80d0493803fc422161140169c7bc1fd401cd9ad6",
- "senderId": "D85WuxGZrFs1QUYTvnRpmc6dd8rmBbpnaX"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0329fc1580906307ac9f2f55cec66e47983f8287d542408fb19f473a305d3638d8",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_39",
- "publicKey": "0329fc1580906307ac9f2f55cec66e47983f8287d542408fb19f473a305d3638d8"
- }
- },
- "signature": "30450221008f66e89ec4c7af4b77e5b7ff36c542cc02672c8df70806b5a0fab7a7e8c7067502200d99ba19ceb1b471c39c4e95107ad6f8b978a623a790080b16f863347fe06b4f",
- "id": "dd3077ed04a76343d340074270ce9826354802bd99e08cb864c1c5ad09f367df",
- "senderId": "D85kwsBJKZ4pw5uQpc81eRj95f6a536AP6"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "035ec848e9388877dac88f121d19c8f5e870ac90d8ccb0116be9f734e4bd1a9405",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_40",
- "publicKey": "035ec848e9388877dac88f121d19c8f5e870ac90d8ccb0116be9f734e4bd1a9405"
- }
- },
- "signature": "304402202b220d6c028bc23213edddaf303f18eef059551891aadbf7a4b4d7d3287457bb0220245678354bb8960b42ba2f2ceb12f926e82ff0d027b44988d799c8c0d8d7d9f2",
- "id": "3afc6ea52b8edc7df0230ceac71baf45460f3bd761c5e75fe796bc7415063220",
- "senderId": "DGBJdDadBwJD2xY8VsdAykdd6vPakMMUt6"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0356f5885306e45402aeb354a74d13c104699b3b53da46a5e922e4a6d6132a67e8",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_41",
- "publicKey": "0356f5885306e45402aeb354a74d13c104699b3b53da46a5e922e4a6d6132a67e8"
- }
- },
- "signature": "3045022100f18bf2e013f2d9dcac013a76037d787f79baaa65f4f31ffe2b4ed8de249bdc8902202abcf77e809599d3e3a96225363c8e760ed4b4e20f97645547b381dba830c3da",
- "id": "aea1fc173a2f4a9233b0fe59a5f6804167bee5658cb3e4e19dfe2be20f5772cd",
- "senderId": "DG4VbapL3H39NJLB3DqQEefU47EMVqtxVw"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03ff8ab980434516ca28c982d0ecc8fc3107116d6c8b3e09c7ee5033f32adbd2ff",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_42",
- "publicKey": "03ff8ab980434516ca28c982d0ecc8fc3107116d6c8b3e09c7ee5033f32adbd2ff"
- }
- },
- "signature": "3045022100e938d9901afeaa5a56d18abd9292ace93be03c84c09a6c4cb58fca96dfb54bc502201e921d27f9886d189f803b14d93655a42c4e095d49ee61051a4e70c7a173f3f1",
- "id": "f18426d3ef81d4b7bf0337d70afcecddbd6db2206a2f139f1ca5823c381c7817",
- "senderId": "DC3oNWedP48ypGxAeKbFC7gMjWxcNc2JhL"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0393f1590771a8ad1cf2baa086858f3029c4444cb82243917a7011f1f66cf8fd05",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_43",
- "publicKey": "0393f1590771a8ad1cf2baa086858f3029c4444cb82243917a7011f1f66cf8fd05"
- }
- },
- "signature": "304502210095745c36a8af07e21546bd064f1ed1bd90e6c2a8db9c0c8e4853d0a8255443db0220259d2ce3677abb42f08b9d22aa13bbe383fd882ed38911b738ebaefc04589694",
- "id": "6c51bea35b5e3270dcf7b7dfae8d984e19f476ea7e0435f157c4e0d22b7e7ea1",
- "senderId": "DJm2sfcUKhyxakowY9TjyAytkdq7JrFgVj"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0234e24ff1dbc447c804eb385cd05bbd1dc59ef03b44a3346b13e7cccf00b61075",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_44",
- "publicKey": "0234e24ff1dbc447c804eb385cd05bbd1dc59ef03b44a3346b13e7cccf00b61075"
- }
- },
- "signature": "3045022100ee1e8df480f2be042386d383d776b3fd6bd2d3f5a9035071153f23dbfdceeaae02203a0834aae4834da3ca7858779d474b9255ead754867d5b4a18873e9ecaa5045e",
- "id": "66fb3e36233f4577ba585ccd7daf83e62d8df262d3d832b806479ac67c1ef35d",
- "senderId": "D5oS8xfNebiPsjpwPWoZS6sA9qcYjTGT5h"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03a5789a4486f20f1fdca78a52b528b3bf9952e7c057de71a22adcfb444ba4c5d3",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_45",
- "publicKey": "03a5789a4486f20f1fdca78a52b528b3bf9952e7c057de71a22adcfb444ba4c5d3"
- }
- },
- "signature": "3045022100cb037530bcff9a4d19899431648747022c28aa3239563379d96692bd525eb38902205f3cabb8dd470d9eb3d425e333ad1bc9f0643d489c600a811748fb5f4a203f7f",
- "id": "5df9c5e350136571af4b86697bc9d4cfca3ff8b669e254b36f00be1dbde063f7",
- "senderId": "D5SzHHdPdGqYUkH7BGNkmGHEUqfZrWb17r"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0347f692345fa7bf90e944eb55246da5f9f595d3f5a20ad50aeb6f9b973aaae17e",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_46",
- "publicKey": "0347f692345fa7bf90e944eb55246da5f9f595d3f5a20ad50aeb6f9b973aaae17e"
- }
- },
- "signature": "3045022100c377efe5ffab58017473699cd7c839dcf48fa5b20b5ddf9bdc4801e22a579b2b02204d35c1a1416069544e3ec01d2ce21bb409f9f2fa4adedc8c03d6417c034a3fec",
- "id": "da4cfad78e37d56421dd6676e5618a507340ef1e496831d1968c509e35ef9202",
- "senderId": "DCLdibuZB6UsJP8KmdzcDLWzizrDtJQuxt"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "028a32b441377a69aa76e867026f3109b2f0aef8651fe91e2a4ab01eff102a6b98",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_47",
- "publicKey": "028a32b441377a69aa76e867026f3109b2f0aef8651fe91e2a4ab01eff102a6b98"
- }
- },
- "signature": "3045022100e098672958be15989bb125d9018adb4a54e95ab664e64a673997e617e28b39df02206e8459997074d5976b77f90eb9d7180e9d4a0e0efdf433958ffeb2f04d9de382",
- "id": "85bafcd07e7ba47ec95cb5b5a6759d4f9f87e036bb7660c7717504e845ef975e",
- "senderId": "DSkivgRyimdAVqmm2ZAKwKmKN39WEbbPnL"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "0223ac52179903e79865b9a98cf0b52ddc1ab46180c157e8f6bd1e63e7f14fcf31",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_48",
- "publicKey": "0223ac52179903e79865b9a98cf0b52ddc1ab46180c157e8f6bd1e63e7f14fcf31"
- }
- },
- "signature": "304402200a005716f67d6cd3963a3c752c95f1bca01aa127c91ab1a632eb3022d11e3e67022024c4746078e440da441bcb366ee8999ffd2419e9a6f9cbf971d696d5b7f8733b",
- "id": "0df1ed07d3f95ddf0385bad83a17b3a8fde6bd6532cd3479e48668064672b34f",
- "senderId": "DDgKyKqdA6SuamB1eW77WvFu6RQFMZoU36"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "039de0390f28c7731d86ae7006a31888f12856cde3cc3c2619d4d4a42b6dfd6c51",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_49",
- "publicKey": "039de0390f28c7731d86ae7006a31888f12856cde3cc3c2619d4d4a42b6dfd6c51"
- }
- },
- "signature": "3045022100efa5d51ca79992be4a87af049b3e9ec1b796576e4d937ea9e3760ab0bdcd301e022027e22a6c3395df155bd399643c241e4cc317eaead1f273fd7a709339dfa9dc99",
- "id": "436bebc107fad38e944fd14785e09f0600df4d75d31cf3eac53f850462d0be74",
- "senderId": "DKCaoaXApw1xE7K1BJcVkr1KGzjKmFWyTk"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "02b87b0e70a7ae10613390f405620e24c495ba2b0cfcdbc67688e9b483dea564ee",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_50",
- "publicKey": "02b87b0e70a7ae10613390f405620e24c495ba2b0cfcdbc67688e9b483dea564ee"
- }
- },
- "signature": "304502210096cdd35f803a37730ac73a97a23061dceac96319c67bfb1ddcfbac737febe96102202fa0b279f697da3afc043ffd3ecc838789be07ff119b5527a5c13468cecf66e9",
- "id": "bb65f9dbe6272fd07a555fc86762d6a487f538b972f2926ff7698cdc906a32df",
- "senderId": "DJQXFKEguZVabsAs46JbXXnQJ5jFhUtN9m"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "03c9f8f4001216603c152b4b4429c2ead322ac34672999e808d567a7d1140e46be",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_51",
- "publicKey": "03c9f8f4001216603c152b4b4429c2ead322ac34672999e808d567a7d1140e46be"
- }
- },
- "signature": "3045022100ee961089d02d7bb68fe2257f6a972eeaf6e2c1a1ad2f491c417e161fedbb556b02204c834644e5b5cde9a0b3f92fa23bade7670efab0a067597f6c151ee633932706",
- "id": "cef44df9684f05dab67c0568a2c5295bb50cbb3c88f5cfbe672365bda274620f",
- "senderId": "DKpt7cm2tZk4RPLyQ5ugwEH7gkriRaA7ov"
- },
- {
- "type": 2,
- "amount": 0,
- "fee": 0,
- "recipientId": null,
- "senderPublicKey": "02cf70f73328d490cfb03ee822d3fc0cf9259d67c0564e843491e739501809d657",
- "timestamp": 0,
- "asset": {
- "delegate": {
- "username": "genesis_27",
- "publicKey": "02cf70f73328d490cfb03ee822d3fc0cf9259d67c0564e843491e739501809d657"
- }
- },
- "signature": "30440220645b912b60f829c0bce58bfe9890ef9253418b6898416aaead663bdf158a99f2022061abbbabd454ec7f7e3f4b502216eec28110e945a4b9b913b1fc0b9758e7e6e4",
- "id": "09408dbcf3e3e0835bf92a05330c023a7d6471f3825301a34efa094e0fd4fc30",
- "senderId": "DQfjSqDuKr5YZaLAF8rWpFMqMYwEbPtGKg"
- }
- ],
- "height": 1,
- "id": "13149578060728881902",
- "blockSignature": "3045022100a6605198e0f590c88798405bc76748d84e280d179bcefed2c993e70cded2a5dd022008c7f915b89fc4f3250fc4b481abb753c68f30ac351871c50bd6cfaf151370e8"
-}
diff --git a/packages/core-config/__tests__/__stubs__/network.json b/packages/core-config/__tests__/__stubs__/network.json
deleted file mode 100644
index d57a1c1694..0000000000
--- a/packages/core-config/__tests__/__stubs__/network.json
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "name": "devnet",
- "messagePrefix": "ARK message:\n",
- "bip32": {
- "public": 46090600,
- "private": 46089520
- },
- "pubKeyHash": 30,
- "nethash": "578e820911f24e039733b45e4882b73e301f813a0d2c31330dafda84534ffa23",
- "wif": 170,
- "client": {
- "token": "DARK",
- "symbol": "DѦ",
- "explorer": "https://dexplorer.ark.io"
- },
- "constants": [
- {
- "height": 1,
- "reward": 0,
- "activeDelegates": 51,
- "blocktime": 8,
- "block": {
- "version": 0,
- "maxTransactions": 150,
- "maxPayload": 2097152
- },
- "epoch": "2017-03-21T13:00:00.000Z",
- "fees": {
- "dynamic": false,
- "dynamicFees": {
- "minFeePool": 1000,
- "minFeeBroadcast": 1000,
- "addonBytes": {
- "transfer": 100,
- "secondSignature": 250,
- "delegateRegistration": 500,
- "vote": 100,
- "multiSignature": 500,
- "ipfs": 250,
- "timelockTransfer": 500,
- "multiPayment": 500,
- "delegateResignation": 500
- }
- },
- "staticFees": {
- "transfer": 10000000,
- "secondSignature": 500000000,
- "delegateRegistration": 2500000000,
- "vote": 100000000,
- "multiSignature": 500000000,
- "ipfs": 0,
- "timelockTransfer": 0,
- "multiPayment": 0,
- "delegateResignation": 0
- }
- }
- },
- {
- "height": 75600,
- "reward": 200000000
- }
- ],
- "exceptions": {}
-}
diff --git a/packages/core-config/__tests__/__stubs__/peers.json b/packages/core-config/__tests__/__stubs__/peers.json
deleted file mode 100644
index f1f6038735..0000000000
--- a/packages/core-config/__tests__/__stubs__/peers.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "blackList": [],
- "list": [
- {
- "ip": "127.0.0.1",
- "port": 4102
- },
- {
- "ip": "127.0.0.1",
- "port": 4202
- }
- ]
-}
diff --git a/packages/core-config/__tests__/loader.test.js b/packages/core-config/__tests__/loader.test.js
deleted file mode 100644
index 8db2ae1f63..0000000000
--- a/packages/core-config/__tests__/loader.test.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const path = require('path')
-const configLoader = require('../lib/loader')
-
-const stubConfigPath = path.resolve(__dirname, './__stubs__')
-
-const stubConfig = {
- delegates: require('./__stubs__/delegates'),
- genesisBlock: require('./__stubs__/genesisBlock'),
- network: require('./__stubs__/network'),
-}
-
-beforeEach(() => {
- process.env.ARK_PATH_CONFIG = stubConfigPath
- process.env.ARK_NETWORK = JSON.stringify(stubConfig.network)
-})
-
-afterEach(() => {
- delete process.env.ARK_PATH_CONFIG
-})
-
-describe('Config Loader', () => {
- it('should fail without a config', async () => {
- try {
- await configLoader.setUp()
- } catch (error) {
- expect(error.message).toEqual('undefined (object) is required')
- }
- })
-
- it('should succeed with a config from a string', async () => {
- const result = await configLoader.setUp()
-
- expect(result.delegates).toEqual(stubConfig.delegates)
- expect(result.genesisBlock).toEqual(stubConfig.genesisBlock)
- expect(result.network).toEqual(stubConfig.network)
- })
-
- it('should succeed with a config from an object', async () => {
- const result = await configLoader.setUp()
-
- expect(result.delegates).toEqual(stubConfig.delegates)
- expect(result.genesisBlock).toEqual(stubConfig.genesisBlock)
- expect(result.network).toEqual(stubConfig.network)
- })
-})
diff --git a/packages/core-config/jest.config.js b/packages/core-config/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-config/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-config/lib/index.js b/packages/core-config/lib/index.js
deleted file mode 100644
index 86e71cb89a..0000000000
--- a/packages/core-config/lib/index.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const { client } = require('@arkecosystem/crypto')
-const loader = require('./loader')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- alias: 'config',
- async register(container, options) {
- const config = await loader.setUp(options)
-
- client.setConfig(config.network)
-
- return config
- },
-}
diff --git a/packages/core-config/lib/loader.js b/packages/core-config/lib/loader.js
deleted file mode 100644
index b98060672d..0000000000
--- a/packages/core-config/lib/loader.js
+++ /dev/null
@@ -1,167 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const axios = require('axios')
-const dirTree = require('directory-tree')
-const fs = require('fs-extra')
-const ow = require('ow')
-const path = require('path')
-const { configManager } = require('@arkecosystem/crypto')
-
-class ConfigLoader {
- /**
- * Make the config instance.
- * @param {Object} options
- * @return {ConfigLoader}
- */
- async setUp(options) {
- this.options = options
- this.network = JSON.parse(process.env.ARK_NETWORK)
-
- await this.__createFromDirectory()
-
- this._validateConfig()
-
- this.configureCrypto()
-
- return this
- }
-
- /**
- * Get constants for the specified height.
- * @param {Number} height
- * @return {void}
- */
- getConstants(height) {
- return configManager.getConstants(height)
- }
-
- /**
- * Configure the crypto package.
- * @return {void}
- */
- configureCrypto() {
- configManager.setConfig(this.network)
- }
-
- /**
- * Copy the config files to the given destination.
- * @param {String} dest
- * @return {Promise}
- */
- async copyFiles(dest) {
- if (!dest) {
- dest = `${process.env.ARK_PATH_DATA}/config`
- }
-
- await fs.ensureDir(dest)
-
- return fs.copy(process.env.ARK_PATH_CONFIG, dest)
- }
-
- /**
- * Load and bind the config.
- * @return {void}
- */
- async __createFromDirectory() {
- const files = this.__getFiles()
-
- this.__createBindings(files)
-
- await this.__buildPeers(files.peers)
- }
-
- /**
- * Bind the config values to the instance.
- * @param {Array} files
- * @return {void}
- */
- __createBindings(files) {
- for (const [key, value] of Object.entries(files)) {
- this[key] = require(value)
- }
- }
-
- /**
- * Get all config files.
- * @return {Object}
- */
- __getFiles() {
- const basePath = path.resolve(process.env.ARK_PATH_CONFIG)
-
- if (!fs.existsSync(basePath)) {
- throw new Error(
- "An invalid configuration was provided or is inaccessible due to it's security settings.",
- )
- process.exit(1) // eslint-disable-line no-unreachable
- }
-
- const formatName = file => path.basename(file.name, path.extname(file.name))
-
- const configTree = {}
-
- dirTree(basePath, { extensions: /\.(js|json)$/ }).children.forEach(
- entry => {
- if (entry.type === 'file') {
- configTree[formatName(entry)] = entry.path
- }
- },
- )
-
- return configTree
- }
-
- /**
- * Build the peer list either from a local file, remote file or object.
- * @param {String} configFile
- * @return {void}
- */
- async __buildPeers(configFile) {
- if (!this.peers.sources) {
- return
- }
-
- const output = require(configFile)
-
- for (const source of this.peers.sources) {
- // Local File...
- if (source.startsWith('/')) {
- output.list = require(source)
-
- fs.writeFileSync(configFile, JSON.stringify(output, null, 2))
-
- break
- }
-
- // URL...
- try {
- const response = await axios.get(source)
-
- output.list = response.data
-
- fs.writeFileSync(configFile, JSON.stringify(output, null, 2))
-
- break
- } catch (error) {
- console.error(error.message)
- }
- }
- }
-
- /**
- * Validate crucial parts of the configuration.
- * @return {void}
- */
- _validateConfig() {
- try {
- ow(this.network.pubKeyHash, ow.number)
- ow(this.network.nethash, ow.string.length(64))
- ow(this.network.wif, ow.number)
- } catch (error) {
- console.error('Invalid configuration. Shutting down :rotating_light:')
- throw Error(error.message)
- process.exit(1) // eslint-disable-line no-unreachable
- }
- }
-}
-
-module.exports = new ConfigLoader()
diff --git a/packages/core-config/package.json b/packages/core-config/package.json
deleted file mode 100644
index 1a62e558d9..0000000000
--- a/packages/core-config/package.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "name": "@arkecosystem/core-config",
- "version": "0.2.0",
- "description": "Configuration Loader for Ark Core",
- "contributors": [
- "François-Xavier Thoorens ",
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/crypto": "~0.2",
- "axios": "^0.18.0",
- "directory-tree": "^2.1.1",
- "fs-extra": "^7.0.1",
- "ow": "^0.8.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
-}
diff --git a/packages/core-container/.gitignore b/packages/core-container/.gitignore
index 5d1942c190..1269488f7f 100644
--- a/packages/core-container/.gitignore
+++ b/packages/core-container/.gitignore
@@ -1,2 +1 @@
-config
data
diff --git a/packages/core-container/CHANGELOG.md b/packages/core-container/CHANGELOG.md
deleted file mode 100644
index 5a2008de88..0000000000
--- a/packages/core-container/CHANGELOG.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- Support plugin extensions
-- More graceful handling of shutdown
-- Silent shutdown to hide output
-- Configuration through a remote peer
-- Expose the git commit hash on development networks
-
-### Fixed
-
-- Cast numerical strings to numbers
-
-### Changed
-
-- No longer load the `.env` file in test environments
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-## 0.1.1 - 2018-06-14
-
-### Added
-
-- initial release
diff --git a/packages/core-container/LICENSE b/packages/core-container/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-container/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-container/README.md b/packages/core-container/README.md
index dc6a8b990b..91d6c0666e 100644
--- a/packages/core-container/README.md
+++ b/packages/core-container/README.md
@@ -14,8 +14,9 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [Joshua Noack](https://github.com/supaiku0)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-container/__tests__/__stubs__/config/delegates.json b/packages/core-container/__tests__/__stubs__/config/delegates.json
new file mode 100644
index 0000000000..cb2b4899cd
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/delegates.json
@@ -0,0 +1,3 @@
+{
+ "secrets": ["this is a test"]
+}
diff --git a/packages/core-container/__tests__/__stubs__/config/exceptions.json b/packages/core-container/__tests__/__stubs__/config/exceptions.json
new file mode 100644
index 0000000000..0967ef424b
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/exceptions.json
@@ -0,0 +1 @@
+{}
diff --git a/packages/core-container/__tests__/__stubs__/config/genesisBlock.json b/packages/core-container/__tests__/__stubs__/config/genesisBlock.json
new file mode 100644
index 0000000000..83801fa8e0
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/genesisBlock.json
@@ -0,0 +1,896 @@
+{
+ "version": 0,
+ "totalAmount": 12500000000000000,
+ "totalFee": 0,
+ "reward": 0,
+ "payloadHash": "578e820911f24e039733b45e4882b73e301f813a0d2c31330dafda84534ffa23",
+ "timestamp": 0,
+ "numberOfTransactions": 52,
+ "payloadLength": 11401,
+ "previousBlock": null,
+ "generatorPublicKey": "024c8247388a02ecd1de2a3e3fd5b7c61ecc2797fa3776599d558333ef1802d231",
+ "transactions": [
+ {
+ "type": 0,
+ "amount": 12500000000000000,
+ "fee": 0,
+ "recipientId": "DGihocTkwDygiFvmg6aG8jThYTic47GzU9",
+ "timestamp": 0,
+ "asset": {},
+ "senderPublicKey": "03cb7bca143376721d0e9e3f3ccb0dc2e7e8470c06e630c3cef73f03e309b558ad",
+ "signature": "3044022016ecdf3039e69514c7d75861b22fc076496b61c07a1fcf793dc4f5c76fa0532b0220579c4c0c9d13720f9db5d9df29ed8ceab0adc266c6c160d612d4894dc5867eb1",
+ "id": "e40ce11cab82736da1cc91191716f3c1f446ca7b6a9f4f93b7120ef105ba06e8",
+ "senderId": "DUFeXjJmYt1mWY3auywA1EQSqfCv5kYYfP"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d1",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_1",
+ "publicKey": "03e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d1"
+ }
+ },
+ "signature": "3045022100e3e38811778023e6f17fefd447f179d45ab92c398c7cfb1e34e2f6e1b167c95a022070c36439ecec0fc3c43850070f29515910435d389e059579878d61b5ff2ea337",
+ "id": "eb0146ac79afc228f0474a5ae1c4771970ae7880450b998c401029f522cd8a21",
+ "senderId": "DNL81CT6WNG1PHjobBmLvKwLV3UUscBymB"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "031137050d5fed0b5229b150257da2ac9c135efdf4bcb382b0ad0c197d7be458f4",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_3",
+ "publicKey": "031137050d5fed0b5229b150257da2ac9c135efdf4bcb382b0ad0c197d7be458f4"
+ }
+ },
+ "signature": "30440220124baaa04491287d0abbf5a167c9b0f5ac95c22b196f42ff3d275cc9a213c2fd02206e6ebada85f67063e642dbcde6b956f8c99c05f4b9c55f1551d3eebba6375043",
+ "id": "c9c554056b3428951633a7059dd64dfcbd776fef7f4a156ea362b37ee6ce74c7",
+ "senderId": "DG9LYv5rqX67wuGvGVa9is5k1r86LKCVTA"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "037def83d085778d7767a182a179f345207953441089081f5bc13f86d3891308aa",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_4",
+ "publicKey": "037def83d085778d7767a182a179f345207953441089081f5bc13f86d3891308aa"
+ }
+ },
+ "signature": "3045022100900cea3c2df393414899c9d74db57d89c9f311c70d08b974d0fd4a98bfae2fc902204a2aa51a1ec71da27c26afc033de6bd2d15978813c120c95e1a4dafca75ce876",
+ "id": "c82ccaa16be0e3c7ff4a53e2807968b71a0d88115223c3af2eb320f32449ac32",
+ "senderId": "DMSwarrHg5N9ZAZ6nsqPuUjyAU6gdRAM9d"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "033f28ad2e9b897d46f1e67c7c52070e9ca46b04c0679ebb21fb236719e38aade3",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_5",
+ "publicKey": "033f28ad2e9b897d46f1e67c7c52070e9ca46b04c0679ebb21fb236719e38aade3"
+ }
+ },
+ "signature": "30440220285188d8900cd3cffccf5e1de305b18856451dd04d2ed21165dffe9a7ce4afc1022009457be6bfe536971697105d47ad1f829738a5cacdb27a23c5d1e8a8dddf3ebd",
+ "id": "ee6a19fff622ab4e6e96d159396de56d6034b4b18a9cf5c99efcf4e61b28e15a",
+ "senderId": "DFcYHfCwhGWcBNy6cp48wy5SfXbQmfBYgT"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "023e577a7b3362e0aba70e6911d230e86d729b4cb640f0e0b25637b812a3e38b53",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_6",
+ "publicKey": "023e577a7b3362e0aba70e6911d230e86d729b4cb640f0e0b25637b812a3e38b53"
+ }
+ },
+ "signature": "3045022100afa56542dd473c424b36d4d9f24da68180cfd90527681ab84098f415b2544a8702201e8ebdd619a2dd200e37a57c39a4529afe76d35f6089c00f6dffba6bf7b8a836",
+ "id": "0dcd6e380bd7eaef8724f64f4b86104ce7497308dacf775afbe6ec0d401007fe",
+ "senderId": "D5e2FzTPqdEHridjzpFZCCVyepAu6Vpmk4"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "02af5e6341efc14f4ba39a9ff65e151cc7304fc742ce7b2678d9aa446c555ee9c1",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_7",
+ "publicKey": "02af5e6341efc14f4ba39a9ff65e151cc7304fc742ce7b2678d9aa446c555ee9c1"
+ }
+ },
+ "signature": "3045022100c8980155c8f8964d76baf3e8d690075708f1a84757c1de52e311772466382da2022012599acfc7839fa1ef6bbd445ab34555fb718491db3089f40d4842b1bc2d3178",
+ "id": "8af6abb117c69c130e388970d595b741374b1bbca709d9e91459e9e3c721397b",
+ "senderId": "DDLbnve6XK48cGsQiFhesUJQRQdKkZTfPh"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "02845161cfca4d6ddde8e0d53538b6f881fb3ad9383cd77cebc55375dd6fd17663",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_8",
+ "publicKey": "02845161cfca4d6ddde8e0d53538b6f881fb3ad9383cd77cebc55375dd6fd17663"
+ }
+ },
+ "signature": "30450221009bce7c5c10a4b6306cebe5724adfd3de049a425c44dd314a10154774764c11090220070fb775e71dda6a68f7fc9e0c762fbf96021908911f0de0ca8e9b0c613cb896",
+ "id": "bd346035d4516b85fb3a2cce6260fdcc6f1c434999e586978e065de3bf98e02a",
+ "senderId": "DDAHPjVTTV3uur653TB27fcLGh7XXWnvxW"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03f264a6d2ebb62279313a6fd7fec4e2244785839b625a0b0c261e689ce5401d87",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_9",
+ "publicKey": "03f264a6d2ebb62279313a6fd7fec4e2244785839b625a0b0c261e689ce5401d87"
+ }
+ },
+ "signature": "30450221009f74425c2ec50dbee462e735dee3e7917c8433fd5250ff09af4506c38d2df05902206a14a19b9a5defe3c8c59c77d52c182ea34d81d2e0b05dc5925133f2829a1960",
+ "id": "b48068fb7c848ffd57e82a4d381f53bb69916f3943e0e8935971a028ba245564",
+ "senderId": "DFHdEBuVCz5zfj8yeo3BmKEdsEKpMaYRRw"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03efd265a086c2a099cda4f4fd202adbac07567e1229ce5e6fe39963b714c1e2d5",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_10",
+ "publicKey": "03efd265a086c2a099cda4f4fd202adbac07567e1229ce5e6fe39963b714c1e2d5"
+ }
+ },
+ "signature": "3044022004df492965ed328134aa6443d38ac4dd951a640e00330da9aa4e80c1577af41a0220588f030f5f9584959647898bb977a1ffe6bba639b1c64a728880f2cd3fd7aa3c",
+ "id": "73b3b4375e39aabe51ec205559cd728a18c987dabaa0599c611b3076c38c7a49",
+ "senderId": "DL7Y6smfHHs3Ms3hAYmSYYd5PZukmtDY1i"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "027d616d20f03c375067676c79ff9787e8e42991fbd9e878501d704d23d246d9b0",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_11",
+ "publicKey": "027d616d20f03c375067676c79ff9787e8e42991fbd9e878501d704d23d246d9b0"
+ }
+ },
+ "signature": "3044022051c2f8af62163ca621eeb3087a35bfaca0d679f7be8b19a25972f5a4b24ad8c90220422f3e0e480bf1bf2211e871a102edc15a957c0f97a553d9d707418e6538df26",
+ "id": "80f1d01158452da31d44f0c24f464a0ade37da51d2f61356ad75a019a91a1ff5",
+ "senderId": "DBVoRSXBHBPPvssBXrswv22r4dUSpN1fbA"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "038918951152a37b74dfe61115f83e4b5e3521145065650c4a6d3e94add57d9a9b",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_12",
+ "publicKey": "038918951152a37b74dfe61115f83e4b5e3521145065650c4a6d3e94add57d9a9b"
+ }
+ },
+ "signature": "3045022100facf6ed992c28d41595419666b006800fcb33c6bad4b522e013b4d688e51dc8502207695e968059f7a35486389c430d6a3037e69d3e5f1d4f0a294d8818e4750cf0d",
+ "id": "86d76b0aad8f496d8c20926bfdeb50ad10db242ea6152b68266680c48e1e1aca",
+ "senderId": "DHsSK81gRWjgNx1A9gtHgkRsEwshsog7AM"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03231d8f2f39925fa79efc8f8561e6a8d29b95164a753cbb604a46e8a2e96606fc",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_13",
+ "publicKey": "03231d8f2f39925fa79efc8f8561e6a8d29b95164a753cbb604a46e8a2e96606fc"
+ }
+ },
+ "signature": "304402204c627ec3d24fb7b4f86709c0566cee9909ebddb26039e87a2fa673f1f7227362022003be5aa3303b8f4cdab768f80b4699440a61814950cab0fd983526771c4c52ec",
+ "id": "464614909ac7531a016a0489d78defe262dc0934324f41199975ad42a86f37ac",
+ "senderId": "DDr7UTGQuPTjxLDWZ8RMjWJMKNXAMj3Bor"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "021e6d971e5885a3147ddf1e45bf5c8d0887ad9fc659e24bdf95c2c9607e7e3fe8",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_14",
+ "publicKey": "021e6d971e5885a3147ddf1e45bf5c8d0887ad9fc659e24bdf95c2c9607e7e3fe8"
+ }
+ },
+ "signature": "3045022100898e59efe518745d3eb3f2b16f7b6192e3289bb4289d43013224549f2015aa4902204e7be92cbba37a05551151e46224da4e5d0ad86ee2106d3a9c0b9afee5f1c4cf",
+ "id": "9559866ff439959529f69b0947ad2e72d739511ee1f6533c0bca2ebd6dd4ae4a",
+ "senderId": "DRXNNQ9gQXh6VNUVKaAn9xHAViyiHKtBHZ"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03d5b3efbe98631443c5cdf4de8a610dd2655b86427bf70aa209451b54256f6758",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_15",
+ "publicKey": "03d5b3efbe98631443c5cdf4de8a610dd2655b86427bf70aa209451b54256f6758"
+ }
+ },
+ "signature": "3044022037fa085e37a582b2e0b3734d44b813bb18be939f73100c5b6f977d4f53ae708f022064ae54f6a1b17b193ab6b6d633f7b7a7b8171a158cdba7480afe380f383930dc",
+ "id": "7bab92d5397a4ad291c5d01b8d681e480d19b437a7ab5cbd4c6807c96ef2716f",
+ "senderId": "DT12wf9erZyNJbBQrpbPDmfH3J8txiDgTE"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0297f2e8e609b2a6799214481e7573a043a197f8adf7b8bb306576fc3da83d2aaa",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_16",
+ "publicKey": "0297f2e8e609b2a6799214481e7573a043a197f8adf7b8bb306576fc3da83d2aaa"
+ }
+ },
+ "signature": "304402202eee94bc3b53c64f8dee7790fe3eed8639da8faf0aa1f785e921cf139df0fb7e02200224efb0c07ae3972287c12a32143c1356adb93e00ac9e04a1358c8245a24cab",
+ "id": "1e59740fa596b615231660974d0b656122b799a8b13102ade8c1b779aa5de7b5",
+ "senderId": "DKGYWPSqa4m4z6h3433rNFbWPDdvHj5wwd"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0361b914fd5823bf39ae467e95d99e9f6ddb7d85cc6df3055ce00274b8e4a976cc",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_17",
+ "publicKey": "0361b914fd5823bf39ae467e95d99e9f6ddb7d85cc6df3055ce00274b8e4a976cc"
+ }
+ },
+ "signature": "3044022002ad92b9b9d81dabf96ac7d90034debc55eeeae879b3fe6ffc026bde86bb7ad902205c57d31c5e5e0099b504ba4c49e220a00ff325dceb64c46aefbb7a0ad8570099",
+ "id": "bf305776da902802923c19b9d2c7f1a809b0847992131cfa578d5e5518c924bf",
+ "senderId": "DJshaeFyHcFTjiGJnVPaDmFXhnJ9bp96i5"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03fa6bc09bd2ff348b304e0cfbc2d2ec50aa3b9aee0de6a66c13fcd8ee5ac891cd",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_18",
+ "publicKey": "03fa6bc09bd2ff348b304e0cfbc2d2ec50aa3b9aee0de6a66c13fcd8ee5ac891cd"
+ }
+ },
+ "signature": "3045022100be50b19c17a9ff221aae20394a45d92ea47e8c1072b6d5a302937d2fc48cba8002205e9bcb3471a734c07ceff0083ad9ba1570507a29e5014e889ba42a85e797cb5e",
+ "id": "44e48364b5b8cff3c68ae03de7dfde8d7ba6bcb99bf82b32fdc8bc3d0d9adeca",
+ "senderId": "DSuNttSb1UvCWg8iormfwPwi67EA84P5Mu"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03241957edca9ed28308e35cbf36762d22de706ebbd7c6a3a2d235d905d660c5c7",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_19",
+ "publicKey": "03241957edca9ed28308e35cbf36762d22de706ebbd7c6a3a2d235d905d660c5c7"
+ }
+ },
+ "signature": "3045022100c11f8b863133535192e6c3fff20253a2695a2df74cdf1445d4ca0966803f708c0220200d4c2723d84f6334ba5d1cc1a0d45854867f4523fbcc9d09b3d53dd1972950",
+ "id": "5cba288f9ffc1361ba8f7f19f28347ffd917f37df8cf46ba1e0816725f288528",
+ "senderId": "DCZt1ozEVvPdYVvkHmUKK6k7gnyNNQDpMq"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "035ae2364c838bc21edf4c04a99c85799f26fb02cc0740c5a1c67d4dc1748ff913",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_20",
+ "publicKey": "035ae2364c838bc21edf4c04a99c85799f26fb02cc0740c5a1c67d4dc1748ff913"
+ }
+ },
+ "signature": "304402203066f06a1c165795d8a069499a8c0998913ec93e689219f14145754aa3e26e4e02206e9f88da16f1f8a8ebaf481eff798452487738714fe9b5694fec6a5ef8c152a5",
+ "id": "ada1696532f7faad1dda594bc6db7bfc029a1759402c924348b74222873a3a27",
+ "senderId": "D7JyqWMPKhhRNQcKTAvrPGBjEjjBcGgPca"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "023aff4a16c3876e885aea70e5bce9734ce5acc95a2c41c9783f5acd617f7c7533",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_21",
+ "publicKey": "023aff4a16c3876e885aea70e5bce9734ce5acc95a2c41c9783f5acd617f7c7533"
+ }
+ },
+ "signature": "3045022100f5150c23596b9479c8b277401ab9e7da9b2275436f3927dabd70395e52c3ea7c02204e318d498b0176b5f05bb96418c49da3375a8d9b47b3b1e72a6f4db30b3f8c34",
+ "id": "e186a679f2e47300ec2f24c670192bcede1cb12f359cb8e827374b22f41fbe12",
+ "senderId": "D6itxYJr4n7ZZk2bd9cZbJE1xaDmpfkNFL"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0217d7ce9c3754f7fc7e5b4c64a1ff397dc75931cd6c92e32d8b42068ad50fe4eb",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_22",
+ "publicKey": "0217d7ce9c3754f7fc7e5b4c64a1ff397dc75931cd6c92e32d8b42068ad50fe4eb"
+ }
+ },
+ "signature": "3045022100b84f69a7ff67ed147fc0a750c3b7b2ecabd582b6d0cb698c0bb4a531daa6ca46022039d2722e486e1674d0db422078d63fcdb90b21bed0dcc1265adff72d0c2bf8b9",
+ "id": "86d9d146b62dbafe212aba5ec9764223b67f72c3c1aa93e54a270e3a528a8b20",
+ "senderId": "DDy4aKhF3cMadGhjFZnjaA1tx2rwnSEWcc"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "024019207f50dcb3e8aeb9ac1b00993d2bf131346e7e6d296429ea813a8373818e",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_23",
+ "publicKey": "024019207f50dcb3e8aeb9ac1b00993d2bf131346e7e6d296429ea813a8373818e"
+ }
+ },
+ "signature": "3045022100aa83596b740639ee8947aa6d0f0ee123e4a5b87c39a4c6dd8a50304d4a7c97d102205fd45f85f5bdb076585a77888ef880bea52ade689731dff694d777de34913efc",
+ "id": "6301b791844e02116df528b1ea46d788e91521189c3828ce224e45a1b72cda59",
+ "senderId": "D6BwyDJkNFkaDLedcJTE4rPUw5bRtb4K8f"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0275db912c21dca0f0213a76f4544137d7c741b47f281cfd4f8b7cb8187e7ce3cc",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_24",
+ "publicKey": "0275db912c21dca0f0213a76f4544137d7c741b47f281cfd4f8b7cb8187e7ce3cc"
+ }
+ },
+ "signature": "3045022100c7eda0d9cd7ef522615643d1b985c73add2d3612344bdcc0117779fa4f4f54d302203e33fb5d185f5174e9cb7634a3d307b74d3bb56cc2354024ce69c74905a85203",
+ "id": "eee776fcb8024469eacab3e4b23c3d14185326431369aa84f17921abab8ad0ad",
+ "senderId": "DHQSmrRdfYAp9Y6CuebKnkoQNzuN7Pk2oQ"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0331c615ca4bc89d4eeb8d7a9cfbb5c0d4ce49d2f480afbce499b0c7f8c6a24f2f",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_25",
+ "publicKey": "0331c615ca4bc89d4eeb8d7a9cfbb5c0d4ce49d2f480afbce499b0c7f8c6a24f2f"
+ }
+ },
+ "signature": "304402203e69be3a73c5917d89d58f3c0ae18febbbf364d3f9dfbec6b526a5294f9c435902201750bcf6368c181aabc53c73fd271a2967a6f215e1d0506eded5dd1800fea1c8",
+ "id": "ec3d17c6d38c0b9848c7cb57b968efd1f3872b1d1b8bcfb74bae2b0aaa15877c",
+ "senderId": "D6EVFQx5Z7M2X9DWXHtfX51CtVekuKPMQF"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0338ca9b719f8047580eed23b64a40aecad3803a12c0dde83e3ec2c2a9bfaa8147",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_26",
+ "publicKey": "0338ca9b719f8047580eed23b64a40aecad3803a12c0dde83e3ec2c2a9bfaa8147"
+ }
+ },
+ "signature": "3045022100e0bf90949739012b641793da162b3daa88b34c8753ee31b26850729e9df579810220439a3f2f1b8e719767ee68df46f4bc1f18c8c3b2da4118edff22396616d319fb",
+ "id": "14cd65c5f28f4cefc7c0157518a24f90c2260eb7166105b6b3358d91164ddf39",
+ "senderId": "DLCQ1jPsYbBCV7JfUJTasKbKoyGbK4a4HG"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03127001718bee76f14133272f0f4a928ffa8c2b38cafd94d7100253dac732c644",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_2",
+ "publicKey": "03127001718bee76f14133272f0f4a928ffa8c2b38cafd94d7100253dac732c644"
+ }
+ },
+ "signature": "3044022003d2e76aca2848aedfe25415c11b9368dc72f687b66bef4527b40e2997b86b8c022076f7f82cbeb282d26535a2c1f0af0f02b48025d42c1bd56ac687fba1a3adb706",
+ "id": "0daff3992b54b1384f52f751c933c727cbaaf4fac435eba88a1817a425753614",
+ "senderId": "D9rv3h61heDYHQ3b3Xk3V5epHSTTC6Vn1d"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0241734825ba45b6de29d6f26242c25ae1ef125b82615ee89a9fdd5b0f3c6b5132",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_28",
+ "publicKey": "0241734825ba45b6de29d6f26242c25ae1ef125b82615ee89a9fdd5b0f3c6b5132"
+ }
+ },
+ "signature": "3045022100bb2903424bcd0a72da531470779144d60286191bea1b200c5617ae4f92229ba6022046a876e3e6cb85469a16f34d2f937e2eef787011c6a313ee50258f15116148ac",
+ "id": "bd17dbd23f8dbba2736688702ac185a87c88c43b24ee6d7764a5b4138b2f38b7",
+ "senderId": "DAcQPbKa8zBWwDHbxj37N13C61iseMDWM9"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03b9409203d7091e3f4d49168529b749e942ed18f21beddd236d57d692f09a8f86",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_29",
+ "publicKey": "03b9409203d7091e3f4d49168529b749e942ed18f21beddd236d57d692f09a8f86"
+ }
+ },
+ "signature": "3044022016d7ecfa776930a6f83464548e7a686735fde752903539a38eb9da0ce2488bbd02203c5e23a4072c8de35a90b296145cce3156a31cc0d754b8a37d363fb088bc7387",
+ "id": "16e02d3ef24dca4b03a1e489e20335224f18d888ed04f7e3512572f8e0cf92ae",
+ "senderId": "D5mmTaDAMSyPNKiDKrqwTFGWzWrZA3xaF8"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "02c7b92a2d0027309e21855cf9c42a432b21ad13925e9dfc206f9c01e18fefa08a",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_30",
+ "publicKey": "02c7b92a2d0027309e21855cf9c42a432b21ad13925e9dfc206f9c01e18fefa08a"
+ }
+ },
+ "signature": "30450221009de8828a7ad87cb5d52900e09d5beb680f9edc7640a3707d08a379511a7ba0f102202aa1d9294f9631f1325f252adb87c0d866e7398ce410037a42dc861d94308e15",
+ "id": "fece556bee4de2c7f1bb3099a05a84a33d0c963979fe1a222a899c13b7abb1fc",
+ "senderId": "DJ3NywAwQh4srbooLH1jTs9ma1hJE79v3z"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0221297804a26a93bb441a9d20a2916abf27fa7b29967678ef1a7a58062f73f40d",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_31",
+ "publicKey": "0221297804a26a93bb441a9d20a2916abf27fa7b29967678ef1a7a58062f73f40d"
+ }
+ },
+ "signature": "3045022100b969611ef532557fa3da8a0325b2c88f3ebec954d64f158431d86b8e07929ea50220520affdcd0728cb7c5f63a58a1200d44133e90b1f7a6a9e28744ad6b0dcc2a75",
+ "id": "ee086317ea2fdc522f5eb502a0db9f3d4955b2318559e40a1f22a3f5f8d6344b",
+ "senderId": "D5P7eti7FUY4Tk5KXoxdf2tDAVQrRVCESA"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "027f504f6f20648e3bf171952629c7b868a2f799aa4b60f8eb3fe96afff16bbef0",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_32",
+ "publicKey": "027f504f6f20648e3bf171952629c7b868a2f799aa4b60f8eb3fe96afff16bbef0"
+ }
+ },
+ "signature": "3044022006be7cbaa74089cabe47d02621f756762587d210a3f211ee941b5fcd0650908f02207d4040408bd25a2de03e5724362735ee8ad36c099b0c16efd4716e1dd7ec62ae",
+ "id": "764dd21aa4d0e2e0fa17bb2ff5e7ca304995d9e3593542badecc8ed24d5ea3ea",
+ "senderId": "D9q26yBTrEYuxHg7bbfZphv6129KvLu4v2"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "031954315b84db8f49ab7ee21357270450bb68d06b34472e5e93ddfa5710edc0c9",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_33",
+ "publicKey": "031954315b84db8f49ab7ee21357270450bb68d06b34472e5e93ddfa5710edc0c9"
+ }
+ },
+ "signature": "3045022100859f93df994d86995fdf834bfe86b41eebaa04e5ab7d09f0b37acb50d313cd9802203c8993b793602c96d305fa795a9f2459f4706b340993584f3c56579392c0995c",
+ "id": "efd9e7c638afe62bec9be61783193ea52eea7b335053bd5af6c758d5b0e5847c",
+ "senderId": "D9iPFb5kAVnuDdomehRP9LncJj5ng2vrsr"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0267b310eac2bb0d6594de382a1ab74ac75b91e9d64a590b6249247b10fd9be829",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_34",
+ "publicKey": "0267b310eac2bb0d6594de382a1ab74ac75b91e9d64a590b6249247b10fd9be829"
+ }
+ },
+ "signature": "3045022100a678978ab899e3903e760ee98640e3f658792a096a8d771c575944af6536cfdb0220428c312f1e0eb4be73ce4b256a754447570176200cfb6c09b3eb55f66526dd80",
+ "id": "70edcce5df67a250b6ba3567879bae6379ce4c688597fcedfbfd0313da6998e8",
+ "senderId": "D6xZmtyBzZKCEkK29JNPAD581TJ8XXrXYn"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "028f32320c66a89779756b04946d2aa256dff6cd547349d46e1938710063e387c4",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_35",
+ "publicKey": "028f32320c66a89779756b04946d2aa256dff6cd547349d46e1938710063e387c4"
+ }
+ },
+ "signature": "304402206bc95876897527b39eacf4c961f9c036a9c8a0e53a17ce925c592d079fa643030220096e115d7fbd54aca4af7f621d64178dfcf2c13361106a3e3b5025dca97b44ee",
+ "id": "7f23f44157f3a677e81514fa431227410a27442e5fd1f2491b177c0f580f296d",
+ "senderId": "D9dW4eXJjABDQXSQB9GtvY5UBuRWWWejWb"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0218b889a24988527ab3948d80f97cfc37b923082e1f0398bc162190fd66ec4dee",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_36",
+ "publicKey": "0218b889a24988527ab3948d80f97cfc37b923082e1f0398bc162190fd66ec4dee"
+ }
+ },
+ "signature": "3045022100c40a3f4cf15f9274e2b25ca8608cb965316aa0f00fa77817b79620ad8ccbdd5902206203a1043b03ba58aa9b7399694f8215cf45d30eb0caa748cc06f1a85a8faea9",
+ "id": "a65244ed17a9280aa694abdf6804b1a0b78dfc052b4845abcd3c89380159b29e",
+ "senderId": "DFHK7SdmPdjxNZ9uweqLZAv6v5GQ1NnBNe"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "035392ee88c60617764b4fe89ae2cc96560dfa5f992b03be31ce5680db9b863f73",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_37",
+ "publicKey": "035392ee88c60617764b4fe89ae2cc96560dfa5f992b03be31ce5680db9b863f73"
+ }
+ },
+ "signature": "3044022036200c3191f8f01b77676644b9b94728b5afb2ab2de8c5c7c5582e795465661c02207848f1f2f0ab378d8906fd45aa048f354d5dbac4cb87c15973ffa86fe84ff0cd",
+ "id": "219e0942afe5f65c548ec2118a1c49febb7ec03fca4334ac16649062db9d146b",
+ "senderId": "DSh7AAC9KahXU2JZ539HAqEa5sHafxsxDQ"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03e75127d1deccf65844a1761bd26611b6c65c5b51a52eba27e3ee20a539fd63f1",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_38",
+ "publicKey": "03e75127d1deccf65844a1761bd26611b6c65c5b51a52eba27e3ee20a539fd63f1"
+ }
+ },
+ "signature": "304402201a2990b2baae72f5cc8f2d1890f328e4082af0cf2a787d8f05208c3424ce089d0220790dbc7606dd6c03568fd0a771e9e8e89557257238ae90cfcb3bb8f3b475987b",
+ "id": "ee9ad2a66e9b2009a9fc671f80d0493803fc422161140169c7bc1fd401cd9ad6",
+ "senderId": "D85WuxGZrFs1QUYTvnRpmc6dd8rmBbpnaX"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0329fc1580906307ac9f2f55cec66e47983f8287d542408fb19f473a305d3638d8",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_39",
+ "publicKey": "0329fc1580906307ac9f2f55cec66e47983f8287d542408fb19f473a305d3638d8"
+ }
+ },
+ "signature": "30450221008f66e89ec4c7af4b77e5b7ff36c542cc02672c8df70806b5a0fab7a7e8c7067502200d99ba19ceb1b471c39c4e95107ad6f8b978a623a790080b16f863347fe06b4f",
+ "id": "dd3077ed04a76343d340074270ce9826354802bd99e08cb864c1c5ad09f367df",
+ "senderId": "D85kwsBJKZ4pw5uQpc81eRj95f6a536AP6"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "035ec848e9388877dac88f121d19c8f5e870ac90d8ccb0116be9f734e4bd1a9405",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_40",
+ "publicKey": "035ec848e9388877dac88f121d19c8f5e870ac90d8ccb0116be9f734e4bd1a9405"
+ }
+ },
+ "signature": "304402202b220d6c028bc23213edddaf303f18eef059551891aadbf7a4b4d7d3287457bb0220245678354bb8960b42ba2f2ceb12f926e82ff0d027b44988d799c8c0d8d7d9f2",
+ "id": "3afc6ea52b8edc7df0230ceac71baf45460f3bd761c5e75fe796bc7415063220",
+ "senderId": "DGBJdDadBwJD2xY8VsdAykdd6vPakMMUt6"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0356f5885306e45402aeb354a74d13c104699b3b53da46a5e922e4a6d6132a67e8",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_41",
+ "publicKey": "0356f5885306e45402aeb354a74d13c104699b3b53da46a5e922e4a6d6132a67e8"
+ }
+ },
+ "signature": "3045022100f18bf2e013f2d9dcac013a76037d787f79baaa65f4f31ffe2b4ed8de249bdc8902202abcf77e809599d3e3a96225363c8e760ed4b4e20f97645547b381dba830c3da",
+ "id": "aea1fc173a2f4a9233b0fe59a5f6804167bee5658cb3e4e19dfe2be20f5772cd",
+ "senderId": "DG4VbapL3H39NJLB3DqQEefU47EMVqtxVw"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03ff8ab980434516ca28c982d0ecc8fc3107116d6c8b3e09c7ee5033f32adbd2ff",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_42",
+ "publicKey": "03ff8ab980434516ca28c982d0ecc8fc3107116d6c8b3e09c7ee5033f32adbd2ff"
+ }
+ },
+ "signature": "3045022100e938d9901afeaa5a56d18abd9292ace93be03c84c09a6c4cb58fca96dfb54bc502201e921d27f9886d189f803b14d93655a42c4e095d49ee61051a4e70c7a173f3f1",
+ "id": "f18426d3ef81d4b7bf0337d70afcecddbd6db2206a2f139f1ca5823c381c7817",
+ "senderId": "DC3oNWedP48ypGxAeKbFC7gMjWxcNc2JhL"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0393f1590771a8ad1cf2baa086858f3029c4444cb82243917a7011f1f66cf8fd05",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_43",
+ "publicKey": "0393f1590771a8ad1cf2baa086858f3029c4444cb82243917a7011f1f66cf8fd05"
+ }
+ },
+ "signature": "304502210095745c36a8af07e21546bd064f1ed1bd90e6c2a8db9c0c8e4853d0a8255443db0220259d2ce3677abb42f08b9d22aa13bbe383fd882ed38911b738ebaefc04589694",
+ "id": "6c51bea35b5e3270dcf7b7dfae8d984e19f476ea7e0435f157c4e0d22b7e7ea1",
+ "senderId": "DJm2sfcUKhyxakowY9TjyAytkdq7JrFgVj"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0234e24ff1dbc447c804eb385cd05bbd1dc59ef03b44a3346b13e7cccf00b61075",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_44",
+ "publicKey": "0234e24ff1dbc447c804eb385cd05bbd1dc59ef03b44a3346b13e7cccf00b61075"
+ }
+ },
+ "signature": "3045022100ee1e8df480f2be042386d383d776b3fd6bd2d3f5a9035071153f23dbfdceeaae02203a0834aae4834da3ca7858779d474b9255ead754867d5b4a18873e9ecaa5045e",
+ "id": "66fb3e36233f4577ba585ccd7daf83e62d8df262d3d832b806479ac67c1ef35d",
+ "senderId": "D5oS8xfNebiPsjpwPWoZS6sA9qcYjTGT5h"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03a5789a4486f20f1fdca78a52b528b3bf9952e7c057de71a22adcfb444ba4c5d3",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_45",
+ "publicKey": "03a5789a4486f20f1fdca78a52b528b3bf9952e7c057de71a22adcfb444ba4c5d3"
+ }
+ },
+ "signature": "3045022100cb037530bcff9a4d19899431648747022c28aa3239563379d96692bd525eb38902205f3cabb8dd470d9eb3d425e333ad1bc9f0643d489c600a811748fb5f4a203f7f",
+ "id": "5df9c5e350136571af4b86697bc9d4cfca3ff8b669e254b36f00be1dbde063f7",
+ "senderId": "D5SzHHdPdGqYUkH7BGNkmGHEUqfZrWb17r"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0347f692345fa7bf90e944eb55246da5f9f595d3f5a20ad50aeb6f9b973aaae17e",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_46",
+ "publicKey": "0347f692345fa7bf90e944eb55246da5f9f595d3f5a20ad50aeb6f9b973aaae17e"
+ }
+ },
+ "signature": "3045022100c377efe5ffab58017473699cd7c839dcf48fa5b20b5ddf9bdc4801e22a579b2b02204d35c1a1416069544e3ec01d2ce21bb409f9f2fa4adedc8c03d6417c034a3fec",
+ "id": "da4cfad78e37d56421dd6676e5618a507340ef1e496831d1968c509e35ef9202",
+ "senderId": "DCLdibuZB6UsJP8KmdzcDLWzizrDtJQuxt"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "028a32b441377a69aa76e867026f3109b2f0aef8651fe91e2a4ab01eff102a6b98",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_47",
+ "publicKey": "028a32b441377a69aa76e867026f3109b2f0aef8651fe91e2a4ab01eff102a6b98"
+ }
+ },
+ "signature": "3045022100e098672958be15989bb125d9018adb4a54e95ab664e64a673997e617e28b39df02206e8459997074d5976b77f90eb9d7180e9d4a0e0efdf433958ffeb2f04d9de382",
+ "id": "85bafcd07e7ba47ec95cb5b5a6759d4f9f87e036bb7660c7717504e845ef975e",
+ "senderId": "DSkivgRyimdAVqmm2ZAKwKmKN39WEbbPnL"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "0223ac52179903e79865b9a98cf0b52ddc1ab46180c157e8f6bd1e63e7f14fcf31",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_48",
+ "publicKey": "0223ac52179903e79865b9a98cf0b52ddc1ab46180c157e8f6bd1e63e7f14fcf31"
+ }
+ },
+ "signature": "304402200a005716f67d6cd3963a3c752c95f1bca01aa127c91ab1a632eb3022d11e3e67022024c4746078e440da441bcb366ee8999ffd2419e9a6f9cbf971d696d5b7f8733b",
+ "id": "0df1ed07d3f95ddf0385bad83a17b3a8fde6bd6532cd3479e48668064672b34f",
+ "senderId": "DDgKyKqdA6SuamB1eW77WvFu6RQFMZoU36"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "039de0390f28c7731d86ae7006a31888f12856cde3cc3c2619d4d4a42b6dfd6c51",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_49",
+ "publicKey": "039de0390f28c7731d86ae7006a31888f12856cde3cc3c2619d4d4a42b6dfd6c51"
+ }
+ },
+ "signature": "3045022100efa5d51ca79992be4a87af049b3e9ec1b796576e4d937ea9e3760ab0bdcd301e022027e22a6c3395df155bd399643c241e4cc317eaead1f273fd7a709339dfa9dc99",
+ "id": "436bebc107fad38e944fd14785e09f0600df4d75d31cf3eac53f850462d0be74",
+ "senderId": "DKCaoaXApw1xE7K1BJcVkr1KGzjKmFWyTk"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "02b87b0e70a7ae10613390f405620e24c495ba2b0cfcdbc67688e9b483dea564ee",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_50",
+ "publicKey": "02b87b0e70a7ae10613390f405620e24c495ba2b0cfcdbc67688e9b483dea564ee"
+ }
+ },
+ "signature": "304502210096cdd35f803a37730ac73a97a23061dceac96319c67bfb1ddcfbac737febe96102202fa0b279f697da3afc043ffd3ecc838789be07ff119b5527a5c13468cecf66e9",
+ "id": "bb65f9dbe6272fd07a555fc86762d6a487f538b972f2926ff7698cdc906a32df",
+ "senderId": "DJQXFKEguZVabsAs46JbXXnQJ5jFhUtN9m"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "03c9f8f4001216603c152b4b4429c2ead322ac34672999e808d567a7d1140e46be",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_51",
+ "publicKey": "03c9f8f4001216603c152b4b4429c2ead322ac34672999e808d567a7d1140e46be"
+ }
+ },
+ "signature": "3045022100ee961089d02d7bb68fe2257f6a972eeaf6e2c1a1ad2f491c417e161fedbb556b02204c834644e5b5cde9a0b3f92fa23bade7670efab0a067597f6c151ee633932706",
+ "id": "cef44df9684f05dab67c0568a2c5295bb50cbb3c88f5cfbe672365bda274620f",
+ "senderId": "DKpt7cm2tZk4RPLyQ5ugwEH7gkriRaA7ov"
+ },
+ {
+ "type": 2,
+ "amount": 0,
+ "fee": 0,
+ "recipientId": null,
+ "senderPublicKey": "02cf70f73328d490cfb03ee822d3fc0cf9259d67c0564e843491e739501809d657",
+ "timestamp": 0,
+ "asset": {
+ "delegate": {
+ "username": "genesis_27",
+ "publicKey": "02cf70f73328d490cfb03ee822d3fc0cf9259d67c0564e843491e739501809d657"
+ }
+ },
+ "signature": "30440220645b912b60f829c0bce58bfe9890ef9253418b6898416aaead663bdf158a99f2022061abbbabd454ec7f7e3f4b502216eec28110e945a4b9b913b1fc0b9758e7e6e4",
+ "id": "09408dbcf3e3e0835bf92a05330c023a7d6471f3825301a34efa094e0fd4fc30",
+ "senderId": "DQfjSqDuKr5YZaLAF8rWpFMqMYwEbPtGKg"
+ }
+ ],
+ "height": 1,
+ "id": "13149578060728881902",
+ "blockSignature": "3045022100a6605198e0f590c88798405bc76748d84e280d179bcefed2c993e70cded2a5dd022008c7f915b89fc4f3250fc4b481abb753c68f30ac351871c50bd6cfaf151370e8"
+}
diff --git a/packages/core-container/__tests__/__stubs__/config/milestones.json b/packages/core-container/__tests__/__stubs__/config/milestones.json
new file mode 100644
index 0000000000..51c32313cf
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/milestones.json
@@ -0,0 +1,31 @@
+[
+ {
+ "height": 1,
+ "reward": 0,
+ "activeDelegates": 51,
+ "blocktime": 8,
+ "block": {
+ "version": 0,
+ "maxTransactions": 150,
+ "maxPayload": 2097152
+ },
+ "epoch": "2017-03-21T13:00:00.000Z",
+ "fees": {
+ "staticFees": {
+ "transfer": 10000000,
+ "secondSignature": 500000000,
+ "delegateRegistration": 2500000000,
+ "vote": 100000000,
+ "multiSignature": 500000000,
+ "ipfs": 0,
+ "timelockTransfer": 0,
+ "multiPayment": 0,
+ "delegateResignation": 0
+ }
+ }
+ },
+ {
+ "height": 75600,
+ "reward": 200000000
+ }
+]
diff --git a/packages/core-container/__tests__/__stubs__/config/network.json b/packages/core-container/__tests__/__stubs__/config/network.json
new file mode 100644
index 0000000000..0f373e462e
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/network.json
@@ -0,0 +1,17 @@
+{
+ "name": "testnet",
+ "messagePrefix": "TEST message:\n",
+ "bip32": {
+ "public": 70617039,
+ "private": 70615956
+ },
+ "pubKeyHash": 23,
+ "nethash": "d9acd04bde4234a81addb8482333b4ac906bed7be5a9970ce8ada428bd083192",
+ "wif": 186,
+ "aip20": 0,
+ "client": {
+ "token": "TARK",
+ "symbol": "TѦ",
+ "explorer": "http://texplorer.ark.io"
+ }
+}
diff --git a/packages/core-container/__tests__/__stubs__/config/peers.json b/packages/core-container/__tests__/__stubs__/config/peers.json
new file mode 100644
index 0000000000..e212ecdd33
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/peers.json
@@ -0,0 +1,12 @@
+{
+ "list": [
+ {
+ "ip": "127.0.0.1",
+ "port": 4102
+ },
+ {
+ "ip": "127.0.0.1",
+ "port": 4202
+ }
+ ]
+}
diff --git a/packages/core-container/__tests__/__stubs__/config/plugins.js b/packages/core-container/__tests__/__stubs__/config/plugins.js
new file mode 100644
index 0000000000..2647260229
--- /dev/null
+++ b/packages/core-container/__tests__/__stubs__/config/plugins.js
@@ -0,0 +1,12 @@
+module.exports = {
+ "./plugin-a": {
+ enabled: true,
+ },
+ "./plugin-b": {
+ enabled: true,
+ property: "value",
+ },
+ "./plugin-c": {
+ enabled: true,
+ },
+};
diff --git a/packages/core-container/__tests__/__stubs__/plugin-a.js b/packages/core-container/__tests__/__stubs__/plugin-a.js
index c7595c7c40..cd92b85ba0 100644
--- a/packages/core-container/__tests__/__stubs__/plugin-a.js
+++ b/packages/core-container/__tests__/__stubs__/plugin-a.js
@@ -1,14 +1,14 @@
exports.plugin = {
- pkg: {
- name: 'stub/plugin-a',
- version: '1.0.0',
- },
- alias: 'stub-plugin-a',
- register(container, options) {
- return {
- container,
- options,
- }
- },
- deregister() {},
-}
+ pkg: {
+ name: "stub/plugin-a",
+ version: "1.0.0",
+ },
+ alias: "stub-plugin-a",
+ register(container, options) {
+ return {
+ container,
+ options,
+ };
+ },
+ deregister() {},
+};
diff --git a/packages/core-container/__tests__/__stubs__/plugin-b.js b/packages/core-container/__tests__/__stubs__/plugin-b.js
index 6023222e04..dc1dedccd5 100644
--- a/packages/core-container/__tests__/__stubs__/plugin-b.js
+++ b/packages/core-container/__tests__/__stubs__/plugin-b.js
@@ -1,14 +1,14 @@
exports.plugin = {
- pkg: {
- name: 'stub/plugin-b',
- version: '1.0.0',
- },
- alias: 'stub-plugin-b',
- register(container, options) {
- return {
- container,
- options,
- }
- },
- deregister() {},
-}
+ pkg: {
+ name: "stub/plugin-b",
+ version: "1.0.0",
+ },
+ alias: "stub-plugin-b",
+ register(container, options) {
+ return {
+ container,
+ options,
+ };
+ },
+ deregister() {},
+};
diff --git a/packages/core-container/__tests__/__stubs__/plugin-c.js b/packages/core-container/__tests__/__stubs__/plugin-c.js
index 7d262d985b..b303b97ce3 100644
--- a/packages/core-container/__tests__/__stubs__/plugin-c.js
+++ b/packages/core-container/__tests__/__stubs__/plugin-c.js
@@ -1,13 +1,13 @@
exports.plugin = {
- pkg: {
- name: 'stub/plugin-c',
- version: '1.0.0',
- },
- alias: 'stub-plugin-c',
- register(container, options) {
- return {
- container,
- options,
- }
- },
-}
+ pkg: {
+ name: "stub/plugin-c",
+ version: "1.0.0",
+ },
+ alias: "stub-plugin-c",
+ register(container, options) {
+ return {
+ container,
+ options,
+ };
+ },
+};
diff --git a/packages/core-container/__tests__/__stubs__/plugins.js b/packages/core-container/__tests__/__stubs__/plugins.js
index ad2a319c0c..2647260229 100644
--- a/packages/core-container/__tests__/__stubs__/plugins.js
+++ b/packages/core-container/__tests__/__stubs__/plugins.js
@@ -1,12 +1,12 @@
module.exports = {
- './plugin-a': {
- enabled: true,
- },
- './plugin-b': {
- enabled: true,
- property: 'value',
- },
- './plugin-c': {
- enabled: true,
- },
-}
+ "./plugin-a": {
+ enabled: true,
+ },
+ "./plugin-b": {
+ enabled: true,
+ property: "value",
+ },
+ "./plugin-c": {
+ enabled: true,
+ },
+};
diff --git a/packages/core-container/__tests__/config/loaders/file-loader.test.ts b/packages/core-container/__tests__/config/loaders/file-loader.test.ts
new file mode 100644
index 0000000000..50c41588de
--- /dev/null
+++ b/packages/core-container/__tests__/config/loaders/file-loader.test.ts
@@ -0,0 +1,34 @@
+import "jest-extended";
+
+import { resolve } from "path";
+import { fileLoader } from "../../../src/config/loaders";
+import { Network } from "../../../src/config/network";
+
+const stubConfigPath = resolve(__dirname, "../../__stubs__/config");
+
+const stubConfig = {
+ delegates: require(resolve(__dirname, "../../__stubs__/config/delegates")),
+ peers: require(resolve(__dirname, "../../__stubs__/config/peers")),
+ plugins: require(resolve(__dirname, "../../__stubs__/config/plugins")),
+};
+
+beforeEach(() => {
+ process.env.CORE_PATH_CONFIG = stubConfigPath;
+});
+
+afterEach(() => {
+ delete process.env.CORE_PATH_CONFIG;
+});
+
+describe("File Loader", () => {
+ it("should fail without a config", async () => {
+ await expect(fileLoader.setUp(null)).rejects.toThrowError("Invalid network configuration provided.");
+ });
+
+ it("should succeed with a config", async () => {
+ const { config } = await fileLoader.setUp(Network.setUp({}));
+
+ expect(config.delegates).toEqual(stubConfig.delegates);
+ expect(config.peers).toEqual(stubConfig.peers);
+ });
+});
diff --git a/packages/core-container/__tests__/config/loaders/remote-loader.test.ts b/packages/core-container/__tests__/config/loaders/remote-loader.test.ts
new file mode 100644
index 0000000000..7033b2eaf0
--- /dev/null
+++ b/packages/core-container/__tests__/config/loaders/remote-loader.test.ts
@@ -0,0 +1,139 @@
+import "jest-extended";
+
+import { existsSync, pathExistsSync, removeSync } from "fs-extra";
+import * as mockProcess from "jest-mock-process";
+
+import axios from "axios";
+import MockAdapter from "axios-mock-adapter";
+import { RemoteLoader } from "../../../src/config/loaders";
+
+const axiosMock = new MockAdapter(axios);
+const configDir = "./__test-remote-config__";
+
+let testSubject;
+
+afterAll(() => {
+ removeSync(configDir);
+});
+
+beforeEach(() => {
+ testSubject = new RemoteLoader({
+ remote: "127.0.0.1:4002",
+ config: configDir,
+ data: "./data",
+ });
+});
+
+afterEach(() => {
+ axiosMock.reset();
+});
+
+describe.skip("Remote Loader", () => {
+ it("should ensure the config directory exists", () => {
+ expect(pathExistsSync(testSubject.config)).toBeTrue();
+ });
+
+ describe("__configureNetwork", () => {
+ it("should not be OK", async () => {
+ const mockExit = mockProcess.mockProcessExit();
+
+ axiosMock.onGet("http://127.0.0.1:4002/config/network").reply(() => [404, {}]);
+
+ await testSubject.__configureNetwork();
+
+ expect(mockExit).toHaveBeenCalledWith(1);
+ });
+
+ it("should be OK", async () => {
+ axiosMock.onGet("http://127.0.0.1:4002/config/network").reply(() => [
+ 200,
+ {
+ data: require("../../crypto/src/networks/devnet.json"),
+ },
+ ]);
+
+ await testSubject.__configureNetwork();
+
+ expect(existsSync(`${configDir}/network.json`)).toBeTrue();
+ });
+ });
+
+ describe("__configureGenesisBlock", () => {
+ it("should not be OK", async () => {
+ axiosMock.onGet("http://127.0.0.1:4002/config/genesis-block").reply(() => [404, {}]);
+
+ await expect(testSubject.__configureGenesisBlock()).rejects.toThrowError();
+ });
+
+ it("should be OK", async () => {
+ axiosMock.onGet("http://127.0.0.1:4002/config/genesis-block").reply(() => [
+ 200,
+ {
+ data: require("../../core/src/config/devnet/genesisBlock.json"),
+ },
+ ]);
+
+ await testSubject.__configureGenesisBlock();
+
+ expect(existsSync(`${configDir}/genesisBlock.json`)).toBeTrue();
+ });
+ });
+
+ describe("__configurePeers", () => {
+ it("should not be OK", async () => {
+ const mockExit = mockProcess.mockProcessExit();
+
+ axiosMock.onGet("http://127.0.0.1:4002/config/peers").reply(() => [404, {}]);
+
+ await testSubject.__configurePeers();
+
+ expect(mockExit).toHaveBeenCalledWith(1);
+ });
+
+ it("should be OK", async () => {
+ axiosMock.onGet("http://127.0.0.1:4002/config/peers").reply(() => [
+ 200,
+ {
+ data: require("../../core/src/config/devnet/peers.json"),
+ },
+ ]);
+
+ await testSubject.__configurePeers();
+
+ expect(existsSync(`${configDir}/peers.json`)).toBeTrue();
+ });
+ });
+
+ describe("__configureDelegates", () => {
+ it("should not be OK", async () => {
+ const mockExit = mockProcess.mockProcessExit();
+
+ axiosMock.onGet("http://127.0.0.1:4002/config/delegates").reply(() => [404, {}]);
+
+ await testSubject.__configureDelegates();
+
+ expect(mockExit).toHaveBeenCalledWith(1);
+ });
+
+ it("should be OK", async () => {
+ axiosMock.onGet("http://127.0.0.1:4002/config/delegates").reply(() => [
+ 200,
+ {
+ data: require("../../core/src/config/devnet/delegates.json"),
+ },
+ ]);
+
+ await testSubject.__configureDelegates();
+
+ expect(existsSync(`${configDir}/delegates.json`)).toBeTrue();
+ });
+ });
+
+ describe("__configurePlugins", () => {
+ it("should be OK", async () => {
+ await testSubject.__configurePlugins({ name: "devnet" });
+
+ expect(existsSync(`${configDir}/plugins.js`)).toBeTrue();
+ });
+ });
+});
diff --git a/packages/core-container/__tests__/container.test.js b/packages/core-container/__tests__/container.test.js
deleted file mode 100644
index f0ac16b9f1..0000000000
--- a/packages/core-container/__tests__/container.test.js
+++ /dev/null
@@ -1,48 +0,0 @@
-const path = require('path')
-const { asValue } = require('awilix')
-
-let app
-beforeEach(async () => {
- app = require('../lib')
-
- await app.setUp(
- '2.0.0',
- {
- data: 'fake-path',
- config: path.resolve(__dirname, '../../core/lib/config/testnet'),
- token: 'ark',
- network: 'testnet',
- },
- {
- skipPlugins: true,
- },
- )
-})
-
-describe('Container', () => {
- it('should be an object', () => {
- expect(app).toBeObject()
- })
-
- it('should add a new registration', () => {
- app.register('fake', asValue('value'))
-
- expect(app.container.registrations.fake).toBeTruthy()
- })
-
- it('should resolve a registration', () => {
- app.register('fake', asValue('value'))
-
- expect(app.resolve('fake')).toBe('value')
- })
-
- it('should determine if a registration exists', () => {
- app.register('fake', asValue('value'))
-
- expect(app.has('fake')).toBeTrue()
- })
-
- it('should resolve and export paths', () => {
- expect(process.env.ARK_PATH_DATA).toEqual(path.resolve('fake-path'))
- })
-})
diff --git a/packages/core-container/__tests__/container.test.ts b/packages/core-container/__tests__/container.test.ts
new file mode 100644
index 0000000000..bdd57ad12c
--- /dev/null
+++ b/packages/core-container/__tests__/container.test.ts
@@ -0,0 +1,86 @@
+import "jest-extended";
+
+import { asValue } from "awilix";
+import { resolve } from "path";
+import { app } from "../src";
+
+const dummyPlugin = {
+ name: "dummy",
+ version: "0.1.0",
+ plugin: { key: "value" },
+ options: { key: "value" },
+};
+
+beforeEach(async () => {
+ await app.setUp(
+ "2.0.0",
+ {
+ data: "fake-path",
+ config: resolve(__dirname, "../../core/src/config/testnet"),
+ token: "ark",
+ network: "testnet",
+ },
+ {
+ skipPlugins: true,
+ },
+ );
+});
+
+describe("Container", () => {
+ it("should add a new registration", () => {
+ app.register("fake", asValue("value"));
+
+ expect(app.has("fake")).toBeTruthy();
+ expect(app.has("unregistered")).toBeFalsy();
+ });
+
+ it("should resolve a registration", () => {
+ app.register("fake", asValue("value"));
+
+ expect(app.resolve("fake")).toBe("value");
+ });
+
+ it("should resolve a plugin", () => {
+ app.register("fake", asValue(dummyPlugin));
+
+ expect(app.resolvePlugin("fake")).toEqual(dummyPlugin.plugin);
+ });
+
+ it("should resolve the options of a plugin", () => {
+ app.register("fake", asValue(dummyPlugin));
+
+ expect(app.resolveOptions("fake")).toEqual(dummyPlugin.options);
+ });
+
+ it("should determine if a registration exists", () => {
+ app.register("fake", asValue("value"));
+
+ expect(app.has("fake")).toBeTrue();
+ });
+
+ it("should determine if a registration exists", () => {
+ app.register("fake", asValue("value"));
+
+ expect(app.has("fake")).toBeTrue();
+ });
+
+ it("should get the hashid", () => {
+ expect(app.getHashid()).toBeString();
+ });
+
+ it("should get the version", () => {
+ expect(app.getVersion()).toBe("2.0.0");
+ });
+
+ it("should set the version", () => {
+ expect(app.getVersion()).toBe("2.0.0");
+
+ app.setVersion("3.0.0");
+
+ expect(app.getVersion()).toBe("3.0.0");
+ });
+
+ it("should resolve and export paths", () => {
+ expect(process.env.CORE_PATH_DATA).toEqual(resolve("fake-path"));
+ });
+});
diff --git a/packages/core-container/__tests__/registrars/plugin.test.js b/packages/core-container/__tests__/registrars/plugin.test.js
deleted file mode 100644
index 7e09eb1cdb..0000000000
--- a/packages/core-container/__tests__/registrars/plugin.test.js
+++ /dev/null
@@ -1,121 +0,0 @@
-const path = require('path')
-const Container = require('../../lib/container')
-const PluginRegistrar = require('../../lib/registrars/plugin')
-
-const stubPluginPath = path.resolve(__dirname, '../__stubs__')
-
-let instance
-beforeEach(() => {
- process.env.ARK_PATH_CONFIG = stubPluginPath
-
- instance = new PluginRegistrar(new Container())
-})
-
-describe('Plugin Registrar', () => {
- it('should be an object', () => {
- expect(instance).toBeObject()
- })
-
- it('should load the plugins and their options', () => {
- ;['a', 'b', 'c'].forEach(char => {
- const pluginName = `./plugin-${char}`
- expect(instance.plugins[pluginName]).toBeObject()
- })
-
- expect(instance.plugins['./plugin-b']).toHaveProperty('property', 'value')
- })
-
- describe('register', () => {
- it('should be a function', () => {
- expect(instance.setUp).toBeFunction()
- })
-
- it('should register plugins with relative paths', async () => {
- const pluginName = './plugin-a'
-
- await instance.register(pluginName, { enabled: false })
-
- expect(instance.container.has('stub-plugin-a')).toBeTrue()
- })
-
- it.skip('should register plugins with @ paths', () => {})
- })
-
- describe('setUp', () => {
- it('should be a function', () => {
- expect(instance.setUp).toBeFunction()
- })
-
- it('should register each plugin', async () => {
- await instance.setUp()
- ;['a', 'b', 'c'].forEach(char => {
- expect(instance.container.has(`stub-plugin-${char}`)).toBeTrue()
- })
- })
-
- describe('with a plugin name as the value of the `exit` option', () => {
- it('should register the plugins but ignore the rest', async () => {
- instance.options.exit = './plugin-a'
-
- await instance.setUp()
-
- expect(instance.container.has('stub-plugin-a')).toBeTrue()
- ;['b', 'c'].forEach(char => {
- expect(instance.container.has(`stub-plugin-${char}`)).toBeFalse()
- })
- })
- })
- })
-
- describe('tearDown', () => {
- const plugins = {}
-
- beforeEach(async () => {
- await instance.setUp()
- ;['a', 'b', 'c'].forEach(char => {
- expect(instance.container.has(`stub-plugin-${char}`)).toBeTrue()
- })
- ;['a', 'b', 'c'].forEach(char => {
- plugins[char] = require(`${stubPluginPath}/plugin-${char}`)
- })
- })
-
- it('should deregister plugins supporting deregister', async () => {
- ;['a', 'b'].forEach(char => {
- plugins[char].plugin.deregister = jest.fn()
- })
-
- await instance.tearDown()
- ;['a', 'b'].forEach(char => {
- expect(plugins[char].plugin.deregister).toHaveBeenCalled()
- })
-
- expect(plugins.c.deregister).not.toBeDefined()
- })
-
- it('should deregister all the plugins in inverse order', async () => {
- const spy = jest.fn()
- ;['a', 'b'].forEach(char => {
- plugins[char].plugin.deregister = () => spy(char)
- })
-
- await instance.tearDown()
-
- expect(spy).toHaveBeenNthCalledWith(1, 'b')
- expect(spy).toHaveBeenNthCalledWith(2, 'a')
- })
- })
-
- describe('__castOptions', () => {
- it('should cast options', async () => {
- const options = {
- number: '1',
- notANumber: '0.0.0.0',
- }
-
- instance.__castOptions(options)
- expect(options.number).toEqual(1)
- expect(options.notANumber).toEqual('0.0.0.0')
- })
- })
-})
diff --git a/packages/core-container/__tests__/registrars/plugin.test.ts b/packages/core-container/__tests__/registrars/plugin.test.ts
new file mode 100644
index 0000000000..657e0b5198
--- /dev/null
+++ b/packages/core-container/__tests__/registrars/plugin.test.ts
@@ -0,0 +1,114 @@
+import "jest-extended";
+
+import { resolve } from "path";
+import { Container } from "../../src/container";
+import { PluginRegistrar } from "../../src/registrars/plugin";
+
+const stubPluginPath = resolve(__dirname, "../__stubs__");
+
+let instance;
+beforeEach(() => {
+ process.env.CORE_PATH_CONFIG = stubPluginPath;
+
+ instance = new PluginRegistrar(new Container());
+});
+
+describe("Plugin Registrar", () => {
+ it("should load the plugins and their options", () => {
+ ["a", "b", "c"].forEach(char => {
+ const pluginName = `./plugin-${char}`;
+ expect(instance.plugins[pluginName]).toBeObject();
+ });
+
+ expect(instance.plugins["./plugin-b"]).toHaveProperty("property", "value");
+ });
+
+ describe("register", () => {
+ it("should register plugins with relative paths", async () => {
+ const pluginName = "./plugin-a";
+
+ await instance.register(pluginName, { enabled: false });
+
+ expect(instance.container.has("stub-plugin-a")).toBeTrue();
+ });
+ });
+
+ describe("setUp", () => {
+ it("should register each plugin", async () => {
+ await instance.setUp();
+ const plugins = ["a", "b", "c"];
+ plugins.forEach(char => {
+ expect(instance.container.has(`stub-plugin-${char}`)).toBeTrue();
+ });
+ });
+
+ describe("with a plugin name as the value of the `exit` option", () => {
+ it("should register the plugins but ignore the rest", async () => {
+ instance.options.exit = "./plugin-a";
+
+ await instance.setUp();
+
+ expect(instance.container.has("stub-plugin-a")).toBeTrue();
+ const plugins = ["b", "c"];
+ plugins.forEach(char => {
+ expect(instance.container.has(`stub-plugin-${char}`)).toBeFalse();
+ });
+ });
+ });
+ });
+
+ describe("tearDown", () => {
+ const plugins: any = {};
+
+ beforeEach(async () => {
+ await instance.setUp();
+ const dummyPlugins = ["a", "b", "c"];
+ dummyPlugins.forEach(char => {
+ expect(instance.container.has(`stub-plugin-${char}`)).toBeTrue();
+ });
+ dummyPlugins.forEach(char => {
+ plugins[char] = require(`${stubPluginPath}/plugin-${char}`);
+ });
+ });
+
+ it("should deregister plugins supporting deregister", async () => {
+ const dummyPlugins = ["a", "b"];
+ dummyPlugins.forEach(char => {
+ plugins[char].plugin.deregister = jest.fn();
+ });
+
+ await instance.tearDown();
+ dummyPlugins.forEach(char => {
+ expect(plugins[char].plugin.deregister).toHaveBeenCalled();
+ });
+
+ expect(plugins.c.deregister).not.toBeDefined();
+ });
+
+ it("should deregister all the plugins in inverse order", async () => {
+ const spy = jest.fn();
+ const dummyPlugins = ["a", "b"];
+ dummyPlugins.forEach(char => {
+ plugins[char].plugin.deregister = () => spy(char);
+ });
+
+ await instance.tearDown();
+
+ expect(spy).toHaveBeenNthCalledWith(1, "b");
+ expect(spy).toHaveBeenNthCalledWith(2, "a");
+ });
+ });
+
+ describe("__castOptions", () => {
+ it("should cast options", async () => {
+ const options = {
+ number: "1",
+ notANumber: "0.0.0.0",
+ };
+
+ instance.__castOptions(options);
+ expect(options.number).toEqual(1);
+ expect(options.notANumber).toEqual("0.0.0.0");
+ });
+ });
+});
diff --git a/packages/core-container/__tests__/remote-loader.test.js b/packages/core-container/__tests__/remote-loader.test.js
deleted file mode 100644
index ca4d002486..0000000000
--- a/packages/core-container/__tests__/remote-loader.test.js
+++ /dev/null
@@ -1,171 +0,0 @@
-const fs = require('fs-extra')
-const mockProcess = require('jest-mock-process')
-
-const axios = require('axios')
-const MockAdapter = require('axios-mock-adapter')
-const RemoteLoader = require('../lib/remote-loader')
-
-const axiosMock = new MockAdapter(axios)
-const configDir = './__test-remote-config__'
-
-let testSubject
-
-afterAll(() => {
- fs.removeSync(configDir)
-})
-
-beforeEach(() => {
- testSubject = new RemoteLoader({
- remote: '127.0.0.1:4002',
- config: configDir,
- data: './data',
- })
-})
-
-afterEach(() => {
- axiosMock.reset()
-})
-
-describe('Remote Loader', () => {
- it('should be an object', () => {
- expect(testSubject).toBeObject()
- })
-
- it('should ensure the config directory exists', () => {
- expect(fs.pathExistsSync(testSubject.config)).toBeTrue()
- })
-
- describe('__configureNetwork', () => {
- it('should be a function', () => {
- expect(testSubject.__configureNetwork).toBeFunction()
- })
-
- it('should not be OK', async () => {
- const mockExit = mockProcess.mockProcessExit()
-
- axiosMock
- .onGet('http://127.0.0.1:4002/config/network')
- .reply(() => [404, {}])
-
- await testSubject.__configureNetwork()
-
- expect(mockExit).toHaveBeenCalledWith(1)
- })
-
- it('should be OK', async () => {
- axiosMock.onGet('http://127.0.0.1:4002/config/network').reply(() => [
- 200,
- {
- data: require('../../crypto/lib/networks/ark/devnet.json'),
- },
- ])
-
- await testSubject.__configureNetwork()
-
- expect(fs.existsSync(`${configDir}/network.json`)).toBeTrue()
- })
- })
-
- describe('__configureGenesisBlock', () => {
- it('should be a function', () => {
- expect(testSubject.__configureGenesisBlock).toBeFunction()
- })
-
- it('should not be OK', async () => {
- axiosMock
- .onGet('http://127.0.0.1:4002/config/genesis-block')
- .reply(() => [404, {}])
-
- await expect(testSubject.__configureGenesisBlock()).rejects.toThrowError()
- })
-
- it('should be OK', async () => {
- axiosMock
- .onGet('http://127.0.0.1:4002/config/genesis-block')
- .reply(() => [
- 200,
- {
- data: require('../../core/lib/config/devnet/genesisBlock.json'),
- },
- ])
-
- await testSubject.__configureGenesisBlock()
-
- expect(fs.existsSync(`${configDir}/genesisBlock.json`)).toBeTrue()
- })
- })
-
- describe('__configurePeers', () => {
- it('should be a function', () => {
- expect(testSubject.__configurePeers).toBeFunction()
- })
-
- it('should not be OK', async () => {
- const mockExit = mockProcess.mockProcessExit()
-
- axiosMock
- .onGet('http://127.0.0.1:4002/config/peers')
- .reply(() => [404, {}])
-
- await testSubject.__configurePeers()
-
- expect(mockExit).toHaveBeenCalledWith(1)
- })
-
- it('should be OK', async () => {
- axiosMock.onGet('http://127.0.0.1:4002/config/peers').reply(() => [
- 200,
- {
- data: require('../../core/lib/config/devnet/peers.json'),
- },
- ])
-
- await testSubject.__configurePeers()
-
- expect(fs.existsSync(`${configDir}/peers.json`)).toBeTrue()
- })
- })
-
- describe('__configureDelegates', () => {
- it('should be a function', () => {
- expect(testSubject.__configureDelegates).toBeFunction()
- })
-
- it('should not be OK', async () => {
- const mockExit = mockProcess.mockProcessExit()
-
- axiosMock
- .onGet('http://127.0.0.1:4002/config/delegates')
- .reply(() => [404, {}])
-
- await testSubject.__configureDelegates()
-
- expect(mockExit).toHaveBeenCalledWith(1)
- })
-
- it('should be OK', async () => {
- axiosMock.onGet('http://127.0.0.1:4002/config/delegates').reply(() => [
- 200,
- {
- data: require('../../core/lib/config/devnet/delegates.json'),
- },
- ])
-
- await testSubject.__configureDelegates()
-
- expect(fs.existsSync(`${configDir}/delegates.json`)).toBeTrue()
- })
- })
-
- describe('__configurePlugins', () => {
- it('should be a function', () => {
- expect(testSubject.__configurePlugins).toBeFunction()
- })
-
- it('should be OK', async () => {
- await testSubject.__configurePlugins({ name: 'devnet' })
-
- expect(fs.existsSync(`${configDir}/plugins.js`)).toBeTrue()
- })
- })
-})
diff --git a/packages/core-container/jest.config.js b/packages/core-container/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-container/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-container/lib/container.js b/packages/core-container/lib/container.js
deleted file mode 100644
index f5d25cf845..0000000000
--- a/packages/core-container/lib/container.js
+++ /dev/null
@@ -1,247 +0,0 @@
-const { createContainer } = require('awilix')
-const semver = require('semver')
-const delay = require('delay')
-const PluginRegistrar = require('./registrars/plugin')
-const Environment = require('./environment')
-const RemoteLoader = require('./remote-loader')
-
-module.exports = class Container {
- /**
- * Create a new container instance.
- * @constructor
- */
- constructor() {
- this.container = createContainer()
- this.exitEvents = ['SIGINT', 'exit']
-
- /**
- * May be used by CLI programs to suppress the shutdown
- * messages.
- */
- this.silentShutdown = false
-
- /**
- * The git commit hash of the repository. Used during development to
- * easily idenfity nodes based on their commit hash and version.
- */
- try {
- this.hashid = require('child_process')
- .execSync('git rev-parse --short=8 HEAD')
- .toString()
- .trim()
- } catch (e) {
- this.hashid = 'unknown'
- }
- }
-
- /**
- * Set up the app.
- * @param {String} version
- * @param {Object} variables
- * @param {Object} options
- * @return {void}
- */
- async setUp(version, variables, options = {}) {
- this.__registerExitHandler()
-
- this.setVersion(version)
-
- if (variables.remote) {
- const remoteLoader = new RemoteLoader(variables)
- await remoteLoader.setUp()
- }
-
- this.env = new Environment(variables)
- this.env.setUp()
-
- if (options.skipPlugins) {
- return
- }
-
- // TODO: Move this out eventually - not really the responsibility of the container
- this.plugins = new PluginRegistrar(this, options)
- await this.plugins.setUp()
- }
-
- /**
- * Tear down the app.
- * @return {Promise}
- */
- async tearDown() {
- return this.plugins.tearDown()
- }
-
- /**
- * Add a new registration.
- * @param {string} key
- * @return {Object}
- * @throws {Error}
- */
- register(name, resolver) {
- try {
- return this.container.register(name, resolver)
- } catch (err) {
- throw new Error(err.message)
- }
- }
-
- /**
- * Resolve a registration.
- * @param {string} key
- * @return {Object}
- * @throws {Error}
- */
- resolve(key) {
- try {
- return this.container.resolve(key)
- } catch (err) {
- throw new Error(err.message)
- }
- }
-
- /**
- * Resolve a plugin.
- * @param {string} key
- * @return {Object}
- * @throws {Error}
- */
- resolvePlugin(key) {
- try {
- return this.container.resolve(key).plugin
- } catch (err) {
- return null
- }
- }
-
- /**
- * Resolve the options of a plugin. Available before a plugin mounts.
- * @param {string} key
- * @return {Object}
- * @throws {Error}
- */
- resolveOptions(key) {
- return this.plugins.resolveOptions(key)
- }
-
- /**
- * Determine if the given registration exists.
- * @param {String} key
- * @return {Boolean}
- */
- has(key) {
- try {
- this.container.resolve(key)
-
- return true
- } catch (err) {
- return false
- }
- }
-
- /**
- * Force the container to exit and print the given message and associated error.
- * @param {String} message
- * @param {Error} error
- * @return {void}
- */
- forceExit(message, error = null) {
- this.exit(1, message, error)
- }
-
- /**
- * Exit the container with the given exitCode, message and associated error.
- * @param {Number} exitCode
- * @param {String} message
- * @param {Error} error
- * @return {void}
- */
- exit(exitCode, message, error = null) {
- this.shuttingDown = true
-
- const logger = this.resolvePlugin('logger')
- logger.error(':boom: Container force shutdown :boom:')
- logger.error(message)
-
- if (error) {
- logger.error(error.stack)
- }
-
- process.exit(exitCode)
- }
-
- /**
- * Get the application git commit hash.
- * @throws {String}
- */
- getHashid() {
- return this.hashid
- }
-
- /**
- * Get the application version.
- * @throws {String}
- */
- getVersion() {
- return this.version
- }
-
- /**
- * Set the application version.
- * @param {String} version
- * @return {void}
- */
- setVersion(version) {
- if (!semver.valid(version)) {
- this.forceExit(
- `The provided version ("${version}") is invalid. Please check https://semver.org/ and make sure you follow the spec.`,
- )
- }
-
- this.version = version
- }
-
- /**
- * Handle any exit signals.
- * @return {void}
- */
- __registerExitHandler() {
- const handleExit = async () => {
- if (this.shuttingDown) {
- return
- }
-
- this.shuttingDown = true
-
- const logger = this.resolvePlugin('logger')
- logger.suppressConsoleOutput(this.silentShutdown)
- logger.info(
- 'Ark Core is trying to gracefully shut down to avoid data corruption :pizza:',
- )
-
- try {
- const database = this.resolvePlugin('database')
- if (database) {
- const emitter = this.resolvePlugin('event-emitter')
-
- // Notify plugins about shutdown
- emitter.emit('shutdown')
-
- // Wait for event to be emitted and give time to finish
- await delay(1000)
-
- // Save dirty wallets
- await database.saveWallets(false)
- }
- } catch (error) {
- console.error(error.stack)
- }
-
- await this.plugins.tearDown()
-
- process.exit()
- }
-
- // Handle exit events
- this.exitEvents.forEach(eventType => process.on(eventType, handleExit))
- }
-}
diff --git a/packages/core-container/lib/environment.js b/packages/core-container/lib/environment.js
deleted file mode 100644
index 67aa61f0c7..0000000000
--- a/packages/core-container/lib/environment.js
+++ /dev/null
@@ -1,97 +0,0 @@
-const fs = require('fs-extra')
-const path = require('path')
-const expandHomeDir = require('expand-home-dir')
-const { NetworkManager } = require('@arkecosystem/crypto')
-
-module.exports = class Environment {
- /**
- * Create a new environment instance.
- * @param {Object} variables
- * @return {void}
- */
- constructor(variables) {
- this.variables = variables
- }
-
- /**
- * Set up the environment variables.
- */
- setUp() {
- this.__exportPaths()
- this.__exportNetwork()
- this.__exportVariables()
- }
-
- /**
- * Export all path variables for the core environment.
- * @return {void}
- */
- __exportPaths() {
- const allowedKeys = ['config', 'data']
-
- for (const [key, value] of Object.entries(this.variables)) {
- if (allowedKeys.includes(key)) {
- process.env[`ARK_PATH_${key.toUpperCase()}`] = path.resolve(
- expandHomeDir(value),
- )
- }
- }
- }
-
- /**
- * Export all network variables for the core environment.
- * @return {void}
- */
- __exportNetwork() {
- let config
-
- if (this.variables.token && this.variables.network) {
- config = NetworkManager.findByName(
- this.variables.network,
- this.variables.token,
- )
- } else {
- try {
- const networkPath = path.resolve(
- expandHomeDir(`${process.env.ARK_PATH_CONFIG}/network.json`),
- )
-
- config = require(networkPath)
- } catch (error) {
- config = false
- }
- }
-
- if (!config) {
- throw new Error(
- "An invalid network configuration was provided or is inaccessible due to it's security settings.",
- )
- process.exit(1) // eslint-disable-line no-unreachable
- }
-
- process.env.ARK_NETWORK = JSON.stringify(config)
- process.env.ARK_NETWORK_NAME = config.name
- }
-
- /**
- * Export all additional variables for the core environment.
- * @return {void}
- */
- __exportVariables() {
- // Don't pollute the test environment, which is more in line with how
- // travis runs the tests.
- if (process.env.NODE_ENV === 'test') {
- return
- }
-
- const envPath = expandHomeDir(`${process.env.ARK_PATH_DATA}/.env`)
-
- if (fs.existsSync(envPath)) {
- const env = require('envfile').parseFileSync(envPath)
-
- Object.keys(env).forEach(key => {
- process.env[key] = env[key]
- })
- }
- }
-}
diff --git a/packages/core-container/lib/index.js b/packages/core-container/lib/index.js
deleted file mode 100644
index 914fde2228..0000000000
--- a/packages/core-container/lib/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const Container = require('./container')
-
-/**
- * @type {Container}
- */
-module.exports = new Container()
diff --git a/packages/core-container/lib/registrars/plugin.js b/packages/core-container/lib/registrars/plugin.js
deleted file mode 100644
index 48035f49a9..0000000000
--- a/packages/core-container/lib/registrars/plugin.js
+++ /dev/null
@@ -1,255 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const path = require('path')
-const fs = require('fs')
-const semver = require('semver')
-const isString = require('lodash/isString')
-const expandHomeDir = require('expand-home-dir')
-const Hoek = require('hoek')
-const { asValue } = require('awilix')
-
-module.exports = class PluginRegistrars {
- /**
- * Create a new plugin manager instance.
- * @param {Container} container
- * @param {Object} options
- */
- constructor(container, options = {}) {
- this.container = container
- this.plugins = this.__loadPlugins()
- this.resolvedPlugins = []
- this.options = this.__castOptions(options)
- this.deregister = []
- }
-
- /**
- * Set up all available plugins.
- * @return {void}
- */
- resolveOptions(name) {
- if (!this.resolvedPlugins.length) {
- this.resolvedPlugins = Object.keys(this.plugins).map(
- item => require(item).plugin,
- )
- }
-
- const plugin = Object.values(this.resolvedPlugins).find(
- item => item.alias === name || item.pkg.name === name,
- )
-
- return this.__applyToDefaults(
- plugin.pkg.name,
- plugin.defaults,
- this.plugins[plugin.pkg.name],
- )
- }
-
- /**
- * Set up all available plugins.
- * @return {void}
- */
- async setUp() {
- for (const [name, options] of Object.entries(this.plugins)) {
- await this.register(name, options)
-
- if (
- (this.options.exit && this.options.exit === name) ||
- this.container.shuttingDown
- ) {
- break
- }
- }
- }
-
- /**
- * Deregister all plugins.
- * @return {void}
- */
- async tearDown() {
- const plugins = this.deregister.reverse()
-
- for (let i = 0; i < plugins.length; i++) {
- await plugins[i].plugin.deregister(this.container, plugins[i].options)
- }
- }
-
- /**
- * Register a plugin.
- * @param {String} name
- * @param {Object} options
- * @return {void}
- */
- async register(name, options = {}) {
- if (!this.__shouldBeRegistered(name)) {
- return
- }
-
- if (this.plugins[name]) {
- options = Hoek.applyToDefaults(this.plugins[name], options)
- }
-
- return this.__registerWithContainer(name, options)
- }
-
- /**
- * Register a plugin.
- * @param {Object} plugin
- * @param {Object} options
- * @return {void}
- */
- async __registerWithContainer(plugin, options = {}) {
- const item = this.__resolve(plugin)
-
- if (!item.plugin.register) {
- return
- }
-
- if (item.plugin.extends) {
- await this.__registerWithContainer(item.plugin.extends)
- }
-
- const name = item.plugin.name || item.plugin.pkg.name
- const version = item.plugin.version || item.plugin.pkg.version
- const defaults = item.plugin.defaults || item.plugin.pkg.defaults
- const alias = item.plugin.alias || item.plugin.pkg.alias
-
- if (!semver.valid(version)) {
- throw new Error(
- `The plugin "${name}" provided an invalid version "${version}". Please check https://semver.org/ and make sure you follow the spec.`,
- )
- }
-
- options = this.__applyToDefaults(name, defaults, options)
-
- plugin = await item.plugin.register(this.container, options || {})
- this.container.register(
- alias || name,
- asValue({
- name,
- version,
- plugin,
- options,
- }),
- )
-
- if (item.plugin.deregister) {
- this.deregister.push({ plugin: item.plugin, options })
- }
- }
-
- /**
- * Apply the given options to the defaults of the given plugin.
- *
- * @param {String} name
- * @param {Object} defaults
- * @param {Object} options
- * @return {Object}
- */
- __applyToDefaults(name, defaults, options) {
- if (defaults) {
- options = Hoek.applyToDefaults(defaults, options)
- }
-
- if (this.options.options && this.options.options[name]) {
- options = Hoek.applyToDefaults(options, this.options.options[name])
- }
-
- return this.__castOptions(options)
- }
-
- /**
- * When the env is used to overwrite options, we get strings even if we
- * expect a number. This is in most cases not desired and leads to side-
- * effects. Here is assumed all numeric strings except blacklisted ones
- * should be treated as numbers.
- * @param {Object} options
- * @return {Object} options
- */
- __castOptions(options) {
- const blacklist = []
- const regex = new RegExp(/^\d+$/)
- Object.keys(options).forEach(key => {
- const value = options[key]
- if (isString(value) && !blacklist.includes(key) && regex.test(value)) {
- options[key] = +value
- }
- })
-
- return options
- }
-
- /**
- * Resolve a plugin instance.
- * @param {(String|Object)} plugin - plugin name or path, or object
- * @return {Object}
- */
- __resolve(plugin) {
- let item = {}
-
- if (isString(plugin)) {
- if (plugin.startsWith('.')) {
- plugin = path.resolve(
- `${path.dirname(this.pluginsConfigPath)}/${plugin}`,
- )
- } else if (!plugin.startsWith('@')) {
- plugin = path.resolve(plugin)
- }
-
- try {
- item = require(plugin)
- } catch (error) {
- console.error(error)
- }
-
- if (!item.plugin) {
- item = { plugin: item }
- }
- }
-
- return item
- }
-
- /**
- * Determine if the given plugin should be registered.
- * @param {String} name
- * @return {Boolean}
- */
- __shouldBeRegistered(name) {
- let register = true
-
- if (this.options.include) {
- register = this.options.include.includes(name)
- }
-
- if (this.options.exclude) {
- register = !this.options.exclude.includes(name)
- }
-
- return register
- }
-
- /**
- * Load plugins from any of the available files (plugins.js or plugins.json).
- * @return {[Object|void]}
- */
- __loadPlugins() {
- const files = ['plugins.js', 'plugins.json']
-
- for (const file of files) {
- const configPath = path.resolve(
- expandHomeDir(`${process.env.ARK_PATH_CONFIG}/${file}`),
- )
-
- if (fs.existsSync(configPath)) {
- this.pluginsConfigPath = configPath
-
- return require(configPath)
- }
- }
-
- throw new Error(
- "An invalid configuration was provided or is inaccessible due to it's security settings.",
- )
- process.exit(1) // eslint-disable-line no-unreachable
- }
-}
diff --git a/packages/core-container/lib/remote-loader.js b/packages/core-container/lib/remote-loader.js
deleted file mode 100644
index f4e5896b75..0000000000
--- a/packages/core-container/lib/remote-loader.js
+++ /dev/null
@@ -1,112 +0,0 @@
-const axios = require('axios')
-const expandHomeDir = require('expand-home-dir')
-const fs = require('fs-extra')
-const path = require('path')
-const {
- models: { Block },
-} = require('@arkecosystem/crypto')
-const { spawnSync } = require('child_process')
-
-module.exports = class RemoteLoader {
- constructor(variables) {
- this.remote = variables.remote
- this.config = expandHomeDir(variables.config)
- this.data = expandHomeDir(variables.data)
-
- fs.ensureDirSync(this.config)
- }
-
- async setUp() {
- const network = await this.__configureNetwork()
-
- await this.__configureGenesisBlock()
-
- await this.__configurePeers()
-
- await this.__configureDelegates()
-
- this.__configurePlugins(network)
-
- this.__configureDatabase(network)
- }
-
- async __configureNetwork() {
- const network = await this.__getConfig('network')
-
- this.__writeConfig('network', network)
-
- return network
- }
-
- async __configureGenesisBlock() {
- const genesisBlock = await this.__getConfig('genesis-block')
- const genesisBlockModel = new Block(genesisBlock)
-
- if (!genesisBlockModel.verification.verified) {
- console.error(
- 'Failed to verify the genesis block. Try another remote host.',
- )
- process.exit(1)
- }
-
- this.__writeConfig('genesisBlock', genesisBlock)
- }
-
- async __configurePeers() {
- const peers = await this.__getConfig('peers')
-
- this.__writeConfig('peers', peers)
- }
-
- async __configureDelegates() {
- const delegates = await this.__getConfig('delegates')
-
- this.__writeConfig('delegates', delegates)
- }
-
- __configurePlugins(network) {
- const plugins = path.resolve(
- __dirname,
- `../../core/lib/config/${network.name}/plugins.js`,
- )
-
- fs.copySync(plugins, `${this.config}/plugins.js`)
- }
-
- __configureDatabase(network) {
- const command = spawnSync('createdb', [`ark_${network.name}`])
-
- if (command.stderr.length > 0) {
- console.error(command.stderr.toString())
- process.exit(1)
- }
-
- console.info(command.stdout.toString())
- }
-
- async __getConfig(type) {
- try {
- const { data } = await axios.get(`http://${this.remote}/config/${type}`, {
- headers: { 'Content-Type': 'application/json' },
- })
-
- return data.data
- } catch (error) {
- if (!this.__exists(type)) {
- console.error(error.message)
- process.exit(1)
- }
- }
- }
-
- __writeConfig(file, data) {
- fs.writeFileSync(
- `${this.config}/${file}.json`,
- JSON.stringify(data, null, 4),
- )
- }
-
- __exists(file) {
- return fs.existsSync(`${this.config}/${file}.json`)
- }
-}
diff --git a/packages/core-container/package.json b/packages/core-container/package.json
index b6c34cfe71..f0e4c9b12c 100644
--- a/packages/core-container/package.json
+++ b/packages/core-container/package.json
@@ -1,40 +1,68 @@
{
- "name": "@arkecosystem/core-container",
- "description": "Container for Ark Core",
- "version": "0.2.0",
- "contributors": [
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/crypto": "~0.2",
- "awilix": "^4.0.1",
- "axios": "^0.18.0",
- "delay": "^4.1.0",
- "envfile": "^2.3.0",
- "expand-home-dir": "^0.0.3",
- "fs-extra": "^7.0.1",
- "hoek": "^6.1.1",
- "lodash.isstring": "^4.0.1",
- "semver": "^5.6.0"
- },
- "devDependencies": {
- "axios-mock-adapter": "^1.15.0",
- "jest-mock-process": "^1.1.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-container",
+ "description": "Container for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index",
+ "types": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "yarn lint && yarn build",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/fs-extra": "^5.0.4",
+ "@types/hoek": "^4.1.3",
+ "@types/joi": "^14.0.1",
+ "@types/lodash.get": "^4.4.4",
+ "@types/lodash.isstring": "^4.0.4",
+ "@types/lodash.set": "^4.3.4",
+ "@types/semver": "^5.5.0",
+ "awilix": "^4.0.1",
+ "axios": "^0.18.0",
+ "delay": "^4.1.0",
+ "env-paths": "^2.0.0",
+ "envfile": "^2.3.0",
+ "expand-home-dir": "^0.0.3",
+ "fs-extra": "^7.0.1",
+ "hoek": "^6.1.2",
+ "joi": "^14.3.0",
+ "lodash.get": "^4.4.2",
+ "lodash.isstring": "^4.0.1",
+ "lodash.set": "^4.3.2",
+ "semver": "^5.6.0"
+ },
+ "devDependencies": {
+ "@types/env-paths": "^1.0.2",
+ "axios-mock-adapter": "^1.15.0",
+ "jest-mock-process": "^1.1.0"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-container/src/config/index.ts b/packages/core-container/src/config/index.ts
new file mode 100644
index 0000000000..2c370cb9ad
--- /dev/null
+++ b/packages/core-container/src/config/index.ts
@@ -0,0 +1,66 @@
+import { configManager as crypto, HashAlgorithms } from "@arkecosystem/crypto";
+import get from "lodash/get";
+import set from "lodash/set";
+import { fileLoader, RemoteLoader } from "./loaders";
+import { Network } from "./network";
+
+class Config {
+ private config: Record;
+
+ public async setUp(opts) {
+ if (opts.remote) {
+ const remoteLoader = new RemoteLoader(opts);
+ await remoteLoader.setUp();
+ }
+
+ const network = Network.setUp(opts);
+
+ const { files } = await fileLoader.setUp(network);
+
+ this.config = files;
+
+ this.configureCrypto(network);
+
+ return this;
+ }
+
+ public all(): any {
+ return this.config;
+ }
+
+ public get(key: string, defaultValue: any = null): any {
+ return get(this.config, key, defaultValue);
+ }
+
+ public set(key: string, value: any): void {
+ set(this.config, key, value);
+ }
+
+ /**
+ * Get constants for the specified height.
+ */
+ public getMilestone(height: number): any {
+ return crypto.getMilestone(height);
+ }
+
+ /**
+ * Configure the @arkecosystem/crypto package.
+ * @return {void}
+ */
+ private configureCrypto(value: any): void {
+ crypto.setConfig(value);
+
+ this.config.network = crypto.all();
+ this.config.exceptions = crypto.get("exceptions");
+ this.config.milestones = crypto.get("milestones");
+ this.config.genesisBlock = crypto.get("genesisBlock");
+
+ // Calculate milestone hash
+ const milestonesBuffer = Buffer.from(JSON.stringify(this.config.milestones));
+ this.config.milestoneHash = HashAlgorithms.sha256(milestonesBuffer)
+ .slice(0, 8)
+ .toString("hex");
+ }
+}
+
+export const configManager = new Config();
diff --git a/packages/core-container/src/config/loaders/file-loader.ts b/packages/core-container/src/config/loaders/file-loader.ts
new file mode 100644
index 0000000000..8120c53b44
--- /dev/null
+++ b/packages/core-container/src/config/loaders/file-loader.ts
@@ -0,0 +1,101 @@
+import { configManager } from "@arkecosystem/crypto";
+import axios from "axios";
+import { existsSync, readdirSync, writeFileSync } from "fs-extra";
+import Joi from "joi";
+import { basename, extname, resolve } from "path";
+import { schemaConfig } from "../schema";
+
+class FileLoader {
+ /**
+ * Make the config instance.
+ * @param {Object} opts
+ * @return {Loader}
+ */
+ public async setUp(opts) {
+ if (!opts) {
+ throw new Error("Invalid network configuration provided.");
+ }
+
+ const files = await this.createFromDirectory();
+
+ const { value, error } = Joi.validate(files, schemaConfig);
+
+ if (error) {
+ throw error;
+ }
+
+ return { config: value, files };
+ }
+
+ /**
+ * Load and bind the config.
+ * @return {void}
+ */
+ private async createFromDirectory() {
+ const files: Record = this.getFiles();
+
+ for (const [key, value] of Object.entries(files)) {
+ files[key] = require(value);
+ }
+
+ await this.buildPeers(files.peers);
+
+ return files;
+ }
+
+ /**
+ * Get all config files.
+ * @return {Object}
+ */
+ private getFiles(): Record {
+ const basePath = resolve(process.env.CORE_PATH_CONFIG);
+
+ if (!existsSync(basePath)) {
+ throw new Error("An invalid configuration was provided or is inaccessible due to it's security settings.");
+ }
+
+ const configTree = {};
+ for (const file of readdirSync(basePath)) {
+ if ([".js", ".json"].includes(extname(file))) {
+ configTree[basename(file, extname(file))] = resolve(basePath, file);
+ }
+ }
+
+ return configTree;
+ }
+
+ /**
+ * Build the peer list either from a local file, remote file or object.
+ * @param {String} configFile
+ * @return {void}
+ */
+ private async buildPeers(configFile: any): Promise {
+ if (configFile.sources) {
+ for (const source of configFile.sources) {
+ // Local File...
+ if (source.startsWith("/")) {
+ configFile.list = require(source);
+
+ writeFileSync(configFile, JSON.stringify(configFile, null, 2));
+
+ break;
+ }
+
+ // URL...
+ try {
+ const response = await axios.get(source);
+
+ configFile.list = response.data;
+
+ writeFileSync(configFile, JSON.stringify(configFile, null, 2));
+
+ break;
+ } catch (error) {
+ //
+ }
+ }
+ }
+ }
+}
+
+export const fileLoader = new FileLoader();
diff --git a/packages/core-container/src/config/loaders/index.ts b/packages/core-container/src/config/loaders/index.ts
new file mode 100644
index 0000000000..a59ad04e10
--- /dev/null
+++ b/packages/core-container/src/config/loaders/index.ts
@@ -0,0 +1,2 @@
+export * from "./file-loader";
+export * from "./remote-loader";
diff --git a/packages/core-container/src/config/loaders/remote-loader.ts b/packages/core-container/src/config/loaders/remote-loader.ts
new file mode 100644
index 0000000000..1481d0df2a
--- /dev/null
+++ b/packages/core-container/src/config/loaders/remote-loader.ts
@@ -0,0 +1,130 @@
+import { models } from "@arkecosystem/crypto";
+import axios from "axios";
+import { spawnSync } from "child_process";
+import expandHomeDir from "expand-home-dir";
+import { copySync, ensureDirSync, existsSync, writeFileSync } from "fs-extra";
+import { resolve } from "path";
+
+export class RemoteLoader {
+ private remote: any;
+ private config: any;
+
+ constructor(variables) {
+ this.remote = variables.remote;
+ this.config = expandHomeDir(variables.config);
+
+ ensureDirSync(this.config);
+ }
+
+ public async setUp() {
+ const network = await this.configureNetwork();
+
+ await this.configureExceptions();
+
+ await this.configureMilestones();
+
+ await this.configureGenesisBlock();
+
+ await this.configurePeers();
+
+ await this.configureDelegates();
+
+ this.configurePlugins(network);
+
+ this.configureDatabase(network);
+ }
+
+ private async configureNetwork() {
+ const network = await this.getConfig("network");
+
+ this.writeConfig("network", network);
+
+ return network;
+ }
+
+ private async configureExceptions() {
+ const exceptions = await this.getConfig("exceptions");
+
+ this.writeConfig("exceptions", exceptions);
+
+ return exceptions;
+ }
+
+ private async configureMilestones() {
+ const milestones = await this.getConfig("milestones");
+
+ this.writeConfig("milestones", milestones);
+
+ return milestones;
+ }
+
+ private async configureGenesisBlock() {
+ const { Block } = models;
+
+ const genesisBlock = await this.getConfig("genesis-block");
+ const genesisBlockModel = new Block(genesisBlock);
+
+ if (!genesisBlockModel.verification.verified) {
+ // tslint:disable-next-line:no-console
+ console.error("Failed to verify the genesis block. Try another remote host.");
+ process.exit(1);
+ }
+
+ this.writeConfig("genesisBlock", genesisBlock);
+ }
+
+ private async configurePeers() {
+ const peers = await this.getConfig("peers");
+
+ this.writeConfig("peers", peers);
+ }
+
+ private async configureDelegates() {
+ const delegates = await this.getConfig("delegates");
+
+ this.writeConfig("delegates", delegates);
+ }
+
+ private configurePlugins(network) {
+ const plugins = resolve(__dirname, `../../core/src/config/${network.name}/plugins.js`);
+
+ copySync(plugins, `${this.config}/plugins.js`);
+ }
+
+ private configureDatabase(network) {
+ const command = spawnSync("createdb", [`core_${network.name}`]);
+
+ if (command.stderr.length > 0) {
+ // tslint:disable-next-line:no-console
+ console.error(command.stderr.toString());
+ process.exit(1);
+ }
+
+ // tslint:disable-next-line:no-console
+ console.info(command.stdout.toString());
+ }
+
+ private async getConfig(type) {
+ try {
+ const { data } = await axios.get(`http://${this.remote}/config/${type}`, {
+ headers: { "Content-Type": "application/json" },
+ });
+
+ return data.data;
+ } catch (error) {
+ if (!this.exists(type)) {
+ // tslint:disable-next-line:no-console
+ console.error(error.message);
+ process.exit(1);
+ }
+ }
+ }
+
+ private writeConfig(file, data) {
+ writeFileSync(`${this.config}/${file}.json`, JSON.stringify(data, null, 4));
+ }
+
+ private exists(file) {
+ return existsSync(`${this.config}/${file}.json`);
+ }
+}
diff --git a/packages/core-container/src/config/network.ts b/packages/core-container/src/config/network.ts
new file mode 100644
index 0000000000..3259247662
--- /dev/null
+++ b/packages/core-container/src/config/network.ts
@@ -0,0 +1,48 @@
+import { NetworkManager } from "@arkecosystem/crypto";
+import expandHomeDir from "expand-home-dir";
+import Joi from "joi";
+import { resolve } from "path";
+import { schemaNetwork } from "./schema";
+
+export class Network {
+ /**
+ * Expose information about the for the operating network to the environment.
+ * @return {void}
+ */
+ public static setUp(opts: any) {
+ let config;
+
+ // Default configuration...
+ if (opts.network) {
+ config = NetworkManager.findByName(opts.network);
+ } else {
+ try {
+ const networkPath = resolve(expandHomeDir(process.env.CORE_PATH_CONFIG));
+
+ config = {
+ exceptions: require(`${networkPath}/exceptions`),
+ milestones: require(`${networkPath}/milestones`),
+ genesisBlock: require(`${networkPath}/genesisBlock`),
+ network: require(`${networkPath}/network`),
+ };
+ } catch (error) {
+ config = false;
+ }
+ }
+
+ // Validate the configuration...
+ const { error } = Joi.validate(config, schemaNetwork);
+
+ if (error) {
+ throw new Error(
+ `An invalid network configuration was provided or is inaccessible due to it's security settings. ${
+ error.message
+ }.`,
+ );
+ }
+
+ process.env.CORE_NETWORK_NAME = config.network.name;
+
+ return config;
+ }
+}
diff --git a/packages/core-container/src/config/schema.ts b/packages/core-container/src/config/schema.ts
new file mode 100644
index 0000000000..e9df8f6e30
--- /dev/null
+++ b/packages/core-container/src/config/schema.ts
@@ -0,0 +1,50 @@
+import Joi from "joi";
+
+export const schemaNetwork = Joi.object({
+ milestones: Joi.array()
+ .items(Joi.object())
+ .required(),
+ exceptions: Joi.object({
+ blocks: Joi.array().items(Joi.string()),
+ transactions: Joi.array().items(Joi.string()),
+ outlookTable: Joi.object(),
+ transactionIdFixTable: Joi.object(),
+ }).default({ exceptions: {} }),
+ genesisBlock: Joi.object().required(),
+ network: Joi.object({
+ name: Joi.string().required(),
+ messagePrefix: Joi.string().required(),
+ bip32: Joi.object({
+ public: Joi.number()
+ .positive()
+ .required(),
+ private: Joi.number()
+ .positive()
+ .required(),
+ }),
+ pubKeyHash: Joi.number()
+ .positive()
+ .required(),
+ nethash: Joi.string()
+ .hex()
+ .required(),
+ wif: Joi.number()
+ .positive()
+ .required(),
+ aip20: Joi.number().required(),
+ client: Joi.object({
+ token: Joi.string().required(),
+ symbol: Joi.string().required(),
+ explorer: Joi.string().required(),
+ }),
+ }).required(),
+});
+
+export const schemaConfig = Joi.object({
+ delegates: Joi.object({
+ secrets: Joi.array().items(Joi.string()),
+ bip38: Joi.string(),
+ }),
+ peers: Joi.object().required(),
+ plugins: Joi.object().required(),
+}).unknown();
diff --git a/packages/core-container/src/container.ts b/packages/core-container/src/container.ts
new file mode 100644
index 0000000000..0856862dda
--- /dev/null
+++ b/packages/core-container/src/container.ts
@@ -0,0 +1,280 @@
+import { Container as container, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import { createContainer, Resolver } from "awilix";
+import { execSync } from "child_process";
+import delay from "delay";
+import semver from "semver";
+import { configManager } from "./config";
+import { Environment } from "./environment";
+import { PluginRegistrar } from "./registrars/plugin";
+
+export class Container implements container.IContainer {
+ public options: any;
+ public exitEvents: any;
+ /**
+ * May be used by CLI programs to suppress the shutdown messages.
+ */
+ public silentShutdown = false;
+ public hashid: string;
+ public plugins: any;
+ public shuttingDown: boolean;
+ public version: string;
+ public isReady: boolean = false;
+ public variables: any;
+ public config: any;
+ private container = createContainer();
+
+ /**
+ * Create a new container instance.
+ * @constructor
+ */
+ constructor() {
+ /**
+ * The git commit hash of the repository. Used during development to
+ * easily idenfity nodes based on their commit hash and version.
+ */
+ try {
+ this.hashid = execSync("git rev-parse --short=8 HEAD")
+ .toString()
+ .trim();
+ } catch (e) {
+ this.hashid = "unknown";
+ }
+
+ /**
+ * Register any exit signal handling.
+ */
+ this.registerExitHandler(["SIGINT", "exit"]);
+ }
+
+ /**
+ * Set up the app.
+ * @param {String} version
+ * @param {Object} variables
+ * @param {Object} options
+ * @return {void}
+ */
+ public async setUp(version: string, variables: any, options: any = {}) {
+ this.options = options;
+ this.variables = variables;
+
+ this.setVersion(version);
+
+ // Register the environment variables
+ new Environment(variables).setUp();
+
+ // Mainly used for testing environments!
+ if (options.skipPlugins) {
+ this.isReady = true;
+ return;
+ }
+
+ // Setup the configuration
+ this.config = await configManager.setUp(variables);
+
+ // TODO: Move this out eventually - not really the responsibility of the container
+ this.plugins = new PluginRegistrar(this, options);
+ await this.plugins.setUp();
+
+ this.isReady = true;
+ }
+
+ public getConfig() {
+ return this.config;
+ }
+
+ /**
+ * Tear down the app.
+ * @return {Promise}
+ */
+ public async tearDown() {
+ if (!this.options.skipPlugins) {
+ await this.plugins.tearDown();
+ }
+
+ this.isReady = false;
+ }
+
+ /**
+ * Add a new registration.
+ */
+ public register(name, resolver: Resolver) {
+ try {
+ this.container.register(name, resolver);
+ return this;
+ } catch (err) {
+ throw new Error(err.message);
+ }
+ }
+
+ /**
+ * Resolve a registration.
+ * @param {string} key
+ * @return {Object}
+ * @throws {Error}
+ */
+ public resolve(key): T {
+
+ try {
+ return this.container.resolve(key);
+ } catch (err) {
+ throw new Error(err.message);
+ }
+ }
+
+ /**
+ * Resolve a plugin.
+ * @param {string} key
+ * @return {Object}
+ * @throws {Error}
+ */
+ public resolvePlugin(key): T {
+ try {
+ return this.container.resolve>(key).plugin;
+ } catch (err) {
+ return null;
+ }
+ }
+
+ /**
+ * Resolve the options of a plugin. Available before a plugin mounts.
+ * @param {string} key
+ * @return {Object}
+ * @throws {Error}
+ */
+ public resolveOptions(key) {
+ try {
+ return this.container.resolve>(key).options;
+ } catch (err) {
+ throw err;
+ }
+ }
+
+ /**
+ * Determine if the given registration exists.
+ * @param {String} key
+ * @return {Boolean}
+ */
+ public has(key) {
+ try {
+ this.container.resolve(key);
+
+ return true;
+ } catch (err) {
+ return false;
+ }
+ }
+
+ /**
+ * Force the container to exit and print the given message and associated error.
+ * @param {String} message
+ * @param {Error} error
+ * @return {void}
+ */
+ public forceExit(message, error = null) {
+ this.exit(1, message, error);
+ }
+
+ /**
+ * Exit the container with the given exitCode, message and associated error.
+ * @param {Number} exitCode
+ * @param {String} message
+ * @param {Error} error
+ * @return {void}
+ */
+ public exit(exitCode, message, error = null) {
+ this.shuttingDown = true;
+
+ const logger = this.resolvePlugin("logger");
+ logger.error(":boom: Container force shutdown :boom:");
+ logger.error(message);
+
+ if (error) {
+ logger.error(error.stack);
+ }
+
+ process.exit(exitCode);
+ }
+
+ /**
+ * Get the application git commit hash.
+ * @throws {String}
+ */
+ public getHashid() {
+ return this.hashid;
+ }
+
+ /**
+ * Get the application version.
+ * @throws {String}
+ */
+ public getVersion() {
+ return this.version;
+ }
+
+ /**
+ * Set the application version.
+ * @param {String} version
+ * @return {void}
+ */
+ public setVersion(version) {
+ if (!semver.valid(version)) {
+ this.forceExit(
+ // tslint:disable-next-line:max-line-length
+ `The provided version ("${version}") is invalid. Please check https://semver.org/ and make sure you follow the spec.`,
+ );
+ }
+
+ this.version = version;
+ }
+
+ /**
+ * Handle any exit signals.
+ * @return {void}
+ */
+ private registerExitHandler(exitEvents: string[]) {
+ const handleExit = async () => {
+ if (this.shuttingDown) {
+ return;
+ }
+
+ this.shuttingDown = true;
+
+ const logger = this.resolvePlugin("logger");
+ if (logger) {
+ logger.suppressConsoleOutput(this.silentShutdown);
+ logger.info("Core is trying to gracefully shut down to avoid data corruption :pizza:");
+ }
+
+ try {
+ /* TODO: core-database-postgres has a dep on core-container. Yet we have code in core-container fetching a reference to core-database-postgres.
+ If we try to import core-database-postgres types, we create a circular dependency: core-container -> core-database-postgres -> core-container.
+ The only thing we're doing here is trying to save the wallets upon shutdown. The code can and should be moved into core-database-postgres instead
+ and leverage either the plugins `tearDown` method or the event-emitter's 'shutdown' event
+ */
+ const database = this.resolvePlugin("database");
+ if (database) {
+ const emitter = this.resolvePlugin("event-emitter");
+
+ // Notify plugins about shutdown
+ emitter.emit("shutdown");
+
+ // Wait for event to be emitted and give time to finish
+ await delay(1000);
+
+ // Save dirty wallets
+ await database.saveWallets(false);
+ }
+ } catch (error) {
+ // tslint:disable-next-line:no-console
+ console.error(error.stack);
+ }
+
+ await this.plugins.tearDown();
+
+ process.exit();
+ };
+
+ // Handle exit events
+ exitEvents.forEach(eventType => process.on(eventType as any, handleExit));
+ }
+}
diff --git a/packages/core-container/src/environment.ts b/packages/core-container/src/environment.ts
new file mode 100644
index 0000000000..15bd410c8b
--- /dev/null
+++ b/packages/core-container/src/environment.ts
@@ -0,0 +1,73 @@
+import envPaths from "env-paths";
+import expandHomeDir from "expand-home-dir";
+import { ensureDirSync, existsSync } from "fs-extra";
+import { resolve } from "path";
+
+export class Environment {
+ /**
+ * Create a new environment instance.
+ * @param {Object} variables
+ * @return {void}
+ */
+ constructor(readonly variables: any) {}
+
+ /**
+ * Set up the environment variables.
+ */
+ public setUp() {
+ this.exportPaths();
+ this.exportVariables();
+ }
+
+ /**
+ * Export all path variables for the core environment.
+ * @return {void}
+ */
+ private exportPaths() {
+ const allowedKeys = ["data", "config", "cache", "log", "temp"];
+
+ const createPathVariables = (values, namespace?) =>
+ allowedKeys.forEach(key => {
+ if (values[key]) {
+ const name = `CORE_PATH_${key.toUpperCase()}`;
+ let path = resolve(expandHomeDir(values[key]));
+
+ if (namespace) {
+ path += `/${this.variables.network}`;
+ }
+
+ process.env[name] = path;
+ ensureDirSync(path);
+ }
+ });
+
+ createPathVariables(envPaths(this.variables.token, { suffix: "core" }), this.variables.network);
+
+ if (this.variables.data || this.variables.config) {
+ createPathVariables(this.variables);
+ }
+ }
+
+ /**
+ * Export all additional variables for the core environment.
+ * @return {void}
+ */
+ private exportVariables() {
+ process.env.CORE_TOKEN = this.variables.token;
+
+ // Don't pollute the test environment!
+ if (process.env.NODE_ENV === "test") {
+ return;
+ }
+
+ const envPath = expandHomeDir(`${process.env.CORE_PATH_CONFIG}/.env`);
+
+ if (existsSync(envPath)) {
+ const env = require("envfile").parseFileSync(envPath);
+
+ Object.keys(env).forEach(key => {
+ process.env[key] = env[key];
+ });
+ }
+ }
+}
diff --git a/packages/core-container/src/index.ts b/packages/core-container/src/index.ts
new file mode 100644
index 0000000000..6e9d336d6a
--- /dev/null
+++ b/packages/core-container/src/index.ts
@@ -0,0 +1,5 @@
+import { Container as container } from "@arkecosystem/core-interfaces";
+import { Container } from "./container";
+
+const app: container.IContainer = new Container();
+export { app };
diff --git a/packages/core-container/src/registrars/plugin.ts b/packages/core-container/src/registrars/plugin.ts
new file mode 100644
index 0000000000..a7b6b0e09c
--- /dev/null
+++ b/packages/core-container/src/registrars/plugin.ts
@@ -0,0 +1,229 @@
+import { asValue } from "awilix";
+import expandHomeDir from "expand-home-dir";
+import { existsSync } from "fs";
+import Hoek from "hoek";
+import isString from "lodash/isString";
+import { dirname, resolve } from "path";
+import semver from "semver";
+
+export class PluginRegistrar {
+ private container: any;
+ private plugins: any;
+ private resolvedPlugins: any;
+ private options: any;
+ private deregister: any;
+ private pluginsConfigPath: any;
+
+ /**
+ * Create a new plugin manager instance.
+ * @param {IContainer} container
+ * @param {Object} options
+ */
+ constructor(container, options: any = {}) {
+ this.container = container;
+ this.plugins = this.__loadPlugins();
+ this.resolvedPlugins = [];
+ this.options = this.__castOptions(options);
+ this.deregister = [];
+ }
+
+ /**
+ * Set up all available plugins.
+ * @return {void}
+ */
+ public async setUp() {
+ for (const [name, options] of Object.entries(this.plugins)) {
+ await this.register(name, options);
+
+ if ((this.options.exit && this.options.exit === name) || this.container.shuttingDown) {
+ break;
+ }
+ }
+ }
+
+ /**
+ * Deregister all plugins.
+ * @return {void}
+ */
+ public async tearDown() {
+ for (const plugin of this.deregister.reverse()) {
+ await plugin.plugin.deregister(this.container, plugin.options);
+ }
+ }
+
+ /**
+ * Register a plugin.
+ * @param {String} name
+ * @param {Object} options
+ * @return {void}
+ */
+ public async register(name, options = {}) {
+ if (!this.__shouldBeRegistered(name)) {
+ return;
+ }
+
+ if (this.plugins[name]) {
+ options = Hoek.applyToDefaults(this.plugins[name], options);
+ }
+
+ return this.__registerWithContainer(name, options);
+ }
+
+ /**
+ * Register a plugin.
+ * @param {Object} plugin
+ * @param {Object} options
+ * @return {void}
+ */
+ public async __registerWithContainer(plugin, options = {}) {
+ const item: any = this.__resolve(plugin);
+
+ if (!item.plugin.register) {
+ return;
+ }
+
+ if (item.plugin.extends) {
+ await this.__registerWithContainer(item.plugin.extends);
+ }
+
+ const name = item.plugin.name || item.plugin.pkg.name;
+ const version = item.plugin.version || item.plugin.pkg.version;
+ const defaults = item.plugin.defaults || item.plugin.pkg.defaults;
+ const alias = item.plugin.alias || item.plugin.pkg.alias;
+
+ if (!semver.valid(version)) {
+ throw new Error(
+ // tslint:disable-next-line:max-line-length
+ `The plugin "${name}" provided an invalid version "${version}". Please check https://semver.org/ and make sure you follow the spec.`,
+ );
+ }
+
+ options = this.__applyToDefaults(name, defaults, options);
+
+ plugin = await item.plugin.register(this.container, options || {});
+ this.container.register(
+ alias || name,
+ asValue({
+ name,
+ version,
+ plugin,
+ options,
+ }),
+ );
+
+ if (item.plugin.deregister) {
+ this.deregister.push({ plugin: item.plugin, options });
+ }
+ }
+
+ /**
+ * Apply the given options to the defaults of the given plugin.
+ *
+ * @param {String} name
+ * @param {Object} defaults
+ * @param {Object} options
+ * @return {Object}
+ */
+ public __applyToDefaults(name, defaults, options) {
+ if (defaults) {
+ options = Hoek.applyToDefaults(defaults, options);
+ }
+
+ if (this.options.options && this.options.options[name]) {
+ options = Hoek.applyToDefaults(options, this.options.options[name]);
+ }
+
+ return this.__castOptions(options);
+ }
+
+ /**
+ * When the env is used to overwrite options, we get strings even if we
+ * expect a number. This is in most cases not desired and leads to side-
+ * effects. Here is assumed all numeric strings except blacklisted ones
+ * should be treated as numbers.
+ * @param {Object} options
+ * @return {Object} options
+ */
+ public __castOptions(options) {
+ const blacklist: any = [];
+ const regex = new RegExp(/^\d+$/);
+
+ Object.keys(options).forEach(key => {
+ const value = options[key];
+ if (isString(value) && !blacklist.includes(key) && regex.test(value)) {
+ options[key] = +value;
+ }
+ });
+
+ return options;
+ }
+
+ /**
+ * Resolve a plugin instance.
+ * @param {(String|Object)} plugin - plugin name or path, or object
+ * @return {Object}
+ */
+ public __resolve(plugin) {
+ let item: any = {};
+
+ if (isString(plugin)) {
+ if (plugin.startsWith(".")) {
+ plugin = resolve(`${dirname(this.pluginsConfigPath)}/${plugin}`);
+ } else if (!plugin.startsWith("@")) {
+ plugin = resolve(plugin);
+ }
+
+ try {
+ item = require(plugin);
+ } catch (error) {
+ // tslint:disable-next-line:no-console
+ console.error(error);
+ }
+
+ if (!item.plugin) {
+ item = { plugin: item };
+ }
+ }
+
+ return item;
+ }
+
+ /**
+ * Determine if the given plugin should be registered.
+ * @param {String} name
+ * @return {Boolean}
+ */
+ public __shouldBeRegistered(name) {
+ let register = true;
+
+ if (this.options.include) {
+ register = this.options.include.includes(name);
+ }
+
+ if (this.options.exclude) {
+ register = !this.options.exclude.includes(name);
+ }
+
+ return register;
+ }
+
+ /**
+ * Load plugins from any of the available files (plugins.js or plugins.json).
+ * @return {[Object|void]}
+ */
+ public __loadPlugins() {
+ const files = ["plugins.js", "plugins.json"];
+
+ for (const file of files) {
+ const configPath = resolve(expandHomeDir(`${process.env.CORE_PATH_CONFIG}/${file}`));
+
+ if (existsSync(configPath)) {
+ this.pluginsConfigPath = configPath;
+
+ return require(configPath);
+ }
+ }
+
+ throw new Error("An invalid configuration was provided or is inaccessible due to it's security settings.");
+ }
+}
diff --git a/packages/core-container/tsconfig.json b/packages/core-container/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-container/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-database-postgres/CHANGELOG.md b/packages/core-database-postgres/CHANGELOG.md
deleted file mode 100644
index 13db840f6b..0000000000
--- a/packages/core-database-postgres/CHANGELOG.md
+++ /dev/null
@@ -1,59 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.11 - 2018-12-05
-
-### Added
-
-- Store executed migrations in the database
-
-## 0.2.1 - 2018-12-05
-
-### Added
-
-- `sender_public_key`, `recipient_id` and `timestamp` indices on the `transactions` table
-- `generator_public_key` index on the `blocks` table
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- Database rollback
-
-### Changed
-
-- Build delegate list in-memory to reduce database load
-- Perform vote balance calculations in-memory to reduce database load
-- Handle numbers as `BigNumber` instances
-- Reduced complexity and duplicated logic
-- Improved performance of various SQL queries
-- Improved performance of wallet saving
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-### Removed
-
-- All `redis` integrations and dependencies
-
-### Fixed
-
-- Wrong documentation
-- Bad method calls for `sync/async` methods
-- Cast rounds to integers
-- Only commit data when `saveBlockCommit` is called
-- Various bad method calls for expected query results
-- Sorting of votes during SPV
-- Added a missing index for the `block_id` column in the `transactions` table
-- Moved the wallets integrity check after the wallet rebuild process to avoid false positive blockchain rebuilds
-- Insert bignumber objects as strings to avoid rounding issues caused by `Number.MAX_SAFE_INTEGER`
-
-## 0.1.0 - 2018-09-11
-
-### Added
-
-- initial release
diff --git a/packages/core-database-postgres/LICENSE b/packages/core-database-postgres/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-database-postgres/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-database-postgres/README.md b/packages/core-database-postgres/README.md
index 3ef5117887..9a38b12edf 100644
--- a/packages/core-database-postgres/README.md
+++ b/packages/core-database-postgres/README.md
@@ -14,8 +14,9 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [Joshua Noack](https://github.com/supaiku0)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-database-postgres/__tests__/__support__/setup.ts b/packages/core-database-postgres/__tests__/__support__/setup.ts
new file mode 100644
index 0000000000..3300e02f17
--- /dev/null
+++ b/packages/core-database-postgres/__tests__/__support__/setup.ts
@@ -0,0 +1,36 @@
+import { app } from "@arkecosystem/core-container";
+import { registerWithContainer, setUpContainer } from "../../../core-test-utils/src/helpers/container";
+
+jest.setTimeout(60000);
+
+const options = {
+ connection: {
+ host: "localhost",
+ port: 5432,
+ database: "core_development",
+ user: "core",
+ password: "password",
+ },
+};
+
+export const setUp = async () => {
+ await setUpContainer({
+ exit: "@arkecosystem/core-database-postgres",
+ exclude: ["@arkecosystem/core-database-postgres"],
+ });
+
+ // register first core-database because core-database-postgres extends it
+ // (we might improve registerWithContainer to take care of extends)
+ const { plugin: pluginDatabase } = require("@arkecosystem/core-database");
+ await registerWithContainer(pluginDatabase, options);
+
+ const { plugin } = require("../../src/plugin");
+ await registerWithContainer(plugin, options);
+};
+
+export const tearDown = async () => {
+ await app.tearDown();
+
+ const { plugin } = require("../../src/plugin");
+ await plugin.deregister(app, options);
+};
diff --git a/packages/core-database-postgres/__tests__/connection.test.ts b/packages/core-database-postgres/__tests__/connection.test.ts
new file mode 100644
index 0000000000..034f28c3dd
--- /dev/null
+++ b/packages/core-database-postgres/__tests__/connection.test.ts
@@ -0,0 +1,40 @@
+import { app } from "@arkecosystem/core-container";
+import { Database } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+import genesisBlock from "../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { setUp, tearDown } from "./__support__/setup";
+
+const { Block } = models;
+
+let databaseService: Database.IDatabaseService;
+
+beforeAll(async () => {
+ await setUp();
+
+ databaseService = app.resolvePlugin("database");
+
+ await databaseService.saveBlock(new Block(genesisBlock));
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+describe("Connection", () => {
+ describe("verifyBlockchain", () => {
+ it("should be valid - no errors - when verifying blockchain", async () => {
+ expect(await databaseService.verifyBlockchain()).toEqual({
+ valid: true,
+ errors: [],
+ });
+ });
+ });
+
+ describe("getLastBlock", () => {
+ it("should get the genesis block as last block", async () => {
+ const lastBlock = await databaseService.getLastBlock();
+
+ expect(lastBlock).toEqual(new Block(genesisBlock as any));
+ });
+ });
+});
diff --git a/packages/core-database-postgres/jest.config.js b/packages/core-database-postgres/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-database-postgres/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-database-postgres/lib/connection.js b/packages/core-database-postgres/lib/connection.js
deleted file mode 100644
index f79ac58548..0000000000
--- a/packages/core-database-postgres/lib/connection.js
+++ /dev/null
@@ -1,733 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-/* eslint no-use-before-define: "warn" */
-/* eslint max-len: "off" */
-
-const pgPromise = require('pg-promise')
-const crypto = require('crypto')
-const chunk = require('lodash/chunk')
-const pluralize = require('pluralize')
-const fs = require('fs')
-const path = require('path')
-
-const { ConnectionInterface } = require('@arkecosystem/core-database')
-
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const logger = app.resolvePlugin('logger')
-const emitter = app.resolvePlugin('event-emitter')
-
-const { roundCalculator } = require('@arkecosystem/core-utils')
-
-const {
- Bignum,
- models: { Block, Transaction },
-} = require('@arkecosystem/crypto')
-
-const SPV = require('./spv')
-
-const migrations = require('./migrations')
-const QueryExecutor = require('./sql/query-executor')
-const repositories = require('./repositories')
-const { camelizeColumns } = require('./utils')
-
-module.exports = class PostgresConnection extends ConnectionInterface {
- /**
- * Make the database connection instance.
- * @return {PostgresConnection}
- */
- async make() {
- if (this.db) {
- throw new Error('Database connection already initialised')
- }
-
- logger.debug('Connecting to database')
-
- this.queuedQueries = null
- this.cache = new Map()
-
- try {
- await this.connect()
- await this.__registerQueryExecutor()
- await this.__runMigrations()
- await this.__registerModels()
- await super._registerRepositories()
- await super._registerWalletManager()
-
- this.blocksInCurrentRound = await this.__getBlocksForRound()
-
- return this
- } catch (error) {
- app.forceExit('Unable to connect to the database!', error)
- }
- }
-
- /**
- * Connect to the database.
- * @return {void}
- */
- async connect() {
- const initialization = {
- receive(data, result, e) {
- camelizeColumns(pgp, data)
- },
- extend(object) {
- for (const repository of Object.keys(repositories)) {
- object[repository] = new repositories[repository](object, pgp)
- }
- },
- }
-
- const pgp = pgPromise({ ...this.config.initialization, ...initialization })
-
- this.pgp = pgp
- this.db = this.pgp(this.config.connection)
- }
-
- /**
- * Disconnects from the database and closes the cache.
- * @return {Promise} The successfulness of closing the Sequelize connection
- */
- async disconnect() {
- try {
- await this.commitQueuedQueries()
- this.cache.clear()
- } catch (error) {
- logger.warn('Issue in commiting blocks, database might be corrupted')
- logger.warn(error.message)
- }
-
- logger.debug('Disconnecting from database')
-
- return this.pgp.end()
- }
-
- /**
- * Verify the blockchain stored on db is not corrupted making simple assertions:
- * - Last block is available
- * - Last block height equals the number of stored blocks
- * - Number of stored transactions equals the sum of block.numberOfTransactions in the database
- * - Sum of all tx fees equals the sum of block.totalFee
- * - Sum of all tx amount equals the sum of block.totalAmount
- * @return {Object} An object { valid, errors } with the result of the verification and the errors
- */
- async verifyBlockchain() {
- const errors = []
-
- const lastBlock = await this.getLastBlock()
-
- // Last block is available
- if (!lastBlock) {
- errors.push('Last block is not available')
- } else {
- const { count: numberOfBlocks } = await this.db.blocks.count()
-
- // Last block height equals the number of stored blocks
- if (lastBlock.data.height !== +numberOfBlocks) {
- errors.push(
- `Last block height: ${lastBlock.data.height.toLocaleString()}, number of stored blocks: ${numberOfBlocks}`,
- )
- }
- }
-
- const blockStats = await this.db.blocks.statistics()
- const transactionStats = await this.db.transactions.statistics()
-
- // Number of stored transactions equals the sum of block.numberOfTransactions in the database
- if (blockStats.numberOfTransactions !== transactionStats.count) {
- errors.push(
- `Number of transactions: ${
- transactionStats.count
- }, number of transactions included in blocks: ${
- blockStats.numberOfTransactions
- }`,
- )
- }
-
- // Sum of all tx fees equals the sum of block.totalFee
- if (blockStats.totalFee !== transactionStats.totalFee) {
- errors.push(
- `Total transaction fees: ${
- transactionStats.totalFee
- }, total of block.totalFee : ${blockStats.totalFee}`,
- )
- }
-
- // Sum of all tx amount equals the sum of block.totalAmount
- if (blockStats.totalAmount !== transactionStats.totalAmount) {
- errors.push(
- `Total transaction amounts: ${
- transactionStats.totalAmount
- }, total of block.totalAmount : ${blockStats.totalAmount}`,
- )
- }
-
- return {
- valid: !errors.length,
- errors,
- }
- }
-
- /**
- * Get the top 51 delegates.
- * @param {Number} height
- * @param {Array} delegates
- * @return {Array}
- */
- async getActiveDelegates(height, delegates) {
- const maxDelegates = config.getConstants(height).activeDelegates
- const round = Math.floor((height - 1) / maxDelegates) + 1
-
- if (
- this.forgingDelegates &&
- this.forgingDelegates.length &&
- this.forgingDelegates[0].round === round
- ) {
- return this.forgingDelegates
- }
-
- // When called during applyRound we already know the delegates, so we don't have to query the database.
- if (!delegates || delegates.length === 0) {
- delegates = await this.db.rounds.findById(round)
- }
-
- const seedSource = round.toString()
- let currentSeed = crypto
- .createHash('sha256')
- .update(seedSource, 'utf8')
- .digest()
-
- for (let i = 0, delCount = delegates.length; i < delCount; i++) {
- for (let x = 0; x < 4 && i < delCount; i++, x++) {
- const newIndex = currentSeed[x] % delCount
- const b = delegates[newIndex]
- delegates[newIndex] = delegates[i]
- delegates[i] = b
- }
- currentSeed = crypto
- .createHash('sha256')
- .update(currentSeed)
- .digest()
- }
-
- this.forgingDelegates = delegates.map(delegate => {
- delegate.round = +delegate.round
- return delegate
- })
-
- return this.forgingDelegates
- }
-
- /**
- * Store the given round.
- * @param {Array} delegates
- * @return {Array}
- */
- async saveRound(delegates) {
- logger.info(`Saving round ${delegates[0].round.toLocaleString()}`)
-
- await this.db.rounds.create(delegates)
-
- emitter.emit('round.created', delegates)
- }
-
- /**
- * Delete the given round.
- * @param {Number} round
- * @return {Promise}
- */
- async deleteRound(round) {
- return this.db.rounds.delete(round)
- }
-
- /**
- * Load a list of wallets into memory.
- * @param {Number} height
- * @return {Boolean} success
- */
- async buildWallets(height) {
- this.walletManager.reset()
-
- const spvPath = `${process.env.ARK_PATH_DATA}/spv.json`
-
- if (fs.existsSync(spvPath)) {
- fs.removeSync(spvPath)
-
- logger.info(
- 'Ark Core ended unexpectedly - resuming from where we left off :runner:',
- )
-
- return true
- }
-
- try {
- const spv = new SPV(this)
- const success = await spv.build(height)
-
- this._spvFinished = true
-
- await this.__registerListeners()
-
- return success
- } catch (error) {
- logger.error(error.stack)
- }
- }
-
- /**
- * Load all wallets from database.
- * @return {Array}
- */
- async loadWallets() {
- const wallets = await this.db.wallets.all()
-
- this.walletManager.index(wallets)
-
- return this.walletManager.all()
- }
-
- /**
- * Commit wallets from the memory.
- * @param {Boolean} force
- * @return {void}
- */
- async saveWallets(force) {
- const wallets = this.walletManager
- .allByPublicKey()
- .filter(wallet => wallet.publicKey && (force || wallet.dirty))
-
- // Remove dirty flags first to not save all dirty wallets in the exit handler
- // when called during a force insert right after SPV.
- this.walletManager.clear()
-
- if (force) {
- // all wallets to be updated, performance is better without upsert
- await this.db.wallets.truncate()
-
- try {
- const chunks = chunk(wallets, 5000).map(c => this.db.wallets.create(c))
- await this.db.tx(t => t.batch(chunks))
- } catch (error) {
- logger.error(error.stack)
- }
- } else {
- // NOTE: The list of delegates is calculated in-memory against the WalletManager,
- // so it is safe to perform the costly UPSERT non-blocking during round change only:
- // 'await saveWallets(false)' -> 'saveWallets(false)'
- try {
- const queries = wallets.map(wallet =>
- this.db.wallets.updateOrCreate(wallet),
- )
- await this.db.tx(t => t.batch(queries))
- } catch (error) {
- logger.error(error.stack)
- }
- }
-
- logger.info(
- `${wallets.length} modified ${pluralize(
- 'wallet',
- wallets.length,
- )} committed to database`,
- )
-
- emitter.emit('wallet.saved', wallets.length)
-
- // NOTE: commented out as more use cases to be taken care of
- // this.walletManager.purgeEmptyNonDelegates()
- }
-
- /**
- * Commit the given block.
- * NOTE: to be used when node is in sync and committing newly received blocks
- * @param {Block} block
- * @return {void}
- */
- async saveBlock(block) {
- try {
- const queries = [this.db.blocks.create(block.data)]
-
- if (block.transactions.length > 0) {
- queries.push(this.db.transactions.create(block.transactions))
- }
-
- await this.db.tx(t => t.batch(queries))
- } catch (err) {
- logger.error(err.message)
- }
- }
-
- /**
- * Delete the given block.
- * @param {Block} block
- * @return {void}
- */
- async deleteBlock(block) {
- try {
- const queries = [
- this.db.transactions.deleteByBlock(block.data.id),
- this.db.blocks.delete(block.data.id),
- ]
-
- await this.db.tx(t => t.batch(queries))
- } catch (error) {
- logger.error(error.stack)
-
- throw error
- }
- }
-
- /**
- * Stores the block in memory. Generated insert statements are stored in this.queuedQueries, to be later saved to the database by calling commit.
- * NOTE: to use when rebuilding to decrease the number of database tx, and commit blocks (save only every 1000s for instance) by calling commit.
- * @param {Block} block
- * @return {void}
- */
- enqueueSaveBlock(block) {
- const queries = [this.db.blocks.create(block.data)]
-
- if (block.transactions.length > 0) {
- queries.push(this.db.transactions.create(block.transactions))
- }
-
- this.enqueueQueries(queries)
- }
-
- /**
- * Generated delete statements are stored in this.queuedQueries to be later executed by calling this.commitQueuedQueries.
- * See also enqueueSaveBlock.
- * @param {Block} block
- * @return {void}
- */
- enqueueDeleteBlock(block) {
- const queries = [
- this.db.transactions.deleteByBlock(block.data.id),
- this.db.blocks.delete(block.data.id),
- ]
-
- this.enqueueQueries(queries)
- }
-
- /**
- * Generated delete statements are stored in this.queuedQueries to be later executed by calling this.commitQueuedQueries.
- * @param {Number} round
- * @return {void}
- */
- enqueueDeleteRound(height) {
- const { round, nextRound, maxDelegates } = roundCalculator.calculateRound(
- height,
- )
-
- if (nextRound === round + 1 && height >= maxDelegates) {
- this.enqueueQueries([this.db.rounds.delete(nextRound)])
- }
- }
-
- /**
- * Add queries to the queue to be executed when calling commit.
- * @param {Array} queries
- */
- enqueueQueries(queries) {
- if (!this.queuedQueries) {
- this.queuedQueries = []
- }
-
- this.queuedQueries.push(...queries)
- }
-
- /**
- * Commit all queued queries.
- * NOTE: to be used in combination with enqueueSaveBlock and enqueueDeleteBlock.
- * @return {void}
- */
- async commitQueuedQueries() {
- if (!this.queuedQueries || this.queuedQueries.length === 0) {
- return
- }
-
- logger.debug('Committing database transactions.')
-
- try {
- await this.db.tx(t => t.batch(this.queuedQueries))
- } catch (error) {
- logger.error(error)
-
- throw error
- } finally {
- this.queuedQueries = null
- }
- }
-
- /**
- * Get a block.
- * @param {Number} id
- * @return {Block}
- */
- async getBlock(id) {
- // TODO: caching the last 1000 blocks, in combination with `saveBlock` could help to optimise
- const block = await this.db.blocks.findById(id)
-
- if (!block) {
- return null
- }
-
- const transactions = await this.db.transactions.findByBlock(block.id)
-
- block.transactions = transactions.map(({ serialized }) =>
- Transaction.deserialize(serialized.toString('hex')),
- )
-
- return new Block(block)
- }
-
- /**
- * Get the last block.
- * @return {(Block|null)}
- */
- async getLastBlock() {
- const block = await this.db.blocks.latest()
-
- if (!block) {
- return null
- }
-
- const transactions = await this.db.transactions.latestByBlock(block.id)
-
- block.transactions = transactions.map(({ serialized }) =>
- Transaction.deserialize(serialized.toString('hex')),
- )
-
- return new Block(block)
- }
-
- /**
- * Get a transaction.
- * @param {Number} id
- * @return {Promise}
- */
- async getTransaction(id) {
- return this.db.transactions.findById(id)
- }
-
- /**
- * Get common blocks for the given IDs.
- * @param {Array} ids
- * @return {Array}
- */
- async getCommonBlocks(ids) {
- const state = app.resolve('state')
- let commonBlocks = state.getCommonBlocks(ids)
- if (commonBlocks.length < ids.length) {
- commonBlocks = await this.db.blocks.common(ids)
- }
-
- return commonBlocks
- }
-
- /**
- * Get transactions for the given IDs.
- * @param {Array} ids
- * @return {Array}
- */
- async getTransactionsFromIds(ids) {
- return this.db.transactions.findManyById(ids)
- }
-
- /**
- * Get forged transactions for the given IDs.
- * @param {Array} ids
- * @return {Array}
- */
- async getForgedTransactionsIds(ids) {
- if (!ids.length) {
- return []
- }
-
- const transactions = await this.db.transactions.forged(ids)
-
- return transactions.map(transaction => transaction.id)
- }
-
- /**
- * Get blocks for the given offset and limit.
- * @param {Number} offset
- * @param {Number} limit
- * @return {Array}
- */
- async getBlocks(offset, limit) {
- let blocks = []
-
- if (app.has('state')) {
- blocks = app
- .resolve('state')
- .getLastBlocksByHeight(offset, offset + limit)
- }
-
- if (blocks.length !== limit) {
- blocks = await this.db.blocks.heightRange(offset, offset + limit)
-
- await this.loadTransactionsForBlocks(blocks)
- }
-
- return blocks
- }
-
- /**
- * Get top count blocks ordered by height DESC.
- * NOTE: Only used when trying to restore database integrity. The returned blocks may be unchained.
- * @param {Number} count
- * @return {Array}
- */
- async getTopBlocks(count) {
- const blocks = await this.db.blocks.top(count)
-
- await this.loadTransactionsForBlocks(blocks)
-
- return blocks
- }
-
- /**
- * Load all transactions for the given blocks
- * @param {Array} blocks
- * @return {void}
- */
- async loadTransactionsForBlocks(blocks) {
- if (!blocks.length) {
- return
- }
-
- const ids = blocks.map(block => block.id)
-
- let transactions = await this.db.transactions.latestByBlocks(ids)
- transactions = transactions.map(tx => {
- const data = Transaction.deserialize(tx.serialized.toString('hex'))
- data.blockId = tx.blockId
- return data
- })
-
- for (const block of blocks) {
- if (block.numberOfTransactions > 0) {
- block.transactions = transactions.filter(
- transaction => transaction.blockId === block.id,
- )
- }
- }
- }
-
- /**
- * Get the 10 recent block ids.
- * @return {[]String}
- */
- async getRecentBlockIds() {
- const state = app.resolve('state')
- let blocks = state
- .getLastBlockIds()
- .reverse()
- .slice(0, 10)
-
- if (blocks.length < 10) {
- blocks = await this.db.blocks.recent()
- blocks = blocks.map(block => block.id)
- }
-
- return blocks
- }
-
- /**
- * Get the headers of blocks for the given offset and limit.
- * @param {Number} offset
- * @param {Number} limit
- * @return {Array}
- */
- async getBlockHeaders(offset, limit) {
- const blocks = await this.db.blocks.headers(offset, offset + limit)
-
- return blocks.map(block => Block.serialize(block))
- }
-
- /**
- * Get the cache object
- * @return {Cache}
- */
- getCache() {
- return this.cache
- }
-
- /**
- * Run all migrations.
- * @return {void}
- */
- async __runMigrations() {
- for (const migration of migrations) {
- const { name } = path.parse(migration.file)
-
- if (name === '20180304100000-create-migrations-table') {
- await this.query.none(migration)
- } else {
- const row = await this.db.migrations.findByName(name)
-
- if (row === null) {
- logger.debug(`Migrating ${name}`)
-
- await this.query.none(migration)
-
- await this.db.migrations.create({ name })
- }
- }
- }
- }
-
- /**
- * Register all models.
- * @return {void}
- */
- async __registerModels() {
- this.models = {}
-
- for (const [key, Value] of Object.entries(require('./models'))) {
- this.models[key.toLowerCase()] = new Value(this.pgp)
- }
- }
-
- /**
- * Register the query builder.
- * @return {void}
- */
- __registerQueryExecutor() {
- this.query = new QueryExecutor(this)
- }
-
- /**
- * Register event listeners.
- * @return {void}
- */
- __registerListeners() {
- super.__registerListeners()
-
- emitter.on('wallet.created.cold', async coldWallet => {
- try {
- const wallet = await this.db.wallets.findByAddress(coldWallet.address)
-
- if (wallet) {
- Object.keys(wallet).forEach(key => {
- if (['balance'].indexOf(key) !== -1) {
- return
- }
-
- coldWallet[key] =
- key !== 'voteBalance' ? wallet[key] : new Bignum(wallet[key])
- })
- }
- } catch (err) {
- logger.error(err)
- }
- })
-
- emitter.once('shutdown', async () => {
- if (!this._spvFinished) {
- // Prevent dirty wallets to be saved when SPV didn't finish
- this.walletManager.clear()
- }
- })
- }
-}
diff --git a/packages/core-database-postgres/lib/defaults.js b/packages/core-database-postgres/lib/defaults.js
deleted file mode 100644
index b6e0e4b74e..0000000000
--- a/packages/core-database-postgres/lib/defaults.js
+++ /dev/null
@@ -1,15 +0,0 @@
-module.exports = {
- initialization: {
- capSQL: true,
- promiseLib: require('bluebird'),
- noLocking: process.env.NODE_ENV === 'test',
- },
- connection: {
- host: process.env.ARK_DB_HOST || 'localhost',
- port: process.env.ARK_DB_PORT || 5432,
- database:
- process.env.ARK_DB_DATABASE || `ark_${process.env.ARK_NETWORK_NAME}`,
- user: process.env.ARK_DB_USERNAME || 'ark',
- password: process.env.ARK_DB_PASSWORD || 'password',
- },
-}
diff --git a/packages/core-database-postgres/lib/index.js b/packages/core-database-postgres/lib/index.js
deleted file mode 100644
index 6b128c71e4..0000000000
--- a/packages/core-database-postgres/lib/index.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const PostgresConnection = require('./connection')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'database',
- extends: '@arkecosystem/core-database',
- async register(container, options) {
- container.resolvePlugin('logger').info('Establishing Database Connection')
-
- const postgres = new PostgresConnection(options)
-
- const databaseManager = container.resolvePlugin('databaseManager')
- await databaseManager.makeConnection(postgres)
-
- return databaseManager.connection()
- },
- async deregister(container, options) {
- container.resolvePlugin('logger').info('Closing Database Connection')
-
- return container.resolvePlugin('database').disconnect()
- },
-}
-
-/**
- * The files required to migrate the database.
- * @type {Array}
- */
-exports.migrations = require('./migrations')
diff --git a/packages/core-database-postgres/lib/migrations/index.js b/packages/core-database-postgres/lib/migrations/index.js
deleted file mode 100644
index 385f0b9502..0000000000
--- a/packages/core-database-postgres/lib/migrations/index.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const { loadQueryFile } = require('../utils')
-
-module.exports = [
- loadQueryFile(__dirname, './20180304100000-create-migrations-table.sql'),
- loadQueryFile(__dirname, './20180305100000-create-wallets-table.sql'),
- loadQueryFile(__dirname, './20180305200000-create-rounds-table.sql'),
- loadQueryFile(__dirname, './20180305300000-create-blocks-table.sql'),
- loadQueryFile(__dirname, './20180305400000-create-transactions-table.sql'),
- loadQueryFile(
- __dirname,
- './20181129400000-add-block_id-index-to-transactions-table.sql',
- ),
- loadQueryFile(
- __dirname,
- './20181204100000-add-generator_public_key-index-to-blocks-table.sql',
- ),
- loadQueryFile(
- __dirname,
- './20181204200000-add-timestamp-index-to-blocks-table.sql',
- ),
- loadQueryFile(
- __dirname,
- './20181204300000-add-sender_public_key-index-to-transactions-table.sql',
- ),
- loadQueryFile(
- __dirname,
- './20181204400000-add-recipient_id-index-to-transactions-table.sql',
- ),
-]
diff --git a/packages/core-database-postgres/lib/models/block.js b/packages/core-database-postgres/lib/models/block.js
deleted file mode 100644
index 979f5ea1ad..0000000000
--- a/packages/core-database-postgres/lib/models/block.js
+++ /dev/null
@@ -1,72 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-const Model = require('./model')
-
-module.exports = class Block extends Model {
- /**
- * The table associated with the model.
- * @return {String}
- */
- getTable() {
- return 'blocks'
- }
-
- /**
- * The read-only structure with query-formatting columns.
- * @return {Object}
- */
- getColumnSet() {
- return this.createColumnSet([
- {
- name: 'id',
- },
- {
- name: 'version',
- },
- {
- name: 'timestamp',
- },
- {
- name: 'previous_block',
- prop: 'previousBlock',
- def: null,
- },
- {
- name: 'height',
- },
- {
- name: 'number_of_transactions',
- prop: 'numberOfTransactions',
- },
- {
- name: 'total_amount',
- prop: 'totalAmount',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'total_fee',
- prop: 'totalFee',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'reward',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'payload_length',
- prop: 'payloadLength',
- },
- {
- name: 'payload_hash',
- prop: 'payloadHash',
- },
- {
- name: 'generator_public_key',
- prop: 'generatorPublicKey',
- },
- {
- name: 'block_signature',
- prop: 'blockSignature',
- },
- ])
- }
-}
diff --git a/packages/core-database-postgres/lib/models/index.js b/packages/core-database-postgres/lib/models/index.js
deleted file mode 100644
index dfbbdc2b42..0000000000
--- a/packages/core-database-postgres/lib/models/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-module.exports = {
- Block: require('./block'),
- Migration: require('./migration'),
- Round: require('./round'),
- Transaction: require('./transaction'),
- Wallet: require('./wallet'),
-}
diff --git a/packages/core-database-postgres/lib/models/migration.js b/packages/core-database-postgres/lib/models/migration.js
deleted file mode 100644
index f5f80cd506..0000000000
--- a/packages/core-database-postgres/lib/models/migration.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const Model = require('./model')
-
-module.exports = class Round extends Model {
- /**
- * The table associated with the model.
- * @return {String}
- */
- getTable() {
- return 'migrations'
- }
-
- /**
- * The read-only structure with query-formatting columns.
- * @return {Object}
- */
- getColumnSet() {
- return this.createColumnSet([
- {
- name: 'name',
- },
- ])
- }
-}
diff --git a/packages/core-database-postgres/lib/models/model.js b/packages/core-database-postgres/lib/models/model.js
deleted file mode 100644
index 718ee30a9d..0000000000
--- a/packages/core-database-postgres/lib/models/model.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const sql = require('sql')
-
-module.exports = class Model {
- /**
- * Create a new model instance.
- * @param {Object} pgp
- */
- constructor(pgp) {
- this.pgp = pgp
- }
-
- /**
- * Return the model & table definition.
- * @return {Object}
- */
- query() {
- return sql.define({
- name: this.getTable(),
- columns: this.getColumnSet().columns.map(column => ({
- name: column.name,
- prop: column.prop || column.name,
- })),
- })
- }
-
- /**
- * Convert the "camelCase" keys to "snake_case".
- * @return {Object}
- */
- transform(model) {
- const mappings = Object.entries(this.getMappings())
-
- const transformed = {}
-
- for (const [original, mapping] of mappings) {
- transformed[mapping] = model[original]
- }
-
- return transformed
- }
-
- /**
- * Convert the "camelCase" keys to "snake_case".
- * @param {Array} v
- * @return {ColumnSet}
- */
- createColumnSet(columns) {
- return new this.pgp.helpers.ColumnSet(columns, {
- table: {
- table: this.getTable(),
- schema: 'public',
- },
- })
- }
-}
diff --git a/packages/core-database-postgres/lib/models/round.js b/packages/core-database-postgres/lib/models/round.js
deleted file mode 100644
index c572f4997f..0000000000
--- a/packages/core-database-postgres/lib/models/round.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-const Model = require('./model')
-
-module.exports = class Round extends Model {
- /**
- * The table associated with the model.
- * @return {String}
- */
- getTable() {
- return 'rounds'
- }
-
- /**
- * The read-only structure with query-formatting columns.
- * @return {Object}
- */
- getColumnSet() {
- return this.createColumnSet([
- {
- name: 'public_key',
- prop: 'publicKey',
- },
- {
- name: 'balance',
- prop: 'voteBalance',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'round',
- },
- ])
- }
-}
diff --git a/packages/core-database-postgres/lib/models/transaction.js b/packages/core-database-postgres/lib/models/transaction.js
deleted file mode 100644
index 98ab7f004a..0000000000
--- a/packages/core-database-postgres/lib/models/transaction.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-const Model = require('./model')
-
-module.exports = class Transaction extends Model {
- /**
- * The table associated with the model.
- * @return {String}
- */
- getTable() {
- return 'transactions'
- }
-
- /**
- * The read-only structure with query-formatting columns.
- * @return {Object}
- */
- getColumnSet() {
- return this.createColumnSet([
- {
- name: 'id',
- },
- {
- name: 'version',
- },
- {
- name: 'block_id',
- prop: 'blockId',
- },
- {
- name: 'sequence',
- },
- {
- name: 'timestamp',
- },
- {
- name: 'sender_public_key',
- prop: 'senderPublicKey',
- },
- {
- name: 'recipient_id',
- prop: 'recipientId',
- },
- {
- name: 'type',
- },
- {
- name: 'vendor_field_hex',
- prop: 'vendorFieldHex',
- },
- {
- name: 'amount',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'fee',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'serialized',
- init: col => Buffer.from(col.value, 'hex'),
- },
- ])
- }
-}
diff --git a/packages/core-database-postgres/lib/models/wallet.js b/packages/core-database-postgres/lib/models/wallet.js
deleted file mode 100644
index 72bd3faa51..0000000000
--- a/packages/core-database-postgres/lib/models/wallet.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const { bignumify } = require('@arkecosystem/core-utils')
-const Model = require('./model')
-
-module.exports = class WalletModel extends Model {
- /**
- * The table associated with the model.
- * @return {String}
- */
- getTable() {
- return 'wallets'
- }
-
- /**
- * The read-only structure with query-formatting columns.
- * @return {Object}
- */
- getColumnSet() {
- return this.createColumnSet([
- {
- name: 'address',
- },
- {
- name: 'public_key',
- prop: 'publicKey',
- },
- {
- name: 'second_public_key',
- prop: 'secondPublicKey',
- },
- {
- name: 'vote',
- },
- {
- name: 'username',
- },
- {
- name: 'balance',
- init: col => bignumify(col.value).toFixed(),
- },
- {
- name: 'vote_balance',
- prop: 'voteBalance',
- init: col => (col.value ? bignumify(col.value).toFixed() : null),
- },
- {
- name: 'produced_blocks',
- prop: 'producedBlocks',
- },
- {
- name: 'missed_blocks',
- prop: 'missedBlocks',
- },
- ])
- }
-}
diff --git a/packages/core-database-postgres/lib/queries/index.js b/packages/core-database-postgres/lib/queries/index.js
deleted file mode 100644
index 840e20000d..0000000000
--- a/packages/core-database-postgres/lib/queries/index.js
+++ /dev/null
@@ -1,76 +0,0 @@
-const { loadQueryFile } = require('../utils')
-
-module.exports = {
- blocks: {
- common: loadQueryFile(__dirname, './blocks/common.sql'),
- count: loadQueryFile(__dirname, './blocks/count.sql'),
- delete: loadQueryFile(__dirname, './blocks/delete.sql'),
- findById: loadQueryFile(__dirname, './blocks/find-by-id.sql'),
- headers: loadQueryFile(__dirname, './blocks/headers.sql'),
- heightRange: loadQueryFile(__dirname, './blocks/height-range.sql'),
- latest: loadQueryFile(__dirname, './blocks/latest.sql'),
- recent: loadQueryFile(__dirname, './blocks/recent.sql'),
- statistics: loadQueryFile(__dirname, './blocks/statistics.sql'),
- top: loadQueryFile(__dirname, './blocks/top.sql'),
- },
- migrations: {
- create: loadQueryFile(__dirname, './migrations/create.sql'),
- find: loadQueryFile(__dirname, './migrations/find.sql'),
- },
- rounds: {
- delete: loadQueryFile(__dirname, './rounds/delete.sql'),
- find: loadQueryFile(__dirname, './rounds/find.sql'),
- },
- spv: {
- blockRewards: loadQueryFile(__dirname, './spv/block-rewards.sql'),
- delegates: loadQueryFile(__dirname, './spv/delegates.sql'),
- delegatesForgedBlocks: loadQueryFile(
- __dirname,
- './spv/delegates-forged-blocks.sql',
- ),
- delegatesRanks: loadQueryFile(__dirname, './spv/delegates-ranks.sql'),
- lastForgedBlocks: loadQueryFile(__dirname, './spv/last-forged-blocks.sql'),
- multiSignatures: loadQueryFile(__dirname, './spv/multi-signatures.sql'),
- receivedTransactions: loadQueryFile(
- __dirname,
- './spv/received-transactions.sql',
- ),
- secondSignatures: loadQueryFile(__dirname, './spv/second-signatures.sql'),
- sentTransactions: loadQueryFile(__dirname, './spv/sent-transactions.sql'),
- votes: loadQueryFile(__dirname, './spv/votes.sql'),
- },
- transactions: {
- findByBlock: loadQueryFile(__dirname, './transactions/find-by-block.sql'),
- latestByBlock: loadQueryFile(
- __dirname,
- './transactions/latest-by-block.sql',
- ),
- latestByBlocks: loadQueryFile(
- __dirname,
- './transactions/latest-by-blocks.sql',
- ),
- statistics: loadQueryFile(__dirname, './transactions/statistics.sql'),
- forged: loadQueryFile(__dirname, './transactions/forged.sql'),
- findById: loadQueryFile(__dirname, './transactions/find-by-id.sql'),
- findManyById: loadQueryFile(
- __dirname,
- './transactions/find-many-by-id.sql',
- ),
- deleteByBlock: loadQueryFile(
- __dirname,
- './transactions/delete-by-block.sql',
- ),
- },
- wallets: {
- all: loadQueryFile(__dirname, './wallets/all.sql'),
- findByAddress: loadQueryFile(__dirname, './wallets/find-by-address.sql'),
- findNegativeBalances: loadQueryFile(
- __dirname,
- './wallets/find-negative-balances.sql',
- ),
- findNegativeVoteBalances: loadQueryFile(
- __dirname,
- './wallets/find-negative-vote-balances.sql',
- ),
- },
-}
diff --git a/packages/core-database-postgres/lib/queries/spv/last-forged-blocks.sql b/packages/core-database-postgres/lib/queries/spv/last-forged-blocks.sql
deleted file mode 100644
index b2fe36352b..0000000000
--- a/packages/core-database-postgres/lib/queries/spv/last-forged-blocks.sql
+++ /dev/null
@@ -1,5 +0,0 @@
-SELECT id,
- generator_public_key,
- TIMESTAMP
-FROM blocks
-ORDER BY TIMESTAMP DESC LIMIT ${limit}
diff --git a/packages/core-database-postgres/lib/queries/transactions/find-many-by-id.sql b/packages/core-database-postgres/lib/queries/transactions/find-many-by-id.sql
deleted file mode 100644
index 5b20ec290a..0000000000
--- a/packages/core-database-postgres/lib/queries/transactions/find-many-by-id.sql
+++ /dev/null
@@ -1,4 +0,0 @@
-SELECT serialized,
- block_id
-FROM transactions
-WHERE id IN (${ids:list})
diff --git a/packages/core-database-postgres/lib/repositories/blocks.js b/packages/core-database-postgres/lib/repositories/blocks.js
deleted file mode 100644
index 61569cfd73..0000000000
--- a/packages/core-database-postgres/lib/repositories/blocks.js
+++ /dev/null
@@ -1,100 +0,0 @@
-const Repository = require('./repository')
-const { Block } = require('../models')
-const { blocks: sql } = require('../queries')
-
-module.exports = class BlocksRepository extends Repository {
- /**
- * Find a block by its ID.
- * @param {Number} id
- * @return {Promise}
- */
- async findById(id) {
- return this.db.one(sql.findById, { id })
- }
-
- /**
- * Count the number of records in the database.
- * @return {Promise}
- */
- async count() {
- return this.db.one(sql.count)
- }
-
- /**
- * Get all of the common blocks from the database.
- * @param {Array} ids
- * @return {Promise}
- */
- async common(ids) {
- return this.db.manyOrNone(sql.common, { ids })
- }
-
- /**
- * Get all of the blocks within the given height range.
- * @param {Number} start
- * @param {Number} end
- * @return {Promise}
- */
- async headers(start, end) {
- return this.db.many(sql.headers, { start, end })
- }
-
- /**
- * Get all of the blocks within the given height range and order them by height.
- * @param {Number} start
- * @param {Number} end
- * @return {Promise}
- */
- async heightRange(start, end) {
- return this.db.manyOrNone(sql.heightRange, { start, end })
- }
-
- /**
- * Get the last created block from the database.
- * @return {Promise}
- */
- async latest() {
- return this.db.oneOrNone(sql.latest)
- }
-
- /**
- * Get the 10 most recently created blocks from the database.
- * @return {Promise}
- */
- async recent() {
- return this.db.many(sql.recent)
- }
-
- /**
- * Get statistics about all blocks from the database.
- * @return {Promise}
- */
- async statistics() {
- return this.db.one(sql.statistics)
- }
-
- /**
- * Get top count blocks
- * @return {Promise}
- */
- async top(count) {
- return this.db.many(sql.top, { top: count })
- }
-
- /**
- * Delete the block from the database.
- * @param {Number} id
- * @return {Promise}
- */
- async delete(id) {
- return this.db.none(sql.delete, { id })
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- return new Block(this.pgp)
- }
-}
diff --git a/packages/core-database-postgres/lib/repositories/index.js b/packages/core-database-postgres/lib/repositories/index.js
deleted file mode 100644
index 59947e709d..0000000000
--- a/packages/core-database-postgres/lib/repositories/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-module.exports = {
- blocks: require('./blocks'),
- migrations: require('./migrations'),
- rounds: require('./rounds'),
- transactions: require('./transactions'),
- wallets: require('./wallets'),
-}
diff --git a/packages/core-database-postgres/lib/repositories/migrations.js b/packages/core-database-postgres/lib/repositories/migrations.js
deleted file mode 100644
index a72275c6a7..0000000000
--- a/packages/core-database-postgres/lib/repositories/migrations.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const Repository = require('./repository')
-const { Migration } = require('../models')
-const { migrations: sql } = require('../queries')
-
-module.exports = class MigrationsRepository extends Repository {
- /**
- * Find a migration by its name.
- * @param {String} name
- * @return {Promise}
- */
- async findByName(name) {
- return this.db.oneOrNone(sql.find, { name })
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- return new Migration(this.pgp)
- }
-}
diff --git a/packages/core-database-postgres/lib/repositories/repository.js b/packages/core-database-postgres/lib/repositories/repository.js
deleted file mode 100644
index e34f7f7ea0..0000000000
--- a/packages/core-database-postgres/lib/repositories/repository.js
+++ /dev/null
@@ -1,74 +0,0 @@
-module.exports = class Repository {
- /**
- * Create a new repository instance.
- * @param {Object} db
- * @param {Object} pgp
- */
- constructor(db, pgp) {
- this.db = db
- this.pgp = pgp
- this.model = this.getModel()
- }
-
- /**
- * Estimate the number of records in the table.
- * @return {Promise}
- */
- async estimate() {
- return this.db.one(
- `SELECT count_estimate('SELECT * FROM ${this.model.getTable()})`,
- )
- }
-
- /**
- * Run a truncate statement on the table.
- * @return {Promise}
- */
- async truncate() {
- return this.db.none(`TRUNCATE ${this.model.getTable()} RESTART IDENTITY`)
- }
-
- /**
- * Create one or many instances of the related models.
- * @param {Array|Object} item
- * @return {Promise}
- */
- async create(item) {
- return this.db.none(this.__insertQuery(item))
- }
-
- /**
- * Update one or many instances of the related models.
- * @param {Array|Object} item
- * @return {Promise}
- */
- async update(item) {
- return this.db.none(this.__updateQuery(item))
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- throw new Error('Method [getModel] not implemented!')
- }
-
- /**
- * Generate an "INSERT" query for the given data.
- * @param {Array|Object} data
- * @return {String}
- */
- __insertQuery(data) {
- return this.pgp.helpers.insert(data, this.model.getColumnSet())
- }
-
- /**
- * Generate an "UPDATE" query for the given data.
- * @param {Array|Object} data
- * @return {String}
- */
- __updateQuery(data) {
- return this.pgp.helpers.update(data, this.model.getColumnSet())
- }
-}
diff --git a/packages/core-database-postgres/lib/repositories/rounds.js b/packages/core-database-postgres/lib/repositories/rounds.js
deleted file mode 100644
index ea55b11f76..0000000000
--- a/packages/core-database-postgres/lib/repositories/rounds.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const Repository = require('./repository')
-const { Round } = require('../models')
-const { rounds: sql } = require('../queries')
-
-module.exports = class RoundsRepository extends Repository {
- /**
- * Find a round by its ID.
- * @param {Number} round
- * @return {Promise}
- */
- async findById(round) {
- return this.db.manyOrNone(sql.find, { round })
- }
-
- /**
- * Delete the round from the database.
- * @param {Number} round
- * @return {Promise}
- */
- async delete(round) {
- return this.db.none(sql.delete, { round })
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- return new Round(this.pgp)
- }
-}
diff --git a/packages/core-database-postgres/lib/repositories/transactions.js b/packages/core-database-postgres/lib/repositories/transactions.js
deleted file mode 100644
index 66cf3158c5..0000000000
--- a/packages/core-database-postgres/lib/repositories/transactions.js
+++ /dev/null
@@ -1,84 +0,0 @@
-const Repository = require('./repository')
-const { Transaction } = require('../models')
-const { transactions: sql } = require('../queries')
-
-module.exports = class TransactionsRepository extends Repository {
- /**
- * Find a transactions by its ID.
- * @param {String} id
- * @return {Promise}
- */
- async findById(id) {
- return this.db.oneOrNone(sql.findById, { id })
- }
-
- /**
- * Find multiple transactionss by their IDs.
- * @param {Array} ids
- * @return {Promise}
- */
- async findManyById(ids) {
- return this.db.manyOrNone(sql.findManyById, { ids })
- }
-
- /**
- * Find multiple transactionss by their block ID.
- * @param {String} id
- * @return {Promise}
- */
- async findByBlock(id) {
- return this.db.manyOrNone(sql.findByBlock, { id })
- }
-
- /**
- * Find multiple transactionss by their block ID and order them by sequence.
- * @param {Number} id
- * @return {Promise}
- */
- async latestByBlock(id) {
- return this.db.manyOrNone(sql.latestByBlock, { id })
- }
-
- /**
- * Find multiple transactionss by their block IDs and order them by sequence.
- * @param {Array} ids
- * @return {Promise}
- */
- async latestByBlocks(ids) {
- return this.db.manyOrNone(sql.latestByBlocks, { ids })
- }
-
- /**
- * Get all of the forged transactions from the database.
- * @param {Array} ids
- * @return {Promise}
- */
- async forged(ids) {
- return this.db.manyOrNone(sql.forged, { ids })
- }
-
- /**
- * Get statistics about all transactions from the database.
- * @return {Promise}
- */
- async statistics() {
- return this.db.one(sql.statistics)
- }
-
- /**
- * Delete the transactions from the database.
- * @param {Number} id
- * @return {Promise}
- */
- async deleteByBlock(id) {
- return this.db.none(sql.deleteByBlock, { id })
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- return new Transaction(this.pgp)
- }
-}
diff --git a/packages/core-database-postgres/lib/repositories/wallets.js b/packages/core-database-postgres/lib/repositories/wallets.js
deleted file mode 100644
index 95a01041eb..0000000000
--- a/packages/core-database-postgres/lib/repositories/wallets.js
+++ /dev/null
@@ -1,62 +0,0 @@
-const Repository = require('./repository')
-const { Wallet } = require('../models')
-const { wallets: sql } = require('../queries')
-
-module.exports = class WalletsRepository extends Repository {
- /**
- * Get all of the wallets from the database.
- * @return {Promise}
- */
- async all() {
- return this.db.manyOrNone(sql.all)
- }
-
- /**
- * Find a wallet by its address.
- * @param {String} address
- * @return {Promise}
- */
- async findByAddress(address) {
- return this.db.oneOrNone(sql.findByAddress, { address })
- }
-
- /**
- * Get the count of wallets that have a negative balance.
- * @return {Promise}
- */
- async findNegativeBalances() {
- return this.db.oneOrNone(sql.findNegativeBalances)
- }
-
- /**
- * Get the count of wallets that have a negative vote balance.
- * @return {Promise}
- */
- async findNegativeVoteBalances() {
- return this.db.oneOrNone(sql.findNegativeVoteBalances)
- }
-
- /**
- * Create or update a record matching the attributes, and fill it with values.
- * @param {Object} wallet
- * @return {Promise}
- */
- async updateOrCreate(wallet) {
- const query = `${this.__insertQuery(
- wallet,
- )} ON CONFLICT(address) DO UPDATE SET ${this.pgp.helpers.sets(
- wallet,
- this.model.getColumnSet(),
- )}`
-
- return this.db.none(query)
- }
-
- /**
- * Get the model related to this repository.
- * @return {Object}
- */
- getModel() {
- return new Wallet(this.pgp)
- }
-}
diff --git a/packages/core-database-postgres/lib/spv.js b/packages/core-database-postgres/lib/spv.js
deleted file mode 100644
index 3266252c45..0000000000
--- a/packages/core-database-postgres/lib/spv.js
+++ /dev/null
@@ -1,325 +0,0 @@
-const {
- Bignum,
- models: { Transaction },
-} = require('@arkecosystem/crypto')
-const app = require('@arkecosystem/core-container')
-
-const logger = app.resolvePlugin('logger')
-const config = app.resolvePlugin('config')
-const queries = require('./queries')
-
-const genesisWallets = config.genesisBlock.transactions.map(tx => tx.senderId)
-
-module.exports = class SPV {
- /**
- * Create a new wallet builder instance.
- * @param {SequelizeConnection} database
- * @return {void}
- */
- constructor(database) {
- this.connection = database.connection
- this.models = database.models
- this.walletManager = database.walletManager
- this.query = database.query
- }
-
- /**
- * Perform the SPV (Simple Payment Verification).
- * @param {Number} height
- * @return {void}
- */
- async build(height) {
- this.activeDelegates = config.getConstants(height).activeDelegates
-
- logger.printTracker('SPV', 1, 8, 'Received Transactions')
- await this.__buildReceivedTransactions()
-
- logger.printTracker('SPV', 2, 8, 'Block Rewards')
- await this.__buildBlockRewards()
-
- logger.printTracker('SPV', 3, 8, 'Last Forged Blocks')
- await this.__buildLastForgedBlocks()
-
- logger.printTracker('SPV', 4, 8, 'Sent Transactions')
- await this.__buildSentTransactions()
-
- logger.printTracker('SPV', 5, 8, 'Second Signatures')
- await this.__buildSecondSignatures()
-
- logger.printTracker('SPV', 6, 8, 'Votes')
- await this.__buildVotes()
-
- logger.printTracker('SPV', 7, 8, 'Delegates')
- await this.__buildDelegates()
-
- logger.printTracker('SPV', 8, 8, 'MultiSignatures')
- await this.__buildMultisignatures()
-
- logger.stopTracker('SPV', 8, 8)
- logger.info(
- `SPV rebuild finished, wallets in memory: ${
- Object.keys(this.walletManager.byAddress).length
- }`,
- )
- logger.info(
- `Number of registered delegates: ${
- Object.keys(this.walletManager.byUsername).length
- }`,
- )
-
- return this.__verifyWalletsConsistency()
- }
-
- /**
- * Load and apply received transactions to wallets.
- * @return {void}
- */
- async __buildReceivedTransactions() {
- const transactions = await this.query.many(queries.spv.receivedTransactions)
-
- for (const transaction of transactions) {
- const wallet = this.walletManager.findByAddress(transaction.recipientId)
-
- wallet
- ? (wallet.balance = new Bignum(transaction.amount))
- : logger.warn(
- `Lost cold wallet: ${transaction.recipientId} ${
- transaction.amount
- }`,
- )
- }
- }
-
- /**
- * Load and apply block rewards to wallets.
- * @return {void}
- */
- async __buildBlockRewards() {
- const blocks = await this.query.many(queries.spv.blockRewards)
-
- for (const block of blocks) {
- const wallet = this.walletManager.findByPublicKey(
- block.generatorPublicKey,
- )
- wallet.balance = wallet.balance.plus(block.reward)
- }
- }
-
- /**
- * Load and apply last forged blocks to wallets.
- * @return {void}
- */
- async __buildLastForgedBlocks() {
- const blocks = await this.query.many(queries.spv.lastForgedBlocks, {
- limit: this.activeDelegates,
- })
-
- for (const block of blocks) {
- const wallet = this.walletManager.findByPublicKey(
- block.generatorPublicKey,
- )
- wallet.lastBlock = block
- }
- }
-
- /**
- * Load and apply sent transactions to wallets.
- * @return {void}
- */
- async __buildSentTransactions() {
- const transactions = await this.query.many(queries.spv.sentTransactions)
-
- for (const transaction of transactions) {
- const wallet = this.walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
- wallet.balance = wallet.balance
- .minus(transaction.amount)
- .minus(transaction.fee)
-
- if (wallet.balance.isLessThan(0) && !this.isGenesis(wallet)) {
- logger.warn(`Negative balance: ${wallet}`)
- }
- }
- }
-
- /**
- * Used to determine if a wallet is a Genesis wallet.
- * @return {Boolean}
- */
- isGenesis(wallet) {
- return genesisWallets.includes(wallet.address)
- }
-
- /**
- * Load and apply second signature transactions to wallets.
- * @return {void}
- */
- async __buildSecondSignatures() {
- const transactions = await this.query.manyOrNone(
- queries.spv.secondSignatures,
- )
-
- for (const transaction of transactions) {
- const wallet = this.walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
- wallet.secondPublicKey = Transaction.deserialize(
- transaction.serialized.toString('hex'),
- ).asset.signature.publicKey
- }
- }
-
- /**
- * Load and apply votes to wallets.
- * @return {void}
- */
- async __buildVotes() {
- const transactions = await this.query.manyOrNone(queries.spv.votes)
-
- for (const transaction of transactions) {
- const wallet = this.walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
-
- if (!wallet.voted) {
- const vote = Transaction.deserialize(
- transaction.serialized.toString('hex'),
- ).asset.votes[0]
-
- if (vote.startsWith('+')) {
- wallet.vote = vote.slice(1)
- }
-
- // NOTE: The "voted" property is only used within this loop to avoid an issue
- // that results in not properly applying "unvote" transactions as the "vote" property
- // would be empty in that case and return a false result.
- wallet.voted = true
- }
- }
-
- this.walletManager.buildVoteBalances()
- }
-
- /**
- * Load and apply delegate usernames to wallets.
- * @return {void}
- */
- async __buildDelegates() {
- // Register...
- const transactions = await this.query.manyOrNone(queries.spv.delegates)
-
- transactions.forEach(transaction => {
- const wallet = this.walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
- wallet.username = Transaction.deserialize(
- transaction.serialized.toString('hex'),
- ).asset.delegate.username
- this.walletManager.reindex(wallet)
- })
-
- // Forged Blocks...
- const forgedBlocks = await this.query.manyOrNone(
- queries.spv.delegatesForgedBlocks,
- )
- forgedBlocks.forEach(block => {
- const wallet = this.walletManager.findByPublicKey(
- block.generatorPublicKey,
- )
- wallet.forgedFees = wallet.forgedFees.plus(block.totalFees)
- wallet.forgedRewards = wallet.forgedRewards.plus(block.totalRewards)
- wallet.producedBlocks = +block.totalProduced
- })
-
- // NOTE: This is highly NOT reliable, however the number of missed blocks
- // is NOT used for the consensus
- const delegates = await this.query.manyOrNone(queries.spv.delegatesRanks)
- delegates.forEach((delegate, i) => {
- const wallet = this.walletManager.findByPublicKey(delegate.publicKey)
- wallet.missedBlocks = parseInt(delegate.missedBlocks)
- wallet.rate = i + 1
- this.walletManager.reindex(wallet)
- })
- }
-
- /**
- * Load and apply multisignatures to wallets.
- * @return {void}
- */
- async __buildMultisignatures() {
- const transactions = await this.query.manyOrNone(
- queries.spv.multiSignatures,
- )
-
- for (const transaction of transactions) {
- const wallet = this.walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
-
- if (!wallet.multisignature) {
- wallet.multisignature = Transaction.deserialize(
- transaction.serialized.toString('hex'),
- ).asset.multisignature
- }
- }
- }
-
- /**
- * Verify the consistency of the wallets table by comparing all records against
- * the in memory wallets.
- * NOTE: This is faster than rebuilding the entire table from scratch each time.
- * @returns {Boolean}
- */
- async __verifyWalletsConsistency() {
- const dbWallets = await this.query.manyOrNone(queries.wallets.all)
- const inMemoryWallets = this.walletManager.allByPublicKey()
-
- let detectedInconsistency = false
- if (dbWallets.length !== inMemoryWallets.length) {
- detectedInconsistency = true
- } else {
- for (const dbWallet of dbWallets) {
- if (dbWallet.balance < 0 && !this.isGenesis(dbWallet)) {
- detectedInconsistency = true
- logger.warn(
- `Wallet '${dbWallet.address}' has a negative balance of '${
- dbWallet.balance
- }'`,
- )
- break
- }
-
- if (dbWallet.voteBalance < 0) {
- detectedInconsistency = true
- logger.warn(
- `Wallet ${dbWallet.address} has a negative vote balance of '${
- dbWallet.voteBalance
- }'`,
- )
- break
- }
-
- const inMemoryWallet = this.walletManager.findByPublicKey(
- dbWallet.publicKey,
- )
-
- if (
- !inMemoryWallet.balance.isEqualTo(dbWallet.balance) ||
- !inMemoryWallet.voteBalance.isEqualTo(dbWallet.voteBalance) ||
- dbWallet.username !== inMemoryWallet.username
- ) {
- detectedInconsistency = true
- break
- }
- }
- }
-
- // Remove dirty flags when no inconsistency has been found
- if (!detectedInconsistency) {
- this.walletManager.clear()
- }
-
- return !detectedInconsistency
- }
-}
diff --git a/packages/core-database-postgres/lib/sql/query-executor.js b/packages/core-database-postgres/lib/sql/query-executor.js
deleted file mode 100644
index 4a1d3b58a0..0000000000
--- a/packages/core-database-postgres/lib/sql/query-executor.js
+++ /dev/null
@@ -1,81 +0,0 @@
-module.exports = class QueryExecutor {
- /**
- * Create a new QueryExecutor instance.
- * @param {[type]} connection
- * @return {QueryBuilder}
- */
- constructor(connection) {
- this.connection = connection
- }
-
- /**
- * Execute the given query and expect no results.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async none(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'none')
- }
-
- /**
- * Execute the given query and expect one result.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async one(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'one')
- }
-
- /**
- * Execute the given query and expect one or no results.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async oneOrNone(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'oneOrNone')
- }
-
- /**
- * Execute the given query and expect many results.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async many(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'many')
- }
-
- /**
- * Execute the given query and expect many or no results.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async manyOrNone(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'manyOrNone')
- }
-
- /**
- * Execute the given query and expect any results.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @return {Promise}
- */
- async any(query, parameters) {
- return this.__executeQueryFile(query, parameters, 'any')
- }
-
- /**
- * Execute the given query using the given method and parameters.
- * @param {QueryFile} query
- * @param {Array} parameters
- * @param {String} method
- * @return {QueryBuilder}
- */
- async __executeQueryFile(query, parameters, method) {
- return this.connection.db[method](query, parameters)
- }
-}
diff --git a/packages/core-database-postgres/lib/utils/camelize-columns.js b/packages/core-database-postgres/lib/utils/camelize-columns.js
deleted file mode 100644
index d0721f31d5..0000000000
--- a/packages/core-database-postgres/lib/utils/camelize-columns.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/* eslint guard-for-in: "off" */
-
-module.exports = (pgp, data) => {
- const tmp = data[0]
-
- for (const prop in tmp) {
- const camel = pgp.utils.camelize(prop)
-
- if (!(camel in tmp)) {
- for (let i = 0; i < data.length; i++) {
- const d = data[i]
- d[camel] = d[prop]
- delete d[prop]
- }
- }
- }
-}
diff --git a/packages/core-database-postgres/lib/utils/index.js b/packages/core-database-postgres/lib/utils/index.js
deleted file mode 100644
index a80bd79d5b..0000000000
--- a/packages/core-database-postgres/lib/utils/index.js
+++ /dev/null
@@ -1,4 +0,0 @@
-module.exports = {
- camelizeColumns: require('./camelize-columns'),
- loadQueryFile: require('./load-query-file'),
-}
diff --git a/packages/core-database-postgres/lib/utils/load-query-file.js b/packages/core-database-postgres/lib/utils/load-query-file.js
deleted file mode 100644
index 20302d61aa..0000000000
--- a/packages/core-database-postgres/lib/utils/load-query-file.js
+++ /dev/null
@@ -1,25 +0,0 @@
-const QueryFile = require('pg-promise').QueryFile
-const path = require('path')
-
-const app = require('@arkecosystem/core-container')
-
-const logger = app.resolvePlugin('logger')
-
-module.exports = (directory, file) => {
- const fullPath = path.join(directory, file)
-
- const options = {
- minify: true,
- params: {
- schema: 'public',
- },
- }
-
- const query = new QueryFile(fullPath, options)
-
- if (query.error) {
- logger.error(query.error)
- }
-
- return query
-}
diff --git a/packages/core-database-postgres/package.json b/packages/core-database-postgres/package.json
index 97642fcc10..e3da866f71 100644
--- a/packages/core-database-postgres/package.json
+++ b/packages/core-database-postgres/package.json
@@ -1,35 +1,56 @@
{
- "name": "@arkecosystem/core-database-postgres",
- "description": "PostgreSQL integration for Ark Core",
- "version": "0.2.1",
- "contributors": [
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/core-container": "~0.2",
- "@arkecosystem/core-database": "~0.2",
- "@arkecosystem/core-utils": "~0.2",
- "@arkecosystem/crypto": "~0.2",
- "bluebird": "^3.5.3",
- "lodash.chunk": "^4.2.0",
- "pg-promise": "^8.5.2",
- "pluralize": "^7.0.0",
- "sql": "^0.78.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-database-postgres",
+ "description": "PostgreSQL integration for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index",
+ "types": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "bash ../../scripts/pre-test.sh",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn copy && yarn compile",
+ "build:watch": "yarn clean && yarn copy && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "copy": "cd src/ && cpy './**/*.sql' --parents ../dist/ && cd ../",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/core-container": "^2.1.0",
+ "@arkecosystem/core-database": "^2.1.0",
+ "@arkecosystem/core-utils": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/bluebird": "^3.5.25",
+ "@types/lodash.chunk": "^4.2.4",
+ "@types/pluralize": "^0.0.29",
+ "bluebird": "^3.5.3",
+ "cpy-cli": "^2.0.0",
+ "lodash.chunk": "^4.2.0",
+ "pg-promise": "^8.5.4",
+ "pluralize": "^7.0.0",
+ "sql": "^0.78.0"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-database-postgres/src/defaults.ts b/packages/core-database-postgres/src/defaults.ts
new file mode 100644
index 0000000000..566937c84c
--- /dev/null
+++ b/packages/core-database-postgres/src/defaults.ts
@@ -0,0 +1,14 @@
+export const defaults = {
+ initialization: {
+ capSQL: true,
+ promiseLib: require("bluebird"),
+ noLocking: process.env.NODE_ENV === "test",
+ },
+ connection: {
+ host: process.env.CORE_DB_HOST || "localhost",
+ port: process.env.CORE_DB_PORT || 5432,
+ database: process.env.CORE_DB_DATABASE || `${process.env.CORE_TOKEN}_${process.env.CORE_NETWORK_NAME}`,
+ user: process.env.CORE_DB_USERNAME || process.env.CORE_TOKEN,
+ password: process.env.CORE_DB_PASSWORD || "password",
+ },
+};
diff --git a/packages/core-database-postgres/src/index.ts b/packages/core-database-postgres/src/index.ts
new file mode 100644
index 0000000000..b4d0f3e18c
--- /dev/null
+++ b/packages/core-database-postgres/src/index.ts
@@ -0,0 +1,6 @@
+export * from "./postgres-connection";
+export * from "./migrations";
+export * from "./spv";
+export * from "./models";
+export * from "./repositories";
+export * from "./plugin";
diff --git a/packages/core-database-postgres/lib/migrations/20180304100000-create-migrations-table.sql b/packages/core-database-postgres/src/migrations/20180304100000-create-migrations-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20180304100000-create-migrations-table.sql
rename to packages/core-database-postgres/src/migrations/20180304100000-create-migrations-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20180305100000-create-wallets-table.sql b/packages/core-database-postgres/src/migrations/20180305100000-create-wallets-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20180305100000-create-wallets-table.sql
rename to packages/core-database-postgres/src/migrations/20180305100000-create-wallets-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20180305200000-create-rounds-table.sql b/packages/core-database-postgres/src/migrations/20180305200000-create-rounds-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20180305200000-create-rounds-table.sql
rename to packages/core-database-postgres/src/migrations/20180305200000-create-rounds-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20180305300000-create-blocks-table.sql b/packages/core-database-postgres/src/migrations/20180305300000-create-blocks-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20180305300000-create-blocks-table.sql
rename to packages/core-database-postgres/src/migrations/20180305300000-create-blocks-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20180305400000-create-transactions-table.sql b/packages/core-database-postgres/src/migrations/20180305400000-create-transactions-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20180305400000-create-transactions-table.sql
rename to packages/core-database-postgres/src/migrations/20180305400000-create-transactions-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20181129400000-add-block_id-index-to-transactions-table.sql b/packages/core-database-postgres/src/migrations/20181129400000-add-block_id-index-to-transactions-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20181129400000-add-block_id-index-to-transactions-table.sql
rename to packages/core-database-postgres/src/migrations/20181129400000-add-block_id-index-to-transactions-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20181204100000-add-generator_public_key-index-to-blocks-table.sql b/packages/core-database-postgres/src/migrations/20181204100000-add-generator_public_key-index-to-blocks-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20181204100000-add-generator_public_key-index-to-blocks-table.sql
rename to packages/core-database-postgres/src/migrations/20181204100000-add-generator_public_key-index-to-blocks-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20181204200000-add-timestamp-index-to-blocks-table.sql b/packages/core-database-postgres/src/migrations/20181204200000-add-timestamp-index-to-blocks-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20181204200000-add-timestamp-index-to-blocks-table.sql
rename to packages/core-database-postgres/src/migrations/20181204200000-add-timestamp-index-to-blocks-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20181204300000-add-sender_public_key-index-to-transactions-table.sql b/packages/core-database-postgres/src/migrations/20181204300000-add-sender_public_key-index-to-transactions-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20181204300000-add-sender_public_key-index-to-transactions-table.sql
rename to packages/core-database-postgres/src/migrations/20181204300000-add-sender_public_key-index-to-transactions-table.sql
diff --git a/packages/core-database-postgres/lib/migrations/20181204400000-add-recipient_id-index-to-transactions-table.sql b/packages/core-database-postgres/src/migrations/20181204400000-add-recipient_id-index-to-transactions-table.sql
similarity index 100%
rename from packages/core-database-postgres/lib/migrations/20181204400000-add-recipient_id-index-to-transactions-table.sql
rename to packages/core-database-postgres/src/migrations/20181204400000-add-recipient_id-index-to-transactions-table.sql
diff --git a/packages/core-database-postgres/src/migrations/index.ts b/packages/core-database-postgres/src/migrations/index.ts
new file mode 100644
index 0000000000..d0e51a118e
--- /dev/null
+++ b/packages/core-database-postgres/src/migrations/index.ts
@@ -0,0 +1,14 @@
+import { loadQueryFile } from "../utils";
+
+export const migrations = [
+ loadQueryFile(__dirname, "./20180304100000-create-migrations-table.sql"),
+ loadQueryFile(__dirname, "./20180305100000-create-wallets-table.sql"),
+ loadQueryFile(__dirname, "./20180305200000-create-rounds-table.sql"),
+ loadQueryFile(__dirname, "./20180305300000-create-blocks-table.sql"),
+ loadQueryFile(__dirname, "./20180305400000-create-transactions-table.sql"),
+ loadQueryFile(__dirname, "./20181129400000-add-block_id-index-to-transactions-table.sql"),
+ loadQueryFile(__dirname, "./20181204100000-add-generator_public_key-index-to-blocks-table.sql"),
+ loadQueryFile(__dirname, "./20181204200000-add-timestamp-index-to-blocks-table.sql"),
+ loadQueryFile(__dirname, "./20181204300000-add-sender_public_key-index-to-transactions-table.sql"),
+ loadQueryFile(__dirname, "./20181204400000-add-recipient_id-index-to-transactions-table.sql"),
+];
diff --git a/packages/core-database-postgres/src/models/block.ts b/packages/core-database-postgres/src/models/block.ts
new file mode 100644
index 0000000000..f537ab8b4e
--- /dev/null
+++ b/packages/core-database-postgres/src/models/block.ts
@@ -0,0 +1,72 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import { Model } from "./model";
+
+export class Block extends Model {
+ /**
+ * The table associated with the model.
+ * @return {String}
+ */
+ public getTable() {
+ return "blocks";
+ }
+
+ /**
+ * The read-only structure with query-formatting columns.
+ * @return {Object}
+ */
+ public getColumnSet() {
+ return this.createColumnSet([
+ {
+ name: "id",
+ },
+ {
+ name: "version",
+ },
+ {
+ name: "timestamp",
+ },
+ {
+ name: "previous_block",
+ prop: "previousBlock",
+ def: null,
+ },
+ {
+ name: "height",
+ },
+ {
+ name: "number_of_transactions",
+ prop: "numberOfTransactions",
+ },
+ {
+ name: "total_amount",
+ prop: "totalAmount",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "total_fee",
+ prop: "totalFee",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "reward",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "payload_length",
+ prop: "payloadLength",
+ },
+ {
+ name: "payload_hash",
+ prop: "payloadHash",
+ },
+ {
+ name: "generator_public_key",
+ prop: "generatorPublicKey",
+ },
+ {
+ name: "block_signature",
+ prop: "blockSignature",
+ },
+ ]);
+ }
+}
diff --git a/packages/core-database-postgres/src/models/index.ts b/packages/core-database-postgres/src/models/index.ts
new file mode 100644
index 0000000000..5fdb934052
--- /dev/null
+++ b/packages/core-database-postgres/src/models/index.ts
@@ -0,0 +1,6 @@
+export * from "./model";
+export * from "./block";
+export * from "./migration";
+export * from "./round";
+export * from "./transaction";
+export * from "./wallet";
diff --git a/packages/core-database-postgres/src/models/migration.ts b/packages/core-database-postgres/src/models/migration.ts
new file mode 100644
index 0000000000..cf56eb259c
--- /dev/null
+++ b/packages/core-database-postgres/src/models/migration.ts
@@ -0,0 +1,23 @@
+import { Model } from "./model";
+
+export class Migration extends Model {
+ /**
+ * The table associated with the model.
+ * @return {String}
+ */
+ public getTable() {
+ return "migrations";
+ }
+
+ /**
+ * The read-only structure with query-formatting columns.
+ * @return {Object}
+ */
+ public getColumnSet() {
+ return this.createColumnSet([
+ {
+ name: "name",
+ },
+ ]);
+ }
+}
diff --git a/packages/core-database-postgres/src/models/model.ts b/packages/core-database-postgres/src/models/model.ts
new file mode 100644
index 0000000000..6d401f9dca
--- /dev/null
+++ b/packages/core-database-postgres/src/models/model.ts
@@ -0,0 +1,51 @@
+import sql from "sql";
+
+export abstract class Model {
+ /**
+ * Create a new model instance.
+ * @param {Object} pgp
+ */
+ constructor(public pgp) {}
+
+ /**
+ * Get table name for model.
+ * @return {String}
+ */
+ public abstract getTable(): string;
+
+ /**
+ * Get table column names for model.
+ * @return {String[]}
+ */
+ public abstract getColumnSet(): any;
+
+ /**
+ * Return the model & table definition.
+ * @return {Object}
+ */
+ public query(): any {
+ const { schema, columns } = this.getColumnSet();
+ return sql.define({
+ name: this.getTable(),
+ schema,
+ columns: columns.map(column => ({
+ name: column.name,
+ prop: column.prop || column.name,
+ })),
+ });
+ }
+
+ /**
+ * Convert the "camelCase" keys to "snake_case".
+ * @return {ColumnSet}
+ * @param columns
+ */
+ public createColumnSet(columns) {
+ return new this.pgp.helpers.ColumnSet(columns, {
+ table: {
+ table: this.getTable(),
+ schema: "public",
+ },
+ });
+ }
+}
diff --git a/packages/core-database-postgres/src/models/round.ts b/packages/core-database-postgres/src/models/round.ts
new file mode 100644
index 0000000000..2ccd7a87bd
--- /dev/null
+++ b/packages/core-database-postgres/src/models/round.ts
@@ -0,0 +1,33 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import { Model } from "./model";
+
+export class Round extends Model {
+ /**
+ * The table associated with the model.
+ * @return {String}
+ */
+ public getTable() {
+ return "rounds";
+ }
+
+ /**
+ * The read-only structure with query-formatting columns.
+ * @return {Object}
+ */
+ public getColumnSet() {
+ return this.createColumnSet([
+ {
+ name: "public_key",
+ prop: "publicKey",
+ },
+ {
+ name: "balance",
+ prop: "voteBalance",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "round",
+ },
+ ]);
+ }
+}
diff --git a/packages/core-database-postgres/src/models/transaction.ts b/packages/core-database-postgres/src/models/transaction.ts
new file mode 100644
index 0000000000..e79870cfd4
--- /dev/null
+++ b/packages/core-database-postgres/src/models/transaction.ts
@@ -0,0 +1,64 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import { Model } from "./model";
+
+export class Transaction extends Model {
+ /**
+ * The table associated with the model.
+ * @return {String}
+ */
+ public getTable() {
+ return "transactions";
+ }
+
+ /**
+ * The read-only structure with query-formatting columns.
+ * @return {Object}
+ */
+ public getColumnSet() {
+ return this.createColumnSet([
+ {
+ name: "id",
+ },
+ {
+ name: "version",
+ },
+ {
+ name: "block_id",
+ prop: "blockId",
+ },
+ {
+ name: "sequence",
+ },
+ {
+ name: "timestamp",
+ },
+ {
+ name: "sender_public_key",
+ prop: "senderPublicKey",
+ },
+ {
+ name: "recipient_id",
+ prop: "recipientId",
+ },
+ {
+ name: "type",
+ },
+ {
+ name: "vendor_field_hex",
+ prop: "vendorFieldHex",
+ },
+ {
+ name: "amount",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "fee",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "serialized",
+ init: col => Buffer.from(col.value, "hex"),
+ },
+ ]);
+ }
+}
diff --git a/packages/core-database-postgres/src/models/wallet.ts b/packages/core-database-postgres/src/models/wallet.ts
new file mode 100644
index 0000000000..52c9c043dd
--- /dev/null
+++ b/packages/core-database-postgres/src/models/wallet.ts
@@ -0,0 +1,55 @@
+import { bignumify } from "@arkecosystem/core-utils";
+import { Model } from "./model";
+
+export class Wallet extends Model {
+ /**
+ * The table associated with the model.
+ * @return {String}
+ */
+ public getTable() {
+ return "wallets";
+ }
+
+ /**
+ * The read-only structure with query-formatting columns.
+ * @return {Object}
+ */
+ public getColumnSet() {
+ return this.createColumnSet([
+ {
+ name: "address",
+ },
+ {
+ name: "public_key",
+ prop: "publicKey",
+ },
+ {
+ name: "second_public_key",
+ prop: "secondPublicKey",
+ },
+ {
+ name: "vote",
+ },
+ {
+ name: "username",
+ },
+ {
+ name: "balance",
+ init: col => bignumify(col.value).toFixed(),
+ },
+ {
+ name: "vote_balance",
+ prop: "voteBalance",
+ init: col => (col.value ? bignumify(col.value).toFixed() : null),
+ },
+ {
+ name: "produced_blocks",
+ prop: "producedBlocks",
+ },
+ {
+ name: "missed_blocks",
+ prop: "missedBlocks",
+ },
+ ]);
+ }
+}
diff --git a/packages/core-database-postgres/src/plugin.ts b/packages/core-database-postgres/src/plugin.ts
new file mode 100644
index 0000000000..2e1c39fd57
--- /dev/null
+++ b/packages/core-database-postgres/src/plugin.ts
@@ -0,0 +1,29 @@
+import {
+ DatabaseManager, databaseServiceFactory, WalletManager} from "@arkecosystem/core-database";
+import { Container, Database, Logger } from "@arkecosystem/core-interfaces";
+import { defaults } from "./defaults";
+import { PostgresConnection } from "./postgres-connection";
+
+export const plugin: Container.PluginDescriptor = {
+ pkg: require("../package.json"),
+ defaults,
+ alias: "database",
+ extends: "@arkecosystem/core-database",
+ async register(container: Container.IContainer, options) {
+ container.resolvePlugin("logger").info("Establishing Database Connection");
+
+ const walletManager = new WalletManager();
+
+ const databaseManager = container.resolvePlugin("databaseManager");
+
+ const connection = await databaseManager.makeConnection(new PostgresConnection(options, walletManager));
+
+ return await databaseServiceFactory(options, walletManager, connection);
+ },
+ async deregister(container: Container.IContainer, options) {
+ container.resolvePlugin("logger").info("Closing Database Connection");
+
+ const databaseService = container.resolvePlugin("database");
+ await databaseService.connection.disconnect();
+ },
+};
diff --git a/packages/core-database-postgres/src/postgres-connection.ts b/packages/core-database-postgres/src/postgres-connection.ts
new file mode 100644
index 0000000000..442a19eb98
--- /dev/null
+++ b/packages/core-database-postgres/src/postgres-connection.ts
@@ -0,0 +1,275 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import { roundCalculator } from "@arkecosystem/core-utils";
+import { models } from "@arkecosystem/crypto";
+import fs from "fs";
+import chunk from "lodash/chunk";
+import path from "path";
+import pgPromise from "pg-promise";
+import { migrations } from "./migrations";
+import { Model } from "./models";
+import { repositories } from "./repositories";
+import { MigrationsRepository } from "./repositories/migrations";
+import { SPV } from "./spv";
+import { QueryExecutor } from "./sql/query-executor";
+import { camelizeColumns } from "./utils";
+
+export class PostgresConnection implements Database.IDatabaseConnection {
+
+ public logger = app.resolvePlugin("logger");
+ public models: { [key: string]: Model } = {};
+ public query: QueryExecutor;
+ public db: any;
+ public blocksRepository: Database.IBlocksRepository;
+ public roundsRepository: Database.IRoundsRepository;
+ public transactionsRepository: Database.ITransactionsRepository;
+ public walletsRepository: Database.IWalletsRepository;
+ public pgp: any;
+ private emitter = app.resolvePlugin("event-emitter");
+ private migrationsRepository : MigrationsRepository;
+ private cache: Map;
+ private queuedQueries: any[];
+
+
+ public constructor(readonly options: any, private walletManager: Database.IWalletManager) {
+
+ }
+
+
+ public async buildWallets(height: number) {
+ const spvPath = `${process.env.CORE_PATH_CACHE}/spv.json`;
+
+ if (fs.existsSync(spvPath)) {
+ (fs as any).removeSync(spvPath);
+
+ this.logger.info("Ark Core ended unexpectedly - resuming from where we left off :runner:");
+
+ return true;
+ }
+
+ try {
+ const spv = new SPV(this.query, this.walletManager);
+ return await spv.build(height);
+ } catch (error) {
+ this.logger.error(error.stack);
+ }
+
+ return false;
+ }
+
+ public async commitQueuedQueries() {
+ if (!this.queuedQueries || this.queuedQueries.length === 0) {
+ return;
+ }
+
+ this.logger.debug("Committing database transactions.");
+
+ try {
+ await this.db.tx(t => t.batch(this.queuedQueries));
+ } catch (error) {
+ this.logger.error(error);
+
+ throw error;
+ } finally {
+ this.queuedQueries = null;
+ }
+ }
+
+ public async connect() {
+
+ this.emitter.emit(Database.DatabaseEvents.PRE_CONNECT);
+ const initialization = {
+ receive(data, result, e) {
+ camelizeColumns(pgp, data);
+ },
+ extend(object) {
+ for (const repository of Object.keys(repositories)) {
+ object[repository] = new repositories[repository](object, pgp);
+ }
+ },
+ };
+
+ const pgp = pgPromise({ ...this.options.initialization, ...initialization });
+
+ this.pgp = pgp;
+ this.db = this.pgp(this.options.connection);
+ }
+
+ public async deleteBlock(block: models.Block) {
+ try {
+ const queries = [this.transactionsRepository.deleteByBlockId(block.data.id), this.blocksRepository.delete(block.data.id)];
+
+ await this.db.tx(t => t.batch(queries));
+ } catch (error) {
+ this.logger.error(error.stack);
+
+ throw error;
+ }
+ }
+
+ public async disconnect() {
+ this.logger.debug("Disconnecting from database");
+ this.emitter.emit(Database.DatabaseEvents.PRE_DISCONNECT);
+
+ try {
+ await this.commitQueuedQueries();
+ this.cache.clear();
+ } catch (error) {
+ this.logger.warn("Issue in commiting blocks, database might be corrupted");
+ this.logger.warn(error.message);
+ }
+
+ await this.pgp.end();
+ this.emitter.emit(Database.DatabaseEvents.POST_DISCONNECT);
+ this.logger.debug("Disconnected from database");
+ }
+
+ public enqueueDeleteBlock(block: models.Block): any {
+ const queries = [this.transactionsRepository.deleteByBlockId(block.data.id), this.blocksRepository.delete(block.data.id)];
+
+ this.enqueueQueries(queries);
+ }
+
+ public enqueueDeleteRound(height: number): any {
+ const { round, nextRound, maxDelegates } = roundCalculator.calculateRound(height);
+
+ if (nextRound === round + 1 && height >= maxDelegates) {
+ this.enqueueQueries([this.roundsRepository.delete(nextRound)]);
+ }
+ }
+
+ public enqueueSaveBlock(block: models.Block): any {
+ const queries = [this.blocksRepository.insert(block.data)];
+
+ if (block.transactions.length > 0) {
+ queries.push(this.transactionsRepository.insert(block.transactions));
+ }
+
+ this.enqueueQueries(queries);
+ }
+
+ public async make(): Promise {
+ if (this.db) {
+ throw new Error("Database connection already initialised");
+ }
+
+ this.logger.debug("Connecting to database");
+
+ this.queuedQueries = null;
+ this.cache = new Map();
+
+ try {
+ await this.connect();
+ this.exposeRepositories();
+ await this.registerQueryExecutor();
+ await this.runMigrations();
+ await this.registerModels();
+ this.logger.debug("Connected to database.");
+ this.emitter.emit(Database.DatabaseEvents.POST_CONNECT);
+
+ return this;
+ } catch (error) {
+ app.forceExit("Unable to connect to the database!", error);
+ }
+
+ return null;
+ }
+
+ public async saveBlock(block: models.Block) {
+ try {
+ const queries = [this.blocksRepository.insert(block.data)];
+
+ if (block.transactions.length > 0) {
+ queries.push(this.transactionsRepository.insert(block.transactions));
+ }
+
+ await this.db.tx(t => t.batch(queries));
+ } catch (err) {
+ this.logger.error(err.message);
+ }
+ }
+
+ public async saveWallets(wallets: any[], force?: boolean) {
+ if (force) {
+ // all wallets to be updated, performance is better without upsert
+ await this.walletsRepository.truncate();
+
+ try {
+ const chunks = chunk(wallets, 5000).map(c => this.walletsRepository.insert(c)); // this 5000 figure should be configurable...
+ await this.db.tx(t => t.batch(chunks));
+ } catch (error) {
+ this.logger.error(error.stack);
+ }
+ } else {
+ // NOTE: The list of delegates is calculated in-memory against the WalletManager,
+ // so it is safe to perform the costly UPSERT non-blocking during round change only:
+ // 'await saveWallets(false)' -> 'saveWallets(false)'
+ try {
+ const queries = wallets.map(wallet => this.walletsRepository.updateOrCreate(wallet));
+ await this.db.tx(t => t.batch(queries));
+ } catch (error) {
+ this.logger.error(error.stack);
+ }
+ }
+ }
+
+ /**
+ * Run all migrations.
+ * @return {void}
+ */
+
+ private async runMigrations() {
+ for (const migration of migrations) {
+ const { name } = path.parse(migration.file);
+
+ if (name === "20180304100000-create-migrations-table") {
+ await this.query.none(migration);
+ } else {
+ const row = await this.migrationsRepository.findByName(name);
+
+ if (row === null) {
+ this.logger.debug(`Migrating ${name}`);
+
+ await this.query.none(migration);
+
+ await this.migrationsRepository.insert({ name });
+ }
+ }
+ }
+ }
+
+
+ /**
+ * Register all models.
+ * @return {void}
+ */
+ private async registerModels() {
+ for (const [key, Value] of Object.entries(require("./models"))) {
+ this.models[key.toLowerCase()] = new (Value as any)(this.pgp);
+ }
+ }
+
+ /**
+ * Register the query builder.
+ * @return {void}
+ */
+ private registerQueryExecutor() {
+ this.query = new QueryExecutor(this);
+ }
+
+ private enqueueQueries(queries) {
+ if (!this.queuedQueries) {
+ this.queuedQueries = [];
+ }
+
+ (this.queuedQueries as any).push(...queries);
+ }
+
+ private exposeRepositories() {
+ this.blocksRepository = this.db.blocks;
+ this.transactionsRepository = this.db.transactions;
+ this.roundsRepository = this.db.rounds;
+ this.walletsRepository = this.db.wallets;
+ this.migrationsRepository = this.db.migrations;
+ }
+}
diff --git a/packages/core-database-postgres/lib/queries/blocks/common.sql b/packages/core-database-postgres/src/queries/blocks/common.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/common.sql
rename to packages/core-database-postgres/src/queries/blocks/common.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/count.sql b/packages/core-database-postgres/src/queries/blocks/count.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/count.sql
rename to packages/core-database-postgres/src/queries/blocks/count.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/delete.sql b/packages/core-database-postgres/src/queries/blocks/delete.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/delete.sql
rename to packages/core-database-postgres/src/queries/blocks/delete.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/find-by-id.sql b/packages/core-database-postgres/src/queries/blocks/find-by-id.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/find-by-id.sql
rename to packages/core-database-postgres/src/queries/blocks/find-by-id.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/headers.sql b/packages/core-database-postgres/src/queries/blocks/headers.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/headers.sql
rename to packages/core-database-postgres/src/queries/blocks/headers.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/height-range.sql b/packages/core-database-postgres/src/queries/blocks/height-range.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/height-range.sql
rename to packages/core-database-postgres/src/queries/blocks/height-range.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/latest.sql b/packages/core-database-postgres/src/queries/blocks/latest.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/latest.sql
rename to packages/core-database-postgres/src/queries/blocks/latest.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/recent.sql b/packages/core-database-postgres/src/queries/blocks/recent.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/recent.sql
rename to packages/core-database-postgres/src/queries/blocks/recent.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/statistics.sql b/packages/core-database-postgres/src/queries/blocks/statistics.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/statistics.sql
rename to packages/core-database-postgres/src/queries/blocks/statistics.sql
diff --git a/packages/core-database-postgres/lib/queries/blocks/top.sql b/packages/core-database-postgres/src/queries/blocks/top.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/blocks/top.sql
rename to packages/core-database-postgres/src/queries/blocks/top.sql
diff --git a/packages/core-database-postgres/src/queries/index.ts b/packages/core-database-postgres/src/queries/index.ts
new file mode 100644
index 0000000000..9490800c32
--- /dev/null
+++ b/packages/core-database-postgres/src/queries/index.ts
@@ -0,0 +1,51 @@
+import { loadQueryFile } from "../utils";
+
+export const queries = {
+ blocks: {
+ common: loadQueryFile(__dirname, "./blocks/common.sql"),
+ count: loadQueryFile(__dirname, "./blocks/count.sql"),
+ delete: loadQueryFile(__dirname, "./blocks/delete.sql"),
+ findById: loadQueryFile(__dirname, "./blocks/find-by-id.sql"),
+ headers: loadQueryFile(__dirname, "./blocks/headers.sql"),
+ heightRange: loadQueryFile(__dirname, "./blocks/height-range.sql"),
+ latest: loadQueryFile(__dirname, "./blocks/latest.sql"),
+ recent: loadQueryFile(__dirname, "./blocks/recent.sql"),
+ statistics: loadQueryFile(__dirname, "./blocks/statistics.sql"),
+ top: loadQueryFile(__dirname, "./blocks/top.sql"),
+ },
+ migrations: {
+ create: loadQueryFile(__dirname, "./migrations/create.sql"),
+ find: loadQueryFile(__dirname, "./migrations/find.sql"),
+ },
+ rounds: {
+ delete: loadQueryFile(__dirname, "./rounds/delete.sql"),
+ find: loadQueryFile(__dirname, "./rounds/find.sql"),
+ },
+ spv: {
+ blockRewards: loadQueryFile(__dirname, "./spv/block-rewards.sql"),
+ delegates: loadQueryFile(__dirname, "./spv/delegates.sql"),
+ delegatesForgedBlocks: loadQueryFile(__dirname, "./spv/delegates-forged-blocks.sql"),
+ delegatesRanks: loadQueryFile(__dirname, "./spv/delegates-ranks.sql"),
+ lastForgedBlocks: loadQueryFile(__dirname, "./spv/last-forged-blocks.sql"),
+ multiSignatures: loadQueryFile(__dirname, "./spv/multi-signatures.sql"),
+ receivedTransactions: loadQueryFile(__dirname, "./spv/received-transactions.sql"),
+ secondSignatures: loadQueryFile(__dirname, "./spv/second-signatures.sql"),
+ sentTransactions: loadQueryFile(__dirname, "./spv/sent-transactions.sql"),
+ votes: loadQueryFile(__dirname, "./spv/votes.sql"),
+ },
+ transactions: {
+ findByBlock: loadQueryFile(__dirname, "./transactions/find-by-block.sql"),
+ latestByBlock: loadQueryFile(__dirname, "./transactions/latest-by-block.sql"),
+ latestByBlocks: loadQueryFile(__dirname, "./transactions/latest-by-blocks.sql"),
+ statistics: loadQueryFile(__dirname, "./transactions/statistics.sql"),
+ forged: loadQueryFile(__dirname, "./transactions/forged.sql"),
+ findById: loadQueryFile(__dirname, "./transactions/find-by-id.sql"),
+ deleteByBlock: loadQueryFile(__dirname, "./transactions/delete-by-block.sql"),
+ },
+ wallets: {
+ all: loadQueryFile(__dirname, "./wallets/all.sql"),
+ findByAddress: loadQueryFile(__dirname, "./wallets/find-by-address.sql"),
+ findNegativeBalances: loadQueryFile(__dirname, "./wallets/find-negative-balances.sql"),
+ findNegativeVoteBalances: loadQueryFile(__dirname, "./wallets/find-negative-vote-balances.sql"),
+ },
+};
diff --git a/packages/core-database-postgres/lib/queries/migrations/create.sql b/packages/core-database-postgres/src/queries/migrations/create.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/migrations/create.sql
rename to packages/core-database-postgres/src/queries/migrations/create.sql
diff --git a/packages/core-database-postgres/lib/queries/migrations/find.sql b/packages/core-database-postgres/src/queries/migrations/find.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/migrations/find.sql
rename to packages/core-database-postgres/src/queries/migrations/find.sql
diff --git a/packages/core-database-postgres/lib/queries/rounds/delete.sql b/packages/core-database-postgres/src/queries/rounds/delete.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/rounds/delete.sql
rename to packages/core-database-postgres/src/queries/rounds/delete.sql
diff --git a/packages/core-database-postgres/lib/queries/rounds/find.sql b/packages/core-database-postgres/src/queries/rounds/find.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/rounds/find.sql
rename to packages/core-database-postgres/src/queries/rounds/find.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/block-rewards.sql b/packages/core-database-postgres/src/queries/spv/block-rewards.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/block-rewards.sql
rename to packages/core-database-postgres/src/queries/spv/block-rewards.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/delegates-forged-blocks.sql b/packages/core-database-postgres/src/queries/spv/delegates-forged-blocks.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/delegates-forged-blocks.sql
rename to packages/core-database-postgres/src/queries/spv/delegates-forged-blocks.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/delegates-ranks.sql b/packages/core-database-postgres/src/queries/spv/delegates-ranks.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/delegates-ranks.sql
rename to packages/core-database-postgres/src/queries/spv/delegates-ranks.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/delegates.sql b/packages/core-database-postgres/src/queries/spv/delegates.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/delegates.sql
rename to packages/core-database-postgres/src/queries/spv/delegates.sql
diff --git a/packages/core-database-postgres/src/queries/spv/last-forged-blocks.sql b/packages/core-database-postgres/src/queries/spv/last-forged-blocks.sql
new file mode 100644
index 0000000000..c05ec7452c
--- /dev/null
+++ b/packages/core-database-postgres/src/queries/spv/last-forged-blocks.sql
@@ -0,0 +1,11 @@
+SELECT id,
+ height,
+ generator_public_key,
+ TIMESTAMP
+FROM blocks
+WHERE height IN (
+ SELECT MAX(height) AS last_block_height
+ FROM blocks
+ GROUP BY generator_public_key
+)
+ORDER BY TIMESTAMP DESC
diff --git a/packages/core-database-postgres/lib/queries/spv/multi-signatures.sql b/packages/core-database-postgres/src/queries/spv/multi-signatures.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/multi-signatures.sql
rename to packages/core-database-postgres/src/queries/spv/multi-signatures.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/received-transactions.sql b/packages/core-database-postgres/src/queries/spv/received-transactions.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/received-transactions.sql
rename to packages/core-database-postgres/src/queries/spv/received-transactions.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/second-signatures.sql b/packages/core-database-postgres/src/queries/spv/second-signatures.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/second-signatures.sql
rename to packages/core-database-postgres/src/queries/spv/second-signatures.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/sent-transactions.sql b/packages/core-database-postgres/src/queries/spv/sent-transactions.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/sent-transactions.sql
rename to packages/core-database-postgres/src/queries/spv/sent-transactions.sql
diff --git a/packages/core-database-postgres/lib/queries/spv/votes.sql b/packages/core-database-postgres/src/queries/spv/votes.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/spv/votes.sql
rename to packages/core-database-postgres/src/queries/spv/votes.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/delete-by-block.sql b/packages/core-database-postgres/src/queries/transactions/delete-by-block.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/delete-by-block.sql
rename to packages/core-database-postgres/src/queries/transactions/delete-by-block.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/find-by-block.sql b/packages/core-database-postgres/src/queries/transactions/find-by-block.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/find-by-block.sql
rename to packages/core-database-postgres/src/queries/transactions/find-by-block.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/find-by-id.sql b/packages/core-database-postgres/src/queries/transactions/find-by-id.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/find-by-id.sql
rename to packages/core-database-postgres/src/queries/transactions/find-by-id.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/forged.sql b/packages/core-database-postgres/src/queries/transactions/forged.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/forged.sql
rename to packages/core-database-postgres/src/queries/transactions/forged.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/latest-by-block.sql b/packages/core-database-postgres/src/queries/transactions/latest-by-block.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/latest-by-block.sql
rename to packages/core-database-postgres/src/queries/transactions/latest-by-block.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/latest-by-blocks.sql b/packages/core-database-postgres/src/queries/transactions/latest-by-blocks.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/latest-by-blocks.sql
rename to packages/core-database-postgres/src/queries/transactions/latest-by-blocks.sql
diff --git a/packages/core-database-postgres/lib/queries/transactions/statistics.sql b/packages/core-database-postgres/src/queries/transactions/statistics.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/transactions/statistics.sql
rename to packages/core-database-postgres/src/queries/transactions/statistics.sql
diff --git a/packages/core-database-postgres/lib/queries/wallets/all.sql b/packages/core-database-postgres/src/queries/wallets/all.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/wallets/all.sql
rename to packages/core-database-postgres/src/queries/wallets/all.sql
diff --git a/packages/core-database-postgres/lib/queries/wallets/find-by-address.sql b/packages/core-database-postgres/src/queries/wallets/find-by-address.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/wallets/find-by-address.sql
rename to packages/core-database-postgres/src/queries/wallets/find-by-address.sql
diff --git a/packages/core-database-postgres/lib/queries/wallets/find-negative-balances.sql b/packages/core-database-postgres/src/queries/wallets/find-negative-balances.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/wallets/find-negative-balances.sql
rename to packages/core-database-postgres/src/queries/wallets/find-negative-balances.sql
diff --git a/packages/core-database-postgres/lib/queries/wallets/find-negative-vote-balances.sql b/packages/core-database-postgres/src/queries/wallets/find-negative-vote-balances.sql
similarity index 100%
rename from packages/core-database-postgres/lib/queries/wallets/find-negative-vote-balances.sql
rename to packages/core-database-postgres/src/queries/wallets/find-negative-vote-balances.sql
diff --git a/packages/core-database-postgres/src/repositories/blocks.ts b/packages/core-database-postgres/src/repositories/blocks.ts
new file mode 100644
index 0000000000..a2e77a3c98
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/blocks.ts
@@ -0,0 +1,104 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Block } from "../models";
+import { queries } from "../queries";
+import { Repository } from "./repository";
+
+const { blocks: sql } = queries;
+
+export class BlocksRepository extends Repository implements Database.IBlocksRepository {
+ /**
+ * Find a block by its ID.
+ * @param {Number} id
+ * @return {Promise}
+ */
+ public async findById(id) {
+ return this.db.oneOrNone(sql.findById, { id });
+ }
+
+ /**
+ * Count the number of records in the database.
+ * @return {Promise}
+ */
+ public async count() {
+ const { count } = await this.db.one(sql.count);
+ return count ;
+ }
+
+ /**
+ * Get all of the common blocks from the database.
+ * @param {Array} ids
+ * @return {Promise}
+ */
+ public async common(ids) {
+ return this.db.manyOrNone(sql.common, { ids });
+ }
+
+ /**
+ * Get all of the blocks within the given height range.
+ * @param {Number} start
+ * @param {Number} end
+ * @return {Promise}
+ */
+ public async headers(start, end) {
+ return this.db.many(sql.headers, { start, end });
+ }
+
+ /**
+ * Get all of the blocks within the given height range and order them by height.
+ * @param {Number} start
+ * @param {Number} end
+ * @return {Promise}
+ */
+ public async heightRange(start, end) {
+ return this.db.manyOrNone(sql.heightRange, { start, end });
+ }
+
+ /**
+ * Get the last created block from the database.
+ * @return {Promise}
+ */
+ public async latest() {
+ return this.db.oneOrNone(sql.latest);
+ }
+
+ /**
+ * Get the 10 most recently created blocks from the database.
+ * @return {Promise}
+ */
+ public async recent() {
+ return this.db.many(sql.recent);
+ }
+
+ /**
+ * Get statistics about all blocks from the database.
+ * @return {Promise}
+ */
+ public async statistics() {
+ return this.db.one(sql.statistics);
+ }
+
+ /**
+ * Get top count blocks
+ * @return {Promise}
+ */
+ public async top(count) {
+ return this.db.many(sql.top, { top: count });
+ }
+
+ /**
+ * Delete the block from the database.
+ * @param {Number} id
+ * @return {Promise}
+ */
+ public async delete(id) {
+ return this.db.none(sql.delete, { id });
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Block}
+ */
+ public getModel() {
+ return new Block(this.pgp);
+ }
+}
diff --git a/packages/core-database-postgres/src/repositories/index.ts b/packages/core-database-postgres/src/repositories/index.ts
new file mode 100644
index 0000000000..5dae791f43
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/index.ts
@@ -0,0 +1,13 @@
+import { BlocksRepository } from "./blocks";
+import { MigrationsRepository } from "./migrations";
+import { RoundsRepository } from "./rounds";
+import { TransactionsRepository } from "./transactions";
+import { WalletsRepository } from "./wallets";
+
+export const repositories = {
+ blocks: BlocksRepository,
+ migrations: MigrationsRepository,
+ rounds: RoundsRepository,
+ transactions: TransactionsRepository,
+ wallets: WalletsRepository,
+};
diff --git a/packages/core-database-postgres/src/repositories/migrations.ts b/packages/core-database-postgres/src/repositories/migrations.ts
new file mode 100644
index 0000000000..e50de6418e
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/migrations.ts
@@ -0,0 +1,24 @@
+import { Migration } from "../models";
+import { queries } from "../queries";
+import { Repository } from "./repository";
+
+const { migrations: sql } = queries;
+
+export class MigrationsRepository extends Repository {
+ /**
+ * Find a migration by its name.
+ * @param {String} name
+ * @return {Promise}
+ */
+ public async findByName(name) {
+ return this.db.oneOrNone(sql.find, { name });
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Migration}
+ */
+ public getModel() {
+ return new Migration(this.pgp);
+ }
+}
diff --git a/packages/core-database-postgres/src/repositories/repository.ts b/packages/core-database-postgres/src/repositories/repository.ts
new file mode 100644
index 0000000000..63de711e58
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/repository.ts
@@ -0,0 +1,73 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Model } from "../models";
+
+export abstract class Repository implements Database.IRepository {
+ protected model: Model;
+
+ /**
+ * Create a new repository instance.
+ * @param {Object} db
+ * @param {Object} pgp
+ */
+ constructor(public db, public pgp) {
+ this.model = this.getModel();
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Model}
+ */
+ public abstract getModel(): Model;
+
+ /**
+ * Estimate the number of records in the table.
+ * @return {Promise}
+ */
+ public async estimate() {
+ return this.db.one(`SELECT count_estimate('SELECT * FROM ${this.model.getTable()})`);
+ }
+
+ /**
+ * Run a truncate statement on the table.
+ * @return {Promise}
+ */
+ public async truncate() {
+ return this.db.none(`TRUNCATE ${this.model.getTable()} RESTART IDENTITY`);
+ }
+
+ /**
+ * Create one or many instances of the related models.
+ * @param {Array|Object} items
+ * @return {Promise}
+ */
+ public async insert(items) {
+ return this.db.none(this.__insertQuery(items));
+ }
+
+ /**
+ * Update one or many instances of the related models.
+ * @param {Array|Object} items
+ * @return {Promise}
+ */
+ public async update(items) {
+ return this.db.none(this.__updateQuery(items));
+ }
+
+ /**
+ * Generate an "INSERT" query for the given data.
+ * @param {Array|Object} data
+ * @return {String}
+ */
+ public __insertQuery(data) {
+ return this.pgp.helpers.insert(data, this.model.getColumnSet());
+ }
+
+ /**
+ * Generate an "UPDATE" query for the given data.
+ * @param {Array|Object} data
+ * @return {String}
+ */
+ public __updateQuery(data) {
+ return this.pgp.helpers.update(data, this.model.getColumnSet());
+ }
+}
diff --git a/packages/core-database-postgres/src/repositories/rounds.ts b/packages/core-database-postgres/src/repositories/rounds.ts
new file mode 100644
index 0000000000..06602185de
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/rounds.ts
@@ -0,0 +1,34 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Round } from "../models";
+import { queries } from "../queries";
+import { Repository } from "./repository";
+
+const { rounds: sql } = queries;
+
+export class RoundsRepository extends Repository implements Database.IRoundsRepository {
+ /**
+ * Find a round by its ID.
+ * @param {Number} round
+ * @return {Promise}
+ */
+ public async findById(round) {
+ return this.db.manyOrNone(sql.find, { round });
+ }
+
+ /**
+ * Delete the round from the database.
+ * @param {Number} round
+ * @return {Promise}
+ */
+ public async delete(round) {
+ return this.db.none(sql.delete, { round });
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Round}
+ */
+ public getModel() {
+ return new Round(this.pgp);
+ }
+}
diff --git a/packages/core-database-postgres/src/repositories/transactions.ts b/packages/core-database-postgres/src/repositories/transactions.ts
new file mode 100644
index 0000000000..843f71b241
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/transactions.ts
@@ -0,0 +1,78 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Transaction } from "../models";
+import { queries } from "../queries";
+import { Repository } from "./repository";
+
+const { transactions: sql } = queries;
+
+export class TransactionsRepository extends Repository implements Database.ITransactionsRepository {
+ /**
+ * Find a transactions by its ID.
+ * @param {String} id
+ * @return {Promise}
+ */
+ public async findById(id) {
+ return this.db.oneOrNone(sql.findById, { id });
+ }
+
+ /**
+ * Find multiple transactionss by their block ID.
+ * @param {String} id
+ * @return {Promise}
+ */
+ public async findByBlockId(id) {
+ return this.db.manyOrNone(sql.findByBlock, { id });
+ }
+
+ /**
+ * Find multiple transactionss by their block ID and order them by sequence.
+ * @param {Number} id
+ * @return {Promise}
+ */
+ public async latestByBlock(id) {
+ return this.db.manyOrNone(sql.latestByBlock, { id });
+ }
+
+ /**
+ * Find multiple transactionss by their block IDs and order them by sequence.
+ * @param {Array} ids
+ * @return {Promise}
+ */
+ public async latestByBlocks(ids) {
+ return this.db.manyOrNone(sql.latestByBlocks, { ids });
+ }
+
+ /**
+ * Get all of the forged transactions from the database.
+ * @param {Array} ids
+ * @return {Promise}
+ */
+ public async forged(ids) {
+ return this.db.manyOrNone(sql.forged, { ids });
+ }
+
+ /**
+ * Get statistics about all transactions from the database.
+ * @return {Promise}
+ */
+ public async statistics() {
+ return this.db.one(sql.statistics);
+ }
+
+ /**
+ * Delete the transactions from the database.
+ * @param {Number} id
+ * @return {Promise}
+ */
+ public async deleteByBlockId(id) {
+ return this.db.none(sql.deleteByBlock, { id });
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Transaction}
+ */
+ public getModel() {
+ return new Transaction(this.pgp);
+ }
+}
diff --git a/packages/core-database-postgres/src/repositories/wallets.ts b/packages/core-database-postgres/src/repositories/wallets.ts
new file mode 100644
index 0000000000..a95d4eba19
--- /dev/null
+++ b/packages/core-database-postgres/src/repositories/wallets.ts
@@ -0,0 +1,63 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Wallet } from "../models";
+import { queries } from "../queries";
+import { Repository } from "./repository";
+
+const { wallets: sql } = queries;
+
+export class WalletsRepository extends Repository implements Database.IWalletsRepository {
+ /**
+ * Get all of the wallets from the database.
+ * @return {Promise}
+ */
+ public async all() {
+ return this.db.manyOrNone(sql.all);
+ }
+
+ /**
+ * Find a wallet by its address.
+ * @param {String} address
+ * @return {Promise}
+ */
+ public async findByAddress(address) {
+ return this.db.oneOrNone(sql.findByAddress, { address });
+ }
+
+ /**
+ * Get the count of wallets that have a negative balance.
+ * @return {Promise}
+ */
+ public async tallyWithNegativeBalance() {
+ return this.db.oneOrNone(sql.findNegativeBalances);
+ }
+
+ /**
+ * Get the count of wallets that have a negative vote balance.
+ * @return {Promise}
+ */
+ public async tallyWithNegativeVoteBalance() {
+ return this.db.oneOrNone(sql.findNegativeVoteBalances);
+ }
+
+ /**
+ * Create or update a record matching the attributes, and fill it with values.
+ * @param {Object} wallet
+ * @return {Promise}
+ */
+ public async updateOrCreate(wallet) {
+ const query = `${this.__insertQuery(wallet)} ON CONFLICT(address) DO UPDATE SET ${this.pgp.helpers.sets(
+ wallet,
+ this.model.getColumnSet(),
+ )}`;
+
+ return this.db.none(query);
+ }
+
+ /**
+ * Get the model related to this repository.
+ * @return {Object}
+ */
+ public getModel() {
+ return new Wallet(this.pgp);
+ }
+}
diff --git a/packages/core-database-postgres/src/spv.ts b/packages/core-database-postgres/src/spv.ts
new file mode 100644
index 0000000000..fdb1998618
--- /dev/null
+++ b/packages/core-database-postgres/src/spv.ts
@@ -0,0 +1,263 @@
+import { Bignum, models } from "@arkecosystem/crypto";
+const { Transaction } = models;
+
+import { app } from "@arkecosystem/core-container";
+import { Database, Logger } from "@arkecosystem/core-interfaces";
+import { queries } from "./queries";
+import { QueryExecutor } from "./sql/query-executor";
+
+const logger = app.resolvePlugin("logger");
+const config = app.getConfig();
+
+const genesisWallets = config.get("genesisBlock.transactions").map(tx => tx.senderId);
+
+export class SPV {
+ constructor(private query: QueryExecutor, private walletManager: Database.IWalletManager) {}
+
+ /**
+ * Perform the SPV (Simple Payment Verification).
+ * @param {Number} height
+ * @return {void}
+ */
+ public async build(height) {
+
+ logger.info("SPV Step 1 of 8: Received Transactions");
+ await this.__buildReceivedTransactions();
+
+ logger.info("SPV Step 2 of 8: Block Rewards");
+ await this.__buildBlockRewards();
+
+ logger.info("SPV Step 3 of 8: Last Forged Blocks");
+ await this.__buildLastForgedBlocks();
+
+ logger.info("SPV Step 4 of 8: Sent Transactions");
+ await this.__buildSentTransactions();
+
+ logger.info("SPV Step 5 of 8: Second Signatures");
+ await this.__buildSecondSignatures();
+
+ logger.info("SPV Step 6 of 8: Votes");
+ await this.__buildVotes();
+
+ logger.info("SPV Step 7 of 8: Delegates");
+ await this.__buildDelegates();
+
+ logger.info("SPV Step 8 of 8: MultiSignatures");
+ await this.__buildMultisignatures();
+
+ logger.info(`SPV rebuild finished, wallets in memory: ${Object.keys(this.walletManager.allByAddress()).length}`);
+ logger.info(`Number of registered delegates: ${Object.keys(this.walletManager.allByUsername()).length}`);
+
+ return this.__verifyWalletsConsistency();
+ }
+
+ /**
+ * Load and apply received transactions to wallets.
+ * @return {void}
+ */
+ public async __buildReceivedTransactions() {
+ const transactions = await this.query.many(queries.spv.receivedTransactions);
+
+ for (const transaction of transactions) {
+ const wallet = this.walletManager.findByAddress(transaction.recipientId);
+
+ wallet
+ ? (wallet.balance = new Bignum(transaction.amount))
+ : logger.warn(`Lost cold wallet: ${transaction.recipientId} ${transaction.amount}`);
+ }
+ }
+
+ /**
+ * Load and apply block rewards to wallets.
+ * @return {void}
+ */
+ public async __buildBlockRewards() {
+ const blocks = await this.query.many(queries.spv.blockRewards);
+
+ for (const block of blocks) {
+ const wallet = this.walletManager.findByPublicKey(block.generatorPublicKey);
+ wallet.balance = wallet.balance.plus(block.reward);
+ }
+ }
+
+ /**
+ * Load and apply last forged blocks to wallets.
+ * @return {void}
+ */
+ public async __buildLastForgedBlocks() {
+ const blocks = await this.query.many(queries.spv.lastForgedBlocks);
+
+ for (const block of blocks) {
+ const wallet = this.walletManager.findByPublicKey(block.generatorPublicKey);
+ wallet.lastBlock = block;
+ }
+ }
+
+ /**
+ * Load and apply sent transactions to wallets.
+ * @return {void}
+ */
+ public async __buildSentTransactions() {
+ const transactions = await this.query.many(queries.spv.sentTransactions);
+
+ for (const transaction of transactions) {
+ const wallet = this.walletManager.findByPublicKey(transaction.senderPublicKey);
+ wallet.balance = wallet.balance.minus(transaction.amount).minus(transaction.fee);
+
+ if (wallet.balance.isLessThan(0) && !this.isGenesis(wallet)) {
+ logger.warn(`Negative balance: ${wallet}`);
+ }
+ }
+ }
+
+ /**
+ * Used to determine if a wallet is a Genesis wallet.
+ * @return {Boolean}
+ */
+ public isGenesis(wallet) {
+ return genesisWallets.includes(wallet.address);
+ }
+
+ /**
+ * Load and apply second signature transactions to wallets.
+ * @return {void}
+ */
+ public async __buildSecondSignatures() {
+ const transactions = await this.query.manyOrNone(queries.spv.secondSignatures);
+
+ for (const transaction of transactions) {
+ const wallet = this.walletManager.findByPublicKey(transaction.senderPublicKey);
+ wallet.secondPublicKey = Transaction.deserialize(
+ transaction.serialized.toString("hex"),
+ ).asset.signature.publicKey;
+ }
+ }
+
+ /**
+ * Load and apply votes to wallets.
+ * @return {void}
+ */
+ public async __buildVotes() {
+ const transactions = await this.query.manyOrNone(queries.spv.votes);
+
+ for (const transaction of transactions) {
+ const wallet = this.walletManager.findByPublicKey(transaction.senderPublicKey);
+
+ if (!wallet.voted) {
+ const vote = Transaction.deserialize(transaction.serialized.toString("hex")).asset.votes[0];
+
+ if (vote.startsWith("+")) {
+ wallet.vote = vote.slice(1);
+ }
+
+ // NOTE: The "voted" property is only used within this loop to avoid an issue
+ // that results in not properly applying "unvote" transactions as the "vote" property
+ // would be empty in that case and return a false result.
+ wallet.voted = true;
+ }
+ }
+
+ this.walletManager.buildVoteBalances();
+ }
+
+ /**
+ * Load and apply delegate usernames to wallets.
+ * @return {void}
+ */
+ public async __buildDelegates() {
+ // Register...
+ const transactions = await this.query.manyOrNone(queries.spv.delegates);
+
+ transactions.forEach(transaction => {
+ const wallet = this.walletManager.findByPublicKey(transaction.senderPublicKey);
+ wallet.username = Transaction.deserialize(transaction.serialized.toString("hex")).asset.delegate.username;
+ this.walletManager.reindex(wallet);
+ });
+
+ // Forged Blocks...
+ const forgedBlocks = await this.query.manyOrNone(queries.spv.delegatesForgedBlocks);
+ forgedBlocks.forEach(block => {
+ const wallet = this.walletManager.findByPublicKey(block.generatorPublicKey);
+ wallet.forgedFees = wallet.forgedFees.plus(block.totalFees);
+ wallet.forgedRewards = wallet.forgedRewards.plus(block.totalRewards);
+ wallet.producedBlocks = +block.totalProduced;
+ });
+
+ // NOTE: This is highly NOT reliable, however the number of missed blocks
+ // is NOT used for the consensus
+ const delegates = await this.query.manyOrNone(queries.spv.delegatesRanks);
+ delegates.forEach((delegate, i) => {
+ const wallet = this.walletManager.findByPublicKey(delegate.publicKey);
+ wallet.missedBlocks = +delegate.missedBlocks;
+ // TODO: unknown property 'rate' being access on Wallet class
+ (wallet as any).rate = i + 1;
+ this.walletManager.reindex(wallet);
+ });
+ }
+
+ /**
+ * Load and apply multisignatures to wallets.
+ * @return {void}
+ */
+ public async __buildMultisignatures() {
+ const transactions = await this.query.manyOrNone(queries.spv.multiSignatures);
+
+ for (const transaction of transactions) {
+ const wallet = this.walletManager.findByPublicKey(transaction.senderPublicKey);
+
+ if (!wallet.multisignature) {
+ wallet.multisignature = Transaction.deserialize(
+ transaction.serialized.toString("hex"),
+ ).asset.multisignature;
+ }
+ }
+ }
+
+ /**
+ * Verify the consistency of the wallets table by comparing all records against
+ * the in memory wallets.
+ * NOTE: This is faster than rebuilding the entire table from scratch each time.
+ * @returns {Boolean}
+ */
+ public async __verifyWalletsConsistency() {
+ const dbWallets = await this.query.manyOrNone(queries.wallets.all);
+ const inMemoryWallets = this.walletManager.allByPublicKey();
+
+ let detectedInconsistency = false;
+ if (dbWallets.length !== inMemoryWallets.length) {
+ detectedInconsistency = true;
+ } else {
+ for (const dbWallet of dbWallets) {
+ if (dbWallet.balance < 0 && !this.isGenesis(dbWallet)) {
+ detectedInconsistency = true;
+ logger.warn(`Wallet '${dbWallet.address}' has a negative balance of '${dbWallet.balance}'`);
+ break;
+ }
+
+ if (dbWallet.voteBalance < 0) {
+ detectedInconsistency = true;
+ logger.warn(`Wallet ${dbWallet.address} has a negative vote balance of '${dbWallet.voteBalance}'`);
+ break;
+ }
+
+ const inMemoryWallet = this.walletManager.findByPublicKey(dbWallet.publicKey);
+
+ if (
+ !inMemoryWallet.balance.isEqualTo(dbWallet.balance) ||
+ !inMemoryWallet.voteBalance.isEqualTo(dbWallet.voteBalance) ||
+ dbWallet.username !== inMemoryWallet.username
+ ) {
+ detectedInconsistency = true;
+ break;
+ }
+ }
+ }
+
+ // Remove dirty flags when no inconsistency has been found
+ if (!detectedInconsistency) {
+ this.walletManager.clear();
+ }
+
+ return !detectedInconsistency;
+ }
+}
diff --git a/packages/core-database-postgres/src/sql/query-executor.ts b/packages/core-database-postgres/src/sql/query-executor.ts
new file mode 100644
index 0000000000..123be4115e
--- /dev/null
+++ b/packages/core-database-postgres/src/sql/query-executor.ts
@@ -0,0 +1,81 @@
+import { PostgresConnection } from "../postgres-connection";
+
+export class QueryExecutor {
+ /**
+ * Create a new QueryExecutor instance.
+ * @param {[type]} connection
+ * @return {QueryBuilder}
+ */
+ constructor(public connection: PostgresConnection) {}
+
+ /**
+ * Execute the given query and expect no results.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async none(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "none");
+ }
+
+ /**
+ * Execute the given query and expect one result.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async one(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "one");
+ }
+
+ /**
+ * Execute the given query and expect one or no results.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async oneOrNone(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "oneOrNone");
+ }
+
+ /**
+ * Execute the given query and expect many results.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async many(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "many");
+ }
+
+ /**
+ * Execute the given query and expect many or no results.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async manyOrNone(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "manyOrNone");
+ }
+
+ /**
+ * Execute the given query and expect any results.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @return {Promise}
+ */
+ public async any(query, parameters = null) {
+ return this.__executeQueryFile(query, parameters, "any");
+ }
+
+ /**
+ * Execute the given query using the given method and parameters.
+ * @param {QueryFile} query
+ * @param {Array} parameters
+ * @param {String} method
+ * @return {QueryBuilder}
+ */
+ public async __executeQueryFile(query, parameters, method) {
+ return this.connection.db[method](query, parameters);
+ }
+}
diff --git a/packages/core-database-postgres/src/utils/camelize-columns.ts b/packages/core-database-postgres/src/utils/camelize-columns.ts
new file mode 100644
index 0000000000..09ecf415e0
--- /dev/null
+++ b/packages/core-database-postgres/src/utils/camelize-columns.ts
@@ -0,0 +1,17 @@
+/* tslint:disable:forin prefer-for-of*/
+
+export function camelizeColumns(pgp, data) {
+ const tmp = data[0];
+
+ for (const prop in tmp) {
+ const camel = pgp.utils.camelize(prop);
+
+ if (!(camel in tmp)) {
+ for (let i = 0; i < data.length; i++) {
+ const d = data[i];
+ d[camel] = d[prop];
+ delete d[prop];
+ }
+ }
+ }
+}
diff --git a/packages/core-database-postgres/src/utils/index.ts b/packages/core-database-postgres/src/utils/index.ts
new file mode 100644
index 0000000000..89d7824488
--- /dev/null
+++ b/packages/core-database-postgres/src/utils/index.ts
@@ -0,0 +1,4 @@
+import { camelizeColumns } from "./camelize-columns";
+import { loadQueryFile } from "./load-query-file";
+
+export { camelizeColumns, loadQueryFile };
diff --git a/packages/core-database-postgres/src/utils/load-query-file.ts b/packages/core-database-postgres/src/utils/load-query-file.ts
new file mode 100644
index 0000000000..04fef821f7
--- /dev/null
+++ b/packages/core-database-postgres/src/utils/load-query-file.ts
@@ -0,0 +1,25 @@
+import { app } from "@arkecosystem/core-container";
+import { Logger } from "@arkecosystem/core-interfaces";
+import path from "path";
+import { QueryFile } from "pg-promise";
+
+const logger = app.resolvePlugin("logger");
+
+export function loadQueryFile(directory, file) {
+ const fullPath = path.join(directory, file);
+
+ const options = {
+ minify: true,
+ params: {
+ schema: "public",
+ },
+ };
+
+ const query = new QueryFile(fullPath, options);
+
+ if (query.error) {
+ logger.error(query.error.toString());
+ }
+
+ return query;
+}
diff --git a/packages/core-database-postgres/tsconfig.json b/packages/core-database-postgres/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-database-postgres/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-database/CHANGELOG.md b/packages/core-database/CHANGELOG.md
deleted file mode 100644
index a413403460..0000000000
--- a/packages/core-database/CHANGELOG.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- Database rollback
-- Block exceptions
-- Common blocks
-- More graceful handling of shutdown
-
-### Changed
-
-- Build delegate list in-memory to reduce database load
-- Perform vote balance calculations in-memory to reduce database load
-- Handle numbers as `BigNumber` instances
-- Reduced complexity and duplicated logic
-- Improved method names to more clearly show their intent
-- Calculate previous rounds in-memory rather then hitting the database
-- non-blocking wallet saving
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-### Fixed
-
-- Wrong documentation
-- Bad method calls for `sync/async` methods
-- Only commit data when `saveBlockCommit` is called
-- Properly log the transaction audit
-- Properly update delegate ranks
-- Only save dirty wallets
-- Various memory leaks
-- Forger order on mainnet
-- Delegate registration handling
-
-## 0.1.1 - 2018-06-14
-
-### Added
-
-- initial release
diff --git a/packages/core-database/LICENSE b/packages/core-database/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-database/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-database/README.md b/packages/core-database/README.md
index 9e7bec4119..92f27b92dc 100644
--- a/packages/core-database/README.md
+++ b/packages/core-database/README.md
@@ -14,10 +14,12 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [François-Xavier Thoorens](https://github.com/fix)
-- [Kristjan Košič](https://github.com/kristjank)
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [Erwann Gentric](https://github.com/air1one)
+- [François-Xavier Thoorens](https://github.com/fix)
+- [Joshua Noack](https://github.com/supaiku0)
+- [Kristjan Košič](https://github.com/kristjank)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-database/__tests__/__fixtures__/database-connection-stub.ts b/packages/core-database/__tests__/__fixtures__/database-connection-stub.ts
new file mode 100644
index 0000000000..34459b572c
--- /dev/null
+++ b/packages/core-database/__tests__/__fixtures__/database-connection-stub.ts
@@ -0,0 +1,53 @@
+// tslint:disable:no-empty
+
+import { Database } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+
+export class DatabaseConnectionStub implements Database.IDatabaseConnection {
+ public blocksRepository: Database.IBlocksRepository;
+ public roundsRepository: Database.IRoundsRepository;
+ public transactionsRepository: Database.ITransactionsRepository;
+ public walletsRepository: Database.IWalletsRepository;
+ public options: any;
+
+ public buildWallets(height: number): Promise {
+ return undefined;
+ }
+
+ public commitQueuedQueries(): any {
+ }
+
+ public connect(): Promise {
+ return undefined;
+ }
+
+ public deleteBlock(block: models.Block): Promise {
+ return undefined;
+ }
+
+ public disconnect(): Promise {
+ return undefined;
+ }
+
+ public enqueueDeleteBlock(block: models.Block): any {
+ }
+
+ public enqueueDeleteRound(height: number): any {
+ }
+
+ public enqueueSaveBlock(block: models.Block): any {
+ return null;
+ }
+
+ public async make(): Promise {
+ return this;
+ }
+
+ public saveBlock(block: models.Block): Promise {
+ return undefined;
+ }
+
+ public saveWallets(wallets: any[], force?: boolean): Promise {
+ return undefined;
+ }
+}
diff --git a/packages/core-database/__tests__/__fixtures__/state-storage-stub.ts b/packages/core-database/__tests__/__fixtures__/state-storage-stub.ts
new file mode 100644
index 0000000000..947a552faf
--- /dev/null
+++ b/packages/core-database/__tests__/__fixtures__/state-storage-stub.ts
@@ -0,0 +1,56 @@
+/* tslint:disable:no-empty */
+import { Blockchain } from "@arkecosystem/core-interfaces";
+import { models } from "@arkecosystem/crypto";
+
+export class StateStorageStub implements Blockchain.IStateStorage {
+ public cacheTransactions(transactions: models.ITransactionData[]): { added: models.ITransactionData[]; notAdded: models.ITransactionData[] } {
+ return undefined;
+ }
+
+ public clear(): void {
+ }
+
+ public clearWakeUpTimeout(): void {
+ }
+
+ public getCachedTransactionIds(): string[] {
+ return [];
+ }
+
+ public getCommonBlocks(ids: string[]): models.IBlockData[] {
+ return [];
+ }
+
+ public getLastBlock(): models.Block | null {
+ return undefined;
+ }
+
+ public getLastBlockIds(): string[] {
+ return [];
+ }
+
+ public getLastBlocks(): models.Block[] {
+ return [];
+ }
+
+ public getLastBlocksByHeight(start: number, end?: number): models.IBlockData[] {
+ return [];
+ }
+
+ public pingBlock(incomingBlock: models.IBlockData): boolean {
+ return false;
+ }
+
+ public pushPingBlock(block: models.IBlockData): void {
+ }
+
+ public removeCachedTransactionIds(transactionIds: string[]): void {
+ }
+
+ public reset(): void {
+ }
+
+ public setLastBlock(block: models.Block): void {
+ }
+
+}
diff --git a/packages/core-database/__tests__/__fixtures__/wallets.json b/packages/core-database/__tests__/__fixtures__/wallets.json
index 61d49f59e8..4424bea579 100644
--- a/packages/core-database/__tests__/__fixtures__/wallets.json
+++ b/packages/core-database/__tests__/__fixtures__/wallets.json
@@ -1,14 +1,14 @@
[
- {
- "address": "APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn",
- "publicKey": "035b63b4668ee261c16ca91443f3371e2fe349e131cb7bf5f8a3e93a3ddfdfc788"
- },
- {
- "address": "Aa4M1zL3a74L51f1AvEsLmBTsKLKrkRScU",
- "publicKey": "0308c0d019cd9c0c59618e3b86afc584078b54a85a025c9f30a8bdc82cdc8e1252"
- },
- {
- "address": "fake-address",
- "publicKey": "fake-publicKey"
- }
+ {
+ "address": "APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn",
+ "publicKey": "035b63b4668ee261c16ca91443f3371e2fe349e131cb7bf5f8a3e93a3ddfdfc788"
+ },
+ {
+ "address": "Aa4M1zL3a74L51f1AvEsLmBTsKLKrkRScU",
+ "publicKey": "0308c0d019cd9c0c59618e3b86afc584078b54a85a025c9f30a8bdc82cdc8e1252"
+ },
+ {
+ "address": "fake-address",
+ "publicKey": "fake-publicKey"
+ }
]
diff --git a/packages/core-database/__tests__/__support__/setup.js b/packages/core-database/__tests__/__support__/setup.js
deleted file mode 100644
index a2358e432c..0000000000
--- a/packages/core-database/__tests__/__support__/setup.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const app = require('@arkecosystem/core-container')
-const appHelper = require('@arkecosystem/core-test-utils/lib/helpers/container')
-
-exports.setUp = async () => {
- jest.setTimeout(60000)
-
- process.env.ARK_SKIP_BLOCKCHAIN = true
-
- await appHelper.setUp({
- exit: '@arkecosystem/core-blockchain',
- exclude: [
- '@arkecosystem/core-p2p',
- '@arkecosystem/core-transaction-pool-mem',
- ],
- })
-}
-
-exports.tearDown = async () => {
- await app.tearDown()
-}
diff --git a/packages/core-database/__tests__/__support__/setup.ts b/packages/core-database/__tests__/__support__/setup.ts
new file mode 100644
index 0000000000..faba7ee97d
--- /dev/null
+++ b/packages/core-database/__tests__/__support__/setup.ts
@@ -0,0 +1,22 @@
+import { app } from "@arkecosystem/core-container";
+import "@arkecosystem/core-test-utils";
+import { setUpContainer } from "@arkecosystem/core-test-utils/src/helpers/container";
+
+export const setUp = async () => {
+ jest.setTimeout(60000);
+
+ process.env.CORE_SKIP_BLOCKCHAIN = "true";
+
+ return await setUpContainer({
+ exit: "@arkecosystem/core-blockchain",
+ exclude: [
+ "@arkecosystem/core-p2p",
+ "@arkecosystem/core-transaction-pool",
+ "@arkecosystem/core-database-postgres",
+ ],
+ });
+};
+
+export const tearDown = async () => {
+ await app.tearDown();
+};
diff --git a/packages/core-database/__tests__/database-service.test.ts b/packages/core-database/__tests__/database-service.test.ts
new file mode 100644
index 0000000000..399397e000
--- /dev/null
+++ b/packages/core-database/__tests__/database-service.test.ts
@@ -0,0 +1,221 @@
+import { Container, Database, EventEmitter } from "@arkecosystem/core-interfaces";
+import { Bignum, constants, models, transactionBuilder } from "@arkecosystem/crypto";
+import "jest-extended";
+import { WalletManager } from "../src";
+import { DatabaseService } from "../src/database-service";
+import { DatabaseConnectionStub } from "./__fixtures__/database-connection-stub";
+import { StateStorageStub } from "./__fixtures__/state-storage-stub";
+import { setUp, tearDown } from "./__support__/setup";
+
+const { Block, Transaction, Wallet } = models;
+
+const { ARKTOSHI, TransactionTypes } = constants;
+
+let connection : Database.IDatabaseConnection;
+let databaseService : DatabaseService;
+let walletManager : Database.IWalletManager;
+let genesisBlock : models.Block;
+let container: Container.IContainer;
+let emitter : EventEmitter.EventEmitter;
+
+
+beforeAll(async () => {
+ container = await setUp();
+ emitter = container.resolvePlugin("event-emitter");
+ genesisBlock = new Block(require("@arkecosystem/core-test-utils/src/config/testnet/genesisBlock.json"));
+ connection = new DatabaseConnectionStub();
+ walletManager = new WalletManager();
+});
+
+afterAll(async () => {
+ await tearDown();
+});
+
+beforeEach(()=> {
+ jest.restoreAllMocks()
+});
+
+function createService() {
+ return new DatabaseService({}, connection, walletManager, null, null);
+}
+
+describe('Database Service', () => {
+ it('should listen for emitter events during constructor', () => {
+ jest.spyOn(emitter, 'on');
+ jest.spyOn(emitter, 'once');
+
+ databaseService = createService();
+
+
+ expect(emitter.on).toHaveBeenCalledWith('state:started', expect.toBeFunction());
+ expect(emitter.on).toHaveBeenCalledWith('wallet.created.cold', expect.toBeFunction());
+ expect(emitter.once).toHaveBeenCalledWith('shutdown', expect.toBeFunction());
+ });
+
+ describe('applyBlock', () => {
+ it('should applyBlock', async () => {
+ jest.spyOn(walletManager, 'applyBlock').mockImplementation( (block) => block );
+ jest.spyOn(emitter, 'emit');
+
+
+ databaseService = createService();
+ jest.spyOn(databaseService, 'applyRound').mockImplementation(() => null); // test applyRound logic separately
+
+ await databaseService.applyBlock(genesisBlock);
+
+
+ expect(walletManager.applyBlock).toHaveBeenCalledWith(genesisBlock);
+ expect(emitter.emit).toHaveBeenCalledWith('block.applied', genesisBlock.data);
+ genesisBlock.transactions.forEach(tx => expect(emitter.emit).toHaveBeenCalledWith('transaction.applied', tx.data));
+ })
+ });
+
+ describe('getBlocksForRound', () => {
+ it('should fetch blocks using lastBlock in state-storage', async() => {
+ const stateStorageStub = new StateStorageStub();
+ jest.spyOn(stateStorageStub, 'getLastBlock').mockReturnValue(null);
+ jest.spyOn(container, 'has').mockReturnValue(true);
+ jest.spyOn(container, 'resolve').mockReturnValue(stateStorageStub);
+
+ databaseService = createService();
+ jest.spyOn(databaseService, 'getLastBlock').mockReturnValue(null);
+
+
+ const blocks = await databaseService.getBlocksForRound();
+
+
+ expect(blocks).toBeEmpty();
+ expect(stateStorageStub.getLastBlock).toHaveBeenCalled();
+ expect(databaseService.getLastBlock).not.toHaveBeenCalled();
+
+ });
+
+ it('should fetch blocks using lastBlock in database', async () => {
+ jest.spyOn(container, 'has').mockReturnValue(false);
+
+ databaseService = createService();
+ jest.spyOn(databaseService, 'getLastBlock').mockReturnValue(null);
+
+
+ const blocks = await databaseService.getBlocksForRound();
+
+
+ expect(blocks).toBeEmpty();
+ expect(databaseService.getLastBlock).toHaveBeenCalled();
+ });
+
+ it('should fetch blocks from lastBlock height', async () => {
+ databaseService = createService();
+
+ jest.spyOn(databaseService, 'getLastBlock').mockReturnValue(genesisBlock);
+ jest.spyOn(databaseService, 'getBlocks').mockReturnValue([]);
+ jest.spyOn(container, 'has').mockReturnValue(false);
+
+
+ const blocks = await databaseService.getBlocksForRound();
+
+
+ expect(blocks).toBeEmpty();
+ expect(databaseService.getBlocks).toHaveBeenCalledWith(1, container.getConfig().getMilestone(genesisBlock.data.height).activeDelegates);
+ })
+ });
+
+ /* TODO: Testing a method that's private. This needs a replacement by testing a public method instead */
+
+ describe("calcPreviousActiveDelegates", () => {
+ it("should calculate the previous delegate list", async () => {
+ walletManager = new WalletManager();
+ const initialHeight = 52;
+
+ // Create delegates
+ for (const transaction of genesisBlock.transactions) {
+ if (transaction.type === TransactionTypes.DelegateRegistration) {
+ const wallet = walletManager.findByPublicKey(transaction.senderPublicKey);
+ wallet.username = Transaction.deserialize(
+ transaction.serialized.toString(),
+ ).asset.delegate.username;
+ walletManager.reindex(wallet);
+ }
+ }
+
+ const keys = {
+ passphrase: "this is a secret passphrase",
+ publicKey: "02c71ab1a1b5b7c278145382eb0b535249483b3c4715a4fe6169d40388bbb09fa7",
+ privateKey: "dcf4ead2355090279aefba91540f32e93b15c541ecb48ca73071f161b4f3e2e3",
+ address: "D64cbDctaiADEH7NREnvRQGV27bnb1v2kE",
+ };
+
+ // Beginning of round 2 with all delegates 0 vote balance.
+ const delegatesRound2 = walletManager.loadActiveDelegateList(51, initialHeight);
+
+ // Prepare sender wallet
+ const sender = new Wallet(keys.address);
+ sender.publicKey = keys.publicKey;
+ sender.canApply = jest.fn(() => true);
+ walletManager.reindex(sender);
+
+ // Apply 51 blocks, where each increases the vote balance of a delegate to
+ // reverse the current delegate order.
+ const blocksInRound = [];
+ for (let i = 0; i < 51; i++) {
+ const transfer = transactionBuilder
+ .transfer()
+ .amount(i * ARKTOSHI)
+ .recipientId(delegatesRound2[i].address)
+ .sign(keys.passphrase)
+ .build();
+
+ // Vote for itself
+ walletManager.findByPublicKey(delegatesRound2[i].publicKey).vote = delegatesRound2[i].publicKey;
+ // walletManager.byPublicKey[delegatesRound2[i].publicKey].vote = delegatesRound2[i].publicKey;
+
+ const block = Block.create(
+ {
+ version: 0,
+ timestamp: 0,
+ height: initialHeight + i,
+ numberOfTransactions: 1,
+ totalAmount: transfer.amount,
+ totalFee: new Bignum(0.1),
+ reward: new Bignum(2),
+ payloadLength: 0,
+ payloadHash: "a".repeat(64),
+ transactions: [transfer],
+ },
+ keys,
+ );
+
+ block.data.generatorPublicKey = keys.publicKey;
+ walletManager.applyBlock(block);
+
+ blocksInRound.push(block);
+ }
+
+ // The delegates from round 2 are now reversed in rank in round 3.
+ const delegatesRound3 = walletManager.loadActiveDelegateList(51, initialHeight + 51);
+ for (let i = 0; i < delegatesRound3.length; i++) {
+ expect(delegatesRound3[i].rate).toBe(i + 1);
+ expect(delegatesRound3[i].publicKey).toBe(delegatesRound2[delegatesRound3.length - i - 1].publicKey);
+ }
+
+
+ jest.spyOn(databaseService, 'getBlocksForRound').mockReturnValue(blocksInRound);
+ databaseService.walletManager = walletManager;
+
+ // Necessary for revertRound to not blow up.
+ walletManager.allByUsername = jest.fn(() => {
+ const usernames = Object.values((walletManager as any).byUsername);
+ usernames.push(sender);
+ return usernames;
+ });
+
+ // Finally recalculate the round 2 list and compare against the original list
+ const restoredDelegatesRound2 = await (databaseService as any).calcPreviousActiveDelegates(2);
+
+ for (let i = 0; i < restoredDelegatesRound2.length; i++) {
+ expect(restoredDelegatesRound2[i].rate).toBe(i + 1);
+ expect(restoredDelegatesRound2[i].publicKey).toBe(delegatesRound2[i].publicKey);
+ }
+ });
+ });
+});
diff --git a/packages/core-database/__tests__/interface.test.js b/packages/core-database/__tests__/interface.test.js
deleted file mode 100644
index 499cd679fc..0000000000
--- a/packages/core-database/__tests__/interface.test.js
+++ /dev/null
@@ -1,453 +0,0 @@
-const { Block, Transaction, Wallet } = require('@arkecosystem/crypto').models
-const { Bignum, transactionBuilder } = require('@arkecosystem/crypto')
-const {
- ARKTOSHI,
- TRANSACTION_TYPES,
-} = require('@arkecosystem/crypto').constants
-const app = require('./__support__/setup')
-
-let ConnectionInterface
-let connectionInterface
-let genesisBlock // eslint-disable-line no-unused-vars
-
-beforeAll(async done => {
- await app.setUp()
-
- ConnectionInterface = require('../lib/interface')
- connectionInterface = new ConnectionInterface()
- genesisBlock = new Block(
- require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json'),
- )
-
- done()
-})
-
-afterAll(async done => {
- await app.tearDown()
-
- done()
-})
-
-describe('Connection Interface', () => {
- it('should be an object', () => {
- expect(connectionInterface).toBeInstanceOf(ConnectionInterface)
- })
-
- describe('getConnection', () => {
- it('should be a function', () => {
- expect(connectionInterface.getConnection).toBeFunction()
- })
-
- it('should return the set connection', () => {
- connectionInterface.connection = 'fake-connection'
-
- expect(connectionInterface.getConnection()).toBe('fake-connection')
- })
- })
-
- describe('connect', () => {
- it('should be a function', () => {
- expect(connectionInterface.connect).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.connect()).rejects.toThrowError(
- 'Method [connect] not implemented!',
- )
- })
- })
-
- describe('disconnect', () => {
- it('should be a function', () => {
- expect(connectionInterface.disconnect).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.disconnect()).rejects.toThrowError(
- 'Method [disconnect] not implemented!',
- )
- })
- })
-
- describe('getActiveDelegates', () => {
- it('should be a function', () => {
- expect(connectionInterface.getActiveDelegates).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(
- connectionInterface.getActiveDelegates(),
- ).rejects.toThrowError('Method [getActiveDelegates] not implemented!')
- })
- })
-
- describe('buildWallets', () => {
- it('should be a function', () => {
- expect(connectionInterface.buildWallets).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.buildWallets()).rejects.toThrowError(
- 'Method [buildWallets] not implemented!',
- )
- })
- })
-
- describe('saveWallets', () => {
- it('should be a function', () => {
- expect(connectionInterface.saveWallets).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.saveWallets()).rejects.toThrowError(
- 'Method [saveWallets] not implemented!',
- )
- })
- })
-
- describe('saveBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.saveBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.saveBlock()).rejects.toThrowError(
- 'Method [saveBlock] not implemented!',
- )
- })
- })
-
- describe('enqueueSaveBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.enqueueSaveBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- expect(connectionInterface.enqueueSaveBlock).toThrow(
- 'Method [enqueueSaveBlock] not implemented!',
- )
- })
- })
-
- describe('enqueueDeleteBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.enqueueDeleteBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- expect(connectionInterface.enqueueDeleteBlock).toThrow(
- 'Method [enqueueDeleteBlock] not implemented!',
- )
- })
- })
-
- describe('enqueueDeleteRound', () => {
- it('should be a function', () => {
- expect(connectionInterface.enqueueDeleteRound).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- expect(connectionInterface.enqueueDeleteRound).toThrow(
- 'Method [enqueueDeleteRound] not implemented!',
- )
- })
- })
-
- describe('commitQueuedQueries', () => {
- it('should be a function', () => {
- expect(connectionInterface.commitQueuedQueries).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(
- connectionInterface.commitQueuedQueries(),
- ).rejects.toThrowError('Method [commitQueuedQueries] not implemented!')
- })
- })
-
- describe('deleteBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.deleteBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.deleteBlock()).rejects.toThrowError(
- 'Method [deleteBlock] not implemented!',
- )
- })
- })
-
- describe('getBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.getBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.getBlock()).rejects.toThrowError(
- 'Method [getBlock] not implemented!',
- )
- })
- })
-
- describe('getLastBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.getLastBlock).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.getLastBlock()).rejects.toThrowError(
- 'Method [getLastBlock] not implemented!',
- )
- })
- })
-
- describe('getBlocks', () => {
- it('should be a function', () => {
- expect(connectionInterface.getBlocks).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.getBlocks()).rejects.toThrowError(
- 'Method [getBlocks] not implemented!',
- )
- })
- })
-
- describe('getRecentBlockIds', () => {
- it('should be a function', () => {
- expect(connectionInterface.getRecentBlockIds).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(
- connectionInterface.getRecentBlockIds(),
- ).rejects.toThrowError('Method [getRecentBlockIds] not implemented!')
- })
- })
-
- describe('saveRound', () => {
- it('should be a function', () => {
- expect(connectionInterface.saveRound).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.saveRound()).rejects.toThrowError(
- 'Method [saveRound] not implemented!',
- )
- })
- })
-
- describe('deleteRound', () => {
- it('should be a function', () => {
- expect(connectionInterface.deleteRound).toBeFunction()
- })
-
- it('should throw an exception', async () => {
- await expect(connectionInterface.deleteRound()).rejects.toThrowError(
- 'Method [deleteRound] not implemented!',
- )
- })
- })
-
- describe('updateDelegateStats', () => {
- it('should be a function', () => {
- expect(connectionInterface.updateDelegateStats).toBeFunction()
- })
- })
-
- describe.skip('applyRound', () => {
- it('should be a function', () => {
- expect(connectionInterface.applyRound).toBeFunction()
- })
- })
-
- describe.skip('revertRound', () => {
- it('should be a function', () => {
- expect(connectionInterface.revertRound).toBeFunction()
- })
- })
-
- describe.skip('validateDelegate', () => {
- it('should be a function', () => {
- expect(connectionInterface.validateDelegate).toBeFunction()
- })
- })
-
- describe.skip('validateForkedBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.validateForkedBlock).toBeFunction()
- })
- })
-
- describe.skip('applyBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.applyBlock).toBeFunction()
- })
- })
-
- describe.skip('revertBlock', () => {
- it('should be a function', () => {
- expect(connectionInterface.revertBlock).toBeFunction()
- })
- })
-
- describe.skip('verifyTransaction', () => {
- it('should be a function', () => {
- expect(connectionInterface.verifyTransaction).toBeFunction()
- })
- })
-
- describe('__calcPreviousActiveDelegates', () => {
- it('should be a function', () => {
- expect(connectionInterface.__calcPreviousActiveDelegates).toBeFunction()
- })
-
- it('should calculate the previous delegate list', async () => {
- const walletManager = new (require('../lib/wallet-manager'))()
- const initialHeight = 52
-
- // Create delegates
- for (const transaction of genesisBlock.transactions) {
- if (transaction.type === TRANSACTION_TYPES.DELEGATE_REGISTRATION) {
- const wallet = walletManager.findByPublicKey(
- transaction.senderPublicKey,
- )
- wallet.username = Transaction.deserialize(
- transaction.serialized.toString('hex'),
- ).asset.delegate.username
- walletManager.reindex(wallet)
- }
- }
-
- const keys = {
- passphrase: 'this is a secret passphrase',
- publicKey:
- '02c71ab1a1b5b7c278145382eb0b535249483b3c4715a4fe6169d40388bbb09fa7',
- privateKey:
- 'dcf4ead2355090279aefba91540f32e93b15c541ecb48ca73071f161b4f3e2e3',
- address: 'D64cbDctaiADEH7NREnvRQGV27bnb1v2kE',
- }
-
- // Beginning of round 2 with all delegates 0 vote balance.
- const delegatesRound2 = walletManager.loadActiveDelegateList(
- 51,
- initialHeight,
- )
-
- // Prepare sender wallet
- const sender = new Wallet(keys.address)
- sender.publicKey = keys.publicKey
- sender.canApply = jest.fn(() => true)
- walletManager.reindex(sender)
-
- // Apply 51 blocks, where each increases the vote balance of a delegate to
- // reverse the current delegate order.
- const blocksInRound = []
- for (let i = 0; i < 51; i++) {
- const transfer = transactionBuilder
- .transfer()
- .amount(i * ARKTOSHI)
- .recipientId(delegatesRound2[i].address)
- .sign(keys.passphrase)
- .build()
-
- // Vote for itself
- walletManager.byPublicKey[delegatesRound2[i].publicKey].vote =
- delegatesRound2[i].publicKey
-
- const block = Block.create(
- {
- version: 0,
- timestamp: 0,
- height: initialHeight + i,
- numberOfTransactions: 0,
- totalAmount: transfer.amount,
- totalFee: new Bignum(0.1),
- reward: new Bignum(2),
- payloadLength: 32 * 0,
- payloadHash: '',
- transactions: [transfer],
- },
- keys,
- )
-
- block.data.generatorPublicKey = keys.publicKey
- walletManager.applyBlock(block)
-
- blocksInRound.push(block)
- }
-
- // The delegates from round 2 are now reversed in rank in round 3.
- const delegatesRound3 = walletManager.loadActiveDelegateList(
- 51,
- initialHeight + 51,
- )
- for (let i = 0; i < delegatesRound3.length; i++) {
- expect(delegatesRound3[i].rate).toBe(i + 1)
- expect(delegatesRound3[i].publicKey).toBe(
- delegatesRound2[delegatesRound3.length - i - 1].publicKey,
- )
- }
-
- const connection = new ConnectionInterface()
- connection.__getBlocksForRound = jest.fn(async () => blocksInRound)
- connection.walletManager = walletManager
-
- // Necessary for revertRound to not blow up.
- walletManager.allByUsername = jest.fn(() => {
- const usernames = Object.values(walletManager.byUsername)
- usernames.push(sender)
- return usernames
- })
-
- // Finally recalculate the round 2 list and compare against the original list
- const restoredDelegatesRound2 = await connection.__calcPreviousActiveDelegates(
- 2,
- )
-
- for (let i = 0; i < restoredDelegatesRound2.length; i++) {
- expect(restoredDelegatesRound2[i].rate).toBe(i + 1)
- expect(restoredDelegatesRound2[i].publicKey).toBe(
- delegatesRound2[i].publicKey,
- )
- }
- })
- })
-
- describe('_registerWalletManager', () => {
- it('should be a function', () => {
- expect(connectionInterface._registerWalletManager).toBeFunction()
- })
-
- it('should register the wallet manager', () => {
- expect(connectionInterface).not.toHaveProperty('walletManager')
-
- connectionInterface._registerWalletManager()
-
- expect(connectionInterface).toHaveProperty('walletManager')
- })
- })
-
- describe('_registerRepositories', () => {
- it('should be a function', () => {
- expect(connectionInterface._registerRepositories).toBeFunction()
- })
-
- it('should register the repositories', async () => {
- await expect(connectionInterface).not.toHaveProperty('wallets')
- await expect(connectionInterface).not.toHaveProperty('delegates')
-
- connectionInterface._registerRepositories()
-
- await expect(connectionInterface).toHaveProperty('wallets')
- await expect(connectionInterface.wallets).toBeInstanceOf(
- require('../lib/repositories/wallets'),
- )
-
- await expect(connectionInterface).toHaveProperty('delegates')
- await expect(connectionInterface.delegates).toBeInstanceOf(
- require('../lib/repositories/delegates'),
- )
- })
- })
-})
diff --git a/packages/core-database/__tests__/repositories/delegates.test.js b/packages/core-database/__tests__/repositories/delegates.test.js
deleted file mode 100644
index a6511dff96..0000000000
--- a/packages/core-database/__tests__/repositories/delegates.test.js
+++ /dev/null
@@ -1,348 +0,0 @@
-const {
- Bignum,
- crypto,
- constants: { ARKTOSHI },
-} = require('@arkecosystem/crypto')
-const { Block } = require('@arkecosystem/crypto').models
-const { delegateCalculator } = require('@arkecosystem/core-utils')
-const app = require('../__support__/setup')
-
-let genesisBlock
-let repository
-let walletManager
-
-beforeAll(async done => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = new Block(
- require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json'),
- )
-
- done()
-})
-
-afterAll(async done => {
- await app.tearDown()
-
- done()
-})
-
-beforeEach(async done => {
- walletManager = new (require('../../lib/wallet-manager'))()
- repository = new (require('../../lib/repositories/delegates'))({
- walletManager,
- })
-
- done()
-})
-
-function generateWallets() {
- return genesisBlock.transactions.map(transaction => {
- const address = crypto.getAddress(transaction.senderPublicKey)
-
- return {
- address,
- publicKey: `publicKey-${address}`,
- secondPublicKey: `secondPublicKey-${address}`,
- vote: `vote-${address}`,
- username: `username-${address}`,
- balance: new Bignum(100),
- voteBalance: new Bignum(200),
- }
- })
-}
-
-describe('Delegate Repository', () => {
- it('should be an object', () => {
- expect(repository).toBeObject()
- })
-
- describe('getLocalDelegates', () => {
- const delegates = [
- { username: 'delegate-0' },
- { username: 'delegate-1' },
- { username: 'delegate-2' },
- ]
- const wallets = [
- delegates[0],
- {},
- delegates[1],
- { username: '' },
- delegates[2],
- {},
- ]
-
- it('should be a function', () => {
- expect(repository.getLocalDelegates).toBeFunction()
- })
-
- it('should return the local wallets of the connection that are delegates', () => {
- repository.connection.walletManager.all = jest.fn(() => wallets)
-
- expect(repository.getLocalDelegates()).toEqual(
- expect.arrayContaining(delegates),
- )
- expect(repository.connection.walletManager.all).toHaveBeenCalled()
- })
- })
-
- describe('findAll', () => {
- it('should be a function', () => {
- expect(repository.findAll).toBeFunction()
- })
-
- it('should be ok without params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll()
- expect(count).toBe(52)
- expect(rows).toHaveLength(52)
- })
-
- it('should be ok with params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 10, limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (no offset)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (offset = 0)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 0, limit: 12 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(12)
- })
-
- it('should be ok with params (no limit)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(42)
- })
- })
-
- describe('paginate', () => {
- it('should be a function', () => {
- expect(repository.paginate).toBeFunction()
- })
-
- it('should be ok without params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.paginate()
- expect(count).toBe(52)
- expect(rows).toHaveLength(52)
- })
-
- it('should be ok with params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.paginate({ offset: 10, limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (no offset)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.paginate({ limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (offset = 0)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.paginate({ offset: 0, limit: 12 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(12)
- })
-
- it('should be ok with params (no limit)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.paginate({ offset: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(42)
- })
- })
-
- describe('search', () => {
- it('should be a function', () => {
- expect(repository.search).toBeFunction()
- })
-
- it('should search by exact username match', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({
- username: 'username-APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn',
- })
-
- expect(count).toBe(1)
- expect(rows).toHaveLength(1)
- })
-
- it('should search that username contains the string', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({ username: 'username' })
-
- expect(count).toBe(52)
- expect(rows).toHaveLength(52)
- })
-
- describe('when no results', () => {
- it('should be ok', () => {
- const { count, rows } = repository.search({
- username: 'unknown-dummy-username',
- })
-
- expect(count).toBe(0)
- expect(rows).toHaveLength(0)
- })
- })
-
- it('should be ok with params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({
- username: 'username',
- offset: 10,
- limit: 10,
- })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (no offset)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({
- username: 'username',
- limit: 10,
- })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (offset = 0)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({
- username: 'username',
- offset: 0,
- limit: 12,
- })
- expect(count).toBe(52)
- expect(rows).toHaveLength(12)
- })
-
- it('should be ok with params (no limit)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.search({
- username: 'username',
- offset: 10,
- })
- expect(count).toBe(52)
- expect(rows).toHaveLength(42)
- })
- })
-
- describe('findById', () => {
- const expectWallet = key => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const wallet = repository.findById(wallets[0][key])
- expect(wallet).toBeObject()
- expect(wallet.address).toBe(wallets[0].address)
- expect(wallet.publicKey).toBe(wallets[0].publicKey)
- expect(wallet.username).toBe(wallets[0].username)
- }
-
- it('should be a function', () => {
- expect(repository.findById).toBeFunction()
- })
-
- it('should be ok with an address', () => {
- expectWallet('address')
- })
-
- it('should be ok with a publicKey', () => {
- expectWallet('publicKey')
- })
-
- it('should be ok with a username', () => {
- expectWallet('username')
- })
- })
-
- describe('getActiveAtHeight', () => {
- it('should be a function', () => {
- expect(repository.getActiveAtHeight).toBeFunction()
- })
-
- it('should be ok', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const delegate = {
- username: 'test',
- publicKey: 'test',
- voteBalance: new Bignum(10000 * ARKTOSHI),
- producedBlocks: 1000,
- missedBlocks: 500,
- }
- const height = 1
-
- repository.connection.getActiveDelegates = jest.fn(() => [delegate])
- repository.connection.wallets = {
- findById: jest.fn(() => delegate),
- }
-
- const results = repository.getActiveAtHeight(height)
-
- expect(results).toBeArray()
- expect(results[0].username).toBeString()
- expect(results[0].approval).toBeNumber()
- expect(results[0].productivity).toBeNumber()
- expect(results[0].approval).toBe(
- delegateCalculator.calculateApproval(delegate, height),
- )
- expect(results[0].productivity).toBe(
- delegateCalculator.calculateProductivity(delegate),
- )
- })
- })
-})
diff --git a/packages/core-database/__tests__/repositories/delegates.test.ts b/packages/core-database/__tests__/repositories/delegates.test.ts
new file mode 100644
index 0000000000..1470ae85ce
--- /dev/null
+++ b/packages/core-database/__tests__/repositories/delegates.test.ts
@@ -0,0 +1,287 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { delegateCalculator } from "@arkecosystem/core-utils";
+import { Bignum, constants, crypto, models } from "@arkecosystem/crypto";
+import genesisBlockTestnet from "../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { DelegatesRepository, WalletsRepository } from "../../src";
+import { DatabaseService } from "../../src/database-service";
+import { setUp, tearDown } from "../__support__/setup";
+
+const { ARKTOSHI } = constants;
+const { Block } = models;
+
+let genesisBlock;
+let repository;
+
+let walletsRepository : Database.IWalletsBusinessRepository;
+let walletManager: Database.IWalletManager;
+let databaseService: Database.IDatabaseService;
+
+beforeAll(async done => {
+ await setUp();
+
+ // Create the genesis block after the setup has finished or else it uses a potentially
+ // wrong network config.
+ genesisBlock = new Block(genesisBlockTestnet);
+
+ done();
+});
+
+afterAll(async done => {
+ await tearDown();
+
+ done();
+});
+
+beforeEach(async done => {
+ const { WalletManager } = require("../../src/wallet-manager");
+ walletManager = new WalletManager();
+
+ repository = new DelegatesRepository(() => databaseService);
+ walletsRepository = new WalletsRepository(() => databaseService);
+ databaseService = new DatabaseService(null, null, walletManager, walletsRepository, repository);
+
+ done();
+});
+
+function generateWallets() {
+ return genesisBlock.transactions.map((transaction, index) => {
+ const address = crypto.getAddress(transaction.senderPublicKey);
+
+ return {
+ address,
+ publicKey: `publicKey-${address}`,
+ secondPublicKey: `secondPublicKey-${address}`,
+ vote: `vote-${address}`,
+ username: `username-${address}`,
+ balance: new Bignum(100),
+ voteBalance: new Bignum(200),
+ rate: index + 1,
+ };
+ });
+}
+
+describe("Delegate Repository", () => {
+ describe("getLocalDelegates", () => {
+ const delegates = [{ username: "delegate-0" }, { username: "delegate-1" }, { username: "delegate-2" }];
+ const wallets = [delegates[0], {}, delegates[1], { username: "" }, delegates[2], {}];
+
+ it("should return the local wallets of the connection that are delegates", () => {
+ jest.spyOn(walletManager, 'allByAddress').mockReturnValue(wallets);
+
+ const actualDelegates = repository.getLocalDelegates();
+
+ expect(actualDelegates).toEqual(expect.arrayContaining(delegates));
+ expect(walletManager.allByAddress).toHaveBeenCalled();
+ });
+ });
+
+ describe("findAll", () => {
+ it("should be ok without params", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll();
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(52);
+ expect(rows.sort((a, b) => a.rate < b.rate)).toEqual(rows);
+ });
+
+ it("should be ok with params", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 10, limit: 10, orderBy: "rate:desc" });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ expect(rows.sort((a, b) => a.rate > b.rate)).toEqual(rows);
+ });
+
+ it("should be ok with params (no offset)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ limit: 10 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (offset = 0)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 0, limit: 12 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(12);
+ });
+
+ it("should be ok with params (no limit)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 10 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(42);
+ });
+ });
+
+ describe("search", () => {
+ beforeEach(() => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+ });
+
+ describe("by `username`", () => {
+ it("should search by exact match", () => {
+ const username = "username-APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn";
+ const { count, rows } = repository.search({ username });
+
+ expect(count).toBe(1);
+ expect(rows).toHaveLength(1);
+ expect(rows[0].username).toEqual(username);
+ });
+
+ it("should search that username contains the string", () => {
+ const { count, rows } = repository.search({ username: "username" });
+
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(52);
+ });
+
+ describe('when a username is "undefined"', () => {
+ it("should return it", () => {
+ // Index a wallet with username "undefined"
+ walletManager.allByAddress()[0].username = 'undefined';
+
+ const username = "undefined";
+ const { count, rows } = repository.search({ username });
+
+ expect(count).toBe(1);
+ expect(rows).toHaveLength(1);
+ expect(rows[0].username).toEqual(username);
+ });
+ });
+
+ describe("when the username does not exist", () => {
+ it("should return no results", () => {
+ const { count, rows } = repository.search({
+ username: "unknown-dummy-username",
+ });
+
+ expect(count).toBe(0);
+ expect(rows).toHaveLength(0);
+ });
+ });
+
+ it("should be ok with params", () => {
+ const { count, rows } = repository.search({
+ username: "username",
+ offset: 10,
+ limit: 10,
+ });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (no offset)", () => {
+ const { count, rows } = repository.search({
+ username: "username",
+ limit: 10,
+ });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (offset = 0)", () => {
+ const { count, rows } = repository.search({
+ username: "username",
+ offset: 0,
+ limit: 12,
+ });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(12);
+ });
+
+ it("should be ok with params (no limit)", () => {
+ const { count, rows } = repository.search({
+ username: "username",
+ offset: 10,
+ });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(42);
+ });
+ });
+
+ describe("when searching without params", () => {
+ it("should return all results", () => {
+ const { count, rows } = repository.search({});
+
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(52);
+ });
+
+ describe('when a username is "undefined"', () => {
+ it("should return all results", () => {
+ // Index a wallet with username "undefined"
+ walletManager.allByAddress()[0].username = "undefined";
+
+ const { count, rows } = repository.search({});
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(52);
+ });
+ });
+ });
+ });
+
+ describe("findById", () => {
+ const expectWallet = key => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const wallet = repository.findById(wallets[0][key]);
+ expect(wallet).toBeObject();
+ expect(wallet.address).toBe(wallets[0].address);
+ expect(wallet.publicKey).toBe(wallets[0].publicKey);
+ expect(wallet.username).toBe(wallets[0].username);
+ };
+
+ it("should be ok with an address", () => {
+ expectWallet("address");
+ });
+
+ it("should be ok with a publicKey", () => {
+ expectWallet("publicKey");
+ });
+
+ it("should be ok with a username", () => {
+ expectWallet("username");
+ });
+ });
+
+ describe("getActiveAtHeight", () => {
+ it("should be ok", async () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const delegate = {
+ username: "test",
+ publicKey: "test",
+ voteBalance: new Bignum(10000 * ARKTOSHI),
+ producedBlocks: 1000,
+ missedBlocks: 500,
+ };
+ const height = 1;
+
+ jest.spyOn(databaseService, 'getActiveDelegates').mockReturnValue([delegate]);
+ jest.spyOn(walletsRepository, 'findById').mockReturnValue(delegate);
+
+ const results = await repository.getActiveAtHeight(height);
+
+ expect(results).toBeArray();
+ expect(results[0].username).toBeString();
+ expect(results[0].approval).toBeNumber();
+ expect(results[0].productivity).toBeNumber();
+ expect(results[0].approval).toBe(delegateCalculator.calculateApproval(delegate, height));
+ expect(results[0].productivity).toBe(delegateCalculator.calculateProductivity(delegate));
+ });
+ });
+});
diff --git a/packages/core-database/__tests__/repositories/utils/filter-rows.test.js b/packages/core-database/__tests__/repositories/utils/filter-rows.test.js
deleted file mode 100644
index ad92e0f46a..0000000000
--- a/packages/core-database/__tests__/repositories/utils/filter-rows.test.js
+++ /dev/null
@@ -1,119 +0,0 @@
-const app = require('../../__support__/setup')
-
-let filterRows
-
-beforeAll(async done => {
- await app.setUp()
-
- filterRows = require('../../../lib/repositories/utils/filter-rows')
-
- done()
-})
-
-afterAll(async done => {
- await app.tearDown()
-
- done()
-})
-
-describe('Filter Rows', () => {
- const rows = [
- { a: 1, b: 2, c: [] },
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- {
- a: 2, b: 4, c: ['dummy-2'], d: 'dummy-0',
- },
- { a: 3, b: 4, c: ['DUMMY-1'] },
- ]
-
- describe('exact', () => {
- it('match objects with the same value than the parameter', () => {
- expect(filterRows(rows, { a: 1 }, { exact: ['a'] })).toEqual([
- { a: 1, b: 2, c: [] },
- ])
- expect(filterRows(rows, { a: 3 }, { exact: ['a'] })).toEqual([
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- { a: 3, b: 4, c: ['DUMMY-1'] },
- ])
- expect(filterRows(rows, { a: 3, b: 3 }, { exact: ['a'] })).toEqual([
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- { a: 3, b: 4, c: ['DUMMY-1'] },
- ])
- expect(filterRows(rows, { a: 3, b: 3 }, { exact: ['a', 'b'] })).toEqual([
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- ])
- })
- })
-
- describe('between', () => {
- it('match objects that include a value beween two parameters (included)', () => {
- expect(filterRows(rows, { a: { from: 3 } }, { between: ['a'] })).toEqual([
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- { a: 3, b: 4, c: ['DUMMY-1'] },
- ])
- expect(
- filterRows(rows, { a: { from: 2, to: 2 } }, { between: ['a'] }),
- ).toEqual([
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- {
- a: 2, b: 4, c: ['dummy-2'], d: 'dummy-0',
- },
- ])
- expect(filterRows(rows, { a: { to: 2 } }, { between: ['a'] })).toEqual([
- { a: 1, b: 2, c: [] },
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- {
- a: 2, b: 4, c: ['dummy-2'], d: 'dummy-0',
- },
- ])
- expect(
- filterRows(rows, { b: { from: 3, to: 4 } }, { between: ['b'] }),
- ).toEqual([
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- {
- a: 2, b: 4, c: ['dummy-2'], d: 'dummy-0',
- },
- { a: 3, b: 4, c: ['DUMMY-1'] },
- ])
- })
- })
-
- // This filter is not used yet
- describe('any', () => {
- it('match objects that include some values of the parameters', () => {
- expect(filterRows(rows, { c: ['dummy-1'] }, { any: ['c'] })).toEqual([
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- ])
- expect(
- filterRows(rows, { c: ['dummy-1'], d: ['dummy-0'] }, { any: ['c'] }),
- ).toEqual([
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- ])
- expect(
- filterRows(
- rows,
- { c: ['dummy-1'], d: ['dummy-0'] },
- { any: ['c', 'd'] },
- ),
- ).toEqual([
- {
- a: 2, b: 2, c: ['dummy-1'], d: ['dummy-0'],
- },
- { a: 3, b: 3, c: ['dummy-3', 'dummy-1', 'dummy-4'] },
- ])
- })
- })
-})
diff --git a/packages/core-database/__tests__/repositories/utils/filter-rows.test.ts b/packages/core-database/__tests__/repositories/utils/filter-rows.test.ts
new file mode 100644
index 0000000000..42a70699ca
--- /dev/null
+++ b/packages/core-database/__tests__/repositories/utils/filter-rows.test.ts
@@ -0,0 +1,87 @@
+import "jest-extended";
+
+import { setUp, tearDown } from "../../__support__/setup";
+
+let filterRows;
+
+beforeAll(async done => {
+ await setUp();
+
+ filterRows = require("../../../src/repositories/utils/filter-rows");
+
+ done();
+});
+
+afterAll(async done => {
+ await tearDown();
+
+ done();
+});
+
+describe("Filter Rows", () => {
+ const rows = [
+ { a: 1, b: 2, c: [] },
+ {
+ a: 2,
+ b: 2,
+ c: ["dummy-1"],
+ d: ["dummy-0"],
+ e: "value-e-1",
+ },
+ { a: 3, b: 3, c: ["dummy-3", "dummy-1", "dummy-4"] },
+ {
+ a: 2,
+ b: 4,
+ c: ["dummy-2"],
+ d: "dummy-0",
+ e: "value-e-2",
+ },
+ { a: 3, b: 4, c: ["DUMMY-1"] },
+ {
+ c: ["nop"],
+ d: "nop",
+ e: "value-e-3",
+ },
+ ];
+
+ describe("exact", () => {
+ it("match objects with the same value than the parameter", () => {
+ expect(filterRows(rows, { a: 1 }, { exact: ["a"] })).toEqual([rows[0]]);
+ expect(filterRows(rows, { a: 3 }, { exact: ["a"] })).toEqual([rows[2], rows[4]]);
+ expect(filterRows(rows, { a: 3, b: 3 }, { exact: ["a"] })).toEqual([rows[2], rows[4]]);
+ expect(filterRows(rows, { a: 3, b: 3 }, { exact: ["a", "b"] })).toEqual([rows[2]]);
+ });
+ });
+
+ describe("between", () => {
+ it("match objects that include a value beween two parameters (included)", () => {
+ expect(filterRows(rows, { a: { from: 3 } }, { between: ["a"] })).toEqual([rows[2], rows[4]]);
+ expect(filterRows(rows, { a: { from: 2, to: 2 } }, { between: ["a"] })).toEqual([rows[1], rows[3]]);
+ expect(filterRows(rows, { a: { to: 2 } }, { between: ["a"] })).toEqual([rows[0], rows[1], rows[3]]);
+ expect(filterRows(rows, { b: { from: 3, to: 4 } }, { between: ["b"] })).toEqual([
+ rows[2],
+ rows[3],
+ rows[4],
+ ]);
+ });
+ });
+
+ describe("in", () => {
+ it("match objects that include some values of the parameters", () => {
+ expect(filterRows(rows, { e: ["value-e-99"] }, { in: ["e"] })).toEqual([]);
+ expect(filterRows(rows, { e: ["value-e-1", "value-e-3"] }, { in: ["e"] })).toEqual([rows[1], rows[5]]);
+ });
+ });
+
+ // This filter is not used yet
+ describe("any", () => {
+ it("match objects that include some values of the parameters", () => {
+ expect(filterRows(rows, { c: ["dummy-1"] }, { any: ["c"] })).toEqual([rows[1], rows[2]]);
+ expect(filterRows(rows, { c: ["dummy-1"], d: ["dummy-0"] }, { any: ["c"] })).toEqual([rows[1], rows[2]]);
+ expect(filterRows(rows, { c: ["dummy-1"], d: ["dummy-0"] }, { any: ["c", "d"] })).toEqual([
+ rows[1],
+ rows[2],
+ ]);
+ });
+ });
+});
diff --git a/packages/core-database/__tests__/repositories/wallets.test.js b/packages/core-database/__tests__/repositories/wallets.test.js
deleted file mode 100644
index fef65f64aa..0000000000
--- a/packages/core-database/__tests__/repositories/wallets.test.js
+++ /dev/null
@@ -1,404 +0,0 @@
-const uniq = require('lodash/uniq')
-const compact = require('lodash/compact')
-const { Bignum, crypto } = require('@arkecosystem/crypto')
-const { Block } = require('@arkecosystem/crypto').models
-const app = require('../__support__/setup')
-
-let genesisBlock
-let genesisSenders
-let repository
-let walletManager
-
-beforeAll(async done => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = new Block(
- require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json'),
- )
- genesisSenders = uniq(
- compact(genesisBlock.transactions.map(tx => tx.senderPublicKey)),
- )
-
- done()
-})
-
-afterAll(async done => {
- await app.tearDown()
-
- done()
-})
-
-beforeEach(async done => {
- walletManager = new (require('../../lib/wallet-manager'))()
- repository = new (require('../../lib/repositories/wallets'))({
- walletManager,
- })
-
- done()
-})
-
-function generateWallets() {
- return genesisSenders.map(senderPublicKey => ({
- address: crypto.getAddress(senderPublicKey),
- }))
-}
-
-function generateVotes() {
- return genesisSenders.map(senderPublicKey => ({
- address: crypto.getAddress(senderPublicKey),
- vote: genesisBlock.transactions[0].senderPublicKey,
- }))
-}
-
-function generateFullWallets() {
- return genesisSenders.map(senderPublicKey => {
- const address = crypto.getAddress(senderPublicKey)
-
- return {
- address,
- publicKey: `publicKey-${address}`,
- secondPublicKey: `secondPublicKey-${address}`,
- vote: `vote-${address}`,
- username: `username-${address}`,
- balance: 100,
- voteBalance: 200,
- }
- })
-}
-
-describe('Wallet Repository', () => {
- it('should be an object', () => {
- expect(repository).toBeObject()
- })
-
- describe('all', () => {
- it('should be a function', () => {
- expect(repository.all).toBeFunction()
- })
-
- it('should return the local wallets of the connection', () => {
- repository.connection.walletManager.all = jest.fn()
- repository.all()
- expect(repository.connection.walletManager.all).toHaveBeenCalled()
- })
- })
-
- describe('findAll', () => {
- it('should be a function', () => {
- expect(repository.findAll).toBeFunction()
- })
-
- it('should be ok without params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll()
- expect(count).toBe(52)
- expect(rows).toHaveLength(52)
- })
-
- it('should be ok with params', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 10, limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (no offset)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ limit: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (offset = 0)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 0, limit: 12 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(12)
- })
-
- it('should be ok with params (no limit)', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- const { count, rows } = repository.findAll({ offset: 10 })
- expect(count).toBe(52)
- expect(rows).toHaveLength(42)
- })
- })
-
- describe('findAllByVote', () => {
- const vote = 'dummy-sender-public-key'
-
- beforeEach(() => {
- const wallets = generateVotes()
- wallets.forEach((wallet, i) => {
- if (i < 17) {
- wallet.vote = vote
- }
- })
- walletManager.index(wallets)
- })
-
- it('should be a function', () => {
- expect(repository.findAllByVote).toBeFunction()
- })
-
- it('should be ok without params', () => {
- const { count, rows } = repository.findAllByVote(vote)
- expect(count).toBe(17)
- expect(rows).toHaveLength(17)
- })
-
- it('should be ok with params', () => {
- const { count, rows } = repository.findAllByVote(vote, {
- offset: 10,
- limit: 10,
- })
- expect(count).toBe(17)
- expect(rows).toHaveLength(7)
- })
-
- it('should be ok with params (no offset)', () => {
- const { count, rows } = repository.findAllByVote(vote, { limit: 10 })
- expect(count).toBe(17)
- expect(rows).toHaveLength(10)
- })
-
- it('should be ok with params (offset = 0)', () => {
- const { count, rows } = repository.findAllByVote(vote, {
- offset: 0,
- limit: 1,
- })
- expect(count).toBe(17)
- expect(rows).toHaveLength(1)
- })
-
- it('should be ok with params (no limit)', () => {
- const { count, rows } = repository.findAllByVote(vote, { offset: 30 })
- expect(count).toBe(17)
- expect(rows).toHaveLength(0)
- })
- })
-
- describe('findById', () => {
- const expectWallet = key => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- const wallet = repository.findById(wallets[0][key])
- expect(wallet).toBeObject()
- expect(wallet.address).toBe(wallets[0].address)
- expect(wallet.publicKey).toBe(wallets[0].publicKey)
- expect(wallet.username).toBe(wallets[0].username)
- }
-
- it('should be a function', () => {
- expect(repository.findById).toBeFunction()
- })
-
- it('should be ok with an address', () => {
- expectWallet('address')
- })
-
- it('should be ok with a publicKey', () => {
- expectWallet('publicKey')
- })
-
- it('should be ok with a username', () => {
- expectWallet('username')
- })
- })
-
- describe('count', () => {
- it('should be a function', () => {
- expect(repository.count).toBeFunction()
- })
-
- it('should be ok', () => {
- const wallets = generateWallets()
- walletManager.index(wallets)
-
- expect(repository.count()).toBe(52)
- })
- })
-
- describe('top', () => {
- beforeEach(() => {
- walletManager.reindex({ address: 'dummy-1', balance: new Bignum(1000) })
- walletManager.reindex({ address: 'dummy-2', balance: new Bignum(2000) })
- walletManager.reindex({ address: 'dummy-3', balance: new Bignum(3000) })
- })
-
- it('should be a function', () => {
- expect(repository.top).toBeFunction()
- })
-
- it('should be ok without params', () => {
- const { count, rows } = repository.top()
-
- expect(count).toBe(3)
- expect(rows.length).toBe(3)
- expect(rows[0].balance).toEqual(new Bignum(3000))
- expect(rows[1].balance).toEqual(new Bignum(2000))
- expect(rows[2].balance).toEqual(new Bignum(1000))
- })
-
- it('should be ok with params', () => {
- const { count, rows } = repository.top({ offset: 1, limit: 2 })
-
- expect(count).toBe(3)
- expect(rows.length).toBe(2)
- expect(rows[0].balance).toEqual(new Bignum(2000))
- expect(rows[1].balance).toEqual(new Bignum(1000))
- })
-
- it('should be ok with params (offset = 0)', () => {
- const { count, rows } = repository.top({ offset: 0, limit: 2 })
-
- expect(count).toBe(3)
- expect(rows.length).toBe(2)
- expect(rows[0].balance).toEqual(new Bignum(3000))
- expect(rows[1].balance).toEqual(new Bignum(2000))
- })
-
- it('should be ok with params (no offset)', () => {
- const { count, rows } = repository.top({ limit: 2 })
-
- expect(count).toBe(3)
- expect(rows.length).toBe(2)
- expect(rows[0].balance).toEqual(new Bignum(3000))
- expect(rows[1].balance).toEqual(new Bignum(2000))
- })
-
- it('should be ok with params (no limit)', () => {
- const { count, rows } = repository.top({ offset: 1 })
-
- expect(count).toBe(3)
- expect(rows.length).toBe(2)
- expect(rows[0].balance).toEqual(new Bignum(2000))
- expect(rows[1].balance).toEqual(new Bignum(1000))
- })
-
- it('should be ok with legacy', () => {
- const { count, rows } = repository.top({}, true)
-
- expect(count).toBe(3)
- expect(rows.length).toBe(3)
- expect(rows[0].balance).toEqual(new Bignum(3000))
- expect(rows[1].balance).toEqual(new Bignum(2000))
- expect(rows[2].balance).toEqual(new Bignum(1000))
- })
- })
-
- describe('search', () => {
- const expectSearch = (params, rows = 1, count = 1) => {
- const wallets = repository.search(params)
- expect(wallets).toBeObject()
-
- expect(wallets).toHaveProperty('count')
- expect(wallets.count).toBeNumber()
- expect(wallets.count).toBe(count)
-
- expect(wallets).toHaveProperty('rows')
- expect(wallets.rows).toBeArray()
- expect(wallets.rows).not.toBeEmpty()
-
- expect(wallets.count).toBe(rows)
- }
-
- it('should be a function', () => {
- expect(repository.search).toBeFunction()
- })
-
- it('should search wallets by the specified address', () => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- expectSearch({ address: wallets[0].address })
- })
-
- it('should search wallets by the specified publicKey', () => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- expectSearch({ publicKey: wallets[0].publicKey })
- })
-
- it('should search wallets by the specified secondPublicKey', () => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- expectSearch({ secondPublicKey: wallets[0].secondPublicKey })
- })
-
- it('should search wallets by the specified vote', () => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- expectSearch({ vote: wallets[0].vote })
- })
-
- it('should search wallets by the specified username', () => {
- const wallets = generateFullWallets()
- walletManager.index(wallets)
-
- expectSearch({ username: wallets[0].username })
- })
-
- it('should search wallets by the specified closed inverval (included) of balance', () => {
- const wallets = generateFullWallets()
- wallets.forEach((wallet, i) => {
- if (i < 13) {
- wallet.balance = 53
- } else if (i < 36) {
- wallet.balance = 99
- }
- })
- walletManager.index(wallets)
-
- expectSearch(
- {
- balance: {
- from: 53,
- to: 99,
- },
- },
- 36,
- 36,
- )
- })
-
- it('should search wallets by the specified closed interval (included) of voteBalance', () => {
- const wallets = generateFullWallets()
- wallets.forEach((wallet, i) => {
- if (i < 17) {
- wallet.voteBalance = 12
- } else if (i < 29) {
- wallet.voteBalance = 17
- }
- })
- walletManager.index(wallets)
-
- expectSearch(
- {
- voteBalance: {
- from: 11,
- to: 18,
- },
- },
- 29,
- 29,
- )
- })
- })
-})
diff --git a/packages/core-database/__tests__/repositories/wallets.test.ts b/packages/core-database/__tests__/repositories/wallets.test.ts
new file mode 100644
index 0000000000..2ddea64513
--- /dev/null
+++ b/packages/core-database/__tests__/repositories/wallets.test.ts
@@ -0,0 +1,405 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { Bignum, crypto, models } from "@arkecosystem/crypto";
+import compact from "lodash/compact";
+import uniq from "lodash/uniq";
+import genesisBlockTestnet from "../../../core-test-utils/src/config/testnet/genesisBlock.json";
+import { setUp, tearDown } from "../__support__/setup";
+
+import { WalletsRepository } from "../../src";
+import { DatabaseService } from "../../src/database-service";
+
+const { Block, Wallet } = models;
+
+let genesisBlock;
+let genesisSenders;
+let repository;
+let walletManager: Database.IWalletManager;
+let databaseService: Database.IDatabaseService;
+
+beforeAll(async done => {
+ await setUp();
+
+ // Create the genesis block after the setup has finished or else it uses a potentially
+ // wrong network config.
+ genesisBlock = new Block(genesisBlockTestnet);
+ genesisSenders = uniq(compact(genesisBlock.transactions.map(tx => tx.senderPublicKey)));
+
+ done();
+});
+
+afterAll(async done => {
+ await tearDown();
+
+ done();
+});
+
+beforeEach(async done => {
+ const { WalletManager } = require("../../src/wallet-manager");
+ walletManager = new WalletManager();
+
+ repository = new WalletsRepository(() => databaseService);
+
+ databaseService = new DatabaseService(null, null, walletManager, repository, null);
+
+ done();
+});
+
+function generateWallets() {
+ return genesisSenders.map(senderPublicKey => ({
+ address: crypto.getAddress(senderPublicKey),
+ }));
+}
+
+function generateVotes() {
+ return genesisSenders.map(senderPublicKey => ({
+ address: crypto.getAddress(senderPublicKey),
+ vote: genesisBlock.transactions[0].senderPublicKey,
+ }));
+}
+
+function generateFullWallets() {
+ return genesisSenders.map(senderPublicKey => {
+ const address = crypto.getAddress(senderPublicKey);
+
+ return {
+ address,
+ publicKey: `publicKey-${address}`,
+ secondPublicKey: `secondPublicKey-${address}`,
+ vote: `vote-${address}`,
+ username: `username-${address}`,
+ balance: 100,
+ voteBalance: 200,
+ };
+ });
+}
+
+describe("Wallet Repository", () => {
+ describe("all", () => {
+ it("should return the local wallets of the connection", () => {
+ jest.spyOn(walletManager, 'allByAddress').mockReturnValue(null);
+
+ repository.all();
+
+ expect(walletManager.allByAddress).toHaveBeenCalled();
+ });
+ });
+
+ describe("findAll", () => {
+ it("should be ok without params", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll();
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(52);
+ });
+
+ it("should be ok with params", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 10, limit: 10 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (no offset)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ limit: 10 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (offset = 0)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 0, limit: 12 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(12);
+ });
+
+ it("should be ok with params (no limit)", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ const { count, rows } = repository.findAll({ offset: 10 });
+ expect(count).toBe(52);
+ expect(rows).toHaveLength(42);
+ });
+ });
+
+ describe("findAllByVote", () => {
+ const vote = "dummy-sender-public-key";
+
+ beforeEach(() => {
+ const wallets = generateVotes();
+ wallets.forEach((wallet, i) => {
+ if (i < 17) {
+ wallet.vote = vote;
+ }
+ });
+ walletManager.index(wallets);
+ });
+
+ it("should be ok without params", () => {
+ const { count, rows } = repository.findAllByVote(vote);
+ expect(count).toBe(17);
+ expect(rows).toHaveLength(17);
+ });
+
+ it("should be ok with params", () => {
+ const { count, rows } = repository.findAllByVote(vote, {
+ offset: 10,
+ limit: 10,
+ });
+ expect(count).toBe(17);
+ expect(rows).toHaveLength(7);
+ });
+
+ it("should be ok with params (no offset)", () => {
+ const { count, rows } = repository.findAllByVote(vote, { limit: 10 });
+ expect(count).toBe(17);
+ expect(rows).toHaveLength(10);
+ });
+
+ it("should be ok with params (offset = 0)", () => {
+ const { count, rows } = repository.findAllByVote(vote, {
+ offset: 0,
+ limit: 1,
+ });
+ expect(count).toBe(17);
+ expect(rows).toHaveLength(1);
+ });
+
+ it("should be ok with params (no limit)", () => {
+ const { count, rows } = repository.findAllByVote(vote, { offset: 30 });
+ expect(count).toBe(17);
+ expect(rows).toHaveLength(0);
+ });
+ });
+
+ describe("findById", () => {
+ const expectWallet = key => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ const wallet = repository.findById(wallets[0][key]);
+ expect(wallet).toBeObject();
+ expect(wallet.address).toBe(wallets[0].address);
+ expect(wallet.publicKey).toBe(wallets[0].publicKey);
+ expect(wallet.username).toBe(wallets[0].username);
+ };
+
+ it("should be ok with an address", () => {
+ expectWallet("address");
+ });
+
+ it("should be ok with a publicKey", () => {
+ expectWallet("publicKey");
+ });
+
+ it("should be ok with a username", () => {
+ expectWallet("username");
+ });
+ });
+
+ describe("count", () => {
+ it("should be ok", () => {
+ const wallets = generateWallets();
+ walletManager.index(wallets);
+
+ expect(repository.count()).toBe(52);
+ });
+ });
+
+ describe("top", () => {
+
+ beforeEach(() => {
+ [
+ { address: 'dummy-1', balance: new Bignum(1000) },
+ { address: 'dummy-2', balance: new Bignum(2000) },
+ { address: 'dummy-3', balance: new Bignum(3000) },
+ ].forEach(o => {
+ const wallet = new Wallet(o.address);
+ wallet.balance = o.balance;
+ walletManager.reindex(wallet);
+ });
+ });
+
+ it("should be ok without params", () => {
+ const { count, rows } = repository.top();
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(3);
+ expect(rows[0].balance).toEqual(new Bignum(3000));
+ expect(rows[1].balance).toEqual(new Bignum(2000));
+ expect(rows[2].balance).toEqual(new Bignum(1000));
+ });
+
+ it("should be ok with params", () => {
+ const { count, rows } = repository.top({ offset: 1, limit: 2 });
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(2);
+ expect(rows[0].balance).toEqual(new Bignum(2000));
+ expect(rows[1].balance).toEqual(new Bignum(1000));
+ });
+
+ it("should be ok with params (offset = 0)", () => {
+ const { count, rows } = repository.top({ offset: 0, limit: 2 });
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(2);
+ expect(rows[0].balance).toEqual(new Bignum(3000));
+ expect(rows[1].balance).toEqual(new Bignum(2000));
+ });
+
+ it("should be ok with params (no offset)", () => {
+ const { count, rows } = repository.top({ limit: 2 });
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(2);
+ expect(rows[0].balance).toEqual(new Bignum(3000));
+ expect(rows[1].balance).toEqual(new Bignum(2000));
+ });
+
+ it("should be ok with params (no limit)", () => {
+ const { count, rows } = repository.top({ offset: 1 });
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(2);
+ expect(rows[0].balance).toEqual(new Bignum(2000));
+ expect(rows[1].balance).toEqual(new Bignum(1000));
+ });
+
+ it("should be ok with legacy", () => {
+ const { count, rows } = repository.top({}, true);
+
+ expect(count).toBe(3);
+ expect(rows.length).toBe(3);
+ expect(rows[0].balance).toEqual(new Bignum(3000));
+ expect(rows[1].balance).toEqual(new Bignum(2000));
+ expect(rows[2].balance).toEqual(new Bignum(1000));
+ });
+ });
+
+ describe("search", () => {
+ const expectSearch = (params, rows = 1, count = 1) => {
+ const wallets = repository.search(params);
+ expect(wallets).toBeObject();
+
+ expect(wallets).toHaveProperty("count");
+ expect(wallets.count).toBeNumber();
+ expect(wallets.count).toBe(count);
+
+ expect(wallets).toHaveProperty("rows");
+ expect(wallets.rows).toBeArray();
+ expect(wallets.rows).not.toBeEmpty();
+
+ expect(wallets.count).toBe(rows);
+ };
+
+ it("should search wallets by the specified address", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ expectSearch({ address: wallets[0].address });
+ });
+
+ it("should search wallets by several addresses", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ const addresses = [wallets[1].address, wallets[3].address, wallets[9].address];
+ expectSearch({ addresses }, 3, 3);
+ });
+
+ describe("when searching by `address` and `addresses`", () => {
+ it("should search wallets only by `address`", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ const { address } = wallets[0];
+ const addresses = [wallets[1].address, wallets[3].address, wallets[9].address];
+ expectSearch({ address, addresses }, 1, 1);
+ });
+ });
+
+ it("should search wallets by the specified publicKey", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ expectSearch({ publicKey: wallets[0].publicKey });
+ });
+
+ it("should search wallets by the specified secondPublicKey", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ expectSearch({ secondPublicKey: wallets[0].secondPublicKey });
+ });
+
+ it("should search wallets by the specified vote", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ expectSearch({ vote: wallets[0].vote });
+ });
+
+ it("should search wallets by the specified username", () => {
+ const wallets = generateFullWallets();
+ walletManager.index(wallets);
+
+ expectSearch({ username: wallets[0].username });
+ });
+
+ it("should search wallets by the specified closed inverval (included) of balance", () => {
+ const wallets = generateFullWallets();
+ wallets.forEach((wallet, i) => {
+ if (i < 13) {
+ wallet.balance = 53;
+ } else if (i < 36) {
+ wallet.balance = 99;
+ }
+ });
+ walletManager.index(wallets);
+
+ expectSearch(
+ {
+ balance: {
+ from: 53,
+ to: 99,
+ },
+ },
+ 36,
+ 36,
+ );
+ });
+
+ it("should search wallets by the specified closed interval (included) of voteBalance", () => {
+ const wallets = generateFullWallets();
+ wallets.forEach((wallet, i) => {
+ if (i < 17) {
+ wallet.voteBalance = 12;
+ } else if (i < 29) {
+ wallet.voteBalance = 17;
+ }
+ });
+ walletManager.index(wallets);
+
+ expectSearch(
+ {
+ voteBalance: {
+ from: 11,
+ to: 18,
+ },
+ },
+ 29,
+ 29,
+ );
+ });
+ });
+});
diff --git a/packages/core-database/__tests__/wallet-manager.test.js b/packages/core-database/__tests__/wallet-manager.test.js
deleted file mode 100644
index b033e223ed..0000000000
--- a/packages/core-database/__tests__/wallet-manager.test.js
+++ /dev/null
@@ -1,551 +0,0 @@
-/* eslint max-len: "off" */
-
-const { Block, Transaction, Wallet } = require('@arkecosystem/crypto').models
-const { Bignum, crypto, transactionBuilder } = require('@arkecosystem/crypto')
-const {
- ARKTOSHI,
- TRANSACTION_TYPES,
-} = require('@arkecosystem/crypto').constants
-
-const genTransfer = require('@arkecosystem/core-test-utils/lib/generators/transactions/transfer')
-const genDelegateReg = require('@arkecosystem/core-test-utils/lib/generators/transactions/delegate')
-const gen2ndSignature = require('@arkecosystem/core-test-utils/lib/generators/transactions/signature')
-const genvote = require('@arkecosystem/core-test-utils/lib/generators/transactions/vote')
-const block3 = require('@arkecosystem/core-test-utils/fixtures/testnet/blocks.2-100')[1]
-const app = require('./__support__/setup')
-
-const block = new Block(block3)
-const walletData1 = require('./__fixtures__/wallets.json')[0]
-const walletData2 = require('./__fixtures__/wallets.json')[1]
-
-let genesisBlock // eslint-disable-line no-unused-vars
-let walletManager
-
-beforeAll(async done => {
- await app.setUp()
-
- // Create the genesis block after the setup has finished or else it uses a potentially
- // wrong network config.
- genesisBlock = new Block(
- require('@arkecosystem/core-test-utils/config/testnet/genesisBlock.json'),
- )
-
- walletManager = new (require('../lib/wallet-manager'))()
-
- done()
-})
-
-beforeEach(() => {
- walletManager = new (require('../lib/wallet-manager'))()
-})
-
-afterAll(async done => {
- await app.tearDown()
-
- done()
-})
-
-describe('Wallet Manager', () => {
- it('should be an object', () => {
- expect(walletManager).toBeObject()
- })
-
- describe('reset', () => {
- it('should be a function', () => {
- expect(walletManager.reset).toBeFunction()
- })
-
- it('should reset the index', () => {
- const wallet = new Wallet(walletData1.address)
-
- walletManager.reindex(wallet)
- expect(walletManager.all()).toEqual([wallet])
-
- walletManager.reset()
- expect(walletManager.all()).toEqual([])
- })
- })
-
- describe('reindex', () => {
- it('should be a function', () => {
- expect(walletManager.reindex).toBeFunction()
- })
-
- it('should index the wallets', () => {
- const wallet = new Wallet(walletData1.address)
-
- expect(walletManager.all()).toEqual([])
-
- walletManager.reindex(wallet)
- expect(walletManager.all()).toEqual([wallet])
- })
- })
-
- describe('applyBlock', () => {
- let delegateMock
- let block2
-
- const delegatePublicKey = block3.generatorPublicKey // '0299deebff24ebf2bb53ad78f3ea3ada5b3c8819132e191b02c263ee4aa4af3d9b'
-
- const txs = []
- for (let i = 0; i < 3; i++) {
- txs[i] = transactionBuilder
- .vote()
- .sign(Math.random().toString(36))
- .votesAsset([`+${delegatePublicKey}`])
- .build()
- }
-
- beforeEach(() => {
- delegateMock = { applyBlock: jest.fn(), publicKey: delegatePublicKey }
- walletManager.findByPublicKey = jest.fn(() => delegateMock)
- walletManager.applyTransaction = jest.fn()
- walletManager.revertTransaction = jest.fn()
-
- const { data } = block
- data.transactions = []
- data.transactions.push(txs[0])
- data.transactions.push(txs[1])
- data.transactions.push(txs[2])
- block2 = new Block(data)
-
- walletManager.reindex(delegateMock)
- })
-
- it('should be a function', () => {
- expect(walletManager.applyBlock).toBeFunction()
- })
-
- it('should apply sequentially the transactions of the block', async () => {
- await walletManager.applyBlock(block2)
-
- block2.transactions.forEach((transaction, i) => {
- expect(walletManager.applyTransaction.mock.calls[i][0]).toBe(
- block2.transactions[i],
- )
- })
- })
-
- it('should apply the block data to the delegate', async () => {
- await walletManager.applyBlock(block)
-
- expect(delegateMock.applyBlock).toHaveBeenCalledWith(block.data)
- })
-
- describe('when 1 transaction fails while applying it', () => {
- it('should revert sequentially (from last to first) all the transactions of the block', async () => {
- walletManager.applyTransaction = jest.fn(transaction => {
- if (transaction === block2.transactions[2]) {
- throw new Error('Fake error')
- }
- })
-
- expect(block2.transactions.length).toBe(3)
-
- try {
- await walletManager.applyBlock(block2)
-
- expect(null).toBe('this should fail if no error is thrown')
- } catch (_error) {
- expect(walletManager.revertTransaction).toHaveBeenCalledTimes(2)
- block2.transactions.slice(0, 1).forEach((transaction, i) => {
- expect(
- walletManager.revertTransaction.mock.calls[1 - i][0],
- ).toEqual(block2.transactions[i])
- })
- }
- })
-
- it('throws the Error', async () => {
- walletManager.applyTransaction = jest.fn(transaction => {
- throw new Error('Fake error')
- })
-
- try {
- await walletManager.applyBlock(block2)
-
- expect(null).toBe('this should fail if no error is thrown')
- } catch (error) {
- expect(error).toBeInstanceOf(Error)
- expect(error.message).toBe('Fake error')
- }
- })
- })
-
- describe.skip('the delegate of the block is not indexed', () => {
- describe('not genesis block', () => {
- it('throw an Error', () => {})
- })
-
- describe('genesis block', () => {
- it('generates a new wallet', () => {})
- })
- })
- })
-
- describe.skip('revertBlock', () => {
- it('should be a function', () => {
- expect(walletManager.revertBlock).toBeFunction()
- })
-
- it('should revert all transactions of the block', () => {})
-
- it('should revert the block of the delegate', () => {})
- })
-
- describe('applyTransaction', () => {
- it('should be a function', () => {
- expect(walletManager.applyTransaction).toBeFunction()
- })
-
- describe('when the recipient is a cold wallet', () => {})
-
- const transfer = genTransfer(
- 'testnet',
- Math.random().toString(36),
- walletData2.address,
- 96579,
- 1,
- )[0]
- const delegateReg = genDelegateReg(
- 'testnet',
- Math.random().toString(36),
- 1,
- )[0]
- const secondSign = gen2ndSignature(
- 'testnet',
- Math.random().toString(36),
- 1,
- )[0]
- const vote = genvote(
- 'testnet',
- Math.random().toString(36),
- walletData2.publicKey,
- 1,
- )[0]
- describe.each`
- type | transaction | amount | balanceSuccess | balanceFail
- ${'transfer'} | ${transfer} | ${new Bignum(96579)} | ${new Bignum(1 * ARKTOSHI)} | ${Bignum.ONE}
- ${'delegate'} | ${delegateReg} | ${Bignum.ZERO} | ${new Bignum(30 * ARKTOSHI)} | ${Bignum.ONE}
- ${'2nd sign'} | ${secondSign} | ${Bignum.ZERO} | ${new Bignum(10 * ARKTOSHI)} | ${Bignum.ONE}
- ${'vote'} | ${vote} | ${Bignum.ZERO} | ${new Bignum(5 * ARKTOSHI)} | ${Bignum.ONE}
- `(
- 'when the transaction is a $type',
- ({ type, transaction, amount, balanceSuccess, balanceFail }) => {
- let sender
- let recipient
-
- beforeEach(() => {
- sender = new Wallet(walletData1.address)
- recipient = new Wallet(walletData2.address)
- recipient.publicKey = walletData2.publicKey
-
- sender.publicKey = transaction.senderPublicKey
-
- walletManager.reindex(sender)
- walletManager.reindex(recipient)
-
- walletManager.__isDelegate = jest.fn(() => true) // for vote transaction
- })
-
- it('should apply the transaction to the sender & recipient', async () => {
- sender.balance = balanceSuccess
-
- expect(+sender.balance.toFixed()).toBe(+balanceSuccess)
- expect(+recipient.balance.toFixed()).toBe(0)
-
- await walletManager.applyTransaction(transaction)
-
- expect(sender.balance).toEqual(
- balanceSuccess.minus(amount).minus(transaction.fee),
- )
-
- if (type === 'transfer') {
- expect(recipient.balance).toEqual(amount)
- }
- })
-
- it('should fail if the transaction cannot be applied', async () => {
- sender.balance = balanceFail
-
- expect(+sender.balance.toFixed()).toBe(+balanceFail)
- expect(+recipient.balance.toFixed()).toBe(0)
-
- try {
- expect(async () => {
- await walletManager.applyTransaction(transaction)
- }).toThrow(/apply transaction/)
-
- expect(null).toBe('this should fail if no error is thrown')
- } catch (error) {
- expect(+sender.balance.toFixed()).toBe(+balanceFail)
- expect(+recipient.balance.toFixed()).toBe(0)
- }
- })
- },
- )
- })
-
- describe('revertTransaction', () => {
- it('should be a function', () => {
- expect(walletManager.revertTransaction).toBeFunction()
- })
-
- it('should revert the transaction from the sender & recipient', async () => {
- const transaction = new Transaction({
- type: TRANSACTION_TYPES.TRANSFER,
- amount: 245098000000000,
- fee: 0,
- recipientId: 'AHXtmB84sTZ9Zd35h9Y1vfFvPE2Xzqj8ri',
- timestamp: 0,
- asset: {},
- senderPublicKey:
- '035b63b4668ee261c16ca91443f3371e2fe349e131cb7bf5f8a3e93a3ddfdfc788',
- signature:
- '304402205fcb0677e06bde7aac3dc776665615f4b93ef8c3ed0fddecef9900e74fcb00f302206958a0c9868ea1b1f3d151bdfa92da1ce24de0b1fcd91933e64fb7971e92f48d',
- id: 'db1aa687737858cc9199bfa336f9b1c035915c30aaee60b1e0f8afadfdb946bd',
- senderId: 'APnhwwyTbMiykJwYbGhYjNgtHiVJDSEhSn',
- })
-
- const sender = walletManager.findByPublicKey(
- transaction.data.senderPublicKey,
- )
- const recipient = walletManager.findByAddress(
- transaction.data.recipientId,
- )
- recipient.balance = transaction.data.amount
-
- expect(sender.balance).toEqual(Bignum.ZERO)
- expect(recipient.balance).toEqual(transaction.data.amount)
-
- await walletManager.revertTransaction(transaction)
-
- expect(sender.balance).toEqual(transaction.data.amount)
- expect(recipient.balance).toEqual(Bignum.ZERO)
- })
- })
-
- describe('findByAddress', () => {
- it('should be a function', () => {
- expect(walletManager.findByAddress).toBeFunction()
- })
-
- it('should index it by address', () => {
- const wallet = new Wallet(walletData1.address)
-
- walletManager.reindex(wallet)
- expect(walletManager.byAddress[wallet.address]).toBe(wallet)
- })
-
- it('should return it by address', () => {
- const wallet = new Wallet(walletData1.address)
-
- walletManager.reindex(wallet)
- expect(walletManager.findByAddress(wallet.address).address).toBe(
- wallet.address,
- )
- })
- })
-
- describe('findByPublicKey', () => {
- it('should be a function', () => {
- expect(walletManager.findByPublicKey).toBeFunction()
- })
-
- it('should index it by publicKey', () => {
- const wallet = new Wallet(walletData1.address)
- wallet.publicKey = walletData1.publicKey
-
- walletManager.reindex(wallet)
- expect(walletManager.byPublicKey[wallet.publicKey]).toBe(wallet)
- })
-
- it('should return it by publicKey', () => {
- const wallet = new Wallet(walletData1.address)
- wallet.publicKey = 'dummy-public-key'
-
- walletManager.reindex(wallet)
- expect(walletManager.findByPublicKey(wallet.publicKey).publicKey).toBe(
- wallet.publicKey,
- )
- })
- })
-
- describe('findByUsername', () => {
- it('should be a function', () => {
- expect(walletManager.findByUsername).toBeFunction()
- })
-
- it('should index it by username', () => {
- const wallet = new Wallet(walletData1.address)
- wallet.username = 'dummy-username'
-
- walletManager.reindex(wallet)
- expect(walletManager.byUsername[wallet.username]).toBe(wallet)
- })
-
- it('should return it by username', () => {
- const wallet = new Wallet(walletData1.address)
- wallet.username = 'dummy-username'
-
- walletManager.reindex(wallet)
- expect(walletManager.findByUsername(wallet.username).username).toBe(
- wallet.username,
- )
- })
- })
-
- describe('all', () => {
- it('should be a function', () => {
- expect(walletManager.all).toBeFunction()
- })
-
- it('should return indexed', () => {
- const wallet1 = new Wallet(walletData1.address)
- walletManager.reindex(wallet1)
-
- const wallet2 = new Wallet(walletData2.address)
- walletManager.reindex(wallet2)
-
- expect(walletManager.all()).toEqual([wallet1, wallet2])
- })
- })
-
- describe('__canBePurged', () => {
- it('should be removed if all criteria are satisfied', async () => {
- const wallet = new Wallet(walletData1.address)
-
- expect(walletManager.__canBePurged(wallet)).toBeTrue()
- })
-
- it('should not be removed if wallet.secondPublicKey is set', async () => {
- const wallet = new Wallet(walletData1.address)
- wallet.secondPublicKey = 'secondPublicKey'
-
- expect(wallet.secondPublicKey).toBe('secondPublicKey')
- expect(walletManager.__canBePurged(wallet)).toBeFalse()
- })
-
- it('should not be removed if wallet.multisignature is set', async () => {
- const wallet = new Wallet(walletData1.address)
- wallet.multisignature = 'multisignature'
-
- expect(wallet.multisignature).toBe('multisignature')
- expect(walletManager.__canBePurged(wallet)).toBeFalse()
- })
-
- it('should not be removed if wallet.username is set', async () => {
- const wallet = new Wallet(walletData1.address)
- wallet.username = 'username'
-
- expect(wallet.username).toBe('username')
- expect(walletManager.__canBePurged(wallet)).toBeFalse()
- })
- })
-
- describe('purgeEmptyNonDelegates', () => {
- it('should be a function', () => {
- expect(walletManager.purgeEmptyNonDelegates).toBeFunction()
- })
-
- it('should be purged if all criteria are satisfied', async () => {
- const wallet1 = new Wallet(walletData1.address)
- wallet1.publicKey = 'dummy-1-publicKey'
- walletManager.reindex(wallet1)
-
- const wallet2 = new Wallet(walletData2.address)
- wallet2.username = 'username'
-
- walletManager.reindex(wallet2)
-
- walletManager.purgeEmptyNonDelegates()
-
- expect(walletManager.all()).toEqual([wallet2])
- })
-
- it('should not be purged if wallet.secondPublicKey is set', async () => {
- const wallet1 = new Wallet(walletData1.address)
- wallet1.publicKey = 'dummy-1-publicKey'
- wallet1.secondPublicKey = 'dummy-1-secondPublicKey'
- walletManager.reindex(wallet1)
-
- const wallet2 = new Wallet(walletData2.address)
- wallet2.username = 'username'
-
- walletManager.reindex(wallet2)
-
- walletManager.purgeEmptyNonDelegates()
-
- expect(walletManager.all()).toEqual([wallet1, wallet2])
- })
-
- it('should not be purged if wallet.multisignature is set', async () => {
- const wallet1 = new Wallet(walletData1.address)
- wallet1.publicKey = 'dummy-1-publicKey'
- wallet1.multisignature = 'dummy-1-multisignature'
- walletManager.reindex(wallet1)
-
- const wallet2 = new Wallet(walletData2.address)
- wallet2.username = 'username'
-
- walletManager.reindex(wallet2)
-
- walletManager.purgeEmptyNonDelegates()
-
- expect(walletManager.all()).toEqual([wallet1, wallet2])
- })
-
- it('should not be purged if wallet.username is set', async () => {
- const wallet1 = new Wallet(walletData1.address)
- wallet1.publicKey = 'dummy-1-publicKey'
- wallet1.username = 'dummy-1-username'
- walletManager.reindex(wallet1)
-
- const wallet2 = new Wallet(walletData2.address)
- wallet2.username = 'username'
-
- walletManager.reindex(wallet2)
-
- walletManager.purgeEmptyNonDelegates()
-
- expect(walletManager.all()).toEqual([wallet1, wallet2])
- })
- })
-
- describe('buildVoteBalances', () => {
- it('should be a function', () => {
- expect(walletManager.buildVoteBalances).toBeFunction()
- })
-
- it('should update vote balance of delegates', async () => {
- for (let i = 0; i < 5; i++) {
- const delegateKey = i.toString().repeat(66)
- const delegate = {
- address: crypto.getAddress(delegateKey),
- publicKey: delegateKey,
- username: `delegate${i}`,
- voteBalance: Bignum.ZERO,
- }
-
- const voter = {
- address: crypto.getAddress((i + 5).toString().repeat(66)),
- balance: new Bignum((i + 1) * 1000 * ARKTOSHI),
- publicKey: `v${delegateKey}`,
- vote: delegateKey,
- }
-
- walletManager.index([delegate, voter])
- }
-
- walletManager.buildVoteBalances()
-
- const delegates = walletManager.allByUsername()
- for (let i = 0; i < 5; i++) {
- const delegate = delegates[4 - i]
- expect(delegate.voteBalance).toEqual(
- new Bignum((5 - i) * 1000 * ARKTOSHI),
- )
- }
- })
- })
-})
diff --git a/packages/core-database/__tests__/wallet-manager.test.ts b/packages/core-database/__tests__/wallet-manager.test.ts
new file mode 100644
index 0000000000..b28f908cc9
--- /dev/null
+++ b/packages/core-database/__tests__/wallet-manager.test.ts
@@ -0,0 +1,430 @@
+/* tslint:disable:max-line-length no-empty */
+import { Database } from "@arkecosystem/core-interfaces";
+import { fixtures, generators } from "@arkecosystem/core-test-utils";
+import { Bignum, constants, crypto, models, transactionBuilder } from "@arkecosystem/crypto";
+import { IMultiSignatureAsset } from "@arkecosystem/crypto/dist/models";
+import genesisBlockTestnet from "../../core-test-utils/src/config/testnet/genesisBlock.json";
+import wallets from "./__fixtures__/wallets.json";
+import { setUp, tearDown } from "./__support__/setup";
+
+const { Block, Transaction, Wallet } = models;
+const { ARKTOSHI, TransactionTypes } = constants;
+
+const { generateDelegateRegistration, generateSecondSignature, generateTransfers, generateVote } = generators;
+
+const block3 = fixtures.blocks2to100[1];
+const block = new Block(block3);
+
+const walletData1 = wallets[0];
+const walletData2 = wallets[1];
+
+let genesisBlock;
+let walletManager : Database.IWalletManager;
+
+beforeAll(async done => {
+ await setUp();
+
+ // Create the genesis block after the setup has finished or else it uses a potentially
+ // wrong network config.
+ genesisBlock = new Block(genesisBlockTestnet);
+
+ const { WalletManager } = require("../src/wallet-manager");
+ walletManager = new WalletManager();
+
+ done();
+});
+
+beforeEach(() => {
+ const { WalletManager } = require("../src/wallet-manager");
+ walletManager = new WalletManager();
+});
+
+afterAll(async done => {
+ await tearDown();
+
+ done();
+});
+
+describe("Wallet Manager", () => {
+ describe("reset", () => {
+ it("should reset the index", () => {
+ const wallet = new Wallet(walletData1.address);
+
+ walletManager.reindex(wallet);
+ expect(walletManager.allByAddress()).toEqual([wallet]);
+
+ walletManager.reset();
+ expect(walletManager.allByAddress()).toEqual([]);
+ });
+ });
+
+ describe("reindex", () => {
+ it("should index the wallets", () => {
+ const wallet = new Wallet(walletData1.address);
+
+ expect(walletManager.allByAddress()).toEqual([]);
+
+ walletManager.reindex(wallet);
+ expect(walletManager.allByAddress()).toEqual([wallet]);
+ });
+ });
+
+ describe("applyBlock", () => {
+ let delegateMock;
+ let block2;
+
+ const delegatePublicKey = block3.generatorPublicKey; // '0299deebff24ebf2bb53ad78f3ea3ada5b3c8819132e191b02c263ee4aa4af3d9b'
+
+ const txs = [];
+ for (let i = 0; i < 3; i++) {
+ txs[i] = transactionBuilder
+ .vote()
+ .sign(Math.random().toString(36))
+ .votesAsset([`+${delegatePublicKey}`])
+ .build();
+ }
+
+ beforeEach(() => {
+ delegateMock = { applyBlock: jest.fn(), publicKey: delegatePublicKey };
+ jest.spyOn(walletManager, 'findByPublicKey').mockReturnValue(delegateMock);
+ jest.spyOn(walletManager, 'applyTransaction').mockImplementation();
+ jest.spyOn(walletManager, 'revertTransaction').mockImplementation();
+
+ const { data } = block;
+ data.transactions = [];
+ data.transactions.push(txs[0]);
+ data.transactions.push(txs[1]);
+ data.transactions.push(txs[2]);
+ block2 = new Block(data);
+
+ walletManager.reindex(delegateMock);
+ });
+
+ it("should apply sequentially the transactions of the block", async () => {
+ await walletManager.applyBlock(block2);
+
+ block2.transactions.forEach((transaction, i) => {
+ expect(walletManager.applyTransaction).toHaveBeenNthCalledWith(i+1, block2.transactions[i])
+ });
+ });
+
+ it("should apply the block data to the delegate", async () => {
+ await walletManager.applyBlock(block);
+
+ expect(delegateMock.applyBlock).toHaveBeenCalledWith(block.data);
+ });
+
+ describe("when 1 transaction fails while applying it", () => {
+ it("should revert sequentially (from last to first) all the transactions of the block", async () => {
+ jest.spyOn(walletManager, 'applyTransaction').mockImplementation( (tx) => {
+ if (tx === block2.transactions[2]) {
+ throw new Error("Fake error");
+ }
+ });
+
+ expect(block2.transactions.length).toBe(3);
+
+ try {
+ await walletManager.applyBlock(block2);
+
+ expect(null).toBe("this should fail if no error is thrown");
+ } catch (error) {
+ expect(walletManager.revertTransaction).toHaveBeenCalledTimes(2);
+ block2.transactions.slice(0, 1).forEach((transaction, i, total) => {
+ expect(walletManager.revertTransaction).toHaveBeenNthCalledWith(total.length+1 - i, block2.transactions[i]);
+ });
+ }
+ });
+
+ it("throws the Error", async () => {
+ walletManager.applyTransaction = jest.fn(tx => {
+ throw new Error("Fake error");
+ });
+
+ try {
+ await walletManager.applyBlock(block2);
+
+ expect(null).toBe("this should fail if no error is thrown");
+ } catch (error) {
+ expect(error).toBeInstanceOf(Error);
+ expect(error.message).toBe("Fake error");
+ }
+ });
+ });
+
+ describe.skip("the delegate of the block is not indexed", () => {
+ describe("not genesis block", () => {
+ it("throw an Error", () => {});
+ });
+
+ describe("genesis block", () => {
+ it("generates a new wallet", () => {});
+ });
+ });
+ });
+
+ describe.skip("revertBlock", () => {
+ it("should revert all transactions of the block", () => {});
+
+ it("should revert the block of the delegate", () => {});
+ });
+
+ describe("applyTransaction", () => {
+ describe("when the recipient is a cold wallet", () => {});
+
+ const transfer = generateTransfers("testnet", Math.random().toString(36), walletData2.address, 96579, 1)[0];
+ const delegateReg = generateDelegateRegistration("testnet", Math.random().toString(36), 1)[0];
+ const secondSign = generateSecondSignature("testnet", Math.random().toString(36), 1)[0];
+ const vote = generateVote("testnet", Math.random().toString(36), walletData2.publicKey, 1)[0];
+ describe.each`
+ type | transaction | amount | balanceSuccess | balanceFail
+ ${"transfer"} | ${transfer} | ${new Bignum(96579)} | ${new Bignum(ARKTOSHI)} | ${Bignum.ONE}
+ ${"delegate"} | ${delegateReg} | ${Bignum.ZERO} | ${new Bignum(30 * ARKTOSHI)} | ${Bignum.ONE}
+ ${"2nd sign"} | ${secondSign} | ${Bignum.ZERO} | ${new Bignum(10 * ARKTOSHI)} | ${Bignum.ONE}
+ ${"vote"} | ${vote} | ${Bignum.ZERO} | ${new Bignum(5 * ARKTOSHI)} | ${Bignum.ONE}
+ `("when the transaction is a $type", ({ type, transaction, amount, balanceSuccess, balanceFail }) => {
+ let sender;
+ let recipient;
+
+ beforeEach(() => {
+ sender = new Wallet(walletData1.address);
+ recipient = new Wallet(walletData2.address);
+ recipient.publicKey = walletData2.publicKey;
+
+ sender.publicKey = transaction.senderPublicKey;
+
+ walletManager.reindex(sender);
+ walletManager.reindex(recipient);
+
+ jest.spyOn(walletManager, 'isDelegate').mockReturnValue(true);
+ });
+
+ it("should apply the transaction to the sender & recipient", async () => {
+ sender.balance = balanceSuccess;
+
+ expect(+sender.balance.toFixed()).toBe(+balanceSuccess);
+ expect(+recipient.balance.toFixed()).toBe(0);
+
+ await walletManager.applyTransaction(transaction);
+
+ expect(sender.balance).toEqual(balanceSuccess.minus(amount).minus(transaction.fee));
+
+ if (type === "transfer") {
+ expect(recipient.balance).toEqual(amount);
+ }
+ });
+
+ it("should fail if the transaction cannot be applied", async () => {
+ sender.balance = balanceFail;
+
+ expect(+sender.balance.toFixed()).toBe(+balanceFail);
+ expect(+recipient.balance.toFixed()).toBe(0);
+
+ try {
+ expect(async () => {
+ await walletManager.applyTransaction(transaction);
+ }).toThrow(/apply transaction/);
+
+ expect(null).toBe("this should fail if no error is thrown");
+ } catch (error) {
+ expect(+sender.balance.toFixed()).toBe(+balanceFail);
+ expect(+recipient.balance.toFixed()).toBe(0);
+ }
+ });
+ });
+ });
+
+ describe("revertTransaction", () => {
+ it("should revert the transaction from the sender & recipient", async () => {
+ const transaction = new Transaction({
+ type: TransactionTypes.Transfer,
+ amount: new Bignum(245098000000000),
+ fee: 0,
+ recipientId: "AHXtmB84sTZ9Zd35h9Y1vfFvPE2Xzqj8ri",
+ timestamp: 0,
+ asset: {},
+ senderPublicKey: "035b63b4668ee261c16ca91443f3371e2fe349e131cb7bf5f8a3e93a3ddfdfc788",
+ signature:
+ "304402205fcb0677e06bde7aac3dc776665615f4b93ef8c3ed0fddecef9900e74fcb00f302206958a0c9868ea1b1f3d151bdfa92da1ce24de0b1fcd91933e64fb7971e92f48d",
+ id: "db1aa687737858cc9199bfa336f9b1c035915c30aaee60b1e0f8afadfdb946bd",
+ });
+
+ const sender = walletManager.findByPublicKey(transaction.data.senderPublicKey);
+ const recipient = walletManager.findByAddress(transaction.data.recipientId);
+ recipient.balance = new Bignum(transaction.data.amount);
+
+ expect(sender.balance).toEqual(Bignum.ZERO);
+ expect(recipient.balance).toEqual(transaction.data.amount);
+
+ await walletManager.revertTransaction(transaction);
+
+ expect(sender.balance).toEqual(transaction.data.amount);
+ expect(recipient.balance).toEqual(Bignum.ZERO);
+ });
+ });
+
+ describe("findByAddress", () => {
+ it("should return it by address", () => {
+ const wallet = new Wallet(walletData1.address);
+
+ walletManager.reindex(wallet);
+ expect(walletManager.findByAddress(wallet.address).address).toBe(wallet.address);
+ });
+ });
+
+ describe("findByPublicKey", () => {
+ it("should return it by publicKey", () => {
+ const wallet = new Wallet(walletData1.address);
+ wallet.publicKey = "dummy-public-key";
+
+ walletManager.reindex(wallet);
+ expect(walletManager.findByPublicKey(wallet.publicKey).publicKey).toBe(wallet.publicKey);
+ });
+ });
+
+ describe("findByUsername", () => {
+ it("should return it by username", () => {
+ const wallet = new Wallet(walletData1.address);
+ wallet.username = "dummy-username";
+
+ walletManager.reindex(wallet);
+ expect(walletManager.findByUsername(wallet.username).username).toBe(wallet.username);
+ });
+ });
+
+ describe("all", () => {
+ it("should return indexed", () => {
+ const wallet1 = new Wallet(walletData1.address);
+ walletManager.reindex(wallet1);
+
+ const wallet2 = new Wallet(walletData2.address);
+ walletManager.reindex(wallet2);
+
+ expect(walletManager.allByAddress()).toEqual([wallet1, wallet2]);
+ });
+ });
+
+ describe("canBePurged", () => {
+ it("should be removed if all criteria are satisfied", async () => {
+ const wallet = new Wallet(walletData1.address);
+
+ expect(walletManager.canBePurged(wallet)).toBeTrue();
+ });
+
+ it("should not be removed if wallet.secondPublicKey is set", async () => {
+ const wallet = new Wallet(walletData1.address);
+ wallet.secondPublicKey = "secondPublicKey";
+
+ expect(wallet.secondPublicKey).toBe("secondPublicKey");
+ expect(walletManager.canBePurged(wallet)).toBeFalse();
+ });
+
+ it("should not be removed if wallet.multisignature is set", async () => {
+ const wallet = new Wallet(walletData1.address);
+ wallet.multisignature = {} as IMultiSignatureAsset;
+
+ expect(wallet.multisignature).toEqual({});
+ expect(walletManager.canBePurged(wallet)).toBeFalse();
+ });
+
+ it("should not be removed if wallet.username is set", async () => {
+ const wallet = new Wallet(walletData1.address);
+ wallet.username = "username";
+
+ expect(wallet.username).toBe("username");
+ expect(walletManager.canBePurged(wallet)).toBeFalse();
+ });
+ });
+
+ describe("purgeEmptyNonDelegates", () => {
+ it("should be purged if all criteria are satisfied", async () => {
+ const wallet1 = new Wallet(walletData1.address);
+ wallet1.publicKey = "dummy-1-publicKey";
+ walletManager.reindex(wallet1);
+
+ const wallet2 = new Wallet(walletData2.address);
+ wallet2.username = "username";
+
+ walletManager.reindex(wallet2);
+
+ walletManager.purgeEmptyNonDelegates();
+
+ expect(walletManager.allByAddress()).toEqual([wallet2]);
+ });
+
+ it("should not be purged if wallet.secondPublicKey is set", async () => {
+ const wallet1 = new Wallet(walletData1.address);
+ wallet1.publicKey = "dummy-1-publicKey";
+ wallet1.secondPublicKey = "dummy-1-secondPublicKey";
+ walletManager.reindex(wallet1);
+
+ const wallet2 = new Wallet(walletData2.address);
+ wallet2.username = "username";
+
+ walletManager.reindex(wallet2);
+
+ walletManager.purgeEmptyNonDelegates();
+
+ expect(walletManager.allByAddress()).toEqual([wallet1, wallet2]);
+ });
+
+ it("should not be purged if wallet.multisignature is set", async () => {
+ const wallet1 = new Wallet(walletData1.address);
+ wallet1.publicKey = "dummy-1-publicKey";
+ wallet1.multisignature = {} as IMultiSignatureAsset;
+ walletManager.reindex(wallet1);
+
+ const wallet2 = new Wallet(walletData2.address);
+ wallet2.username = "username";
+
+ walletManager.reindex(wallet2);
+
+ walletManager.purgeEmptyNonDelegates();
+
+ expect(walletManager.allByAddress()).toEqual([wallet1, wallet2]);
+ });
+
+ it("should not be purged if wallet.username is set", async () => {
+ const wallet1 = new Wallet(walletData1.address);
+ wallet1.publicKey = "dummy-1-publicKey";
+ wallet1.username = "dummy-1-username";
+ walletManager.reindex(wallet1);
+
+ const wallet2 = new Wallet(walletData2.address);
+ wallet2.username = "username";
+
+ walletManager.reindex(wallet2);
+
+ walletManager.purgeEmptyNonDelegates();
+
+ expect(walletManager.allByAddress()).toEqual([wallet1, wallet2]);
+ });
+ });
+
+ describe("buildVoteBalances", () => {
+ it("should update vote balance of delegates", async () => {
+ for (let i = 0; i < 5; i++) {
+ const delegateKey = i.toString().repeat(66);
+ const delegate = new Wallet(crypto.getAddress(delegateKey));
+ delegate.publicKey = delegateKey;
+ delegate.username = `delegate${i}`;
+ delegate.voteBalance = Bignum.ZERO;
+
+ const voter = new Wallet(crypto.getAddress((i + 5).toString().repeat(66)));
+ voter.balance = new Bignum((i + 1) * 1000 * ARKTOSHI);
+ voter.publicKey = `v${delegateKey}`;
+ voter.vote = delegateKey;
+
+ walletManager.index([delegate, voter]);
+ }
+
+ walletManager.buildVoteBalances();
+
+ const delegates = walletManager.allByUsername();
+ for (let i = 0; i < 5; i++) {
+ const delegate = delegates[4 - i];
+ expect(delegate.voteBalance).toEqual(new Bignum((5 - i) * 1000 * ARKTOSHI));
+ }
+ });
+ });
+});
diff --git a/packages/core-database/jest.config.js b/packages/core-database/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-database/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-database/lib/defaults.js b/packages/core-database/lib/defaults.js
deleted file mode 100644
index 09837607a6..0000000000
--- a/packages/core-database/lib/defaults.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = {
- snapshots: `${process.env.ARK_PATH_DATA}/snapshots/${
- process.env.ARK_NETWORK_NAME
- }`,
-}
diff --git a/packages/core-database/lib/index.js b/packages/core-database/lib/index.js
deleted file mode 100644
index e2bb02bb2c..0000000000
--- a/packages/core-database/lib/index.js
+++ /dev/null
@@ -1,28 +0,0 @@
-const databaseManager = require('./manager')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'databaseManager',
- async register(container, options) {
- container.resolvePlugin('logger').info('Starting Database Manager')
-
- return databaseManager
- },
-}
-
-/**
- * The interface used by concrete implementations.
- * @type {ConnectionInterface}
- */
-exports.ConnectionInterface = require('./interface')
-
-/**
- * The Wallet Manager.
- * @type {WalletManager}
- */
-exports.WalletManager = require('./wallet-manager')
diff --git a/packages/core-database/lib/interface.js b/packages/core-database/lib/interface.js
deleted file mode 100644
index b4980fe3d6..0000000000
--- a/packages/core-database/lib/interface.js
+++ /dev/null
@@ -1,596 +0,0 @@
-const { crypto, slots } = require('@arkecosystem/crypto')
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const logger = app.resolvePlugin('logger')
-const emitter = app.resolvePlugin('event-emitter')
-const { Block } = require('@arkecosystem/crypto').models
-const { TRANSACTION_TYPES } = require('@arkecosystem/crypto').constants
-const { roundCalculator } = require('@arkecosystem/core-utils')
-const cloneDeep = require('lodash/cloneDeep')
-const assert = require('assert')
-const WalletManager = require('./wallet-manager')
-
-module.exports = class ConnectionInterface {
- /**
- * @constructor
- * @param {Object} options
- */
- constructor(options) {
- this.config = options
- this.connection = null
- this.blocksInCurrentRound = null
- this.stateStarted = false
-
- this.__registerListeners()
- }
-
- /**
- * Get the current connection.
- * @return {ConnectionInterface}
- */
- getConnection() {
- return this.connection
- }
-
- /**
- * Connect to a database.
- * @return {void}
- * @throws Error
- */
- async connect() {
- throw new Error('Method [connect] not implemented!')
- }
-
- /**
- * Disconnect from a database.
- * @return {void}
- * @throws Error
- */
- async disconnect() {
- throw new Error('Method [disconnect] not implemented!')
- }
-
- /**
- * Verify the blockchain stored on db is not corrupted making simple assertions:
- * - Last block is available
- * - Last block height equals the number of stored blocks
- * - Number of stored transactions equals the sum of block.numberOfTransactions in the database
- * - Sum of all tx fees equals the sum of block.totalFee
- * - Sum of all tx amount equals the sum of block.totalAmount
- * @return {Object} An object { valid, errors } with the result of the verification and the errors
- */
- async verifyBlockchain() {
- throw new Error('Method [verifyBlockchain] not implemented!')
- }
-
- /**
- * Get the top 51 delegates.
- * @param {Number} height
- * @param {Array} delegates
- * @return {void}
- * @throws Error
- */
- async getActiveDelegates(height, delegates) {
- throw new Error('Method [getActiveDelegates] not implemented!')
- }
-
- /**
- * Load a list of wallets into memory.
- * @param {Number} height
- * @return {Boolean} success
- * @throws Error
- */
- async buildWallets(height) {
- throw new Error('Method [buildWallets] not implemented!')
- }
-
- /**
- * Commit wallets from the memory.
- * @param {Boolean} force
- * @return {void}
- * @throws Error
- */
- async saveWallets(force) {
- throw new Error('Method [saveWallets] not implemented!')
- }
-
- /**
- * Commit the given block.
- * NOTE: to be used when node is in sync and committing newly received blocks
- * @param {Block} block
- * @return {void}
- * @throws Error
- */
- async saveBlock(block) {
- throw new Error('Method [saveBlock] not implemented!')
- }
-
- /**
- * Queue a query to save the given block.
- * NOTE: Must call commitQueuedQueries() to save to database.
- * NOTE: to use when rebuilding to decrease the number of database transactions,
- * and commit blocks (save only every 1000s for instance) by calling commit
- * @param {Block} block
- * @return {void}
- * @throws Error
- */
- enqueueSaveBlock(block) {
- throw new Error('Method [enqueueSaveBlock] not implemented!')
- }
-
- /**
- * Queue a query to delete the given block.
- * See also enqueueSaveBlock
- * @param {Block} block
- * @return {void}
- * @throws Error
- */
- enqueueDeleteBlock(block) {
- throw new Error('Method [enqueueDeleteBlock] not implemented!')
- }
-
- /**
- * Queue a query to delete the round at given height.
- * See also enqueueSaveBlock and enqueueDeleteBlock
- * @param {Number} height
- * @return {void}
- * @throws Error
- */
- enqueueDeleteRound(height) {
- throw new Error('Method [enqueueDeleteRound] not implemented!')
- }
-
- /**
- * Commit all queued queries to the database.
- * NOTE: to be used in combination with other enqueue-functions.
- * @return {void}
- * @throws Error
- */
- async commitQueuedQueries() {
- throw new Error('Method [commitQueuedQueries] not implemented!')
- }
-
- /**
- * Delete the given block.
- * @param {Block} block
- * @return {void}
- * @throws Error
- */
- async deleteBlock(block) {
- throw new Error('Method [deleteBlock] not implemented!')
- }
-
- /**
- * Get a block.
- * @param {Block} id
- * @return {void}
- * @throws Error
- */
- async getBlock(id) {
- throw new Error('Method [getBlock] not implemented!')
- }
-
- /**
- * Get last block.
- * @return {void}
- * @throws Error
- */
- async getLastBlock() {
- throw new Error('Method [getLastBlock] not implemented!')
- }
-
- /**
- * Get blocks for the given offset and limit.
- * @param {Number} offset
- * @param {Number} limit
- * @return {void}
- * @throws Error
- */
- async getBlocks(offset, limit) {
- throw new Error('Method [getBlocks] not implemented!')
- }
-
- /**
- * Get top count blocks ordered by height DESC.
- * NOTE: Only used when trying to restore database integrity.
- * The returned blocks may be unchained.
- * @param {Number} count
- * @return {void}
- * @throws Error
- */
- async getTopBlocks(count) {
- throw new Error('Method [getTopBlocks] not implemented!')
- }
-
- /**
- * Get recent block ids.
- * @return {[]String}
- */
- async getRecentBlockIds() {
- throw new Error('Method [getRecentBlockIds] not implemented!')
- }
-
- /**
- * Store the given round.
- * @param {Array} activeDelegates
- * @return {void}
- * @throws Error
- */
- async saveRound(activeDelegates) {
- throw new Error('Method [saveRound] not implemented!')
- }
-
- /**
- * Delete the given round.
- * @param {Number} round
- * @return {void}
- * @throws Error
- */
- async deleteRound(round) {
- throw new Error('Method [deleteRound] not implemented!')
- }
-
- /**
- * Update delegate statistics in memory.
- * NOTE: must be called before saving new round of delegates
- * @param {Block} block
- * @param {Array} delegates
- * @return {void}
- */
- updateDelegateStats(height, delegates) {
- if (!delegates || !this.blocksInCurrentRound) {
- return
- }
-
- logger.debug('Updating delegate statistics')
-
- try {
- delegates.forEach(delegate => {
- const producedBlocks = this.blocksInCurrentRound.filter(
- blockGenerator =>
- blockGenerator.data.generatorPublicKey === delegate.publicKey,
- )
- const wallet = this.walletManager.findByPublicKey(delegate.publicKey)
-
- if (producedBlocks.length === 0) {
- wallet.missedBlocks++
- logger.debug(
- `Delegate ${wallet.username} (${
- wallet.publicKey
- }) just missed a block. Total: ${wallet.missedBlocks}`,
- )
- wallet.dirty = true
- emitter.emit('forger.missing', {
- delegate: wallet,
- })
- }
- })
- } catch (error) {
- logger.error(error.stack)
- }
- }
-
- /**
- * Apply the round.
- * Note that the round is applied and the end of the round (so checking height + 1)
- * so the next block to apply starting the new round will be ready to be validated
- * @param {Number} height
- * @return {void}
- */
- async applyRound(height) {
- const nextHeight = height === 1 ? 1 : height + 1
- const maxDelegates = config.getConstants(nextHeight).activeDelegates
-
- if (nextHeight % maxDelegates === 1) {
- const round = Math.floor((nextHeight - 1) / maxDelegates) + 1
-
- if (
- !this.forgingDelegates ||
- this.forgingDelegates.length === 0 ||
- (this.forgingDelegates.length &&
- this.forgingDelegates[0].round !== round)
- ) {
- logger.info(`Starting Round ${round.toLocaleString()} :dove_of_peace:`)
-
- try {
- this.updateDelegateStats(height, this.forgingDelegates)
- this.saveWallets(false) // save only modified wallets during the last round
- const delegates = this.walletManager.loadActiveDelegateList(
- maxDelegates,
- nextHeight,
- ) // get active delegate list from in-memory wallet manager
- this.saveRound(delegates) // save next round delegate list non-blocking
- this.forgingDelegates = await this.getActiveDelegates(
- nextHeight,
- delegates,
- ) // generate the new active delegates list
- this.blocksInCurrentRound.length = 0
- } catch (error) {
- // trying to leave database state has it was
- await this.deleteRound(round)
- throw error
- }
- } else {
- logger.warn(
- `Round ${round.toLocaleString()} has already been applied. This should happen only if you are a forger. :warning:`,
- )
- }
- }
- }
-
- /**
- * Remove the round.
- * @param {Number} height
- * @return {void}
- */
- async revertRound(height) {
- const { round, nextRound, maxDelegates } = roundCalculator.calculateRound(
- height,
- )
-
- if (nextRound === round + 1 && height >= maxDelegates) {
- logger.info(`Back to previous round: ${round.toLocaleString()} :back:`)
-
- const delegates = await this.__calcPreviousActiveDelegates(round)
- this.forgingDelegates = await this.getActiveDelegates(height, delegates)
-
- await this.deleteRound(nextRound)
- }
- }
-
- /**
- * Calculate the active delegates of the previous round. In order to do
- * so we need to go back to the start of that round. Therefore we create
- * a temporary wallet manager with all delegates and revert all blocks
- * and transactions of that round to get the initial vote balances
- * which are then used to restore the original order.
- * @param {Number} round
- */
- async __calcPreviousActiveDelegates(round) {
- // TODO: cache the blocks of the last X rounds
- this.blocksInCurrentRound = await this.__getBlocksForRound(round)
-
- // Create temp wallet manager from all delegates
- const tempWalletManager = new WalletManager()
- tempWalletManager.index(cloneDeep(this.walletManager.allByUsername()))
-
- // Revert all blocks in reverse order
- let height = 0
- for (let i = this.blocksInCurrentRound.length - 1; i >= 0; i--) {
- tempWalletManager.revertBlock(this.blocksInCurrentRound[i])
- height = this.blocksInCurrentRound[i].data.height
- }
-
- // The first round has no active delegates
- if (height === 1) {
- return []
- }
-
- // Assert that the height is the beginning of a round.
- const { maxDelegates } = roundCalculator.calculateRound(height)
- assert(height > 1 && height % maxDelegates === 1)
-
- // Now retrieve the active delegate list from the temporary wallet manager.
- return tempWalletManager.loadActiveDelegateList(maxDelegates, height)
- }
-
- /**
- * Validate a delegate.
- * @param {Block} block
- * @return {void}
- */
- async validateDelegate(block) {
- if (this.__isException(block.data)) {
- return
- }
-
- const delegates = await this.getActiveDelegates(block.data.height)
- const slot = slots.getSlotNumber(block.data.timestamp)
- const forgingDelegate = delegates[slot % delegates.length]
-
- const generatorUsername = this.walletManager.findByPublicKey(
- block.data.generatorPublicKey,
- ).username
-
- if (!forgingDelegate) {
- logger.debug(
- `Could not decide if delegate ${generatorUsername} (${
- block.data.generatorPublicKey
- }) is allowed to forge block ${block.data.height.toLocaleString()} :grey_question:`,
- )
- } else if (forgingDelegate.publicKey !== block.data.generatorPublicKey) {
- const forgingUsername = this.walletManager.findByPublicKey(
- forgingDelegate.publicKey,
- ).username
-
- if (delegates.some(delegate => delegate.publicKey === block.data.generatorPublicKey)) {
- throw new Error(
- `Delegate ${generatorUsername} (${
- block.data.generatorPublicKey
- }) not allowed to forge, should be ${forgingUsername} (${
- forgingDelegate.publicKey
- }) :-1:`,
- )
- } else {
- throw new Error("inactive generator");
- }
- } else {
- logger.debug(
- `Delegate ${generatorUsername} (${
- block.data.generatorPublicKey
- }) allowed to forge block ${block.data.height.toLocaleString()} :+1:`,
- )
- }
- }
-
- /**
- * Validate a forked block.
- * @param {Block} block
- * @return {Boolean}
- */
- async validateForkedBlock(block) {
- try {
- await this.validateDelegate(block)
- } catch (error) {
- logger.debug(error.stack)
- return false
- }
-
- return true
- }
-
- /**
- * Apply the given block.
- * @param {Block} block
- * @return {void}
- */
- async applyBlock(block) {
- await this.validateDelegate(block)
- this.walletManager.applyBlock(block)
-
- if (this.blocksInCurrentRound) {
- this.blocksInCurrentRound.push(block)
- }
-
- await this.applyRound(block.data.height)
- block.transactions.forEach(tx => this.__emitTransactionEvents(tx))
- emitter.emit('block.applied', block.data)
- }
-
- /**
- * Emit events for the specified transaction.
- * @param {Object} transaction
- * @return {void}
- */
- __emitTransactionEvents(transaction) {
- emitter.emit('transaction.applied', transaction.data)
-
- if (transaction.type === TRANSACTION_TYPES.DELEGATE_REGISTRATION) {
- emitter.emit('delegate.registered', transaction.data)
- }
-
- if (transaction.type === TRANSACTION_TYPES.DELEGATE_RESIGNATION) {
- emitter.emit('delegate.resigned', transaction.data)
- }
-
- if (transaction.type === TRANSACTION_TYPES.VOTE) {
- const vote = transaction.asset.votes[0]
-
- emitter.emit(vote.startsWith('+') ? 'wallet.vote' : 'wallet.unvote', {
- delegate: vote,
- transaction: transaction.data,
- })
- }
- }
-
- /**
- * Remove the given block.
- * @param {Block} block
- * @return {void}
- */
- async revertBlock(block) {
- await this.revertRound(block.data.height)
- await this.walletManager.revertBlock(block)
-
- assert(this.blocksInCurrentRound.pop().data.id === block.data.id)
-
- emitter.emit('block.reverted', block.data)
- }
-
- /**
- * Verify a transaction.
- * @param {Transaction} transaction
- * @return {Boolean}
- */
- async verifyTransaction(transaction) {
- const senderId = crypto.getAddress(
- transaction.data.senderPublicKey,
- config.network.pubKeyHash,
- )
-
- const sender = this.walletManager.findByAddress(senderId) // should exist
-
- if (!sender.publicKey) {
- sender.publicKey = transaction.data.senderPublicKey
- this.walletManager.reindex(sender)
- }
-
- const dbTransaction = await this.getTransaction(transaction.data.id)
-
- return sender.canApply(transaction.data, []) && !dbTransaction
- }
-
- /**
- * Get blocks for round.
- * @param {number} round
- * @return {[]Block}
- */
- async __getBlocksForRound(round) {
- let lastBlock
- if (app.has('state')) {
- lastBlock = app.resolve('state').getLastBlock()
- } else {
- lastBlock = await this.getLastBlock()
- }
-
- if (!lastBlock) {
- return []
- }
-
- let height = +lastBlock.data.height
- if (!round) {
- round = roundCalculator.calculateRound(height).round
- }
-
- const maxDelegates = config.getConstants(height).activeDelegates
- height = round * maxDelegates + 1
-
- const blocks = await this.getBlocks(height - maxDelegates, maxDelegates - 1)
- return blocks.map(b => new Block(b))
- }
-
- /**
- * Register event listeners.
- * @return {void}
- */
- __registerListeners() {
- emitter.on('state:started', () => {
- this.stateStarted = true
- })
- }
-
- /**
- * Register the wallet app.
- * @return {void}
- */
- async _registerWalletManager() {
- this.walletManager = new WalletManager()
- }
-
- /**
- * Register the wallet and delegate repositories.
- * @return {void}
- */
- async _registerRepositories() {
- this.wallets = new (require('./repositories/wallets'))(this)
- this.delegates = new (require('./repositories/delegates'))(this)
- }
-
- /**
- * Determine if the given block is an exception.
- * @param {Object} block
- * @return {Boolean}
- */
- __isException(block) {
- if (!config) {
- return false
- }
-
- if (!Array.isArray(config.network.exceptions.blocks)) {
- return false
- }
-
- return config.network.exceptions.blocks.includes(block.id)
- }
-}
diff --git a/packages/core-database/lib/manager.js b/packages/core-database/lib/manager.js
deleted file mode 100644
index 00b00833ab..0000000000
--- a/packages/core-database/lib/manager.js
+++ /dev/null
@@ -1,30 +0,0 @@
-class DatabaseManager {
- /**
- * Create a new database manager instance.
- * @constructor
- */
- constructor() {
- this.connections = {}
- }
-
- /**
- * Get a database connection instance.
- * @param {String} name
- * @return {ConnectionInterface}
- */
- connection(name = 'default') {
- return this.connections[name]
- }
-
- /**
- * Make the database connection instance.
- * @param {ConnectionInterface} connection
- * @param {String} name
- * @return {void}
- */
- async makeConnection(connection, name = 'default') {
- this.connections[name] = await connection.make()
- }
-}
-
-module.exports = new DatabaseManager()
diff --git a/packages/core-database/lib/repositories/delegates.js b/packages/core-database/lib/repositories/delegates.js
deleted file mode 100644
index f11b258c63..0000000000
--- a/packages/core-database/lib/repositories/delegates.js
+++ /dev/null
@@ -1,113 +0,0 @@
-const { delegateCalculator } = require('@arkecosystem/core-utils')
-const orderBy = require('lodash/orderBy')
-const limitRows = require('./utils/limit-rows')
-
-module.exports = class DelegatesRepository {
- /**
- * Create a new delegate repository instance.
- * @param {ConnectionInterface} connection
- */
- constructor(connection) {
- this.connection = connection
- }
-
- /**
- * Get all local delegates.
- * @return {Array}
- */
- getLocalDelegates() {
- return this.connection.walletManager
- .all()
- .filter(wallet => !!wallet.username)
- }
-
- /**
- * Find all delegates.
- * @param {Object} params
- * @return {Object}
- */
- findAll(params = {}) {
- const rows = this.getLocalDelegates()
-
- const order = params.orderBy ? params.orderBy.split(':') : ['rate', 'asc']
-
- return {
- rows: limitRows(orderBy(rows, order), params),
- count: rows.length,
- }
- }
-
- /**
- * Paginate all delegates.
- * @param {Object} params
- * @return {Object}
- */
- paginate(params) {
- return this.findAll(params)
- }
-
- /**
- * Search all delegates.
- * TODO Currently it searches by username only
- * @param {Object} [params]
- * @param {String} [params.username] - Search by username
- * @return {Object}
- */
- search(params) {
- let delegates = this.getLocalDelegates().filter(
- delegate => delegate.username.indexOf(params.username) > -1,
- )
-
- if (params.orderBy) {
- const orderByField = params.orderBy.split(':')[0]
- const orderByDirection = params.orderBy.split(':')[1] || 'desc'
-
- delegates = delegates.sort((a, b) => {
- if (orderByDirection === 'desc' && a[orderByField] < b[orderByField]) {
- return -1
- }
-
- if (orderByDirection === 'asc' && a[orderByField] > b[orderByField]) {
- return 1
- }
-
- return 0
- })
- }
-
- return {
- rows: limitRows(delegates, params),
- count: delegates.length,
- }
- }
-
- /**
- * Find a delegate.
- * @param {String} id
- * @return {Object}
- */
- findById(id) {
- return this.getLocalDelegates().find(
- a => a.address === id || a.publicKey === id || a.username === id,
- )
- }
-
- /**
- * Find all active delegates at height.
- * @param {Number} height
- * @return {Array}
- */
- getActiveAtHeight(height) {
- const delegates = this.connection.getActiveDelegates(height)
-
- return delegates.map(delegate => {
- const wallet = this.connection.wallets.findById(delegate.publicKey)
-
- return {
- username: wallet.username,
- approval: delegateCalculator.calculateApproval(delegate, height),
- productivity: delegateCalculator.calculateProductivity(wallet),
- }
- })
- }
-}
diff --git a/packages/core-database/lib/repositories/utils/filter-rows.js b/packages/core-database/lib/repositories/utils/filter-rows.js
deleted file mode 100644
index 197ab448c5..0000000000
--- a/packages/core-database/lib/repositories/utils/filter-rows.js
+++ /dev/null
@@ -1,71 +0,0 @@
-/* eslint no-prototype-builtins: "off" */
-
-/**
- * Filter an Array of Objects based on the given parameters.
- * @param {Array} rows
- * @param {Object} params
- * @param {Object} filters
- * @return {Array}
- */
-module.exports = (rows, params, filters) =>
- rows.filter(item => {
- if (filters.hasOwnProperty('exact')) {
- for (const elem of filters.exact) {
- if (params[elem] && item[elem] !== params[elem]) {
- return false
- }
- }
- }
-
- if (filters.hasOwnProperty('between')) {
- for (const elem of filters.between) {
- if (!params[elem]) {
- continue
- }
-
- if (
- !params[elem].hasOwnProperty('from') &&
- !params[elem].hasOwnProperty('to') &&
- item[elem] !== params[elem]
- ) {
- return false
- }
-
- if (
- params[elem].hasOwnProperty('from') ||
- params[elem].hasOwnProperty('to')
- ) {
- let isMoreThan = true
- let isLessThan = true
-
- if (params[elem].hasOwnProperty('from')) {
- isMoreThan = item[elem] >= params[elem].from
- }
-
- if (params[elem].hasOwnProperty('to')) {
- isLessThan = item[elem] <= params[elem].to
- }
-
- return isMoreThan && isLessThan
- }
- }
- }
-
- // NOTE: it was used to filter by `votes`, but that field was rejected and
- // replaced by `vote`. This filter is kept here just in case
- if (filters.hasOwnProperty('any')) {
- for (const elem of filters.any) {
- if (params[elem] && item[elem]) {
- if (Array.isArray(params[elem])) {
- if (item[elem].every(a => params[elem].indexOf(a) === -1)) {
- return false
- }
- } else {
- throw new Error('Fitering by "any" requires an Array')
- }
- }
- }
- }
-
- return true
- })
diff --git a/packages/core-database/lib/repositories/utils/limit-rows.js b/packages/core-database/lib/repositories/utils/limit-rows.js
deleted file mode 100644
index 28a7d5ddd9..0000000000
--- a/packages/core-database/lib/repositories/utils/limit-rows.js
+++ /dev/null
@@ -1,16 +0,0 @@
-/**
- * Return some rows by an offset and a limit.
- * @param {Array} rows
- * @param {Object} params
- * @return {Array}
- */
-module.exports = (rows, params) => {
- if (params.offset || params.limit) {
- const offset = params.offset || 0
- const limit = params.limit ? offset + params.limit : rows.length
-
- return rows.slice(offset, limit)
- }
-
- return rows
-}
diff --git a/packages/core-database/lib/repositories/wallets.js b/packages/core-database/lib/repositories/wallets.js
deleted file mode 100644
index 1c6e53b522..0000000000
--- a/packages/core-database/lib/repositories/wallets.js
+++ /dev/null
@@ -1,122 +0,0 @@
-const orderBy = require('lodash/orderBy')
-const filterRows = require('./utils/filter-rows')
-const limitRows = require('./utils/limit-rows')
-
-module.exports = class WalletsRepository {
- /**
- * Create a new wallet repository instance.
- * @param {ConnectionInterface} connection
- */
- constructor(connection) {
- this.connection = connection
- }
-
- /**
- * Get all local wallets.
- * @return {Array}
- */
- all() {
- return this.connection.walletManager.all()
- }
-
- /**
- * Find all wallets.
- * @param {Object} params
- * @return {Object}
- */
- findAll(params = {}) {
- const wallets = this.all()
-
- const [iteratee, order] = params.orderBy
- ? params.orderBy.split(':')
- : ['rate', 'asc']
-
- return {
- rows: limitRows(orderBy(wallets, iteratee, order), params),
- count: wallets.length,
- }
- }
-
- /**
- * Find all wallets for the given vote.
- * @param {String} publicKey
- * @param {Object} params
- * @return {Object}
- */
- findAllByVote(publicKey, params = {}) {
- const wallets = this.all().filter(wallet => wallet.vote === publicKey)
-
- return {
- rows: limitRows(wallets, params),
- count: wallets.length,
- }
- }
-
- /**
- * Find a wallet by address, public key or username.
- * @param {Number} id
- * @return {Object}
- */
- findById(id) {
- return this.all().find(
- wallet => wallet.address === id
- || wallet.publicKey === id
- || wallet.username === id,
- )
- }
-
- /**
- * Count all wallets.
- * @return {Number}
- */
- count() {
- return this.all().length
- }
-
- /**
- * Find all wallets sorted by balance.
- * @param {Object} params
- * @return {Object}
- */
- top(params = {}) {
- const wallets = Object.values(this.all()).sort(
- (a, b) => +b.balance.minus(a.balance).toFixed(),
- )
-
- return {
- rows: limitRows(wallets, params),
- count: wallets.length,
- }
- }
-
- /**
- * Search all wallets.
- * @param {Object} [params]
- * @param {Number} [params.limit] - Limit the number of results
- * @param {Number} [params.offset] - Skip some results
- * @param {Array} [params.orderBy] - Order of the results
- * @param {String} [params.address] - Search by address
- * @param {String} [params.publicKey] - Search by publicKey
- * @param {String} [params.secondPublicKey] - Search by secondPublicKey
- * @param {String} [params.username] - Search by username
- * @param {String} [params.vote] - Search by vote
- * @param {Object} [params.balance] - Search by balance
- * @param {Number} [params.balance.from] - Search by balance (minimum)
- * @param {Number} [params.balance.to] - Search by balance (maximum)
- * @param {Object} [params.voteBalance] - Search by voteBalance
- * @param {Number} [params.voteBalance.from] - Search by voteBalance (minimum)
- * @param {Number} [params.voteBalance.to] - Search by voteBalance (maximum)
- * @return {Object}
- */
- search(params) {
- const wallets = filterRows(this.all(), params, {
- exact: ['address', 'publicKey', 'secondPublicKey', 'username', 'vote'],
- between: ['balance', 'voteBalance'],
- })
-
- return {
- rows: limitRows(wallets, params),
- count: wallets.length,
- }
- }
-}
diff --git a/packages/core-database/lib/wallet-manager.js b/packages/core-database/lib/wallet-manager.js
deleted file mode 100644
index bceb8f037d..0000000000
--- a/packages/core-database/lib/wallet-manager.js
+++ /dev/null
@@ -1,609 +0,0 @@
-const { crypto, formatArktoshi } = require('@arkecosystem/crypto')
-const { Wallet } = require('@arkecosystem/crypto').models
-const { TRANSACTION_TYPES } = require('@arkecosystem/crypto').constants
-const { roundCalculator } = require('@arkecosystem/core-utils')
-const app = require('@arkecosystem/core-container')
-
-const config = app.resolvePlugin('config')
-const logger = app.resolvePlugin('logger')
-
-const pluralize = require('pluralize')
-
-module.exports = class WalletManager {
- /**
- * Create a new wallet manager instance.
- * @constructor
- */
- constructor() {
- this.networkId = config ? config.network.pubKeyHash : 0x17
- this.reset()
- }
-
- /**
- * Reset the wallets index.
- * @return {void}
- */
- reset() {
- this.byAddress = {}
- this.byPublicKey = {}
- this.byUsername = {}
- }
-
- /**
- * Get all wallets by address.
- * @return {Array}
- */
- all() {
- return Object.values(this.byAddress)
- }
-
- /**
- * Get all wallets by publicKey.
- * @return {Array}
- */
- allByPublicKey() {
- return Object.values(this.byPublicKey)
- }
-
- /**
- * Get all wallets by username.
- * @return {Array}
- */
- allByUsername() {
- return Object.values(this.byUsername)
- }
-
- /**
- * Find a wallet by the given address.
- * @param {String} address
- * @return {Wallet}
- */
- findByAddress(address) {
- if (!this.byAddress[address]) {
- this.byAddress[address] = new Wallet(address)
- }
-
- return this.byAddress[address]
- }
-
- /**
- * Find a wallet by the given public key.
- * @param {String} publicKey
- * @return {Wallet}
- */
- findByPublicKey(publicKey) {
- if (!this.byPublicKey[publicKey]) {
- const address = crypto.getAddress(publicKey, config.network.pubKeyHash)
-
- const wallet = this.findByAddress(address)
- wallet.publicKey = publicKey
- this.byPublicKey[publicKey] = wallet
- }
-
- return this.byPublicKey[publicKey]
- }
-
- /**
- * Find a wallet by the given username.
- * @param {String} username
- * @return {Wallet}
- */
- findByUsername(username) {
- return this.byUsername[username]
- }
-
- /**
- * Set wallet by address.
- * @param {String} address
- * @param {Wallet} wallet
- * @param {void}
- */
- setByAddress(address, wallet) {
- this.byAddress[address] = wallet
- }
-
- /**
- * Set wallet by publicKey.
- * @param {String} publicKey
- * @param {Wallet} wallet
- * @param {void}
- */
- setByPublicKey(publicKey, wallet) {
- this.byPublicKey[publicKey] = wallet
- }
-
- /**
- * Set wallet by username.
- * @param {String} username
- * @param {Wallet} wallet
- * @param {void}
- */
- setByUsername(username, wallet) {
- this.byUsername[username] = wallet
- }
-
- /**
- * Remove wallet by address.
- * @param {String} address
- * @param {void}
- */
- forgetByAddress(address) {
- delete this.byAddress[address]
- }
-
- /**
- * Remove wallet by publicKey.
- * @param {String} publicKey
- * @param {void}
- */
- forgetByPublicKey(publicKey) {
- delete this.byPublicKey[publicKey]
- }
-
- /**
- * Remove wallet by username.
- * @param {String} username
- * @param {void}
- */
- forgetByUsername(username) {
- delete this.byUsername[username]
- }
-
- /**
- * Index the given wallets.
- * @param {Array} wallets
- * @return {void}
- */
- index(wallets) {
- for (const wallet of wallets) {
- this.reindex(wallet)
- }
- }
-
- /**
- * Reindex the given wallet.
- * @param {Wallet} wallet
- * @return {void}
- */
- reindex(wallet) {
- if (wallet.address) {
- this.byAddress[wallet.address] = wallet
- }
-
- if (wallet.publicKey) {
- this.byPublicKey[wallet.publicKey] = wallet
- }
-
- if (wallet.username) {
- this.byUsername[wallet.username] = wallet
- }
- }
-
- clear() {
- Object.values(this.byAddress).forEach(wallet => {
- wallet.dirty = false
- })
- }
-
- /**
- * Load a list of all active delegates.
- * @param {Number} maxDelegates
- * @return {Array}
- */
- loadActiveDelegateList(maxDelegates, height) {
- if (height > 1 && height % maxDelegates !== 1) {
- throw new Error('Trying to build delegates outside of round change')
- }
-
- const { round } = roundCalculator.calculateRound(height, maxDelegates)
- let delegates = this.allByUsername()
-
- if (delegates.length < maxDelegates) {
- throw new Error(
- `Expected to find ${maxDelegates} delegates but only found ${
- delegates.length
- }. This indicates an issue with the genesis block & delegates.`,
- )
- }
-
- const equalVotesMap = new Map()
-
- delegates = delegates
- .sort((a, b) => {
- const diff = b.voteBalance.comparedTo(a.voteBalance)
-
- if (diff === 0) {
- if (!equalVotesMap.has(a.voteBalance.toFixed())) {
- equalVotesMap.set(a.voteBalance.toFixed(), new Set())
- }
-
- const set = equalVotesMap.get(a.voteBalance.toFixed())
- set.add(a)
- set.add(b)
-
- if (a.publicKey === b.publicKey) {
- throw new Error(
- `The balance and public key of both delegates are identical! Delegate "${
- a.username
- }" appears twice in the list.`,
- )
- }
-
- return a.publicKey.localeCompare(b.publicKey, 'en')
- }
-
- return diff
- })
- .map((delegate, i) => {
- const rate = i + 1
- this.byUsername[delegate.username].rate = rate
- return { ...{ round }, ...delegate, rate }
- })
- .slice(0, maxDelegates)
-
- for (const [voteBalance, set] of equalVotesMap.entries()) {
- const values = Array.from(set.values())
- if (delegates.includes(values[0])) {
- const mapped = values.map(v => `${v.username} (${v.publicKey})`)
- logger.warn(
- `Delegates ${JSON.stringify(
- mapped,
- null,
- 4,
- )} have a matching vote balance of ${formatArktoshi(voteBalance)}`,
- )
- }
- }
-
- logger.debug(
- `Loaded ${delegates.length} active ${pluralize(
- 'delegate',
- delegates.length,
- )}`,
- )
-
- return delegates
- }
-
- /**
- * Build vote balances of all delegates.
- * NOTE: Only called during SPV.
- * @return {void}
- */
- buildVoteBalances() {
- Object.values(this.byPublicKey).forEach(voter => {
- if (voter.vote) {
- const delegate = this.byPublicKey[voter.vote]
- delegate.voteBalance = delegate.voteBalance.plus(voter.balance)
- }
- })
- }
-
- /**
- * Remove non-delegate wallets that have zero (0) balance from memory.
- * @return {void}
- */
- purgeEmptyNonDelegates() {
- Object.values(this.byPublicKey).forEach(wallet => {
- if (this.__canBePurged(wallet)) {
- delete this.byPublicKey[wallet.publicKey]
- delete this.byAddress[wallet.address]
- }
- })
- }
-
- /**
- * Apply the given block to a delegate.
- * @param {Block} block
- * @return {void}
- */
- applyBlock(block) {
- const generatorPublicKey = block.data.generatorPublicKey
-
- let delegate = this.byPublicKey[block.data.generatorPublicKey]
-
- if (!delegate) {
- const generator = crypto.getAddress(generatorPublicKey, this.networkId)
-
- if (block.data.height === 1) {
- delegate = new Wallet(generator)
- delegate.publicKey = generatorPublicKey
-
- this.reindex(delegate)
- } else {
- logger.debug(`Delegate by address: ${this.byAddress[generator]}`)
-
- if (this.byAddress[generator]) {
- logger.info('This look like a bug, please report :bug:')
- }
-
- throw new Error(
- `Could not find delegate with publicKey ${generatorPublicKey}`,
- )
- }
- }
-
- const appliedTransactions = []
-
- try {
- block.transactions.forEach(transaction => {
- this.applyTransaction(transaction)
- appliedTransactions.push(transaction)
- })
-
- const applied = delegate.applyBlock(block.data)
-
- // If the block has been applied to the delegate, the balance is increased
- // by reward + totalFee. In which case the vote balance of the
- // delegate's delegate has to be updated.
- if (applied && delegate.vote) {
- const increase = block.data.reward.plus(block.data.totalFee)
- const votedDelegate = this.byPublicKey[delegate.vote]
- votedDelegate.voteBalance = votedDelegate.voteBalance.plus(increase)
- }
- } catch (error) {
- logger.error(
- 'Failed to apply all transactions in block - reverting previous transactions',
- )
- // Revert the applied transactions from last to first
- for (let i = appliedTransactions.length - 1; i >= 0; i--) {
- this.revertTransaction(appliedTransactions[i])
- }
-
- // TODO: should revert the delegate applyBlock ?
- // TBC: whatever situation `delegate.applyBlock(block.data)` is never applied
-
- throw error
- }
- }
-
- /**
- * Remove the given block from a delegate.
- * @param {Block} block
- * @return {void}
- */
- async revertBlock(block) {
- const delegate = this.byPublicKey[block.data.generatorPublicKey]
-
- if (!delegate) {
- app.forceExit(
- `Failed to lookup generator '${
- block.data.generatorPublicKey
- }' of block '${block.data.id}'. :skull:`,
- )
- }
-
- const revertedTransactions = []
-
- try {
- // Revert the transactions from last to first
- for (let i = block.transactions.length - 1; i >= 0; i--) {
- const transaction = block.transactions[i]
- this.revertTransaction(transaction)
- revertedTransactions.push(transaction)
- }
-
- const reverted = delegate.revertBlock(block.data)
-
- // If the block has been reverted, the balance is decreased
- // by reward + totalFee. In which case the vote balance of the
- // delegate's delegate has to be updated.
- if (reverted && delegate.vote) {
- const decrease = block.data.reward.plus(block.data.totalFee)
- const votedDelegate = this.byPublicKey[delegate.vote]
- votedDelegate.voteBalance = votedDelegate.voteBalance.minus(decrease)
- }
- } catch (error) {
- logger.error(error.stack)
-
- revertedTransactions
- .reverse()
- .forEach(transaction => this.applyTransaction(transaction))
-
- throw error
- }
- }
-
- /**
- * Apply the given transaction to a delegate.
- * @param {Transaction} transaction
- * @return {Transaction}
- */
- applyTransaction(transaction) {
- /* eslint padded-blocks: "off" */
- const { data } = transaction
- const { type, asset, recipientId, senderPublicKey } = data
-
- const sender = this.findByPublicKey(senderPublicKey)
- const recipient = this.findByAddress(recipientId)
- const errors = []
-
- // specific verifications / adjustments depending on transaction type
- if (
- type === TRANSACTION_TYPES.DELEGATE_REGISTRATION &&
- this.byUsername[asset.delegate.username.toLowerCase()]
- ) {
- logger.error(
- `Can't apply transaction ${
- data.id
- }: delegate name '${asset.delegate.username.toLowerCase()}' already taken.`,
- )
- throw new Error(
- `Can't apply transaction ${data.id}: delegate name already taken.`,
- )
-
- // NOTE: We use the vote public key, because vote transactions
- // have the same sender and recipient
- } else if (
- type === TRANSACTION_TYPES.VOTE &&
- !this.__isDelegate(asset.votes[0].slice(1))
- ) {
- logger.error(
- `Can't apply vote transaction ${data.id}: delegate ${
- asset.votes[0]
- } does not exist.`,
- )
- throw new Error(
- `Can't apply transaction ${data.id}: delegate ${
- asset.votes[0]
- } does not exist.`,
- )
- } else if (type === TRANSACTION_TYPES.SECOND_SIGNATURE) {
- data.recipientId = ''
- }
-
- // handle exceptions / verify that we can apply the transaction to the sender
- if (this.__isException(data)) {
- logger.warn(
- `Transaction ${
- data.id
- } forcibly applied because it has been added as an exception.`,
- )
- } else if (!sender.canApply(data, errors)) {
- logger.error(
- `Can't apply transaction id:${data.id} from sender:${
- sender.address
- } due to ${JSON.stringify(errors)}`,
- )
- logger.debug(`Audit: ${JSON.stringify(sender.auditApply(data), null, 2)}`)
- throw new Error(`Can't apply transaction ${data.id}`)
- }
-
- sender.applyTransactionToSender(data)
-
- if (type === TRANSACTION_TYPES.DELEGATE_REGISTRATION) {
- this.reindex(sender)
- }
-
- if (recipient && type === TRANSACTION_TYPES.TRANSFER) {
- recipient.applyTransactionToRecipient(data)
- }
-
- this._updateVoteBalances(sender, recipient, data)
-
- return transaction
- }
-
- /**
- * Updates the vote balances of the respective delegates of sender and recipient.
- * If the transaction is not a vote...
- * 1. fee + amount is removed from the sender's delegate vote balance
- * 2. amount is added to the recipient's delegate vote balance
- *
- * in case of a vote...
- * 1. the full sender balance is added to the sender's delegate vote balance
- *
- * If revert is set to true, the operations are reversed (plus -> minus, minus -> plus).
- * @param {Wallet} sender
- * @param {Wallet} recipient
- * @param {Transaction} transaction
- * @param {Boolean} revert
- * @return {Transaction}
- */
- _updateVoteBalances(sender, recipient, transaction, revert = false) {
- // TODO: multipayment?
- if (transaction.type !== TRANSACTION_TYPES.VOTE) {
- // Update vote balance of the sender's delegate
- if (sender.vote) {
- const delegate = this.findByPublicKey(sender.vote)
- const total = transaction.amount.plus(transaction.fee)
- delegate.voteBalance = revert
- ? delegate.voteBalance.plus(total)
- : delegate.voteBalance.minus(total)
- }
-
- // Update vote balance of recipient's delegate
- if (recipient && recipient.vote) {
- const delegate = this.findByPublicKey(recipient.vote)
- delegate.voteBalance = revert
- ? delegate.voteBalance.minus(transaction.amount)
- : delegate.voteBalance.plus(transaction.amount)
- }
- } else {
- const vote = transaction.asset.votes[0]
- const delegate = this.findByPublicKey(vote.substr(1))
-
- if (vote.startsWith('+')) {
- delegate.voteBalance = revert
- ? delegate.voteBalance.minus(sender.balance)
- : delegate.voteBalance.plus(sender.balance)
- } else {
- delegate.voteBalance = revert
- ? delegate.voteBalance.plus(sender.balance.plus(transaction.fee))
- : delegate.voteBalance.minus(sender.balance.plus(transaction.fee))
- }
- }
- }
-
- /**
- * Remove the given transaction from a delegate.
- * @param {Transaction} transaction
- * @return {Transaction}
- */
- revertTransaction(transaction) {
- const { type, data } = transaction
- const sender = this.findByPublicKey(data.senderPublicKey) // Should exist
- const recipient = this.byAddress[data.recipientId]
-
- sender.revertTransactionForSender(data)
-
- // removing the wallet from the delegates index
- if (type === TRANSACTION_TYPES.DELEGATE_REGISTRATION) {
- delete this.byUsername[data.asset.delegate.username]
- }
-
- if (recipient && type === TRANSACTION_TYPES.TRANSFER) {
- recipient.revertTransactionForRecipient(data)
- }
-
- // Revert vote balance updates
- this._updateVoteBalances(sender, recipient, data, true)
-
- return data
- }
-
- /**
- * Checks if a given publicKey is a registered delegate
- * @param {String} publicKey
- */
- __isDelegate(publicKey) {
- const delegateWallet = this.byPublicKey[publicKey]
-
- if (delegateWallet && delegateWallet.username) {
- return !!this.byUsername[delegateWallet.username]
- }
-
- return false
- }
-
- /**
- * Determine if the wallet can be removed from memory.
- * @param {Object} wallet
- * @return {Boolean}
- */
- __canBePurged(wallet) {
- return (
- wallet.balance.isZero() &&
- !wallet.secondPublicKey &&
- !wallet.multisignature &&
- !wallet.username
- )
- }
-
- /**
- * Determine if the given transaction is an exception.
- * @param {Object} transaction
- * @return {Boolean}
- */
- __isException(transaction) {
- if (!config) {
- return false
- }
-
- if (!Array.isArray(config.network.exceptions.transactions)) {
- return false
- }
-
- return config.network.exceptions.transactions.includes(transaction.id)
- }
-}
diff --git a/packages/core-database/package.json b/packages/core-database/package.json
index 1559f49017..4f8acdcc61 100644
--- a/packages/core-database/package.json
+++ b/packages/core-database/package.json
@@ -1,38 +1,58 @@
{
- "name": "@arkecosystem/core-database",
- "description": "Database Interface for Ark Core",
- "version": "0.2.0",
- "contributors": [
- "François-Xavier Thoorens ",
- "Kristjan Košič ",
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/core-container": "~0.2",
- "@arkecosystem/core-utils": "~0.2",
- "@arkecosystem/crypto": "~0.2",
- "lodash.clonedeep": "^4.5.0",
- "lodash.compact": "^3.0.1",
- "lodash.uniq": "^4.5.0",
- "pluralize": "^7.0.0"
- },
- "devDependencies": {
- "@arkecosystem/core-test-utils": "~0.2"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-database",
+ "description": "Database Interface for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "François-Xavier Thoorens ",
+ "Kristjan Košič ",
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index",
+ "types": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "bash ../../scripts/pre-test.sh",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/core-container": "^2.1.0",
+ "@arkecosystem/core-utils": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/lodash.clonedeep": "^4.5.4",
+ "@types/lodash.compact": "^3.0.4",
+ "@types/lodash.uniq": "^4.5.4",
+ "lodash.clonedeep": "^4.5.0",
+ "lodash.compact": "^3.0.1",
+ "lodash.uniq": "^4.5.0",
+ "pluralize": "^7.0.0"
+ },
+ "devDependencies": {
+ "@arkecosystem/core-test-utils": "^2.1.0",
+ "@types/pluralize": "^0.0.29"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-database/src/database-service-factory.ts b/packages/core-database/src/database-service-factory.ts
new file mode 100644
index 0000000000..daeceb6d20
--- /dev/null
+++ b/packages/core-database/src/database-service-factory.ts
@@ -0,0 +1,13 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { DatabaseService } from "./database-service";
+import { DelegatesRepository } from "./repositories/delegates";
+import { WalletsRepository } from "./repositories/wallets";
+
+// Allow extenders of core-database to provide, optionally, a IWalletManager concrete in addition to a IDatabaseConnection, but keep the business repos common
+export const databaseServiceFactory = async (opts: any, walletManager: Database.IWalletManager, connection: Database.IDatabaseConnection): Promise => {
+ let databaseService: DatabaseService;
+ databaseService = new DatabaseService(opts, connection, walletManager, new WalletsRepository(() => databaseService), new DelegatesRepository(() => databaseService));
+ await databaseService.init();
+ return databaseService;
+};
+
diff --git a/packages/core-database/src/database-service.ts b/packages/core-database/src/database-service.ts
new file mode 100644
index 0000000000..cf95461a88
--- /dev/null
+++ b/packages/core-database/src/database-service.ts
@@ -0,0 +1,556 @@
+import { app } from "@arkecosystem/core-container";
+import { Blockchain, Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import { roundCalculator } from "@arkecosystem/core-utils";
+import { Bignum, constants, crypto as arkCrypto, models } from "@arkecosystem/crypto";
+import assert from "assert";
+import crypto from "crypto";
+import cloneDeep from "lodash/cloneDeep";
+import pluralize from "pluralize";
+import { WalletManager } from "./wallet-manager";
+
+const { Block, Transaction } = models;
+const { TransactionTypes } = constants;
+
+
+export class DatabaseService implements Database.IDatabaseService {
+
+ public connection: Database.IDatabaseConnection;
+ public walletManager: Database.IWalletManager;
+ public logger = app.resolvePlugin("logger");
+ public emitter = app.resolvePlugin("event-emitter");
+ public config = app.getConfig();
+ public options: any;
+ public wallets: Database.IWalletsBusinessRepository;
+ public delegates: Database.IDelegatesBusinessRepository;
+ public blocksInCurrentRound: any[] = null;
+ public stateStarted: boolean = false;
+ public restoredDatabaseIntegrity: boolean = false;
+ public forgingDelegates: any[] = null;
+ public cache: Map = new Map();
+ private spvFinished: boolean;
+
+ constructor(options: any,
+ connection: Database.IDatabaseConnection,
+ walletManager: Database.IWalletManager,
+ walletsBusinessRepository: Database.IWalletsBusinessRepository,
+ delegatesBusinessRepository: Database.IDelegatesBusinessRepository
+ ) {
+ this.connection = connection;
+ this.walletManager = walletManager;
+ this.options = options;
+ this.wallets = walletsBusinessRepository;
+ this.delegates = delegatesBusinessRepository;
+
+ this.registerListeners();
+ }
+
+ public async init() {
+ await this.loadBlocksFromCurrentRound();
+ }
+
+ public async applyBlock(block: models.Block) {
+ this.walletManager.applyBlock(block);
+
+ if (this.blocksInCurrentRound) {
+ this.blocksInCurrentRound.push(block);
+ }
+
+ await this.applyRound(block.data.height);
+ block.transactions.forEach(tx => this.emitTransactionEvents(tx));
+ this.emitter.emit("block.applied", block.data);
+ return true;
+ }
+
+ public async applyRound(height: number) {
+ const nextHeight = height === 1 ? 1 : height + 1;
+ const maxDelegates = this.config.getMilestone(nextHeight).activeDelegates;
+
+ if (nextHeight % maxDelegates === 1) {
+ const round = Math.floor((nextHeight - 1) / maxDelegates) + 1;
+
+ if (
+ !this.forgingDelegates ||
+ this.forgingDelegates.length === 0 ||
+ (this.forgingDelegates.length && this.forgingDelegates[0].round !== round)
+ ) {
+ this.logger.info(`Starting Round ${round.toLocaleString()} :dove_of_peace:`);
+
+ try {
+ this.updateDelegateStats(this.forgingDelegates);
+ await this.saveWallets(false); // save only modified wallets during the last round
+ const delegates = this.walletManager.loadActiveDelegateList(maxDelegates, nextHeight); // get active delegate list from in-memory wallet manager
+ await this.saveRound(delegates); // save next round delegate list non-blocking
+ this.forgingDelegates = await this.getActiveDelegates(nextHeight, delegates); // generate the new active delegates list
+ this.blocksInCurrentRound.length = 0;
+ } catch (error) {
+ // trying to leave database state has it was
+ await this.deleteRound(round);
+ throw error;
+ }
+ } else {
+ this.logger.warn(
+ // tslint:disable-next-line:max-line-length
+ `Round ${round.toLocaleString()} has already been applied. This should happen only if you are a forger. :warning:`,
+ );
+ }
+ }
+ }
+
+ public async buildWallets(height: number): Promise {
+ this.walletManager.reset();
+
+ try {
+ const success = await this.connection.buildWallets(height);
+ this.spvFinished = true;
+ return success;
+ } catch (e) {
+ this.logger.error(e.stack);
+ }
+ return false;
+ }
+
+ public async commitQueuedQueries() {
+ await this.connection.commitQueuedQueries();
+ }
+
+ public async deleteBlock(block: models.Block) {
+ await this.connection.deleteBlock(block);
+ }
+
+ public async deleteRound(round: number) {
+ await this.connection.roundsRepository.delete(round);
+ }
+
+ public enqueueDeleteBlock(block: models.Block) {
+ this.connection.enqueueDeleteBlock(block);
+ }
+
+ public enqueueDeleteRound(height: number) {
+ this.connection.enqueueDeleteRound(height);
+ }
+
+ public enqueueSaveBlock(block: models.Block) {
+ this.connection.enqueueSaveBlock(block);
+ }
+
+ public async getActiveDelegates(height: number, delegates?: any[]) {
+ const maxDelegates = this.config.getMilestone(height).activeDelegates;
+ const round = Math.floor((height - 1) / maxDelegates) + 1;
+
+ if (this.forgingDelegates && this.forgingDelegates.length && this.forgingDelegates[0].round === round) {
+ return this.forgingDelegates;
+ }
+
+ // When called during applyRound we already know the delegates, so we don't have to query the database.
+ if (!delegates || delegates.length === 0) {
+ delegates = await this.connection.roundsRepository.findById(round);
+ }
+
+ const seedSource = round.toString();
+ let currentSeed = crypto
+ .createHash("sha256")
+ .update(seedSource, "utf8")
+ .digest();
+
+ for (let i = 0, delCount = delegates.length; i < delCount; i++) {
+ for (let x = 0; x < 4 && i < delCount; i++, x++) {
+ const newIndex = currentSeed[x] % delCount;
+ const b = delegates[newIndex];
+ delegates[newIndex] = delegates[i];
+ delegates[i] = b;
+ }
+ currentSeed = crypto
+ .createHash("sha256")
+ .update(currentSeed)
+ .digest();
+ }
+
+ this.forgingDelegates = delegates.map(delegate => {
+ delegate.round = +delegate.round;
+ return delegate;
+ });
+
+ return this.forgingDelegates;
+ }
+
+ public async getBlock(id: string) {
+ // TODO: caching the last 1000 blocks, in combination with `saveBlock` could help to optimise
+ const block = await this.connection.blocksRepository.findById(id);
+
+ if (!block) {
+ return null;
+ }
+
+ const transactions = await this.connection.transactionsRepository.findByBlockId(block.id);
+
+ block.transactions = transactions.map(({ serialized }) => Transaction.deserialize(serialized.toString("hex")));
+
+ return new Block(block);
+ }
+
+ public async getBlocks(offset: number, limit: number) {
+ let blocks = [];
+
+ // The functions below return matches in the range [start, end], including both ends.
+ const start = offset;
+ const end = offset + limit - 1;
+
+ if (app.has("state")) {
+ blocks = app.resolve("state").getLastBlocksByHeight(start, end);
+ }
+
+ if (blocks.length !== limit) {
+ blocks = await this.connection.blocksRepository.heightRange(start, end);
+
+ await this.loadTransactionsForBlocks(blocks);
+ }
+
+ return blocks;
+ }
+
+ public async getBlocksForRound(round?: number) {
+ let lastBlock;
+ if (app.has("state")) {
+ lastBlock = app.resolve("state").getLastBlock();
+ } else {
+ lastBlock = await this.getLastBlock();
+ }
+
+ if (!lastBlock) {
+ return [];
+ }
+
+ let height = +lastBlock.data.height;
+ if (!round) {
+ round = roundCalculator.calculateRound(height).round;
+ }
+
+ const maxDelegates = this.config.getMilestone(height).activeDelegates;
+ height = round * maxDelegates + 1;
+
+ const blocks = await this.getBlocks(height - maxDelegates, maxDelegates);
+ return blocks.map(b => new Block(b));
+ }
+
+ public async getForgedTransactionsIds(ids: string[]) {
+ if (!ids.length) {
+ return [];
+ }
+
+ const txs = await this.connection.transactionsRepository.forged(ids);
+ return txs.map(tx => tx.id);
+ }
+
+ public async getLastBlock() {
+ const block = await this.connection.blocksRepository.latest();
+
+ if (!block) {
+ return null;
+ }
+
+ const transactions = await this.connection.transactionsRepository.latestByBlock(block.id);
+
+ block.transactions = transactions.map(({ serialized }) => Transaction.deserialize(serialized.toString("hex")));
+
+ return new Block(block);
+ }
+
+ public async getCommonBlocks(ids: string[]) {
+ const state = app.resolve("state");
+ let commonBlocks = state.getCommonBlocks(ids);
+ if (commonBlocks.length < ids.length) {
+ commonBlocks = await this.connection.blocksRepository.common(ids);
+ }
+
+ return commonBlocks;
+ }
+
+ public async getRecentBlockIds() {
+ const state = app.resolve("state");
+ let blocks = state
+ .getLastBlockIds()
+ .reverse()
+ .slice(0, 10);
+
+ if (blocks.length < 10) {
+ blocks = await this.connection.blocksRepository.recent(10);
+ blocks = blocks.map(block => block.id);
+ }
+
+ return blocks;
+ }
+
+ public async getTopBlocks(count: any) {
+ const blocks = await this.connection.blocksRepository.top(count);
+
+ await this.loadTransactionsForBlocks(blocks);
+
+ return blocks;
+ }
+
+ public async getTransaction(id: string) {
+ return this.connection.transactionsRepository.findById(id);
+ }
+
+ public async loadBlocksFromCurrentRound() {
+ this.blocksInCurrentRound = await this.getBlocksForRound();
+ }
+
+ public async loadTransactionsForBlocks(blocks) {
+ if (!blocks.length) {
+ return;
+ }
+
+ const ids = blocks.map(block => block.id);
+
+ let transactions = await this.connection.transactionsRepository.latestByBlocks(ids);
+ transactions = transactions.map(tx => {
+ const data = Transaction.deserialize(tx.serialized.toString("hex"));
+ data.blockId = tx.blockId;
+ return data;
+ });
+
+ for (const block of blocks) {
+ if (block.numberOfTransactions > 0) {
+ block.transactions = transactions.filter(transaction => transaction.blockId === block.id);
+ }
+ }
+ }
+
+ public async revertBlock(block: models.Block) {
+ await this.revertRound(block.data.height);
+ await this.walletManager.revertBlock(block);
+
+ assert(this.blocksInCurrentRound.pop().data.id === block.data.id);
+
+ this.emitter.emit("block.reverted", block.data);
+ }
+
+ public async revertRound(height: number) {
+ const { round, nextRound, maxDelegates } = roundCalculator.calculateRound(height);
+
+ if (nextRound === round + 1 && height >= maxDelegates) {
+ this.logger.info(`Back to previous round: ${round.toLocaleString()} :back:`);
+
+ const delegates = await this.calcPreviousActiveDelegates(round);
+ this.forgingDelegates = await this.getActiveDelegates(height, delegates);
+
+ await this.deleteRound(nextRound);
+ }
+ }
+
+ public async saveBlock(block: models.Block) {
+ await this.connection.saveBlock(block);
+ }
+
+ public async saveRound(activeDelegates: any[]) {
+ this.logger.info(`Saving round ${activeDelegates[0].round.toLocaleString()}`);
+
+ await this.connection.roundsRepository.insert(activeDelegates);
+
+ this.emitter.emit("round.created", activeDelegates);
+ }
+
+ public async saveWallets(force: boolean) {
+ const wallets = this.walletManager
+ .allByPublicKey()
+ .filter(wallet => wallet.publicKey && (force || wallet.dirty));
+
+ // Remove dirty flags first to not save all dirty wallets in the exit handler
+ // when called during a force insert right after SPV.
+ this.walletManager.clear();
+
+ await this.connection.saveWallets(wallets, force);
+
+ this.logger.info(`${wallets.length} modified ${pluralize("wallet", wallets.length)} committed to database`);
+
+ this.emitter.emit("wallet.saved", wallets.length);
+
+ // NOTE: commented out as more use cases to be taken care of
+ // this.walletManager.purgeEmptyNonDelegates()
+ }
+
+ public updateDelegateStats(delegates: any[]): void {
+ if (!delegates || !this.blocksInCurrentRound) {
+ return;
+ }
+
+ this.logger.debug("Updating delegate statistics");
+
+ try {
+ delegates.forEach(delegate => {
+ const producedBlocks = this.blocksInCurrentRound.filter(
+ blockGenerator => blockGenerator.data.generatorPublicKey === delegate.publicKey,
+ );
+ const wallet = this.walletManager.findByPublicKey(delegate.publicKey);
+
+ if (producedBlocks.length === 0) {
+ wallet.missedBlocks++;
+ this.logger.debug(
+ `Delegate ${wallet.username} (${wallet.publicKey}) just missed a block. Total: ${
+ wallet.missedBlocks
+ }`,
+ );
+ wallet.dirty = true;
+ this.emitter.emit("forger.missing", {
+ delegate: wallet,
+ });
+ }
+ });
+ } catch (error) {
+ this.logger.error(error.stack);
+ }
+ }
+
+ public async verifyBlockchain(): Promise<{ valid: boolean; errors: any[] }> {
+ const errors = [];
+
+ const lastBlock = await this.getLastBlock();
+
+ // Last block is available
+ if (!lastBlock) {
+ errors.push("Last block is not available");
+ } else {
+ const numberOfBlocks = await this.connection.blocksRepository.count();
+
+ // Last block height equals the number of stored blocks
+ if (lastBlock.data.height !== +numberOfBlocks) {
+ errors.push(
+ `Last block height: ${lastBlock.data.height.toLocaleString()}, number of stored blocks: ${numberOfBlocks}`,
+ );
+ }
+ }
+
+ const blockStats = await this.connection.blocksRepository.statistics();
+ const transactionStats = await this.connection.transactionsRepository.statistics();
+
+ // Number of stored transactions equals the sum of block.numberOfTransactions in the database
+ if (blockStats.numberOfTransactions !== transactionStats.count) {
+ errors.push(
+ `Number of transactions: ${transactionStats.count}, number of transactions included in blocks: ${
+ blockStats.numberOfTransactions
+ }`,
+ );
+ }
+
+ // Sum of all tx fees equals the sum of block.totalFee
+ if (blockStats.totalFee !== transactionStats.totalFee) {
+ errors.push(
+ `Total transaction fees: ${transactionStats.totalFee}, total of block.totalFee : ${
+ blockStats.totalFee
+ }`,
+ );
+ }
+
+ // Sum of all tx amount equals the sum of block.totalAmount
+ if (blockStats.totalAmount !== transactionStats.totalAmount) {
+ errors.push(
+ `Total transaction amounts: ${transactionStats.totalAmount}, total of block.totalAmount : ${
+ blockStats.totalAmount
+ }`,
+ );
+ }
+
+ return {
+ valid: !errors.length,
+ errors,
+ };
+ }
+
+ public async verifyTransaction(transaction: models.Transaction) {
+ const senderId = arkCrypto.getAddress(transaction.data.senderPublicKey, this.config.get("network.pubKeyHash"));
+
+ const sender = this.walletManager.findByAddress(senderId); // should exist
+
+ if (!sender.publicKey) {
+ sender.publicKey = transaction.data.senderPublicKey;
+ this.walletManager.reindex(sender);
+ }
+
+ const dbTransaction = await this.getTransaction(transaction.data.id);
+
+ return sender.canApply(transaction.data, []) && !dbTransaction;
+ }
+
+ private async calcPreviousActiveDelegates(round: number) {
+ // TODO: cache the blocks of the last X rounds
+ this.blocksInCurrentRound = await this.getBlocksForRound(round);
+
+ // Create temp wallet manager from all delegates
+ const tempWalletManager = new WalletManager();
+ tempWalletManager.index(cloneDeep(this.walletManager.allByUsername()));
+
+ // Revert all blocks in reverse order
+ let height = 0;
+ for (let i = this.blocksInCurrentRound.length - 1; i >= 0; i--) {
+ tempWalletManager.revertBlock(this.blocksInCurrentRound[i]);
+ height = this.blocksInCurrentRound[i].data.height;
+ }
+
+ // The first round has no active delegates
+ if (height === 1) {
+ return [];
+ }
+
+ // Assert that the height is the beginning of a round.
+ const { maxDelegates } = roundCalculator.calculateRound(height);
+ assert(height > 1 && height % maxDelegates === 1);
+
+ // Now retrieve the active delegate list from the temporary wallet manager.
+ return tempWalletManager.loadActiveDelegateList(maxDelegates, height);
+ }
+
+ private emitTransactionEvents(transaction) {
+ this.emitter.emit("transaction.applied", transaction.data);
+
+ if (transaction.type === TransactionTypes.DelegateRegistration) {
+ this.emitter.emit("delegate.registered", transaction.data);
+ }
+
+ if (transaction.type === TransactionTypes.DelegateResignation) {
+ this.emitter.emit("delegate.resigned", transaction.data);
+ }
+
+ if (transaction.type === TransactionTypes.Vote) {
+ const vote = transaction.asset.votes[0];
+
+ this.emitter.emit(vote.startsWith("+") ? "wallet.vote" : "wallet.unvote", {
+ delegate: vote,
+ transaction: transaction.data,
+ });
+ }
+ }
+
+ private registerListeners() {
+
+ this.emitter.on("state:started", () => {
+ this.stateStarted = true;
+ });
+
+ this.emitter.on("wallet.created.cold", async coldWallet => {
+ try {
+ const wallet = await this.connection.walletsRepository.findByAddress(coldWallet.address);
+
+ if (wallet) {
+ Object.keys(wallet).forEach(key => {
+ if (["balance"].indexOf(key) !== -1) {
+ return;
+ }
+
+ coldWallet[key] = key !== "voteBalance" ? wallet[key] : new Bignum(wallet[key]);
+ });
+ }
+ } catch (err) {
+ this.logger.error(err);
+ }
+ });
+
+ this.emitter.once("shutdown", async () => {
+ if (!this.spvFinished) {
+ // Prevent dirty wallets to be saved when SPV didn't finish
+ this.walletManager.clear();
+ }
+ });
+ }
+
+}
diff --git a/packages/core-database/src/index.ts b/packages/core-database/src/index.ts
new file mode 100644
index 0000000000..51340494aa
--- /dev/null
+++ b/packages/core-database/src/index.ts
@@ -0,0 +1,6 @@
+export * from "./manager";
+export * from "./database-service-factory";
+export * from "./wallet-manager";
+export * from "./repositories/delegates";
+export * from "./repositories/wallets";
+export * from "./plugin";
diff --git a/packages/core-database/src/manager.ts b/packages/core-database/src/manager.ts
new file mode 100644
index 0000000000..41721bcd62
--- /dev/null
+++ b/packages/core-database/src/manager.ts
@@ -0,0 +1,33 @@
+import { Database } from "@arkecosystem/core-interfaces";
+
+export class DatabaseManager {
+ public connections: { [key: string]: Database.IDatabaseConnection };
+
+ /**
+ * Create a new database manager instance.
+ * @constructor
+ */
+ constructor() {
+ this.connections = {};
+ }
+
+ /**
+ * Get a database connection instance.
+ * @param {String} name
+ * @return {DatabaseConnection}
+ */
+ public connection(name = "default"): Database.IDatabaseConnection {
+ return this.connections[name];
+ }
+
+ /**
+ * Make the database connection instance.
+ * @param {DatabaseConnection} connection
+ * @param {String} name
+ * @return {void}
+ */
+ public async makeConnection(connection: Database.IDatabaseConnection, name = "default"): Promise {
+ this.connections[name] = await connection.make();
+ return this.connection(name);
+ }
+}
diff --git a/packages/core-database/src/plugin.ts b/packages/core-database/src/plugin.ts
new file mode 100644
index 0000000000..b4083c984e
--- /dev/null
+++ b/packages/core-database/src/plugin.ts
@@ -0,0 +1,12 @@
+import { Container, Logger } from "@arkecosystem/core-interfaces";
+import { DatabaseManager } from "./manager";
+
+export const plugin: Container.PluginDescriptor = {
+ pkg: require("../package.json"),
+ alias: "databaseManager",
+ async register(container: Container.IContainer, options) {
+ container.resolvePlugin("logger").info("Starting Database Manager");
+
+ return new DatabaseManager();
+ },
+};
diff --git a/packages/core-database/src/repositories/delegates.ts b/packages/core-database/src/repositories/delegates.ts
new file mode 100644
index 0000000000..f273070024
--- /dev/null
+++ b/packages/core-database/src/repositories/delegates.ts
@@ -0,0 +1,126 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import { delegateCalculator } from "@arkecosystem/core-utils";
+import orderBy from "lodash/orderBy";
+import limitRows from "./utils/limit-rows";
+
+export class DelegatesRepository implements Database.IDelegatesBusinessRepository {
+
+ /**
+ * Create a new delegate repository instance.
+ * @param databaseServiceProvider
+ */
+ public constructor(private databaseServiceProvider : () => Database.IDatabaseService) {}
+
+ /**
+ * Get all local delegates.
+ */
+ public getLocalDelegates() {
+ // TODO: What's the diff between this and just calling 'allByUsername'
+ return this.databaseServiceProvider().walletManager.allByAddress().filter(wallet => !!wallet.username);
+ }
+
+ /**
+ * Find all delegates.
+ * @param {Object} params
+ * @return {Object}
+ */
+ public findAll(params: Database.IParameters = {}) {
+ const delegates = this.getLocalDelegates();
+
+ const [iteratee, order] = this.__orderBy(params);
+
+ return {
+ rows: limitRows(orderBy(delegates, iteratee, order as "desc" | "asc"), params),
+ count: delegates.length,
+ };
+ }
+
+ /**
+ * Search all delegates.
+ * TODO Currently it searches by username only
+ * @param {Object} [params]
+ * @param {String} [params.username] - Search by username
+ */
+ public search(params : Database.IParameters) {
+ let delegates = this.getLocalDelegates();
+ if (params.hasOwnProperty("username")) {
+ delegates = delegates.filter(delegate => delegate.username.indexOf(params.username as string) > -1);
+ }
+
+ if (params.orderBy) {
+ const orderByField = params.orderBy.split(":")[0];
+ const orderByDirection = params.orderBy.split(":")[1] || "desc";
+
+ delegates = delegates.sort((a, b) => {
+ if (orderByDirection === "desc" && a[orderByField] < b[orderByField]) {
+ return -1;
+ }
+
+ if (orderByDirection === "asc" && a[orderByField] > b[orderByField]) {
+ return 1;
+ }
+
+ return 0;
+ });
+ }
+
+ return {
+ rows: limitRows(delegates, params),
+ count: delegates.length,
+ };
+ }
+
+ /**
+ * Find a delegate.
+ * @param {String} id
+ * @return {Object}
+ */
+ public findById(id) {
+ return this.getLocalDelegates().find(a => a.address === id || a.publicKey === id || a.username === id);
+ }
+
+ /**
+ * Find all active delegates at height.
+ * @param {Number} height
+ * @return {Array}
+ */
+ public async getActiveAtHeight(height: number) {
+ const delegates = await this.databaseServiceProvider().getActiveDelegates(height);
+
+ return delegates.map(delegate => {
+ const wallet = this.databaseServiceProvider().wallets.findById(delegate.publicKey);
+
+ return {
+ username: wallet.username,
+ approval: delegateCalculator.calculateApproval(delegate, height),
+ productivity: delegateCalculator.calculateProductivity(wallet),
+ };
+ });
+ }
+
+ public __orderBy(params): string[] {
+ if (!params.orderBy) {
+ return ["rate", "asc"];
+ }
+
+ const orderByMapped = params.orderBy.split(":").map(p => p.toLowerCase());
+ if (orderByMapped.length !== 2 || ["desc", "asc"].includes(orderByMapped[1]) !== true) {
+ return ["rate", "asc"];
+ }
+
+ return [this.__manipulateIteratee(orderByMapped[0]), orderByMapped[1]];
+ }
+
+ public __manipulateIteratee(iteratee): any {
+ switch (iteratee) {
+ case "rank":
+ return "rate";
+ case "productivity":
+ return delegateCalculator.calculateProductivity;
+ case "approval":
+ return delegateCalculator.calculateApproval;
+ default:
+ return iteratee;
+ }
+ }
+}
diff --git a/packages/core-database/src/repositories/utils/filter-rows.ts b/packages/core-database/src/repositories/utils/filter-rows.ts
new file mode 100644
index 0000000000..0cf7962b95
--- /dev/null
+++ b/packages/core-database/src/repositories/utils/filter-rows.ts
@@ -0,0 +1,74 @@
+/**
+ * Filter an Array of Objects based on the given parameters.
+ * @param {Array} rows
+ * @param {Object} params
+ * @param {Object} filters
+ * @return {Array}
+ */
+export = (rows: T[], params, filters) =>
+ rows.filter(item => {
+ if (filters.hasOwnProperty("exact")) {
+ for (const elem of filters.exact) {
+ if (params[elem] && item[elem] !== params[elem]) {
+ return false;
+ }
+ }
+ }
+
+ if (filters.hasOwnProperty("between")) {
+ for (const elem of filters.between) {
+ if (!params[elem]) {
+ continue;
+ }
+
+ if (
+ !params[elem].hasOwnProperty("from") &&
+ !params[elem].hasOwnProperty("to") &&
+ item[elem] !== params[elem]
+ ) {
+ return false;
+ }
+
+ if (params[elem].hasOwnProperty("from") || params[elem].hasOwnProperty("to")) {
+ let isMoreThan = true;
+ let isLessThan = true;
+
+ if (params[elem].hasOwnProperty("from")) {
+ isMoreThan = item[elem] >= params[elem].from;
+ }
+
+ if (params[elem].hasOwnProperty("to")) {
+ isLessThan = item[elem] <= params[elem].to;
+ }
+
+ return isMoreThan && isLessThan;
+ }
+ }
+ }
+
+ if (filters.hasOwnProperty("in")) {
+ for (const elem of filters.in) {
+ if (params[elem] && Array.isArray(params[elem])) {
+ return params[elem].indexOf(item[elem]) > -1;
+ }
+ }
+ }
+
+ // NOTE: it was used to filter by `votes`, but that field was rejected and
+ // replaced by `vote`. This filter is kept here just in case
+ if (filters.hasOwnProperty("any")) {
+ for (const elem of filters.any) {
+ if (params[elem] && item[elem]) {
+ if (Array.isArray(params[elem])) {
+ if (item[elem].every(a => params[elem].indexOf(a) === -1)) {
+ return false;
+ }
+ } else {
+ throw new Error('Fitering by "any" requires an Array');
+ }
+ }
+ }
+ }
+
+ return true;
+ });
diff --git a/packages/core-database/src/repositories/utils/limit-rows.ts b/packages/core-database/src/repositories/utils/limit-rows.ts
new file mode 100644
index 0000000000..169521762c
--- /dev/null
+++ b/packages/core-database/src/repositories/utils/limit-rows.ts
@@ -0,0 +1,14 @@
+import { Database } from "@arkecosystem/core-interfaces";
+/**
+ * Return some rows by an offset and a limit.
+ */
+export = (rows: T[], params: Database.IParameters) => {
+ if (params.offset || params.limit) {
+ const offset = params.offset || 0;
+ const limit = params.limit ? offset + params.limit : rows.length;
+
+ return rows.slice(offset, limit);
+ }
+
+ return rows;
+};
diff --git a/packages/core-database/src/repositories/wallets.ts b/packages/core-database/src/repositories/wallets.ts
new file mode 100644
index 0000000000..1df7856c1c
--- /dev/null
+++ b/packages/core-database/src/repositories/wallets.ts
@@ -0,0 +1,121 @@
+import { Database } from "@arkecosystem/core-interfaces";
+import orderBy from "lodash/orderBy";
+import filterRows from "./utils/filter-rows";
+import limitRows from "./utils/limit-rows";
+
+export class WalletsRepository implements Database.IWalletsBusinessRepository {
+ /**
+ * Create a new wallet repository instance.
+ * @param {DatabaseConnection} databaseService
+ */
+ public constructor(private databaseServiceProvider : () => Database.IDatabaseService) {}
+
+ /**
+ * Get all local wallets.
+ * @return {Array}
+ */
+ public all() {
+ return this.databaseServiceProvider().walletManager.allByAddress();
+ }
+
+ /**
+ * Find all wallets.
+ * @param {{ orderBy?: string }} params
+ * @return {Object}
+ */
+ public findAll(params: Database.IParameters = {}) {
+ const wallets = this.all();
+
+ const [iteratee, order] = params.orderBy ? params.orderBy.split(":") : ["rate", "asc"];
+
+ return {
+ rows: limitRows(orderBy(wallets, iteratee, order as "desc" | "asc"), params),
+ count: wallets.length,
+ };
+ }
+
+ /**
+ * Find all wallets for the given vote.
+ * @param {String} publicKey
+ * @param {Object} params
+ * @return {Object}
+ */
+ public findAllByVote(publicKey: string, params: Database.IParameters = {}) {
+ const wallets = this.all().filter(wallet => wallet.vote === publicKey);
+
+ return {
+ rows: limitRows(wallets, params),
+ count: wallets.length,
+ };
+ }
+
+ /**
+ * Find a wallet by address, public key or username.
+ */
+ public findById(id: string) {
+ return this.all().find(wallet => wallet.address === id || wallet.publicKey === id || wallet.username === id);
+ }
+
+ /**
+ * Count all wallets.
+ */
+ public count() {
+ return this.all().length;
+ }
+
+ /**
+ * Find all wallets sorted by balance.
+ */
+ public top(params: Database.IParameters = {}) {
+ const wallets = Object.values(this.all()).sort((a: any, b: any) => +b.balance.minus(a.balance).toFixed());
+
+ return {
+ rows: limitRows(wallets, params),
+ count: wallets.length,
+ };
+ }
+
+ /**
+ * Search all wallets.
+ * @param {Object} [params]
+ * @param {Number} [params.limit] - Limit the number of results
+ * @param {Number} [params.offset] - Skip some results
+ * @param {Array} [params.orderBy] - Order of the results
+ * @param {String} [params.address] - Search by address
+ * @param {Array} [params.addresses] - Search by several addresses
+ * @param {String} [params.publicKey] - Search by publicKey
+ * @param {String} [params.secondPublicKey] - Search by secondPublicKey
+ * @param {String} [params.username] - Search by username
+ * @param {String} [params.vote] - Search by vote
+ * @param {Object} [params.balance] - Search by balance
+ * @param {Number} [params.balance.from] - Search by balance (minimum)
+ * @param {Number} [params.balance.to] - Search by balance (maximum)
+ * @param {Object} [params.voteBalance] - Search by voteBalance
+ * @param {Number} [params.voteBalance.from] - Search by voteBalance (minimum)
+ * @param {Number} [params.voteBalance.to] - Search by voteBalance (maximum)
+ * @return {Object}
+ */
+ public search(params: T) {
+ const query: any = {
+ exact: ["address", "publicKey", "secondPublicKey", "username", "vote"],
+ between: ["balance", "voteBalance"],
+ };
+
+ if (params.addresses) {
+ // Use the `in` filter instead of `exact` for the `address` field
+ if (!params.address) {
+ params.address = params.addresses;
+ query.exact.shift();
+ query.in = ["address"];
+ }
+ delete params.addresses;
+ }
+
+ const wallets = filterRows(this.all(), params, query);
+
+ return {
+ rows: limitRows(wallets, params),
+ count: wallets.length,
+ };
+ }
+}
diff --git a/packages/core-database/src/wallet-manager.ts b/packages/core-database/src/wallet-manager.ts
new file mode 100644
index 0000000000..886d5f1a4d
--- /dev/null
+++ b/packages/core-database/src/wallet-manager.ts
@@ -0,0 +1,553 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, Logger } from "@arkecosystem/core-interfaces";
+import { roundCalculator } from "@arkecosystem/core-utils";
+import { Bignum, constants, crypto, formatArktoshi, isException, models } from "@arkecosystem/crypto";
+import pluralize from "pluralize";
+
+const { Wallet } = models;
+const { TransactionTypes } = constants;
+
+export class WalletManager implements Database.IWalletManager {
+ public logger = app.resolvePlugin("logger");
+ public config = app.getConfig();
+
+ public networkId: number;
+ public byAddress: { [key: string]: any };
+ public byPublicKey: { [key: string]: any };
+ public byUsername: { [key: string]: any };
+
+ /**
+ * Create a new wallet manager instance.
+ * @constructor
+ */
+ constructor() {
+ this.networkId = this.config ? this.config.get("network.pubKeyHash") : 0x17;
+ this.reset();
+ }
+
+ public allByAddress(): models.Wallet[] {
+ return Object.values(this.byAddress);
+ }
+
+ /**
+ * Get all wallets by publicKey.
+ */
+ public allByPublicKey(): models.Wallet[] {
+ return Object.values(this.byPublicKey);
+ }
+
+ /**
+ * Get all wallets by username.
+ * @return {Array}
+ */
+ public allByUsername(): models.Wallet[] {
+ return Object.values(this.byUsername);
+ }
+
+ /**
+ * Find a wallet by the given address.
+ */
+ public findByAddress(address: string): models.Wallet {
+ if (!this.byAddress[address]) {
+ this.byAddress[address] = new Wallet(address);
+ }
+
+ return this.byAddress[address];
+ }
+
+ /**
+ * Checks if wallet exits in wallet manager
+ * @param {String} key can be publicKey or address of wallet
+ */
+ public exists(key: string) {
+ if (this.byPublicKey[key]) {
+ return true;
+ }
+
+ return !!this.byAddress[key];
+ }
+
+ /**
+ * Find a wallet by the given public key.
+ * @param {String} publicKey
+ * @return {Wallet}
+ */
+ public findByPublicKey(publicKey: string): models.Wallet {
+ if (!this.byPublicKey[publicKey]) {
+ const address = crypto.getAddress(publicKey, this.networkId);
+
+ const wallet = this.findByAddress(address);
+ wallet.publicKey = publicKey;
+ this.byPublicKey[publicKey] = wallet;
+ }
+
+ return this.byPublicKey[publicKey];
+ }
+
+ /**
+ * Find a wallet by the given username.
+ * @param {String} username
+ * @return {Wallet}
+ */
+ public findByUsername(username: string): models.Wallet {
+ return this.byUsername[username];
+ }
+
+ /**
+ * Set wallet by address.
+ * @param {String} address
+ * @param {Wallet} wallet
+ */
+ public setByAddress(address, wallet) {
+ this.byAddress[address] = wallet;
+ }
+
+ /**
+ * Set wallet by publicKey.
+ * @param {String} publicKey
+ * @param {Wallet} wallet
+ */
+ public setByPublicKey(publicKey, wallet) {
+ this.byPublicKey[publicKey] = wallet;
+ }
+
+ /**
+ * Set wallet by username.
+ * @param {String} username
+ * @param {Wallet} wallet
+ */
+ public setByUsername(username, wallet) {
+ this.byUsername[username] = wallet;
+ }
+
+ /**
+ * Remove wallet by address.
+ * @param {String} address
+ */
+ public forgetByAddress(address) {
+ delete this.byAddress[address];
+ }
+
+ /**
+ * Remove wallet by publicKey.
+ * @param {String} publicKey
+ */
+ public forgetByPublicKey(publicKey) {
+ delete this.byPublicKey[publicKey];
+ }
+
+ /**
+ * Remove wallet by username.
+ * @param {String} username
+ */
+ public forgetByUsername(username) {
+ delete this.byUsername[username];
+ }
+
+ /**
+ * Index the given wallets.
+ * @param {Array} wallets
+ * @return {void}
+ */
+ public index(wallets) {
+ for (const wallet of wallets) {
+ this.reindex(wallet);
+ }
+ }
+
+ /**
+ * Reindex the given wallet.
+ * @param {Wallet} wallet
+ * @return {void}
+ */
+ public reindex(wallet: models.Wallet) {
+ if (wallet.address) {
+ this.byAddress[wallet.address] = wallet;
+ }
+
+ if (wallet.publicKey) {
+ this.byPublicKey[wallet.publicKey] = wallet;
+ }
+
+ if (wallet.username) {
+ this.byUsername[wallet.username] = wallet;
+ }
+ }
+
+ public clear() {
+ Object.values(this.byAddress).forEach(wallet => {
+ wallet.dirty = false;
+ });
+ }
+
+ /**
+ * Load a list of all active delegates.
+ * @param {Number} maxDelegates
+ * @param height
+ * @return {Array}
+ */
+ public loadActiveDelegateList(maxDelegates: number, height?: number): any[] {
+ if (height > 1 && height % maxDelegates !== 1) {
+ throw new Error("Trying to build delegates outside of round change");
+ }
+
+ const { round } = roundCalculator.calculateRound(height, maxDelegates);
+ const delegatesWallets = this.allByUsername();
+
+ if (delegatesWallets.length < maxDelegates) {
+ throw new Error(
+ `Expected to find ${maxDelegates} delegates but only found ${
+ delegatesWallets.length
+ }. This indicates an issue with the genesis block & delegates.`,
+ );
+ }
+
+ const equalVotesMap = new Map();
+
+ const delegates = delegatesWallets
+ .sort((a, b) => {
+ const diff = b.voteBalance.comparedTo(a.voteBalance);
+
+ if (diff === 0) {
+ if (!equalVotesMap.has(a.voteBalance.toFixed())) {
+ equalVotesMap.set(a.voteBalance.toFixed(), new Set());
+ }
+
+ const set = equalVotesMap.get(a.voteBalance.toFixed());
+ set.add(a);
+ set.add(b);
+
+ if (a.publicKey === b.publicKey) {
+ throw new Error(
+ `The balance and public key of both delegates are identical! Delegate "${
+ a.username
+ }" appears twice in the list.`,
+ );
+ }
+
+ return a.publicKey.localeCompare(b.publicKey, "en");
+ }
+
+ return diff;
+ })
+ .map((delegate, i) => {
+ const rate = i + 1;
+ this.byUsername[delegate.username].rate = rate;
+ return { ...{ round }, ...delegate, rate };
+ })
+ .slice(0, maxDelegates);
+
+ for (const [voteBalance, set] of equalVotesMap.entries()) {
+ const values: any[] = Array.from(set.values());
+ if (delegates.includes(values[0])) {
+ const mapped = values.map(v => `${v.username} (${v.publicKey})`);
+ this.logger.warn(
+ `Delegates ${JSON.stringify(mapped, null, 4)} have a matching vote balance of ${formatArktoshi(
+ voteBalance,
+ )}`,
+ );
+ }
+ }
+
+ this.logger.debug(`Loaded ${delegates.length} active ${pluralize("delegate", delegates.length)}`);
+
+ return delegates;
+ }
+
+ /**
+ * Build vote balances of all delegates.
+ * NOTE: Only called during SPV.
+ * @return {void}
+ */
+ public buildVoteBalances() {
+ Object.values(this.byPublicKey).forEach(voter => {
+ if (voter.vote) {
+ const delegate = this.byPublicKey[voter.vote];
+ delegate.voteBalance = delegate.voteBalance.plus(voter.balance);
+ }
+ });
+ }
+
+ /**
+ * Remove non-delegate wallets that have zero (0) balance from memory.
+ * @return {void}
+ */
+ public purgeEmptyNonDelegates() {
+ Object.values(this.byPublicKey).forEach(wallet => {
+ if (this.canBePurged(wallet)) {
+ delete this.byPublicKey[wallet.publicKey];
+ delete this.byAddress[wallet.address];
+ }
+ });
+ }
+
+ /**
+ * Apply the given block to a delegate.
+ * @param {Block} block
+ * @return {void}
+ */
+ public applyBlock(block: models.Block) {
+ const generatorPublicKey = block.data.generatorPublicKey;
+
+ let delegate = this.byPublicKey[block.data.generatorPublicKey];
+
+ if (!delegate) {
+ const generator = crypto.getAddress(generatorPublicKey, this.networkId);
+
+ if (block.data.height === 1) {
+ delegate = new Wallet(generator);
+ delegate.publicKey = generatorPublicKey;
+
+ this.reindex(delegate);
+ } else {
+ this.logger.debug(`Delegate by address: ${this.byAddress[generator]}`);
+
+ if (this.byAddress[generator]) {
+ this.logger.info("This look like a bug, please report :bug:");
+ }
+
+ throw new Error(`Could not find delegate with publicKey ${generatorPublicKey}`);
+ }
+ }
+
+ const appliedTransactions = [];
+
+ try {
+ block.transactions.forEach(transaction => {
+ this.applyTransaction(transaction);
+ appliedTransactions.push(transaction);
+ });
+
+ const applied = delegate.applyBlock(block.data);
+
+ // If the block has been applied to the delegate, the balance is increased
+ // by reward + totalFee. In which case the vote balance of the
+ // delegate's delegate has to be updated.
+ if (applied && delegate.vote) {
+ const increase = (block.data.reward as Bignum).plus(block.data.totalFee);
+ const votedDelegate = this.byPublicKey[delegate.vote];
+ votedDelegate.voteBalance = votedDelegate.voteBalance.plus(increase);
+ }
+ } catch (error) {
+ this.logger.error("Failed to apply all transactions in block - reverting previous transactions");
+ // Revert the applied transactions from last to first
+ for (let i = appliedTransactions.length - 1; i >= 0; i--) {
+ this.revertTransaction(appliedTransactions[i]);
+ }
+
+ // TODO: should revert the delegate applyBlock ?
+ // TBC: whatever situation `delegate.applyBlock(block.data)` is never applied
+
+ throw error;
+ }
+ }
+
+ /**
+ * Remove the given block from a delegate.
+ * @param {Block} block
+ * @return {void}
+ */
+ public revertBlock(block: models.Block) {
+ const delegate = this.byPublicKey[block.data.generatorPublicKey];
+
+ if (!delegate) {
+ app.forceExit(
+ `Failed to lookup generator '${block.data.generatorPublicKey}' of block '${block.data.id}'. :skull:`,
+ );
+ }
+
+ const revertedTransactions = [];
+
+ try {
+ // Revert the transactions from last to first
+ for (let i = block.transactions.length - 1; i >= 0; i--) {
+ const transaction = block.transactions[i];
+ this.revertTransaction(transaction);
+ revertedTransactions.push(transaction);
+ }
+
+ const reverted = delegate.revertBlock(block.data);
+
+ // If the block has been reverted, the balance is decreased
+ // by reward + totalFee. In which case the vote balance of the
+ // delegate's delegate has to be updated.
+ if (reverted && delegate.vote) {
+ const decrease = (block.data.reward as Bignum).plus(block.data.totalFee);
+ const votedDelegate = this.byPublicKey[delegate.vote];
+ votedDelegate.voteBalance = votedDelegate.voteBalance.minus(decrease);
+ }
+ } catch (error) {
+ this.logger.error(error.stack);
+
+ revertedTransactions.reverse().forEach(transaction => this.applyTransaction(transaction));
+
+ throw error;
+ }
+ }
+
+ /**
+ * Apply the given transaction to a delegate.
+ * @param {Transaction} transaction
+ * @return {Transaction}
+ */
+ public applyTransaction(transaction: models.Transaction) {
+ const { data } = transaction;
+ const { type, asset, recipientId, senderPublicKey } = data;
+
+ const sender = this.findByPublicKey(senderPublicKey);
+ const recipient = this.findByAddress(recipientId);
+ const errors = [];
+
+ // specific verifications / adjustments depending on transaction type
+ if (type === TransactionTypes.DelegateRegistration && this.byUsername[asset.delegate.username.toLowerCase()]) {
+ this.logger.error(
+ `Can't apply transaction ${
+ data.id
+ }: delegate name '${asset.delegate.username.toLowerCase()}' already taken.`,
+ );
+ throw new Error(`Can't apply transaction ${data.id}: delegate name already taken.`);
+
+ // NOTE: We use the vote public key, because vote transactions
+ // have the same sender and recipient
+ } else if (type === TransactionTypes.Vote && !this.isDelegate(asset.votes[0].slice(1))) {
+ this.logger.error(`Can't apply vote transaction ${data.id}: delegate ${asset.votes[0]} does not exist.`);
+ throw new Error(`Can't apply transaction ${data.id}: delegate ${asset.votes[0]} does not exist.`);
+ } else if (type === TransactionTypes.SecondSignature) {
+ data.recipientId = "";
+ }
+
+ // handle exceptions / verify that we can apply the transaction to the sender
+ if (isException(data)) {
+ this.logger.warn(`Transaction ${data.id} forcibly applied because it has been added as an exception.`);
+ } else if (!sender.canApply(data, errors)) {
+ this.logger.error(
+ `Can't apply transaction id:${data.id} from sender:${sender.address} due to ${JSON.stringify(errors)}`,
+ );
+ this.logger.debug(`Audit: ${JSON.stringify(sender.auditApply(data), null, 2)}`);
+ throw new Error(`Can't apply transaction ${data.id}`);
+ }
+
+ sender.applyTransactionToSender(data);
+
+ if (type === TransactionTypes.DelegateRegistration) {
+ this.reindex(sender);
+ }
+
+ if (recipient && type === TransactionTypes.Transfer) {
+ recipient.applyTransactionToRecipient(data);
+ }
+
+ this._updateVoteBalances(sender, recipient, data);
+
+ return transaction;
+ }
+
+ /**
+ * Updates the vote balances of the respective delegates of sender and recipient.
+ * If the transaction is not a vote...
+ * 1. fee + amount is removed from the sender's delegate vote balance
+ * 2. amount is added to the recipient's delegate vote balance
+ *
+ * in case of a vote...
+ * 1. the full sender balance is added to the sender's delegate vote balance
+ *
+ * If revert is set to true, the operations are reversed (plus -> minus, minus -> plus).
+ * @param {Wallet} sender
+ * @param {Wallet} recipient
+ * @param {Transaction} transaction
+ * @param {Boolean} revert
+ * @return {Transaction}
+ */
+ public _updateVoteBalances(sender, recipient, transaction, revert = false) {
+ // TODO: multipayment?
+ if (transaction.type !== TransactionTypes.Vote) {
+ // Update vote balance of the sender's delegate
+ if (sender.vote) {
+ const delegate = this.findByPublicKey(sender.vote);
+ const total = transaction.amount.plus(transaction.fee);
+ delegate.voteBalance = revert ? delegate.voteBalance.plus(total) : delegate.voteBalance.minus(total);
+ }
+
+ // Update vote balance of recipient's delegate
+ if (recipient && recipient.vote) {
+ const delegate = this.findByPublicKey(recipient.vote);
+ delegate.voteBalance = revert
+ ? delegate.voteBalance.minus(transaction.amount)
+ : delegate.voteBalance.plus(transaction.amount);
+ }
+ } else {
+ const vote = transaction.asset.votes[0];
+ const delegate = this.findByPublicKey(vote.substr(1));
+
+ if (vote.startsWith("+")) {
+ delegate.voteBalance = revert
+ ? delegate.voteBalance.minus(sender.balance)
+ : delegate.voteBalance.plus(sender.balance);
+ } else {
+ delegate.voteBalance = revert
+ ? delegate.voteBalance.plus(sender.balance.plus(transaction.fee))
+ : delegate.voteBalance.minus(sender.balance.plus(transaction.fee));
+ }
+ }
+ }
+
+ /**
+ * Remove the given transaction from a delegate.
+ * @param {Transaction} transaction
+ * @return {Transaction}
+ */
+ public revertTransaction(transaction: models.Transaction) {
+ const { type, data } = transaction;
+ const sender = this.findByPublicKey(data.senderPublicKey); // Should exist
+ const recipient = this.byAddress[data.recipientId];
+
+ sender.revertTransactionForSender(data);
+
+ // removing the wallet from the delegates index
+ if (type === TransactionTypes.DelegateRegistration) {
+ delete this.byUsername[data.asset.delegate.username];
+ }
+
+ if (recipient && type === TransactionTypes.Transfer) {
+ recipient.revertTransactionForRecipient(data);
+ }
+
+ // Revert vote balance updates
+ this._updateVoteBalances(sender, recipient, data, true);
+
+ return data;
+ }
+
+ /**
+ * Checks if a given publicKey is a registered delegate
+ * @param {String} publicKey
+ */
+ public isDelegate(publicKey: string) {
+ const delegateWallet = this.byPublicKey[publicKey];
+
+ if (delegateWallet && delegateWallet.username) {
+ return !!this.byUsername[delegateWallet.username];
+ }
+
+ return false;
+ }
+
+ /**
+ * Determine if the wallet can be removed from memory.
+ * @param {Object} wallet
+ * @return {Boolean}
+ */
+ public canBePurged(wallet) {
+ return wallet.balance.isZero() && !wallet.secondPublicKey && !wallet.multisignature && !wallet.username;
+ }
+
+ /**
+ * Reset the wallets index.
+ * @return {void}
+ */
+ public reset() {
+ this.byAddress = {};
+ this.byPublicKey = {};
+ this.byUsername = {};
+ }
+}
diff --git a/packages/core-database/tsconfig.json b/packages/core-database/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-database/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-debugger-cli/CHANGELOG.md b/packages/core-debugger-cli/CHANGELOG.md
deleted file mode 100644
index 95a15b0628..0000000000
--- a/packages/core-debugger-cli/CHANGELOG.md
+++ /dev/null
@@ -1,26 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.0 - 2018-12-03
-
-### Added
-
-- Retrieve identities
-- Verify second signature
-
-### Changed
-
-- Change `transaction.serialized` from `Buffer` to hex
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-## 0.1.0 - 2018-10-02
-
-### Added
-
-- initial release
diff --git a/packages/core-debugger-cli/LICENSE b/packages/core-debugger-cli/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-debugger-cli/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-debugger-cli/README.md b/packages/core-debugger-cli/README.md
index 5560b6d780..bc6530e60b 100644
--- a/packages/core-debugger-cli/README.md
+++ b/packages/core-debugger-cli/README.md
@@ -14,8 +14,9 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [Joshua Noack](https://github.com/supaiku0)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-debugger-cli/__tests__/__fixtures__/block.json b/packages/core-debugger-cli/__tests__/__fixtures__/block.json
index 85acc88b6f..d1228aa285 100644
--- a/packages/core-debugger-cli/__tests__/__fixtures__/block.json
+++ b/packages/core-debugger-cli/__tests__/__fixtures__/block.json
@@ -1,140 +1,140 @@
{
- "data": {
- "id": "7176646138626297930",
- "version": 0,
- "height": 2243161,
- "timestamp": 24760440,
- "previousBlock": "3112633353705641986",
- "numberOfTransactions": 7,
- "totalAmount": "3890300",
- "totalFee": "70000000",
- "reward": "200000000",
- "payloadLength": 224,
- "payloadHash": "3784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282",
- "generatorPublicKey": "020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a325",
- "blockSignature": "3045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29",
- "transactions": [
- {
- "type": 0,
- "amount": 555760,
- "fee": 10000000,
- "recipientId": "DB4gFuDztmdGALMb8i1U4Z4R5SktxpNTAY",
- "timestamp": 24760418,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "304402204f12469157b19edd06ba25fcad3d4a5ef5b057c23f9e02de4641e6f8eef0553e022010121ab282f83efe1043de9c16bbf2c6845a03684229a0d7c965ffb9abdfb978",
- "signSignature": "30450221008327862f0b9178d6665f7d6674978c5caf749649558d814244b1c66cdf945c40022015918134ef01fed3fe2a2efde3327917731344332724522c75c2799a14f78717",
- "id": "170543154a3b79459cbaa529f9f62b6f1342682799eb549dbf09fcca2d1f9c11",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555750,
- "fee": 10000000,
- "recipientId": "DGExsNogZR7JFa2656ZFP9TMWJYJh5djzQ",
- "timestamp": 24760416,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "304402205f82feb8c5d1d79c565c2ff7badb93e4c9827b132d135dda11cb25427d4ef8ac02205ff136f970533c4ec4c7d0cd1ea7e02d7b62629b66c6c93265f608d7f2389727",
- "signSignature": "304402207e912031fcc700d8a55fbc415993302a0d8e6aea128397141b640b6dba52331702201fd1ad3984e42af44f548907add6cb7ad72ca0070c8cc1d8dc9bbda208c56bd9",
- "id": "1da153f37eceda233ff1b407ac18e47b3cae47c14cdcd5297d929618a916c4a7",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555770,
- "fee": 10000000,
- "recipientId": "DHGK5np6LuMMErfRfC5CmjpGu3ME85c25n",
- "timestamp": 24760420,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "304502210083216e6969e068770e6d2fe5c244881002309df84d20290ddf3f858967ed010202202a479b3da5080ea475d310ff13494654b42db75886a8808bd211b4bdb9146a7a",
- "signSignature": "3045022100e1dcab3406bbeb968146a4a391909ce41df9b71592a753b001e7c2ee1d382c5102202a74aeafd4a152ec61854636fbae829c41f1416c1e0637a0809408394973099f",
- "id": "1e255f07dc25ce22d900ea81663c8f00d05a7b7c061e6fc3c731b05d642fa0b9",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555750,
- "fee": 10000000,
- "recipientId": "D7pcLJNGe197ibmWEmT8mM9KKU1htrcDyW",
- "timestamp": 24760417,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "3045022100cd4fa9855227be11e17201419dacfbbd5d9946df8d6792a9488160025693821402207fb83969bad6a26959f437b5bb88e255b0a48eb04964d0c0d29f7ee94bd15e11",
- "signSignature": "304402205f50c2991a17743d17ffbb09159cadc35a3f848044261842879ccf5be9d81c5e022023bf21c32fb6e94494104f15f8d3a942ab120d0abd6fb4c93790b68e1b307a79",
- "id": "66336c61d6ec623f8a1d2fd156a0fac16a4fe93bb3fba337859355c2119923a8",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555760,
- "fee": 10000000,
- "recipientId": "DD4yhwzryQdNGqKtezmycToQv63g27Tqqq",
- "timestamp": 24760418,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "30450221009c792062e13399ac6756b2e9f137194d06e106360ac0f3e24e55c7249cee0b3602205dc1d9c76d0451d1cb5a2396783a13e6d2d790ccfd49291e3d0a78349f7ea0e8",
- "signSignature": "30440220083ba8a9af49b8be6e93794d71ec43ffc96a158375810e5d9f2478e71655315b0220278402ecaa1d224dab9f0f3b28295bbaea339c85c7400edafdc49df87439fc64",
- "id": "78db36f7d79f51c67d7210ee3819dfb8d0d47b16a7484ebf55c5a055b17209a3",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555760,
- "fee": 10000000,
- "recipientId": "D5LiYGXL5keycWuTF6AFFwSRc6Mt4uEHMu",
- "timestamp": 24760419,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "3044022063c65263e42be02bd9831b375c1d76a88332f00ed0557ecc1e7d2375ca40070902206797b5932c0bad68444beb5a38daa7cadf536ee2144e0d9777b812284d14374e",
- "signSignature": "3045022100b04da6692f75d43229ffd8486c1517e8952d38b4c03dfac38b6b360190a5c33e0220776622e5f09f92a1258b4a011f22181c977b622b8d1bbb2f83b42f4126d00739",
- "id": "83c80bb58777bb43f5037544b44ef69f191d3548fd1b2a00bed368f9f0d694c5",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- },
- {
- "type": 0,
- "amount": 555750,
- "fee": 10000000,
- "recipientId": "DPopNLwMvv4zSjdZnqUk8HFH13Mcb7NbEK",
- "timestamp": 24760416,
- "asset": {},
- "vendorField": "Goose Voter - True Block Weight",
- "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
- "signature": "3045022100d4513c3608c2072e38e7a0e3bb8daf2cd5f7cc6fec9a5570dccd1eda696c591902202ecbbf3c9d0757be7b23c8b1cc6481c51600d158756c47fcb6f4a7f4893e31c4",
- "signSignature": "304402201fed4858d0806dd32220960900a871dd2f60e1f623af75feef9b1034a9a0a46402205a29b27c63fcc3e1ee1e77ecbbf4dd6e7db09901e7a09b9fd490cd68d62392cb",
- "id": "d2faf992fdd5da96d6d15038b6ddb65230338fa2096e45e44da51daad5e2f3ca",
- "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
- "hop": 2,
- "broadcast": false,
- "blockId": "7176646138626297930"
- }
- ]
- },
- "serialized": "0000000078d07901593a22002b324b8b33a85802070000007c5c3b0000000000801d2c040000000000c2eb0b00000000e00000003784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a3253045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29",
- "serializedFull": "0000000078d07901593a22002b324b8b33a85802070000007c5c3b0000000000801d2c040000000000c2eb0b00000000e00000003784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a3253045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29ff000000fe00000000010000ff000000ff000000ff000000ff000000ff011e0062d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e40fad23d21da7a4fd4decb5c49726ea22f5e6bf6304402204f12469157b19edd06ba25fcad3d4a5ef5b057c23f9e02de4641e6f8eef0553e022010121ab282f83efe1043de9c16bbf2c6845a03684229a0d7c965ffb9abdfb97830450221008327862f0b9178d6665f7d6674978c5caf749649558d814244b1c66cdf945c40022015918134ef01fed3fe2a2efde3327917731344332724522c75c2799a14f78717ff011e0060d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001e79c579fb08f448879c22fe965906b4e3b88d02ed304402205f82feb8c5d1d79c565c2ff7badb93e4c9827b132d135dda11cb25427d4ef8ac02205ff136f970533c4ec4c7d0cd1ea7e02d7b62629b66c6c93265f608d7f2389727304402207e912031fcc700d8a55fbc415993302a0d8e6aea128397141b640b6dba52331702201fd1ad3984e42af44f548907add6cb7ad72ca0070c8cc1d8dc9bbda208c56bd9ff011e0064d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874fa7a080000000000000000001e84fee45dde2b11525afe192a2e991d014ff93a36304502210083216e6969e068770e6d2fe5c244881002309df84d20290ddf3f858967ed010202202a479b3da5080ea475d310ff13494654b42db75886a8808bd211b4bdb9146a7a3045022100e1dcab3406bbeb968146a4a391909ce41df9b71592a753b001e7c2ee1d382c5102202a74aeafd4a152ec61854636fbae829c41f1416c1e0637a0809408394973099fff011e0061d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001e1d69583ede5ee82d220e74bffb36bae2ce762dfb3045022100cd4fa9855227be11e17201419dacfbbd5d9946df8d6792a9488160025693821402207fb83969bad6a26959f437b5bb88e255b0a48eb04964d0c0d29f7ee94bd15e11304402205f50c2991a17743d17ffbb09159cadc35a3f848044261842879ccf5be9d81c5e022023bf21c32fb6e94494104f15f8d3a942ab120d0abd6fb4c93790b68e1b307a79ff011e0062d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e56f9a37a859f4f84e93ce7593e809b15a524db2930450221009c792062e13399ac6756b2e9f137194d06e106360ac0f3e24e55c7249cee0b3602205dc1d9c76d0451d1cb5a2396783a13e6d2d790ccfd49291e3d0a78349f7ea0e830440220083ba8a9af49b8be6e93794d71ec43ffc96a158375810e5d9f2478e71655315b0220278402ecaa1d224dab9f0f3b28295bbaea339c85c7400edafdc49df87439fc64ff011e0063d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e0232a083c16aba4362dddec1b3050ffdd6d43f2e3044022063c65263e42be02bd9831b375c1d76a88332f00ed0557ecc1e7d2375ca40070902206797b5932c0bad68444beb5a38daa7cadf536ee2144e0d9777b812284d14374e3045022100b04da6692f75d43229ffd8486c1517e8952d38b4c03dfac38b6b360190a5c33e0220776622e5f09f92a1258b4a011f22181c977b622b8d1bbb2f83b42f4126d00739ff011e0060d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001eccc4fce0dc95f9951ee40c09a7ae807746cf51403045022100d4513c3608c2072e38e7a0e3bb8daf2cd5f7cc6fec9a5570dccd1eda696c591902202ecbbf3c9d0757be7b23c8b1cc6481c51600d158756c47fcb6f4a7f4893e31c4304402201fed4858d0806dd32220960900a871dd2f60e1f623af75feef9b1034a9a0a46402205a29b27c63fcc3e1ee1e77ecbbf4dd6e7db09901e7a09b9fd490cd68d62392cb"
+ "data": {
+ "id": "7176646138626297930",
+ "version": 0,
+ "height": 2243161,
+ "timestamp": 24760440,
+ "previousBlock": "3112633353705641986",
+ "numberOfTransactions": 7,
+ "totalAmount": "3890300",
+ "totalFee": "70000000",
+ "reward": "200000000",
+ "payloadLength": 224,
+ "payloadHash": "3784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282",
+ "generatorPublicKey": "020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a325",
+ "blockSignature": "3045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29",
+ "transactions": [
+ {
+ "type": 0,
+ "amount": 555760,
+ "fee": 10000000,
+ "recipientId": "DB4gFuDztmdGALMb8i1U4Z4R5SktxpNTAY",
+ "timestamp": 24760418,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "304402204f12469157b19edd06ba25fcad3d4a5ef5b057c23f9e02de4641e6f8eef0553e022010121ab282f83efe1043de9c16bbf2c6845a03684229a0d7c965ffb9abdfb978",
+ "signSignature": "30450221008327862f0b9178d6665f7d6674978c5caf749649558d814244b1c66cdf945c40022015918134ef01fed3fe2a2efde3327917731344332724522c75c2799a14f78717",
+ "id": "170543154a3b79459cbaa529f9f62b6f1342682799eb549dbf09fcca2d1f9c11",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555750,
+ "fee": 10000000,
+ "recipientId": "DGExsNogZR7JFa2656ZFP9TMWJYJh5djzQ",
+ "timestamp": 24760416,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "304402205f82feb8c5d1d79c565c2ff7badb93e4c9827b132d135dda11cb25427d4ef8ac02205ff136f970533c4ec4c7d0cd1ea7e02d7b62629b66c6c93265f608d7f2389727",
+ "signSignature": "304402207e912031fcc700d8a55fbc415993302a0d8e6aea128397141b640b6dba52331702201fd1ad3984e42af44f548907add6cb7ad72ca0070c8cc1d8dc9bbda208c56bd9",
+ "id": "1da153f37eceda233ff1b407ac18e47b3cae47c14cdcd5297d929618a916c4a7",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555770,
+ "fee": 10000000,
+ "recipientId": "DHGK5np6LuMMErfRfC5CmjpGu3ME85c25n",
+ "timestamp": 24760420,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "304502210083216e6969e068770e6d2fe5c244881002309df84d20290ddf3f858967ed010202202a479b3da5080ea475d310ff13494654b42db75886a8808bd211b4bdb9146a7a",
+ "signSignature": "3045022100e1dcab3406bbeb968146a4a391909ce41df9b71592a753b001e7c2ee1d382c5102202a74aeafd4a152ec61854636fbae829c41f1416c1e0637a0809408394973099f",
+ "id": "1e255f07dc25ce22d900ea81663c8f00d05a7b7c061e6fc3c731b05d642fa0b9",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555750,
+ "fee": 10000000,
+ "recipientId": "D7pcLJNGe197ibmWEmT8mM9KKU1htrcDyW",
+ "timestamp": 24760417,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "3045022100cd4fa9855227be11e17201419dacfbbd5d9946df8d6792a9488160025693821402207fb83969bad6a26959f437b5bb88e255b0a48eb04964d0c0d29f7ee94bd15e11",
+ "signSignature": "304402205f50c2991a17743d17ffbb09159cadc35a3f848044261842879ccf5be9d81c5e022023bf21c32fb6e94494104f15f8d3a942ab120d0abd6fb4c93790b68e1b307a79",
+ "id": "66336c61d6ec623f8a1d2fd156a0fac16a4fe93bb3fba337859355c2119923a8",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555760,
+ "fee": 10000000,
+ "recipientId": "DD4yhwzryQdNGqKtezmycToQv63g27Tqqq",
+ "timestamp": 24760418,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "30450221009c792062e13399ac6756b2e9f137194d06e106360ac0f3e24e55c7249cee0b3602205dc1d9c76d0451d1cb5a2396783a13e6d2d790ccfd49291e3d0a78349f7ea0e8",
+ "signSignature": "30440220083ba8a9af49b8be6e93794d71ec43ffc96a158375810e5d9f2478e71655315b0220278402ecaa1d224dab9f0f3b28295bbaea339c85c7400edafdc49df87439fc64",
+ "id": "78db36f7d79f51c67d7210ee3819dfb8d0d47b16a7484ebf55c5a055b17209a3",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555760,
+ "fee": 10000000,
+ "recipientId": "D5LiYGXL5keycWuTF6AFFwSRc6Mt4uEHMu",
+ "timestamp": 24760419,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "3044022063c65263e42be02bd9831b375c1d76a88332f00ed0557ecc1e7d2375ca40070902206797b5932c0bad68444beb5a38daa7cadf536ee2144e0d9777b812284d14374e",
+ "signSignature": "3045022100b04da6692f75d43229ffd8486c1517e8952d38b4c03dfac38b6b360190a5c33e0220776622e5f09f92a1258b4a011f22181c977b622b8d1bbb2f83b42f4126d00739",
+ "id": "83c80bb58777bb43f5037544b44ef69f191d3548fd1b2a00bed368f9f0d694c5",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ },
+ {
+ "type": 0,
+ "amount": 555750,
+ "fee": 10000000,
+ "recipientId": "DPopNLwMvv4zSjdZnqUk8HFH13Mcb7NbEK",
+ "timestamp": 24760416,
+ "asset": {},
+ "vendorField": "Goose Voter - True Block Weight",
+ "senderPublicKey": "0265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c0",
+ "signature": "3045022100d4513c3608c2072e38e7a0e3bb8daf2cd5f7cc6fec9a5570dccd1eda696c591902202ecbbf3c9d0757be7b23c8b1cc6481c51600d158756c47fcb6f4a7f4893e31c4",
+ "signSignature": "304402201fed4858d0806dd32220960900a871dd2f60e1f623af75feef9b1034a9a0a46402205a29b27c63fcc3e1ee1e77ecbbf4dd6e7db09901e7a09b9fd490cd68d62392cb",
+ "id": "d2faf992fdd5da96d6d15038b6ddb65230338fa2096e45e44da51daad5e2f3ca",
+ "senderId": "DB8LnnQqYvHpG4WkGJ9AJWBYEct7G3yRZg",
+ "hop": 2,
+ "broadcast": false,
+ "blockId": "7176646138626297930"
+ }
+ ]
+ },
+ "serialized": "0000000078d07901593a22002b324b8b33a85802070000007c5c3b0000000000801d2c040000000000c2eb0b00000000e00000003784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a3253045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29",
+ "serializedFull": "0000000078d07901593a22002b324b8b33a85802070000007c5c3b0000000000801d2c040000000000c2eb0b00000000e00000003784b953afcf936bdffd43fdf005b5732b49c1fc6b11e195c364c20b2eb06282020f5df4d2bc736d12ce43af5b1663885a893fade7ee5e62b3cc59315a63e6a3253045022100eee6c37b5e592e99811d588532726353592923f347c701d52912e6d583443e400220277ffe38ad31e216ba0907c4738fed19b2071246b150c72c0a52bae4477ebe29ff000000fe00000000010000ff000000ff000000ff000000ff000000ff011e0062d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e40fad23d21da7a4fd4decb5c49726ea22f5e6bf6304402204f12469157b19edd06ba25fcad3d4a5ef5b057c23f9e02de4641e6f8eef0553e022010121ab282f83efe1043de9c16bbf2c6845a03684229a0d7c965ffb9abdfb97830450221008327862f0b9178d6665f7d6674978c5caf749649558d814244b1c66cdf945c40022015918134ef01fed3fe2a2efde3327917731344332724522c75c2799a14f78717ff011e0060d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001e79c579fb08f448879c22fe965906b4e3b88d02ed304402205f82feb8c5d1d79c565c2ff7badb93e4c9827b132d135dda11cb25427d4ef8ac02205ff136f970533c4ec4c7d0cd1ea7e02d7b62629b66c6c93265f608d7f2389727304402207e912031fcc700d8a55fbc415993302a0d8e6aea128397141b640b6dba52331702201fd1ad3984e42af44f548907add6cb7ad72ca0070c8cc1d8dc9bbda208c56bd9ff011e0064d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874fa7a080000000000000000001e84fee45dde2b11525afe192a2e991d014ff93a36304502210083216e6969e068770e6d2fe5c244881002309df84d20290ddf3f858967ed010202202a479b3da5080ea475d310ff13494654b42db75886a8808bd211b4bdb9146a7a3045022100e1dcab3406bbeb968146a4a391909ce41df9b71592a753b001e7c2ee1d382c5102202a74aeafd4a152ec61854636fbae829c41f1416c1e0637a0809408394973099fff011e0061d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001e1d69583ede5ee82d220e74bffb36bae2ce762dfb3045022100cd4fa9855227be11e17201419dacfbbd5d9946df8d6792a9488160025693821402207fb83969bad6a26959f437b5bb88e255b0a48eb04964d0c0d29f7ee94bd15e11304402205f50c2991a17743d17ffbb09159cadc35a3f848044261842879ccf5be9d81c5e022023bf21c32fb6e94494104f15f8d3a942ab120d0abd6fb4c93790b68e1b307a79ff011e0062d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e56f9a37a859f4f84e93ce7593e809b15a524db2930450221009c792062e13399ac6756b2e9f137194d06e106360ac0f3e24e55c7249cee0b3602205dc1d9c76d0451d1cb5a2396783a13e6d2d790ccfd49291e3d0a78349f7ea0e830440220083ba8a9af49b8be6e93794d71ec43ffc96a158375810e5d9f2478e71655315b0220278402ecaa1d224dab9f0f3b28295bbaea339c85c7400edafdc49df87439fc64ff011e0063d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874f07a080000000000000000001e0232a083c16aba4362dddec1b3050ffdd6d43f2e3044022063c65263e42be02bd9831b375c1d76a88332f00ed0557ecc1e7d2375ca40070902206797b5932c0bad68444beb5a38daa7cadf536ee2144e0d9777b812284d14374e3045022100b04da6692f75d43229ffd8486c1517e8952d38b4c03dfac38b6b360190a5c33e0220776622e5f09f92a1258b4a011f22181c977b622b8d1bbb2f83b42f4126d00739ff011e0060d079010265c1f6b8c1966a90f3fed7bc32fd4f42238ab4938fdb2a4e7ddd01ae8b58b4c080969800000000001f476f6f736520566f746572202d205472756520426c6f636b20576569676874e67a080000000000000000001eccc4fce0dc95f9951ee40c09a7ae807746cf51403045022100d4513c3608c2072e38e7a0e3bb8daf2cd5f7cc6fec9a5570dccd1eda696c591902202ecbbf3c9d0757be7b23c8b1cc6481c51600d158756c47fcb6f4a7f4893e31c4304402201fed4858d0806dd32220960900a871dd2f60e1f623af75feef9b1034a9a0a46402205a29b27c63fcc3e1ee1e77ecbbf4dd6e7db09901e7a09b9fd490cd68d62392cb"
}
diff --git a/packages/core-debugger-cli/__tests__/__fixtures__/identities.json b/packages/core-debugger-cli/__tests__/__fixtures__/identities.json
index 2ae9e9e891..71063bafbc 100644
--- a/packages/core-debugger-cli/__tests__/__fixtures__/identities.json
+++ b/packages/core-debugger-cli/__tests__/__fixtures__/identities.json
@@ -1,6 +1,6 @@
{
- "passphrase": "this is a top secret passphrase",
- "publicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
- "privateKey": "d8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712",
- "address": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib"
+ "passphrase": "this is a top secret passphrase",
+ "publicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ "privateKey": "d8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712",
+ "address": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib"
}
diff --git a/packages/core-debugger-cli/__tests__/__fixtures__/transaction-second.json b/packages/core-debugger-cli/__tests__/__fixtures__/transaction-second.json
index 7f6a589dca..8a0ebe0907 100644
--- a/packages/core-debugger-cli/__tests__/__fixtures__/transaction-second.json
+++ b/packages/core-debugger-cli/__tests__/__fixtures__/transaction-second.json
@@ -1,15 +1,15 @@
{
- "data": {
- "type": 0,
- "amount": 200000000,
- "fee": 10000000,
- "recipientId": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
- "timestamp": 41268430,
- "asset": {},
- "senderPublicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
- "signature": "304402206da703bfcc11ec2ccb3f363fa0e23fc64050fdf68e1f1852b7d4a5bb07824166022031ed1d86b586a79f9c1e5010dbc4f4cb36641c62a196536f90b1dfd6be1c9868",
- "signSignature": "304402200759b6f9de5257aa3fcf54b9cd7a426a00af9368b7ea3d5ea2b13a91b97fb277022076e4d2d7deb9bdd8245b2533cab1eeeef72981e18576ef8455a61ee3e6f3fb57",
- "id": "bb8054b6298d659d4b5d655e82de17b3504ba27655ec3d6e35d311f3104b1c43"
- },
- "serialized": "ff011e00ceb47502034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed19280969800000000000000c2eb0b00000000000000001e0995750207ecaf0ccf251c1265b92ad84f553662304402206da703bfcc11ec2ccb3f363fa0e23fc64050fdf68e1f1852b7d4a5bb07824166022031ed1d86b586a79f9c1e5010dbc4f4cb36641c62a196536f90b1dfd6be1c9868304402200759b6f9de5257aa3fcf54b9cd7a426a00af9368b7ea3d5ea2b13a91b97fb277022076e4d2d7deb9bdd8245b2533cab1eeeef72981e18576ef8455a61ee3e6f3fb57"
+ "data": {
+ "type": 0,
+ "amount": 200000000,
+ "fee": 10000000,
+ "recipientId": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
+ "timestamp": 41268430,
+ "asset": {},
+ "senderPublicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ "signature": "304402206da703bfcc11ec2ccb3f363fa0e23fc64050fdf68e1f1852b7d4a5bb07824166022031ed1d86b586a79f9c1e5010dbc4f4cb36641c62a196536f90b1dfd6be1c9868",
+ "signSignature": "304402200759b6f9de5257aa3fcf54b9cd7a426a00af9368b7ea3d5ea2b13a91b97fb277022076e4d2d7deb9bdd8245b2533cab1eeeef72981e18576ef8455a61ee3e6f3fb57",
+ "id": "bb8054b6298d659d4b5d655e82de17b3504ba27655ec3d6e35d311f3104b1c43"
+ },
+ "serialized": "ff011e00ceb47502034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed19280969800000000000000c2eb0b00000000000000001e0995750207ecaf0ccf251c1265b92ad84f553662304402206da703bfcc11ec2ccb3f363fa0e23fc64050fdf68e1f1852b7d4a5bb07824166022031ed1d86b586a79f9c1e5010dbc4f4cb36641c62a196536f90b1dfd6be1c9868304402200759b6f9de5257aa3fcf54b9cd7a426a00af9368b7ea3d5ea2b13a91b97fb277022076e4d2d7deb9bdd8245b2533cab1eeeef72981e18576ef8455a61ee3e6f3fb57"
}
diff --git a/packages/core-debugger-cli/__tests__/__fixtures__/transaction.json b/packages/core-debugger-cli/__tests__/__fixtures__/transaction.json
index 74f4904255..70490d28c9 100644
--- a/packages/core-debugger-cli/__tests__/__fixtures__/transaction.json
+++ b/packages/core-debugger-cli/__tests__/__fixtures__/transaction.json
@@ -1,14 +1,14 @@
{
- "data": {
- "type": 0,
- "amount": 200000000,
- "fee": 10000000,
- "recipientId": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
- "timestamp": 41268326,
- "asset": {},
- "senderPublicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
- "signature": "3044022002994b30e08b58825c8c16ebf2cc693cfe706fb26571674784ead098accc89d702205b79dedc752a84504ecfe4b9e1292997f22260ee4daa102d2d9a61432d93b286",
- "id": "da61c6cba363cc39baa0ca3f9ba2c5db81b9805045bd0b9fc58af07ad4206856"
- },
- "serialized": "ff011e0066b47502034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed19280969800000000000000c2eb0b00000000000000001e0995750207ecaf0ccf251c1265b92ad84f5536623044022002994b30e08b58825c8c16ebf2cc693cfe706fb26571674784ead098accc89d702205b79dedc752a84504ecfe4b9e1292997f22260ee4daa102d2d9a61432d93b286"
+ "data": {
+ "type": 0,
+ "amount": 200000000,
+ "fee": 10000000,
+ "recipientId": "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
+ "timestamp": 41268326,
+ "asset": {},
+ "senderPublicKey": "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ "signature": "3044022002994b30e08b58825c8c16ebf2cc693cfe706fb26571674784ead098accc89d702205b79dedc752a84504ecfe4b9e1292997f22260ee4daa102d2d9a61432d93b286",
+ "id": "da61c6cba363cc39baa0ca3f9ba2c5db81b9805045bd0b9fc58af07ad4206856"
+ },
+ "serialized": "ff011e0066b47502034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed19280969800000000000000c2eb0b00000000000000001e0995750207ecaf0ccf251c1265b92ad84f5536623044022002994b30e08b58825c8c16ebf2cc693cfe706fb26571674784ead098accc89d702205b79dedc752a84504ecfe4b9e1292997f22260ee4daa102d2d9a61432d93b286"
}
diff --git a/packages/core-debugger-cli/__tests__/commands/deserialize.test.js b/packages/core-debugger-cli/__tests__/commands/deserialize.test.js
deleted file mode 100644
index d46609efe5..0000000000
--- a/packages/core-debugger-cli/__tests__/commands/deserialize.test.js
+++ /dev/null
@@ -1,80 +0,0 @@
-const testSubject = require('../../lib/commands/deserialize')
-const fixtureBlock = require('../__fixtures__/block.json')
-const fixtureTransaction = require('../__fixtures__/transaction.json')
-
-describe('Commands - Deserialize', () => {
- it('should be a function', () => {
- expect(testSubject).toBeFunction()
- })
-
- it('should deserialize a block (not-full)', () => {
- const actual = JSON.parse(
- testSubject({
- data: fixtureBlock.serialized,
- type: 'block',
- }),
- )
-
- expect(actual.data.version).toBe(fixtureBlock.data.version)
- expect(actual.data.timestamp).toBe(fixtureBlock.data.timestamp)
- expect(actual.data.height).toBe(fixtureBlock.data.height)
- expect(actual.data.previousBlock).toBe(fixtureBlock.data.previousBlock)
- expect(actual.data.numberOfTransactions).toBe(
- fixtureBlock.data.numberOfTransactions,
- )
- expect(actual.data.totalAmount).toBe(fixtureBlock.data.totalAmount)
- expect(actual.data.totalFee).toBe(fixtureBlock.data.totalFee)
- expect(actual.data.reward).toBe(fixtureBlock.data.reward)
- expect(actual.data.payloadLength).toBe(fixtureBlock.data.payloadLength)
- expect(actual.data.payloadHash).toBe(fixtureBlock.data.payloadHash)
- expect(actual.data.generatorPublicKey).toBe(
- fixtureBlock.data.generatorPublicKey,
- )
- expect(actual.data.blockSignature).toBe(fixtureBlock.data.blockSignature)
- })
-
- it('should deserialize a block (full)', () => {
- const actual = JSON.parse(
- testSubject({
- data: fixtureBlock.serializedFull,
- type: 'block',
- }),
- )
-
- expect(actual.data.version).toBe(fixtureBlock.data.version)
- expect(actual.data.timestamp).toBe(fixtureBlock.data.timestamp)
- expect(actual.data.height).toBe(fixtureBlock.data.height)
- expect(actual.data.previousBlock).toBe(fixtureBlock.data.previousBlock)
- expect(actual.data.numberOfTransactions).toBe(
- fixtureBlock.data.numberOfTransactions,
- )
- expect(actual.data.totalAmount).toBe(fixtureBlock.data.totalAmount)
- expect(actual.data.totalFee).toBe(fixtureBlock.data.totalFee)
- expect(actual.data.reward).toBe(fixtureBlock.data.reward)
- expect(actual.data.payloadLength).toBe(fixtureBlock.data.payloadLength)
- expect(actual.data.payloadHash).toBe(fixtureBlock.data.payloadHash)
- expect(actual.data.generatorPublicKey).toBe(
- fixtureBlock.data.generatorPublicKey,
- )
- expect(actual.data.blockSignature).toBe(fixtureBlock.data.blockSignature)
- expect(actual.transactions).toHaveLength(7)
- })
-
- it('should deserialize a transaction', () => {
- const actual = JSON.parse(
- testSubject({
- data: fixtureTransaction.serialized,
- type: 'transaction',
- }),
- )
-
- expect(actual.type).toBe(fixtureTransaction.data.type)
- expect(+actual.amount).toBe(fixtureTransaction.data.amount)
- expect(+actual.fee).toBe(fixtureTransaction.data.fee)
- expect(actual.recipientId).toBe(fixtureTransaction.data.recipientId)
- expect(actual.timestamp).toBe(fixtureTransaction.data.timestamp)
- expect(actual.senderPublicKey).toBe(fixtureTransaction.data.senderPublicKey)
- expect(actual.signature).toBe(fixtureTransaction.data.signature)
- expect(actual.id).toBe(fixtureTransaction.data.id)
- })
-})
diff --git a/packages/core-debugger-cli/__tests__/commands/deserialize.test.ts b/packages/core-debugger-cli/__tests__/commands/deserialize.test.ts
new file mode 100644
index 0000000000..a7b7fa572e
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/commands/deserialize.test.ts
@@ -0,0 +1,60 @@
+import "jest-extended";
+
+import { DeserializeCommand } from "../../src/commands/deserialize";
+
+describe("Commands - Deserialize", () => {
+ const fixtureBlock = require("../__fixtures__/block.json");
+ const fixtureTransaction = require("../__fixtures__/transaction.json");
+
+ it("should deserialize a block (not-full)", async () => {
+ const actual = JSON.parse(await DeserializeCommand.run(["--data", fixtureBlock.serialized, "--type", "block"]));
+
+ expect(actual.data.version).toBe(fixtureBlock.data.version);
+ expect(actual.data.timestamp).toBe(fixtureBlock.data.timestamp);
+ expect(actual.data.height).toBe(fixtureBlock.data.height);
+ expect(actual.data.previousBlock).toBe(fixtureBlock.data.previousBlock);
+ expect(actual.data.numberOfTransactions).toBe(fixtureBlock.data.numberOfTransactions);
+ expect(actual.data.totalAmount).toBe(fixtureBlock.data.totalAmount);
+ expect(actual.data.totalFee).toBe(fixtureBlock.data.totalFee);
+ expect(actual.data.reward).toBe(fixtureBlock.data.reward);
+ expect(actual.data.payloadLength).toBe(fixtureBlock.data.payloadLength);
+ expect(actual.data.payloadHash).toBe(fixtureBlock.data.payloadHash);
+ expect(actual.data.generatorPublicKey).toBe(fixtureBlock.data.generatorPublicKey);
+ expect(actual.data.blockSignature).toBe(fixtureBlock.data.blockSignature);
+ });
+
+ it("should deserialize a block (full)", async () => {
+ const actual = JSON.parse(
+ await DeserializeCommand.run(["--data", fixtureBlock.serializedFull, "--type", "block"]),
+ );
+
+ expect(actual.data.version).toBe(fixtureBlock.data.version);
+ expect(actual.data.timestamp).toBe(fixtureBlock.data.timestamp);
+ expect(actual.data.height).toBe(fixtureBlock.data.height);
+ expect(actual.data.previousBlock).toBe(fixtureBlock.data.previousBlock);
+ expect(actual.data.numberOfTransactions).toBe(fixtureBlock.data.numberOfTransactions);
+ expect(actual.data.totalAmount).toBe(fixtureBlock.data.totalAmount);
+ expect(actual.data.totalFee).toBe(fixtureBlock.data.totalFee);
+ expect(actual.data.reward).toBe(fixtureBlock.data.reward);
+ expect(actual.data.payloadLength).toBe(fixtureBlock.data.payloadLength);
+ expect(actual.data.payloadHash).toBe(fixtureBlock.data.payloadHash);
+ expect(actual.data.generatorPublicKey).toBe(fixtureBlock.data.generatorPublicKey);
+ expect(actual.data.blockSignature).toBe(fixtureBlock.data.blockSignature);
+ expect(actual.transactions).toHaveLength(7);
+ });
+
+ it("should deserialize a transaction", async () => {
+ const actual = JSON.parse(
+ await DeserializeCommand.run(["--data", fixtureTransaction.serialized, "--type", "transaction"]),
+ );
+
+ expect(actual.type).toBe(fixtureTransaction.data.type);
+ expect(+actual.amount).toBe(fixtureTransaction.data.amount);
+ expect(+actual.fee).toBe(fixtureTransaction.data.fee);
+ expect(actual.recipientId).toBe(fixtureTransaction.data.recipientId);
+ expect(actual.timestamp).toBe(fixtureTransaction.data.timestamp);
+ expect(actual.senderPublicKey).toBe(fixtureTransaction.data.senderPublicKey);
+ expect(actual.signature).toBe(fixtureTransaction.data.signature);
+ expect(actual.id).toBe(fixtureTransaction.data.id);
+ });
+});
diff --git a/packages/core-debugger-cli/__tests__/commands/identity.test.js b/packages/core-debugger-cli/__tests__/commands/identity.test.js
deleted file mode 100644
index 1852332ce6..0000000000
--- a/packages/core-debugger-cli/__tests__/commands/identity.test.js
+++ /dev/null
@@ -1,58 +0,0 @@
-const testSubject = require('../../lib/commands/identity')
-const fixtureIdentities = require('../__fixtures__/identities.json')
-
-describe('Commands - Identity', () => {
- it('should be a function', () => {
- expect(testSubject).toBeFunction()
- })
-
- it('should return identities from passphrase', () => {
- const expected = {
- passphrase: 'this is a top secret passphrase',
- publicKey:
- '034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192',
- privateKey:
- 'd8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712',
- address: 'D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib',
- }
-
- expect(
- testSubject({
- data: fixtureIdentities.passphrase,
- type: 'passphrase',
- }),
- ).toEqual(expected)
- })
-
- it('should return identities from privateKey', () => {
- const expected = {
- publicKey:
- '034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192',
- privateKey:
- 'd8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712',
- address: 'D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib',
- }
-
- expect(
- testSubject({
- data: fixtureIdentities.privateKey,
- type: 'privateKey',
- }),
- ).toEqual(expected)
- })
-
- it('should return identities from publicKey', () => {
- const expected = {
- publicKey:
- '034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192',
- address: 'D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib',
- }
-
- expect(
- testSubject({
- data: fixtureIdentities.publicKey,
- type: 'publicKey',
- }),
- ).toEqual(expected)
- })
-})
diff --git a/packages/core-debugger-cli/__tests__/commands/identity.test.ts b/packages/core-debugger-cli/__tests__/commands/identity.test.ts
new file mode 100644
index 0000000000..2fff72ab91
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/commands/identity.test.ts
@@ -0,0 +1,43 @@
+import "jest-extended";
+
+import { IdentityCommand } from "../../src/commands/identity";
+
+describe("Commands - Identity", async () => {
+ const fixtureIdentities = require("../__fixtures__/identities.json");
+
+ it("should return identities from passphrase", async () => {
+ const expected = {
+ passphrase: "this is a top secret passphrase",
+ publicKey: "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ privateKey: "d8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712",
+ address: "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
+ };
+
+ expect(await IdentityCommand.run(["--data", fixtureIdentities.passphrase, "--type", "passphrase"])).toEqual(
+ expected,
+ );
+ });
+
+ it("should return identities from privateKey", async () => {
+ const expected = {
+ publicKey: "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ privateKey: "d8839c2432bfd0a67ef10a804ba991eabba19f154a3d707917681d45822a5712",
+ address: "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
+ };
+
+ expect(await IdentityCommand.run(["--data", fixtureIdentities.privateKey, "--type", "privateKey"])).toEqual(
+ expected,
+ );
+ });
+
+ it("should return identities from publicKey", async () => {
+ const expected = {
+ publicKey: "034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192",
+ address: "D61mfSggzbvQgTUe6JhYKH2doHaqJ3Dyib",
+ };
+
+ expect(await IdentityCommand.run(["--data", fixtureIdentities.publicKey, "--type", "publicKey"])).toEqual(
+ expected,
+ );
+ });
+});
diff --git a/packages/core-debugger-cli/__tests__/commands/serialize.test.js b/packages/core-debugger-cli/__tests__/commands/serialize.test.js
deleted file mode 100644
index 74c12f82a1..0000000000
--- a/packages/core-debugger-cli/__tests__/commands/serialize.test.js
+++ /dev/null
@@ -1,38 +0,0 @@
-const testSubject = require('../../lib/commands/serialize')
-const fixtureBlock = require('../__fixtures__/block.json')
-const fixtureTransaction = require('../__fixtures__/transaction.json')
-
-describe('Commands - Serialize', () => {
- it('should be a function', () => {
- expect(testSubject).toBeFunction()
- })
-
- it('should serialize a block (not-full)', () => {
- expect(
- testSubject({
- data: JSON.stringify(fixtureBlock.data),
- type: 'block',
- full: false,
- }),
- ).toEqual(fixtureBlock.serialized)
- })
-
- it('should serialize a block (full)', () => {
- expect(
- testSubject({
- data: JSON.stringify(fixtureBlock.data),
- type: 'block',
- full: true,
- }),
- ).toEqual(fixtureBlock.serializedFull)
- })
-
- it('should serialize a transaction', () => {
- expect(
- testSubject({
- data: JSON.stringify(fixtureTransaction.data),
- type: 'transaction',
- }),
- ).toEqual(fixtureTransaction.serialized)
- })
-})
diff --git a/packages/core-debugger-cli/__tests__/commands/serialize.test.ts b/packages/core-debugger-cli/__tests__/commands/serialize.test.ts
new file mode 100644
index 0000000000..a2ddb006bb
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/commands/serialize.test.ts
@@ -0,0 +1,26 @@
+import "jest-extended";
+
+import { SerializeCommand } from "../../src/commands/serialize";
+
+describe("Commands - Serialize", () => {
+ const fixtureBlock = require("../__fixtures__/block.json");
+ const fixtureTransaction = require("../__fixtures__/transaction.json");
+
+ it("should serialize a block (not-full)", async () => {
+ expect(await SerializeCommand.run(["--data", JSON.stringify(fixtureBlock.data), "--type", "block"])).toEqual(
+ fixtureBlock.serialized,
+ );
+ });
+
+ it("should serialize a block (full)", async () => {
+ expect(
+ await SerializeCommand.run(["--data", JSON.stringify(fixtureBlock.data), "--type", "block", "--full"]),
+ ).toEqual(fixtureBlock.serializedFull);
+ });
+
+ it("should serialize a transaction", async () => {
+ expect(
+ await SerializeCommand.run(["--data", JSON.stringify(fixtureTransaction.data), "--type", "transaction"]),
+ ).toEqual(fixtureTransaction.serialized);
+ });
+});
diff --git a/packages/core-debugger-cli/__tests__/commands/verify-second.test.js b/packages/core-debugger-cli/__tests__/commands/verify-second.test.js
deleted file mode 100644
index 57ed595876..0000000000
--- a/packages/core-debugger-cli/__tests__/commands/verify-second.test.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const testSubject = require('../../lib/commands/verify-second')
-const fixtureTransaction = require('../__fixtures__/transaction-second.json')
-
-describe('Commands - Verify Second', () => {
- it('should be a function', () => {
- expect(testSubject).toBeFunction()
- })
-
- it('should verify a second signature', () => {
- expect(
- testSubject({
- data: fixtureTransaction.serialized,
- publicKey:
- '03699e966b2525f9088a6941d8d94f7869964a000efe65783d78ac82e1199fe609',
- }),
- ).toBeTrue()
- })
-})
diff --git a/packages/core-debugger-cli/__tests__/commands/verify-second.test.ts b/packages/core-debugger-cli/__tests__/commands/verify-second.test.ts
new file mode 100644
index 0000000000..5cb6fb2b1a
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/commands/verify-second.test.ts
@@ -0,0 +1,18 @@
+import "jest-extended";
+
+import { VerifySecondSignatureCommand } from "../../src/commands/verify-second";
+
+describe("Commands - Verify Second", () => {
+ const fixtureTransaction = require("../__fixtures__/transaction-second.json");
+
+ it("should verify a second signature", async () => {
+ expect(
+ await VerifySecondSignatureCommand.run([
+ "--data",
+ fixtureTransaction.serialized,
+ "--publicKey",
+ "03699e966b2525f9088a6941d8d94f7869964a000efe65783d78ac82e1199fe609",
+ ]),
+ ).toBeTrue();
+ });
+});
diff --git a/packages/core-debugger-cli/__tests__/commands/verify.test.js b/packages/core-debugger-cli/__tests__/commands/verify.test.js
deleted file mode 100644
index 458933ebbc..0000000000
--- a/packages/core-debugger-cli/__tests__/commands/verify.test.js
+++ /dev/null
@@ -1,27 +0,0 @@
-const testSubject = require('../../lib/commands/verify')
-const fixtureBlock = require('../__fixtures__/block.json')
-const fixtureTransaction = require('../__fixtures__/transaction.json')
-
-describe('Commands - Verify', () => {
- it('should be a function', () => {
- expect(testSubject).toBeFunction()
- })
-
- it('should verify a block', () => {
- expect(
- testSubject({
- data: fixtureBlock.serializedFull,
- type: 'block',
- }),
- ).toBeTrue()
- })
-
- it('should verify a transaction', () => {
- expect(
- testSubject({
- data: fixtureTransaction.serialized,
- type: 'transaction',
- }),
- ).toBeTrue()
- })
-})
diff --git a/packages/core-debugger-cli/__tests__/commands/verify.test.ts b/packages/core-debugger-cli/__tests__/commands/verify.test.ts
new file mode 100644
index 0000000000..8827d94bc0
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/commands/verify.test.ts
@@ -0,0 +1,16 @@
+import "jest-extended";
+
+import { VerifyCommand } from "../../src/commands/verify";
+
+describe("Commands - Verify", () => {
+ const fixtureBlock = require("../__fixtures__/block.json");
+ const fixtureTransaction = require("../__fixtures__/transaction.json");
+
+ it("should verify a block", async () => {
+ expect(await VerifyCommand.run(["--data", fixtureBlock.serializedFull, "--type", "block"])).toBeTrue();
+ });
+
+ it("should verify a transaction", async () => {
+ expect(await VerifyCommand.run(["--data", fixtureTransaction.serialized, "--type", "transaction"])).toBeTrue();
+ });
+});
diff --git a/packages/core-debugger-cli/__tests__/utils.test.ts b/packages/core-debugger-cli/__tests__/utils.test.ts
new file mode 100644
index 0000000000..b07503a359
--- /dev/null
+++ b/packages/core-debugger-cli/__tests__/utils.test.ts
@@ -0,0 +1,36 @@
+import { readSync } from "clipboardy";
+import "jest-extended";
+
+import { copyToClipboard, handleOutput } from "../src/utils";
+
+const dummyData = { hello: "world" };
+
+describe("Utils", () => {
+ describe("copyToClipboard", () => {
+ it("should contain the copied data", () => {
+ copyToClipboard(dummyData);
+
+ expect(JSON.parse(readSync())).toEqual(dummyData);
+ });
+ });
+
+ describe("handleOutput", () => {
+ it("should copy the data", () => {
+ handleOutput({ copy: true }, dummyData);
+
+ expect(JSON.parse(readSync())).toEqual(dummyData);
+ });
+
+ it("should log the data", () => {
+ const method = jest.spyOn(global.console, "log");
+
+ handleOutput({ log: true }, dummyData);
+
+ expect(method).toHaveBeenCalledWith(dummyData);
+ });
+
+ it("should return the data", () => {
+ expect(handleOutput({}, dummyData)).toEqual(dummyData);
+ });
+ });
+});
diff --git a/packages/core-debugger-cli/bin/debugger b/packages/core-debugger-cli/bin/debugger
deleted file mode 100755
index b07d0c62d5..0000000000
--- a/packages/core-debugger-cli/bin/debugger
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env node
-
-const app = require('commander')
-
-app.version(require('../package.json').version)
-
-const registerCommand = (name, description) => {
- return app
- .command(name)
- .description(description)
- .option('-l, --log', 'log the data to the console')
- .option('-c, --copy', 'copy the data to the clipboard')
-}
-
-registerCommand('ser', 'serialize the given JSON')
- .option('-d, --data ', 'JSON blob to serialize')
- .option('-t, --type ', 'transaction or block', 'transaction')
- .action(options => require('../lib/commands/serialize')(options))
-
-registerCommand('des', 'deserialize the given HEX')
- .option('-d, --data ', 'the HEX blob to deserialize')
- .option('-t, --type ', 'transaction or block', 'transaction')
- .action(options => require('../lib/commands/deserialize')(options))
-
-registerCommand('verify', 'verify the given HEX')
- .option('-d, --data ', 'the HEX blob to deserialize and verify')
- .option('-t, --type ', 'transaction or block', 'transaction')
- .action(options => require('../lib/commands/verify')(options))
-
-registerCommand('verify-second', 'verify a second signature of a transaction')
- .option('-d, --data ', 'the transaction HEX blob to deserialize and verify')
- .option('-p, --publicKey ', 'the publicKey of the second signature in HEX')
- .action(options => require('../lib/commands/verify-second')(options))
-
-registerCommand('identity', 'get identities from the given input')
- .option('-d, --data ', 'the data to get the identities from')
- .option('-t, --type ', 'the input type is either of passphrase, privateKey or publicKey', 'passphrase')
- .option('-n, --network ', 'the network version used for calculating the address.')
- .action(options => require('../lib/commands/identity')(options))
-
-app
- .command('*')
- .action(env => {
- app.help()
- })
-
-app.parse(process.argv)
-
-if (app.args.length === 0) {
- app.help()
-}
diff --git a/packages/core-debugger-cli/bin/run b/packages/core-debugger-cli/bin/run
new file mode 100755
index 0000000000..30b14e1773
--- /dev/null
+++ b/packages/core-debugger-cli/bin/run
@@ -0,0 +1,5 @@
+#!/usr/bin/env node
+
+require('@oclif/command').run()
+.then(require('@oclif/command/flush'))
+.catch(require('@oclif/errors/handle'))
diff --git a/packages/core-debugger-cli/bin/run.cmd b/packages/core-debugger-cli/bin/run.cmd
new file mode 100644
index 0000000000..968fc30758
--- /dev/null
+++ b/packages/core-debugger-cli/bin/run.cmd
@@ -0,0 +1,3 @@
+@echo off
+
+node "%~dp0\run" %*
diff --git a/packages/core-debugger-cli/jest.config.js b/packages/core-debugger-cli/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-debugger-cli/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-debugger-cli/lib/commands/deserialize.js b/packages/core-debugger-cli/lib/commands/deserialize.js
deleted file mode 100644
index 5df757b104..0000000000
--- a/packages/core-debugger-cli/lib/commands/deserialize.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const {
- models: { Block, Transaction },
-} = require('@arkecosystem/crypto')
-const handleOutput = require('../utils/handle-output')
-
-module.exports = opts => {
- let deserialized
-
- if (opts.type === 'transaction') {
- deserialized = new Transaction(opts.data)
- } else {
- deserialized = new Block(opts.data)
- }
-
- return handleOutput(opts, JSON.stringify(deserialized, null, 4))
-}
diff --git a/packages/core-debugger-cli/lib/commands/identity.js b/packages/core-debugger-cli/lib/commands/identity.js
deleted file mode 100644
index 594243da9d..0000000000
--- a/packages/core-debugger-cli/lib/commands/identity.js
+++ /dev/null
@@ -1,30 +0,0 @@
-const { crypto } = require('@arkecosystem/crypto')
-const handleOutput = require('../utils/handle-output')
-
-module.exports = opts => {
- let output
-
- if (opts.type === 'passphrase') {
- const keys = crypto.getKeys(opts.data)
- output = {
- passphrase: opts.data,
- publicKey: keys.publicKey,
- privateKey: keys.privateKey,
- address: crypto.getAddress(keys.publicKey, opts.network),
- }
- } else if (opts.type === 'privateKey') {
- const keys = crypto.getKeysByPrivateKey(opts.data)
- output = {
- publicKey: keys.publicKey,
- privateKey: keys.privateKey,
- address: crypto.getAddress(keys.publicKey, opts.network),
- }
- } else if (opts.type === 'publicKey') {
- output = {
- publicKey: opts.data,
- address: crypto.getAddress(opts.data, opts.network),
- }
- }
-
- return handleOutput(opts, output)
-}
diff --git a/packages/core-debugger-cli/lib/commands/serialize.js b/packages/core-debugger-cli/lib/commands/serialize.js
deleted file mode 100644
index 561c32a658..0000000000
--- a/packages/core-debugger-cli/lib/commands/serialize.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const {
- models: { Block, Transaction },
-} = require('@arkecosystem/crypto')
-const handleOutput = require('../utils/handle-output')
-
-module.exports = opts => {
- const serialized = opts.type === 'transaction'
- ? Transaction.serialize(JSON.parse(opts.data))
- : Block[opts.full ? 'serializeFull' : 'serialize'](JSON.parse(opts.data))
-
- return handleOutput(opts, serialized.toString('hex'))
-}
diff --git a/packages/core-debugger-cli/lib/commands/verify-second.js b/packages/core-debugger-cli/lib/commands/verify-second.js
deleted file mode 100644
index 8a312efd52..0000000000
--- a/packages/core-debugger-cli/lib/commands/verify-second.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const {
- crypto,
- models: { Transaction },
-} = require('@arkecosystem/crypto')
-const handleOutput = require('../utils/handle-output')
-
-module.exports = opts => {
- const transaction = new Transaction(opts.data)
- const publicKey = opts.publicKey
-
- const output = crypto.verifySecondSignature(transaction, publicKey)
- return handleOutput(opts, output)
-}
diff --git a/packages/core-debugger-cli/lib/commands/verify.js b/packages/core-debugger-cli/lib/commands/verify.js
deleted file mode 100644
index 33efadd177..0000000000
--- a/packages/core-debugger-cli/lib/commands/verify.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const {
- models: { Block, Transaction },
-} = require('@arkecosystem/crypto')
-const handleOutput = require('../utils/handle-output')
-
-module.exports = opts => {
- const deserialized = opts.type === 'transaction'
- ? new Transaction(opts.data)
- : new Block(Block.deserialize(opts.data))
-
- const output = opts.type === 'transaction'
- ? deserialized.verify()
- : deserialized.verify().verified
-
- return handleOutput(opts, output)
-}
diff --git a/packages/core-debugger-cli/lib/utils/copy-to-clipboard.js b/packages/core-debugger-cli/lib/utils/copy-to-clipboard.js
deleted file mode 100644
index 68720fb77f..0000000000
--- a/packages/core-debugger-cli/lib/utils/copy-to-clipboard.js
+++ /dev/null
@@ -1,3 +0,0 @@
-const clipboardy = require('clipboardy')
-
-module.exports = data => clipboardy.writeSync(JSON.stringify(data))
diff --git a/packages/core-debugger-cli/lib/utils/handle-output.js b/packages/core-debugger-cli/lib/utils/handle-output.js
deleted file mode 100644
index 32387f9eab..0000000000
--- a/packages/core-debugger-cli/lib/utils/handle-output.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const copyToClipboard = require('../utils/copy-to-clipboard')
-
-module.exports = (opts, data) => {
- if (opts.copy) {
- return copyToClipboard(data)
- }
-
- if (opts.log) {
- return console.info(data)
- }
-
- return data
-}
diff --git a/packages/core-debugger-cli/package.json b/packages/core-debugger-cli/package.json
index 204ea13233..b7b6ba0c29 100644
--- a/packages/core-debugger-cli/package.json
+++ b/packages/core-debugger-cli/package.json
@@ -1,33 +1,63 @@
{
- "name": "@arkecosystem/core-debugger-cli",
- "description": "Debugger CLI for Ark Core",
- "version": "0.2.0",
- "contributors": [
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "bin": {
- "ark:debugger": "./bin/debugger"
- },
- "scripts": {
- "start": "./bin/debugger",
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/crypto": "~0.2",
- "clipboardy": "^1.2.3",
- "commander": "^2.19.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-debugger-cli",
+ "description": "Debugger CLI for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index.js",
+ "files": [
+ "/bin",
+ "/dist",
+ "/oclif.manifest.json"
+ ],
+ "bin": {
+ "debugger": "./bin/run"
+ },
+ "scripts": {
+ "debugger": "./bin/run",
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "yarn lint && yarn build",
+ "prepack": "oclif-dev manifest && npm shrinkwrap",
+ "postpack": "rm -f oclif.manifest.json",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "test": "cross-env CORE_ENV=test jest --runInBand --forceExit",
+ "test:coverage": "cross-env CORE_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.ts|index.ts)$' --runInBand --forceExit",
+ "test:debug": "cross-env CORE_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
+ "test:watch": "cross-env CORE_ENV=test jest --runInBand --watch",
+ "test:watch:all": "cross-env CORE_ENV=test jest --runInBand --watchAll",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/crypto": "^2.1.0",
+ "@oclif/command": "^1.5.8",
+ "@oclif/config": "^1.12.4",
+ "@oclif/plugin-help": "^2.1.6",
+ "@oclif/plugin-not-found": "^1.2.2",
+ "@types/clipboardy": "^1.1.0",
+ "clipboardy": "^1.2.3"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ },
+ "oclif": {
+ "commands": "./dist/commands",
+ "bin": "debugger",
+ "plugins": [
+ "@oclif/plugin-help",
+ "@oclif/plugin-not-found"
+ ]
+ }
}
diff --git a/packages/core-debugger-cli/src/commands/command.ts b/packages/core-debugger-cli/src/commands/command.ts
new file mode 100644
index 0000000000..4ab327459a
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/command.ts
@@ -0,0 +1,12 @@
+import Command, { flags } from "@oclif/command";
+
+export abstract class BaseCommand extends Command {
+ public static flags = {
+ log: flags.string({
+ description: "log the data to the console",
+ }),
+ copy: flags.string({
+ description: "copy the data to the clipboard",
+ }),
+ };
+}
diff --git a/packages/core-debugger-cli/src/commands/deserialize.ts b/packages/core-debugger-cli/src/commands/deserialize.ts
new file mode 100644
index 0000000000..553fe65abb
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/deserialize.ts
@@ -0,0 +1,31 @@
+import { models } from "@arkecosystem/crypto";
+import { flags } from "@oclif/command";
+import { handleOutput } from "../utils";
+import { BaseCommand } from "./command";
+
+export class DeserializeCommand extends BaseCommand {
+ public static description: string = "Deserialize the given HEX";
+
+ public static flags = {
+ ...BaseCommand.flags,
+ data: flags.string({
+ description: "the HEX blob to deserialize",
+ required: true,
+ default: "transaction",
+ }),
+ type: flags.string({
+ description: "transaction or block",
+ required: true,
+ }),
+ };
+
+ public async run(): Promise {
+ // tslint:disable-next-line:no-shadowed-variable
+ const { flags } = this.parse(DeserializeCommand);
+
+ const deserialized =
+ flags.type === "transaction" ? new models.Transaction(flags.data) : new models.Block(flags.data);
+
+ return handleOutput(flags, JSON.stringify(deserialized, null, 4));
+ }
+}
diff --git a/packages/core-debugger-cli/src/commands/identity.ts b/packages/core-debugger-cli/src/commands/identity.ts
new file mode 100644
index 0000000000..0ed91c6ba3
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/identity.ts
@@ -0,0 +1,56 @@
+import { crypto } from "@arkecosystem/crypto";
+import { flags } from "@oclif/command";
+import { handleOutput } from "../utils";
+import { BaseCommand } from "./command";
+
+export class IdentityCommand extends BaseCommand {
+ public static description: string = "Get identities from the given input";
+
+ public static flags = {
+ ...BaseCommand.flags,
+ data: flags.string({
+ description: "the data to get the identities from",
+ required: true,
+ }),
+ network: flags.integer({
+ description: "the network version used for calculating the address.",
+ required: true,
+ default: 30,
+ }),
+ type: flags.string({
+ description: "the input type is either of passphrase, privateKey or publicKey",
+ required: true,
+ }),
+ };
+
+ public async run(): Promise {
+ // tslint:disable-next-line:no-shadowed-variable
+ const { flags } = this.parse(IdentityCommand);
+
+ let output;
+
+ if (flags.type === "passphrase") {
+ const keys = crypto.getKeys(flags.data);
+ output = {
+ passphrase: flags.data,
+ publicKey: keys.publicKey,
+ privateKey: keys.privateKey,
+ address: crypto.getAddress(keys.publicKey, flags.network),
+ };
+ } else if (flags.type === "privateKey") {
+ const keys = crypto.getKeysByPrivateKey(flags.data);
+ output = {
+ publicKey: keys.publicKey,
+ privateKey: keys.privateKey,
+ address: crypto.getAddress(keys.publicKey, flags.network),
+ };
+ } else if (flags.type === "publicKey") {
+ output = {
+ publicKey: flags.data,
+ address: crypto.getAddress(flags.data, flags.network),
+ };
+ }
+
+ return handleOutput(flags, output);
+ }
+}
diff --git a/packages/core-debugger-cli/src/commands/serialize.ts b/packages/core-debugger-cli/src/commands/serialize.ts
new file mode 100644
index 0000000000..61c06590b6
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/serialize.ts
@@ -0,0 +1,36 @@
+import { models } from "@arkecosystem/crypto";
+import { flags } from "@oclif/command";
+import { handleOutput } from "../utils";
+import { BaseCommand } from "./command";
+
+export class SerializeCommand extends BaseCommand {
+ public static description: string = "Serialize the given JSON";
+
+ public static flags = {
+ ...BaseCommand.flags,
+ data: flags.string({
+ description: "the HEX blob to serialize",
+ required: true,
+ }),
+ type: flags.string({
+ description: "transaction or block",
+ required: true,
+ }),
+ full: flags.boolean({
+ description: "serialize a full block with transactions",
+ required: false,
+ }),
+ };
+
+ public async run(): Promise {
+ // tslint:disable-next-line:no-shadowed-variable
+ const { flags } = this.parse(SerializeCommand);
+
+ const serialized: any =
+ flags.type === "transaction"
+ ? models.Transaction.serialize(JSON.parse(flags.data))
+ : models.Block[flags.full ? "serializeFull" : "serialize"](JSON.parse(flags.data));
+
+ return handleOutput(flags, serialized.toString("hex"));
+ }
+}
diff --git a/packages/core-debugger-cli/src/commands/verify-second.ts b/packages/core-debugger-cli/src/commands/verify-second.ts
new file mode 100644
index 0000000000..aeed1cadb1
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/verify-second.ts
@@ -0,0 +1,29 @@
+import { crypto, models } from "@arkecosystem/crypto";
+import { flags } from "@oclif/command";
+import { handleOutput } from "../utils";
+import { BaseCommand } from "./command";
+
+export class VerifySecondSignatureCommand extends BaseCommand {
+ public static description: string = "Verify a second signature of a transaction";
+
+ public static flags = {
+ ...BaseCommand.flags,
+ data: flags.string({
+ description: "the HEX blob to deserialize and verify",
+ required: true,
+ }),
+ publicKey: flags.string({
+ description: "the publicKey of the second signature in HEX",
+ required: true,
+ }),
+ };
+
+ public async run(): Promise {
+ // tslint:disable-next-line:no-shadowed-variable
+ const { flags } = this.parse(VerifySecondSignatureCommand);
+
+ const transaction = new models.Transaction(flags.data);
+
+ return handleOutput(flags, crypto.verifySecondSignature(transaction, flags.publicKey));
+ }
+}
diff --git a/packages/core-debugger-cli/src/commands/verify.ts b/packages/core-debugger-cli/src/commands/verify.ts
new file mode 100644
index 0000000000..37deb12c65
--- /dev/null
+++ b/packages/core-debugger-cli/src/commands/verify.ts
@@ -0,0 +1,35 @@
+import { models } from "@arkecosystem/crypto";
+import { flags } from "@oclif/command";
+import { handleOutput } from "../utils";
+import { BaseCommand } from "./command";
+
+export class VerifyCommand extends BaseCommand {
+ public static description: string = "Verify the given HEX";
+
+ public static flags = {
+ ...BaseCommand.flags,
+ data: flags.string({
+ description: "the HEX blob to deserialize and verify",
+ required: true,
+ }),
+ type: flags.string({
+ description: "transaction or block",
+ required: true,
+ }),
+ };
+
+ public async run(): Promise {
+ // tslint:disable-next-line:no-shadowed-variable
+ const { flags } = this.parse(VerifyCommand);
+
+ const deserialized =
+ flags.type === "transaction"
+ ? new models.Transaction(flags.data)
+ : new models.Block(models.Block.deserialize(flags.data));
+
+ const output =
+ deserialized instanceof models.Transaction ? deserialized.verify() : deserialized.verification.verified;
+
+ return handleOutput(flags, output);
+ }
+}
diff --git a/packages/core-debugger-cli/src/index.ts b/packages/core-debugger-cli/src/index.ts
new file mode 100644
index 0000000000..8bdb76f9a0
--- /dev/null
+++ b/packages/core-debugger-cli/src/index.ts
@@ -0,0 +1 @@
+export { run } from "@oclif/command";
diff --git a/packages/core-debugger-cli/src/utils.ts b/packages/core-debugger-cli/src/utils.ts
new file mode 100644
index 0000000000..2fbd8ef2ac
--- /dev/null
+++ b/packages/core-debugger-cli/src/utils.ts
@@ -0,0 +1,18 @@
+import clipboardy from "clipboardy";
+
+export function copyToClipboard(data) {
+ clipboardy.writeSync(JSON.stringify(data));
+}
+
+export function handleOutput(opts, data) {
+ if (opts.copy) {
+ return copyToClipboard(data);
+ }
+
+ if (opts.log) {
+ // tslint:disable-next-line:no-console
+ return console.log(data);
+ }
+
+ return data;
+}
diff --git a/packages/core-debugger-cli/tsconfig.json b/packages/core-debugger-cli/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-debugger-cli/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-deployer/.gitattributes b/packages/core-deployer/.gitattributes
deleted file mode 100644
index 60cc52db63..0000000000
--- a/packages/core-deployer/.gitattributes
+++ /dev/null
@@ -1,11 +0,0 @@
-# Path-based git attributes
-# https://www.kernel.org/pub/software/scm/git/docs/gitattributes.html
-
-# Ignore all test and documentation with "export-ignore".
-/.editorconfig export-ignore
-/.gitattributes export-ignore
-/.gitignore export-ignore
-/.travis.yml export-ignore
-/__tests__ export-ignore
-/docs export-ignore
-/README.md export-ignore
diff --git a/packages/core-deployer/CHANGELOG.md b/packages/core-deployer/CHANGELOG.md
deleted file mode 100644
index 7d7b5529b2..0000000000
--- a/packages/core-deployer/CHANGELOG.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.2.0 - 2018-12-03
-
-### Changed
-
-- Handle numbers as `BigNumber` instances
-- Updated `@arkecosystem/crypto` to `0.2.0`
-- Dropped node.js 9 as minimum requirement in favour of node.js 10
-
-## 0.1.1 - 2018-07-26
-
-### Added
-
-- initial release
diff --git a/packages/core-deployer/LICENSE b/packages/core-deployer/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-deployer/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-deployer/README.md b/packages/core-deployer/README.md
deleted file mode 100644
index afa712cd5c..0000000000
--- a/packages/core-deployer/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Ark Core - Deployer
-
-
-
-
-
-## Documentation
-
-You can find installation instructions and detailed instructions on how to use this package at the [dedicated documentation site](https://docs.ark.io/guidebook/core/plugins/core-deployer.html).
-
-## Security
-
-If you discover a security vulnerability within this package, please send an e-mail to security@ark.io. All security vulnerabilities will be promptly addressed.
-
-## Credits
-
-- [Brian Faust](https://github.com/faustbrian)
-- [Alex Barnsley](https://github.com/alexbarnsley)
-- [All Contributors](../../../../contributors)
-
-## License
-
-[MIT](LICENSE) © [ArkEcosystem](https://ark.io)
diff --git a/packages/core-deployer/__tests__/builder/genesis-block.test.js b/packages/core-deployer/__tests__/builder/genesis-block.test.js
deleted file mode 100644
index 7357a779b7..0000000000
--- a/packages/core-deployer/__tests__/builder/genesis-block.test.js
+++ /dev/null
@@ -1,274 +0,0 @@
-const GenesisBlockBuilder = require('../../lib/builder/genesis-block')
-const network = require('../../../crypto/lib/networks/ark/testnet')
-
-let builder
-let genesis
-let wallet
-let delegateWallet
-let delegateWallets
-
-beforeEach(() => {
- builder = new GenesisBlockBuilder(network, {
- totalPremine: 2100000000000000,
- activeDelegates: 2,
- })
-
- delegateWallets = builder.__buildDelegates()
-})
-
-describe('Genesis Block Builder', () => {
- it('should be an object', () => {
- expect(builder).toBeInstanceOf(GenesisBlockBuilder)
- })
-
- describe('generate', () => {
- it('should be a function', () => {
- expect(builder.generate).toBeFunction()
- })
-
- it('should return a genesis object', () => {
- genesis = builder.generate()
-
- expect(genesis).toContainAllKeys([
- 'genesisBlock',
- 'genesisWallet',
- 'delegatePassphrases',
- ])
- })
-
- it('should call the expected methods', () => {
- builder.__createWallet = jest.fn(builder.__createWallet)
- builder.__buildDelegates = jest.fn(builder.__buildDelegates)
- builder.__buildDelegateTransactions = jest.fn(
- builder.__buildDelegateTransactions,
- )
- builder.__createTransferTransaction = jest.fn(
- builder.__createTransferTransaction,
- )
- builder.__createGenesisBlock = jest.fn(builder.__createGenesisBlock)
-
- builder.generate()
-
- expect(builder.__createWallet).toHaveBeenCalledTimes(4)
- expect(builder.__buildDelegates).toHaveBeenCalledTimes(1)
- expect(builder.__buildDelegateTransactions).toHaveBeenCalledTimes(1)
- expect(builder.__createTransferTransaction).toHaveBeenCalledTimes(1)
- expect(builder.__createGenesisBlock).toHaveBeenCalledTimes(1)
- })
- })
-
- describe('__createWallet', () => {
- it('should be a function', () => {
- expect(builder.__createWallet).toBeFunction()
- })
-
- it('should return an object', () => {
- wallet = builder.__createWallet()
-
- expect(wallet).toBeObject()
- })
-
- it('should return a wallet object', () => {
- expect(wallet).toContainAllKeys(['address', 'keys', 'passphrase'])
- })
-
- it('should have a valid address', () => {
- expect(wallet.address).toEqual(expect.stringMatching(/^A/))
- })
- })
-
- describe('__createDelegateWallet', () => {
- it('should be a function', () => {
- expect(builder.__createDelegateWallet).toBeFunction()
- })
-
- it('should return an object', () => {
- delegateWallet = builder.__createDelegateWallet('testing')
-
- expect(delegateWallet).toBeObject()
- })
-
- it('should return a delegate wallet object', () => {
- expect(delegateWallet).toContainAllKeys([
- 'address',
- 'keys',
- 'passphrase',
- 'username',
- ])
- })
-
- it('should have a valid address', () => {
- expect(delegateWallet.address).toEqual(expect.stringMatching(/^A/))
- })
-
- it('should have a valid username', () => {
- expect(delegateWallet.username).toEqual(
- expect.stringMatching(/^[a-z0-9!@$&_.]+$/),
- )
- })
-
- it('should call the expected methods', () => {
- builder.__createWallet = jest.fn(builder.__createWallet)
-
- builder.__createDelegateWallet('testing')
-
- expect(builder.__createWallet).toHaveBeenCalledTimes(1)
- })
- })
-
- describe('__buildDelegates', () => {
- it('should be a function', () => {
- expect(builder.__buildDelegates).toBeFunction()
- })
-
- it('should return an array of 2', () => {
- expect(delegateWallets).toBeArrayOfSize(2)
- })
-
- it('should call the expected methods', () => {
- builder.__createDelegateWallet = jest.fn(builder.__createDelegateWallet)
-
- builder.__buildDelegates('testing')
-
- expect(builder.__createDelegateWallet).toHaveBeenCalledTimes(2)
- })
- })
-
- describe('__buildDelegateTransactions', () => {
- it('should be a function', () => {
- expect(builder.__buildDelegateTransactions).toBeFunction()
- })
-
- it('should return an array of 2', () => {
- const delegateTransactions = builder.__buildDelegateTransactions(
- delegateWallets,
- )
-
- expect(delegateTransactions).toBeArrayOfSize(2)
- })
-
- it('should call the expected methods', () => {
- builder.__createDelegateTransaction = jest.fn(
- builder.__createDelegateTransaction,
- )
-
- builder.__buildDelegateTransactions(delegateWallets)
-
- expect(builder.__createDelegateTransaction).toHaveBeenCalledTimes(2)
- })
- })
-
- describe('__createTransferTransaction', () => {
- it('should be a function', () => {
- expect(builder.__createTransferTransaction).toBeFunction()
- })
-
- it('should return a transaction object', () => {
- const transferTransaction = builder.__createTransferTransaction(
- delegateWallet,
- wallet,
- 10,
- )
-
- expect(transferTransaction).toContainEntries([
- ['type', 0],
- ['amount', 10],
- ['fee', 0],
- ['recipientId', wallet.address],
- ])
- })
-
- it('should call the expected methods', () => {
- builder.__formatGenesisTransaction = jest.fn(
- builder.__formatGenesisTransaction,
- )
-
- builder.__createTransferTransaction(delegateWallet, wallet, 10)
-
- expect(builder.__formatGenesisTransaction).toHaveBeenCalledTimes(1)
- })
- })
-
- describe('__createDelegateTransaction', () => {
- it('should be a function', () => {
- expect(builder.__createDelegateTransaction).toBeFunction()
- })
-
- it('should return a transaction object', () => {
- const delegateTransaction = builder.__createDelegateTransaction(
- delegateWallet,
- )
-
- expect(delegateTransaction).toContainEntries([
- ['type', 2],
- ['amount', 0],
- ['fee', 0],
- ['senderId', delegateWallet.address],
- ])
-
- expect(delegateTransaction.asset.delegate).toHaveProperty(
- 'username',
- delegateWallet.username,
- )
- expect(delegateTransaction.asset.delegate).toHaveProperty(
- 'publicKey',
- delegateWallet.keys.publicKey,
- )
- })
-
- it('should call the expected methods', () => {
- builder.__formatGenesisTransaction = jest.fn(
- builder.__formatGenesisTransaction,
- )
-
- builder.__createDelegateTransaction(delegateWallet)
-
- expect(builder.__formatGenesisTransaction).toHaveBeenCalledTimes(1)
- })
- })
-
- describe('__createGenesisBlock', () => {
- it('should be a function', () => {
- expect(builder.__createGenesisBlock).toBeFunction()
- })
-
- it('should match the expected struct', () => {
- const genesisBlock = builder.__createGenesisBlock({
- keys: wallet.keys,
- transactions: [],
- timestamp: 0,
- })
-
- expect(genesisBlock).toContainAllKeys([
- 'id',
- 'blockSignature',
- 'version',
- 'totalAmount',
- 'totalFee',
- 'reward',
- 'payloadHash',
- 'timestamp',
- 'numberOfTransactions',
- 'payloadLength',
- 'previousBlock',
- 'generatorPublicKey',
- 'transactions',
- 'height',
- ])
- })
-
- it('should call the expected methods', () => {
- builder.__getBlockId = jest.fn()
- builder.__signBlock = jest.fn()
-
- builder.__createGenesisBlock({
- keys: wallet.keys,
- transactions: [],
- timestamp: 0,
- })
-
- expect(builder.__getBlockId).toHaveBeenCalledTimes(1)
- expect(builder.__signBlock).toHaveBeenCalledTimes(1)
- })
- })
-})
diff --git a/packages/core-deployer/bin/deployer b/packages/core-deployer/bin/deployer
deleted file mode 100755
index ccee9e2f02..0000000000
--- a/packages/core-deployer/bin/deployer
+++ /dev/null
@@ -1,312 +0,0 @@
-#!/usr/bin/env node
-
-'use strict'
-
-const commander = require('commander')
-const Joi = require('joi')
-const fs = require('fs-extra')
-const os = require('os')
-const path = require('path')
-const {
- getRandomNumber,
- logger,
- updateConfig,
- writeEnv,
-} = require('../lib/utils')
-const GenesisBlockBuilder = require('../lib/builder/genesis-block')
-
-process.env.ARK_PATH_CONFIG = path.resolve(os.homedir(), '.ark')
-
-commander
- .version(require('../package.json').version)
- .option('--network ', 'Network to initially copy', 'mainnet')
- .option('--name ', 'Name', 'Bridgechain')
- .option('--nodeIp ', 'IP for node', '0.0.0.0')
- .option('--p2pPort ', 'P2P API Port', 4102)
- .option('--apiPort ', 'Public P2P Port', 4103)
- .option('--dbHost ', 'Database host', 'localhost')
- .option('--dbPort ', 'Database port', 5432)
- .option('--dbUsername ', 'Database username', 'node')
- .option('--dbPassword ', 'Database password', 'password')
- .option(
- '--dbDatabase ',
- 'Database name',
- `ark_${commander.name.toLowerCase()}`,
- )
- .option(
- '--explorerUrl ',
- 'URL to link to explorer',
- 'http://localhost:4200',
- )
- .option(
- '--activeDelegates ',
- 'How many forgers for the network [51]',
- 51,
- )
- .option('--feeTransfer ', 'Fee for sending Transaction', 10000000)
- .option('--feeVote ', 'Fee for Vote Transaction', 100000000)
- .option(
- '--feeSecondSignature ',
- 'Fee for Second Passphrase Transaction',
- 500000000,
- )
- .option(
- '--feeDelegateRegistration ',
- 'Fee for Register Delegate Transaction',
- 2500000000,
- )
- .option(
- '--feeMultiSignature ',
- 'Fee for Multisignature Transaction',
- 500000000,
- )
- .option(
- '--epoch ',
- 'Set Epoch based on time the chain was created',
- '2017-02-21T13:00:00.000Z',
- )
- .option(
- '--rewardHeight ',
- 'Block Height when Forgers receive Rewards [1]',
- 1,
- )
- .option(
- '--rewardPerBlock ',
- 'How many Rewarded Tokens per Forged Block [200000000 (2)]',
- 200000000,
- )
- .option('--blocktime ', 'Time per block (seconds) [8]', 8)
- .option('--token ', 'Token Name [CHAIN]', 'CHAIN')
- .option('--symbol ', 'Symbol for Token [C]', 'C')
- .option('--prefixHash ', 'Address Prefix Hash [28]', 28)
- .option(
- '--transactionsPerBlock ',
- 'Max Transaction count per Block [50]',
- 50,
- )
- .option(
- '--wifPrefix ',
- 'Prefix for generating a WIF [rand(1, 255)]',
- getRandomNumber(1, 255),
- )
- .option(
- '--totalPremine ',
- 'How many tokens initially added to genesis account [2100000000000000 (21 million)]',
- 2100000000000000,
- )
- // .option('--max-tokens-per-account', 'Max amount of tokens per account [12500000000000000 (125 million)]')
- .option(
- '--overwriteConfig',
- 'Overwrite current deployer config files [off]',
- false,
- )
- .option(
- '--configPath ',
- 'Deployer config path destination [~/.ark/deployer]',
- `${process.env.ARK_PATH_CONFIG}/deployer`,
- )
- .parse(process.argv)
-
-const { error, value } = Joi.validate(commander, require('../lib/schema.js'), {
- allowUnknown: true,
- convert: true,
-})
-const options = value
-
-if (error) {
- error.details.forEach(detail => logger.error(detail.message))
- process.exit(1)
-}
-
-if (fs.existsSync(options.configPath)) {
- if (options.overwriteConfig) {
- fs.removeSync(options.configPath)
- } else {
- logger.error(
- `Deployer config already exists in '${
- options.configPath
- }' - to overwrite, use the '--overwriteConfig' flag`,
- )
- process.exit(1)
- }
-}
-fs.ensureDirSync(options.configPath)
-fs.copySync(
- path.resolve(__dirname, `../../core/lib/config/${options.network}`),
- options.configPath,
-)
-const networkPath = path.resolve(
- __dirname,
- `../../crypto/lib/networks/ark/${options.network}.json`,
-)
-if (!fs.existsSync(networkPath)) {
- logger.error(`Network '${options.network}' does not exist`)
- process.exit(1)
-}
-fs.copySync(networkPath, path.resolve(options.configPath, 'network.json'))
-
-let networkConfig = {
- name: options.name.toLowerCase(),
- messagePrefix: `${options.token} message:\n`,
- pubKeyHash: options.prefixHash,
- wif: options.wifPrefix,
- constants: [
- {
- blocktime: options.blocktime,
- block: {
- version: 0,
- maxTransactions: options.transactionsPerBlock,
- maxPayload: 2097152,
- },
- epoch: options.epoch,
- activeDelegates: options.activeDelegates,
- fees: {
- dynamic: false,
- dynamicFees: {
- minFeePool: 1000,
- minFeeBroadcast: 1000,
- addonBytes: {
- transfer: 100,
- secondSignature: 250,
- delegateRegistration: 500,
- vote: 100,
- multiSignature: 500,
- ipfs: 250,
- timelockTransfer: 500,
- multiPayment: 500,
- delegateResignation: 500,
- },
- },
- staticFees: {
- transfer: options.feeTransfer,
- secondSignature: options.feeVote,
- delegateRegistration: options.feeSecondSignature,
- vote: options.feeDelegateRegistration,
- multiSignature: options.feeMultiSignature,
- ipfs: 0,
- timelockTransfer: 0,
- multiPayment: 0,
- delegateResignation: 0,
- },
- },
- },
- ],
- client: {
- token: options.token,
- symbol: options.symbol,
- explorer: options.explorerUrl,
- },
- exceptions: {},
-}
-
-const network = updateConfig('network.json', networkConfig, options.configPath)
-
-const genesis = new GenesisBlockBuilder(network, options).generate()
-
-network.nethash = genesis.genesisBlock.payloadHash
-
-if (options.rewardHeight === 1) {
- network.constants = [network.constants[0]]
- network.constants[0].height = options.rewardHeight
- network.constants[0].reward = options.rewardPerBlock
-} else {
- network.constants[1].height = options.rewardHeight
- network.constants[1].reward = options.rewardPerBlock
-}
-
-const requestNodeIp =
- options.nodeIp === '0.0.0.0' ? '127.0.0.1' : options.nodeIp
-
-updateConfig('network.json', networkConfig, options.configPath)
-updateConfig(
- 'peers.json',
- {
- minimumVersion: '>=1.3.3',
- minimumNetworkReach: 1,
- list: [
- {
- ip: requestNodeIp,
- port: options.p2pPort,
- },
- ],
- sources: [],
- },
- options.configPath,
-)
-
-updateConfig(
- 'genesisWallet.json',
- {
- address: genesis.genesisWallet.address,
- passphrase: genesis.genesisWallet.passphrase,
- },
- options.configPath,
- true,
-)
-updateConfig(
- 'genesisBlock.json',
- genesis.genesisBlock,
- options.configPath,
- true,
-)
-updateConfig(
- 'delegates.json',
- {
- secrets: genesis.delegatePassphrases,
- },
- options.configPath,
- true,
-)
-
-updateConfig(
- 'server.json',
- {
- port: options.p2pPort,
- fastRebuild: false,
- },
- options.configPath,
-)
-
-updateConfig('api/public.json', { port: options.apiPort }, options.configPath)
-
-const pluginsOriginal = require(path.resolve(options.configPath, 'plugins'))
-const plugins = {}
-for (let plugin in pluginsOriginal) {
- plugins[plugin] = {}
-}
-plugins['@arkecosystem/core-p2p'] = {
- host: options.nodeIp,
- port: options.p2pPort,
- whitelist: ['127.0.0.1', '192.168.*'],
-}
-plugins['@arkecosystem/core-api'] = {
- enabled: true,
- host: options.nodeIp,
- port: options.apiPort,
- whitelist: ['*'],
-}
-plugins['@arkecosystem/core-database-postgres'] = {
- host: options.dbHost,
- port: options.dbPort,
- username: options.dbUsername,
- password: options.dbPassword,
- database: options.dbDatabase,
-}
-plugins['@arkecosystem/core-blockchain'] = {
- fastRebuild: false,
-}
-plugins['@arkecosystem/core-forger'] = {
- hosts: [`http://${requestNodeIp}:${options.p2pPort}`],
-}
-
-updateConfig('plugins.json', plugins, options.configPath, true)
-fs.removeSync(path.resolve(options.configPath, 'plugins.js'))
-
-writeEnv(
- {
- P2P_PORT: options.p2pPort,
- API_PORT: options.apiPort,
- DB_PORT: options.dbPort,
- },
- '~/.ark/.env',
-)
diff --git a/packages/core-deployer/jest.config.js b/packages/core-deployer/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-deployer/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-deployer/lib/builder/genesis-block.js b/packages/core-deployer/lib/builder/genesis-block.js
deleted file mode 100644
index 1f6d8d0c5e..0000000000
--- a/packages/core-deployer/lib/builder/genesis-block.js
+++ /dev/null
@@ -1,303 +0,0 @@
-const { Bignum, client, crypto } = require('@arkecosystem/crypto')
-const bip39 = require('bip39')
-const ByteBuffer = require('bytebuffer')
-const { createHash } = require('crypto')
-
-module.exports = class GenesisBlockBuilder {
- /**
- * Create a new Genesis Block builder instance.
- * @param {Object} options
- * @return {void}
- */
- constructor(network, options) {
- this.network = network
- this.prefixHash = network.pubKeyHash
- this.totalPremine = options.totalPremine
- this.activeDelegates = options.activeDelegates
- }
-
- /**
- * Generate a Genesis Block.
- * @return {Object}
- */
- generate() {
- const genesisWallet = this.__createWallet()
- const premineWallet = this.__createWallet()
- const delegates = this.__buildDelegates()
- const transactions = [
- ...this.__buildDelegateTransactions(delegates),
- this.__createTransferTransaction(
- premineWallet,
- genesisWallet,
- this.totalPremine,
- ),
- ]
- const genesisBlock = this.__createGenesisBlock({
- keys: genesisWallet.keys,
- transactions,
- timestamp: 0,
- })
-
- return {
- genesisWallet,
- genesisBlock,
- delegatePassphrases: delegates.map(wallet => wallet.passphrase),
- }
- }
-
- /**
- * Generate a new random wallet.
- * @return {Object}
- */
- __createWallet() {
- const passphrase = bip39.generateMnemonic()
- const keys = crypto.getKeys(passphrase)
-
- return {
- address: crypto.getAddress(keys.publicKey, this.prefixHash),
- passphrase,
- keys,
- }
- }
-
- /**
- * Generate a random wallet and assign it a delegate username.
- * @param {String} username
- * @return {Object}
- */
- __createDelegateWallet(username) {
- const wallet = this.__createWallet()
- wallet.username = username
-
- return wallet
- }
-
- /**
- * Generate a collection of delegate wallets.
- * @return {Object[]}
- */
- __buildDelegates() {
- const wallets = []
- for (let i = 0; i < this.activeDelegates; i++) {
- wallets.push(this.__createDelegateWallet(`genesis_${i + 1}`))
- }
-
- return wallets
- }
-
- /**
- * Generate a collection of delegate registration transactions.
- * @param {Object[]} wallets
- * @return {Object[]}
- */
- __buildDelegateTransactions(wallets) {
- return wallets.map(wallet => this.__createDelegateTransaction(wallet))
- }
-
- /**
- * Create transfer transaction.
- * @param {Object} senderWallet
- * @param {Object} receiverWallet
- * @param {Number} amount
- * @return {Object}
- */
- __createTransferTransaction(senderWallet, receiverWallet, amount) {
- const { data } = client
- .getBuilder()
- .transfer()
- .recipientId(receiverWallet.address)
- .amount(amount)
- .network(this.prefixHash)
- .sign(senderWallet.passphrase)
-
- return this.__formatGenesisTransaction(data, senderWallet)
- }
-
- /**
- * Create delegate registration transaction.
- * @param {Object} wallet
- * @return {Object}
- */
- __createDelegateTransaction(wallet) {
- const { data } = client
- .getBuilder()
- .delegateRegistration()
- .usernameAsset(wallet.username)
- .sign(wallet.passphrase)
-
- return this.__formatGenesisTransaction(data, wallet)
- }
-
- /**
- * Reset transaction to be applied in the genesis block.
- * @param {Object} transaction
- * @param {Object} wallet
- * @return {Object}
- */
- __formatGenesisTransaction(transaction, wallet) {
- Object.assign(transaction, {
- fee: 0,
- timestamp: 0,
- senderId: wallet.address,
- })
- transaction.signature = crypto.sign(transaction, wallet.keys)
- transaction.id = crypto.getId(transaction)
-
- return transaction
- }
-
- /**
- * Create block based on data.
- * @param {Object} data
- * @return {Object}
- */
- __createGenesisBlock(data) {
- const transactions = data.transactions.sort((a, b) => {
- if (a.type === b.type) {
- return a.amount - b.amount
- }
-
- return a.type - b.type
- })
-
- let payloadLength = 0
- let totalFee = 0
- let totalAmount = 0
- const payloadHash = createHash('sha256')
-
- transactions.forEach(transaction => {
- const bytes = crypto.getBytes(transaction)
- payloadLength += bytes.length
- totalFee += transaction.fee
- totalAmount += transaction.amount
- payloadHash.update(bytes)
- })
-
- const block = {
- version: 0,
- totalAmount,
- totalFee,
- reward: 0,
- payloadHash: payloadHash.digest().toString('hex'),
- timestamp: data.timestamp,
- numberOfTransactions: transactions.length,
- payloadLength,
- previousBlock: null,
- generatorPublicKey: data.keys.publicKey.toString('hex'),
- transactions,
- height: 1,
- }
-
- block.id = this.__getBlockId(block)
-
- try {
- block.blockSignature = this.__signBlock(block, data.keys)
- } catch (e) {
- throw e
- }
-
- return block
- }
-
- /**
- * Work out block id for block.
- * @param {Object} block
- * @return {String}
- */
- __getBlockId(block) {
- const hash = this.__getHash(block)
- const blockBuffer = Buffer.alloc(8)
- for (let i = 0; i < 8; i++) {
- blockBuffer[i] = hash[7 - i]
- }
-
- return new Bignum(blockBuffer.toString('hex'), 16).toString()
- }
-
- /**
- * Sign block with keys.
- * @param {Object} block
- * @param {Object]} keys
- * @return {String}
- */
- __signBlock(block, keys) {
- const hash = this.__getHash(block)
- return crypto.signHash(hash, keys)
- }
-
- /**
- * Get hash of block.
- * @param {Object} block
- * @return {String}
- */
- __getHash(block) {
- return createHash('sha256')
- .update(this.__getBytes(block))
- .digest()
- }
-
- /**
- * Get block bytes.
- * @param {Object} block
- * @return {(Buffer|undefined)}
- */
- __getBytes(block) {
- const size = 4 + 4 + 4 + 8 + 4 + 4 + 8 + 8 + 4 + 4 + 4 + 32 + 32 + 64
-
- try {
- const byteBuffer = new ByteBuffer(size, true)
- byteBuffer.writeInt(block.version)
- byteBuffer.writeInt(block.timestamp)
- byteBuffer.writeInt(block.height)
-
- if (block.previousBlock) {
- const previousBlock = Buffer.from(
- new Bignum(block.previousBlock).toString(16),
- 'hex',
- )
-
- for (let i = 0; i < 8; i++) {
- byteBuffer.writeByte(previousBlock[i])
- }
- } else {
- for (let i = 0; i < 8; i++) {
- byteBuffer.writeByte(0)
- }
- }
-
- byteBuffer.writeInt(block.numberOfTransactions)
- byteBuffer.writeLong(block.totalAmount)
- byteBuffer.writeLong(block.totalFee)
- byteBuffer.writeLong(block.reward)
-
- byteBuffer.writeInt(block.payloadLength)
-
- const payloadHashBuffer = Buffer.from(block.payloadHash, 'hex')
- for (let i = 0; i < payloadHashBuffer.length; i++) {
- byteBuffer.writeByte(payloadHashBuffer[i])
- }
-
- const generatorPublicKeyBuffer = Buffer.from(
- block.generatorPublicKey,
- 'hex',
- )
- for (let i = 0; i < generatorPublicKeyBuffer.length; i++) {
- byteBuffer.writeByte(generatorPublicKeyBuffer[i])
- }
-
- if (block.blockSignature) {
- const blockSignatureBuffer = Buffer.from(block.blockSignature, 'hex')
- for (let i = 0; i < blockSignatureBuffer.length; i++) {
- byteBuffer.writeByte(blockSignatureBuffer[i])
- }
- }
-
- byteBuffer.flip()
- const buffer = byteBuffer.toBuffer()
-
- return buffer
- } catch (error) {
- throw error
- }
- }
-}
diff --git a/packages/core-deployer/lib/logger.js b/packages/core-deployer/lib/logger.js
deleted file mode 100644
index 1659945fe0..0000000000
--- a/packages/core-deployer/lib/logger.js
+++ /dev/null
@@ -1,7 +0,0 @@
-const pino = require('pino')
-
-module.exports = pino({
- name: 'ark-tester-cli',
- safe: true,
- prettyPrint: true,
-})
diff --git a/packages/core-deployer/lib/schema.js b/packages/core-deployer/lib/schema.js
deleted file mode 100644
index 621d12766a..0000000000
--- a/packages/core-deployer/lib/schema.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const Joi = require('joi')
-
-module.exports = Joi.object().keys({
- network: Joi.string().required(),
- name: Joi.string().required(),
- nodeIp: Joi.string().required(),
- p2pPort: Joi.number().required(),
- apiPort: Joi.number().required(),
- dbHost: Joi.string().required(),
- dbPort: Joi.number().required(),
- dbUsername: Joi.string().required(),
- dbPassword: Joi.string().required(),
- dbDatabase: Joi.string().required(),
- explorerUrl: Joi.string()
- .uri({ scheme: ['http', 'https'] })
- .required(),
- activeDelegates: Joi.number().required(),
- feeTransfer: Joi.number().required(),
- feeVote: Joi.number().required(),
- feeSecondSignature: Joi.number().required(),
- feeDelegateRegistration: Joi.number().required(),
- feeMultiSignature: Joi.number().required(),
- epoch: Joi.string()
- .regex(
- /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z)/,
- )
- .required(),
- rewardHeight: Joi.number()
- .integer()
- .positive()
- .required(),
- rewardPerBlock: Joi.number().required(),
- blocktime: Joi.number().required(),
- token: Joi.string().required(),
- symbol: Joi.string().required(),
- prefixHash: Joi.number().required(),
- transactionsPerBlock: Joi.number().required(),
- wifPrefix: Joi.number()
- .integer()
- .min(1)
- .max(255)
- .required(),
- totalPremine: Joi.number().required(),
- configPath: Joi.string().required(),
-})
diff --git a/packages/core-deployer/lib/utils.js b/packages/core-deployer/lib/utils.js
deleted file mode 100644
index c63a67218c..0000000000
--- a/packages/core-deployer/lib/utils.js
+++ /dev/null
@@ -1,52 +0,0 @@
-const set = require('lodash/set')
-const envfile = require('envfile')
-const expandHomeDir = require('expand-home-dir')
-const fs = require('fs-extra')
-const path = require('path')
-
-/**
- * Get a random number from range.
- * @param {Number} min
- * @param {Number} max
- * @return {Number}
- */
-exports.getRandomNumber = (min, max) =>
- Math.floor(Math.random() * (max - min) + min)
-
-exports.logger = require('./logger')
-
-/**
- * Update the contents of the given file and return config.
- * @param {String} file
- * @param {Object} values
- * @return {Object}
- */
-exports.updateConfig = (file, values, configPath, forceOverwrite) => {
- configPath = configPath || `${process.env.ARK_PATH_CONFIG}/deployer`
- configPath = path.resolve(configPath, file)
- let config
- if (fs.existsSync(configPath) && !forceOverwrite) {
- config = require(configPath)
- } else {
- config = {}
- }
-
- Object.keys(values).forEach(key => set(config, key, values[key]))
-
- fs.ensureFileSync(configPath)
- fs.writeFileSync(configPath, JSON.stringify(config, null, 2))
-
- return config
-}
-
-/**
- * Write Environment variables to file.
- * @param {Object} object
- * @param {String} path
- * @return {void}
- */
-exports.writeEnv = (object, filePath) => {
- filePath = expandHomeDir(filePath)
- fs.ensureDirSync(path.dirname(filePath))
- fs.writeFileSync(filePath, envfile.stringifySync(object))
-}
diff --git a/packages/core-deployer/package.json b/packages/core-deployer/package.json
deleted file mode 100644
index 3f2465136c..0000000000
--- a/packages/core-deployer/package.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "name": "@arkecosystem/core-deployer",
- "description": "Deployer for Ark Core",
- "version": "0.2.0",
- "contributors": [
- "Brian Faust ",
- "Alex Barnsley "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "bin": {
- "ark:deployer": "./bin/deployer"
- },
- "scripts": {
- "start": "./bin/deployer",
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/crypto": "~0.2",
- "bip39": "^2.5.0",
- "bytebuffer": "^5.0.1",
- "commander": "^2.19.0",
- "envfile": "^2.3.0",
- "expand-home-dir": "0.0.3",
- "fs-extra": "^7.0.1",
- "joi": "^14.3.0",
- "lodash.set": "^4.3.2",
- "pino": "^5.9.0",
- "pino-pretty": "^2.3.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
-}
diff --git a/packages/core-elasticsearch/CHANGELOG.md b/packages/core-elasticsearch/CHANGELOG.md
deleted file mode 100644
index 6842caf9df..0000000000
--- a/packages/core-elasticsearch/CHANGELOG.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.1.0 - 2018-12-03
-
-### Added
-
-- initial release
diff --git a/packages/core-elasticsearch/LICENSE b/packages/core-elasticsearch/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-elasticsearch/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-elasticsearch/README.md b/packages/core-elasticsearch/README.md
index 140161d7f3..7d91695924 100644
--- a/packages/core-elasticsearch/README.md
+++ b/packages/core-elasticsearch/README.md
@@ -14,8 +14,8 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-elasticsearch/jest.config.js b/packages/core-elasticsearch/jest.config.js
deleted file mode 100644
index 57770a97bb..0000000000
--- a/packages/core-elasticsearch/jest.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- testEnvironment: 'node',
- bail: false,
- verbose: true,
- testMatch: ['**/__tests__/**/*.test.js'],
- moduleFileExtensions: ['js', 'json'],
- collectCoverage: false,
- coverageDirectory: '/.coverage',
- collectCoverageFrom: ['lib/**/*.js', '!**/node_modules/**'],
- watchman: false,
- setupTestFrameworkScriptFile: 'jest-extended',
-}
diff --git a/packages/core-elasticsearch/lib/defaults.js b/packages/core-elasticsearch/lib/defaults.js
deleted file mode 100644
index 229aa2c9af..0000000000
--- a/packages/core-elasticsearch/lib/defaults.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module.exports = {
- server: {
- host: '0.0.0.0',
- port: 4007,
- whitelist: ['*'],
- },
- client: {
- host: 'localhost:9200',
- log: 'info',
- },
- chunkSize: 50000,
-}
diff --git a/packages/core-elasticsearch/lib/index.js b/packages/core-elasticsearch/lib/index.js
deleted file mode 100644
index 1546591aae..0000000000
--- a/packages/core-elasticsearch/lib/index.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const blockIndex = require('./index/block')
-const transactionIndex = require('./index/transaction')
-const walletIndex = require('./index/wallet')
-const roundIndex = require('./index/round')
-const client = require('./services/client')
-const storage = require('./services/storage')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'elasticsearch',
- async register(container, options) {
- const logger = container.resolvePlugin('logger')
-
- logger.info('[Elasticsearch] Initialising History :hourglass:')
- storage.ensure('history')
-
- logger.info('[Elasticsearch] Initialising Client :joystick:')
- await client.setUp(options.client)
-
- blockIndex.setUp(options.chunkSize)
- transactionIndex.setUp(options.chunkSize)
- walletIndex.setUp(options.chunkSize)
- roundIndex.setUp(options.chunkSize)
-
- return require('./server')(options.server)
- },
- async deregister(container, options) {
- container
- .resolvePlugin('logger')
- .info('[Elasticsearch] Stopping API :warning:')
-
- return container.resolvePlugin('elasticsearch').stop()
- },
-}
diff --git a/packages/core-elasticsearch/lib/index/block.js b/packages/core-elasticsearch/lib/index/block.js
deleted file mode 100644
index c72cf21e9b..0000000000
--- a/packages/core-elasticsearch/lib/index/block.js
+++ /dev/null
@@ -1,87 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const first = require('lodash/first')
-const last = require('lodash/last')
-const app = require('@arkecosystem/core-container')
-
-const database = app.resolvePlugin('database')
-const logger = app.resolvePlugin('logger')
-const Index = require('./index')
-const client = require('../services/client')
-const storage = require('../services/storage')
-
-class BlockIndex extends Index {
- /**
- * Index blocks using the specified chunk size.
- * @return {void}
- */
- async index() {
- const { count } = await this.__count()
-
- const queries = Math.ceil(count / this.chunkSize)
-
- for (let i = 0; i < queries; i++) {
- const modelQuery = this.__createQuery()
-
- const query = modelQuery
- .select()
- .from(modelQuery)
- .where(modelQuery.timestamp.gte(storage.get('history', 'lastBlock')))
- .order(modelQuery.height.asc)
- .limit(this.chunkSize)
- .offset(this.chunkSize * i)
-
- const rows = await database.query.manyOrNone(query.toQuery())
-
- if (!rows.length) {
- continue
- }
-
- const heights = rows.map(row => row.height)
- logger.info(
- `[Elasticsearch] Indexing blocks from height ${first(
- heights,
- )} to ${last(heights)} :card_index_dividers:`,
- )
-
- try {
- await client.bulk(this._buildBulkUpsert(rows))
-
- storage.update('history', {
- lastBlock: last(heights),
- })
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- }
- }
-
- /**
- * Register listeners for "block.*" events.
- * @return {void}
- */
- listen() {
- this._registerCreateListener('block.applied')
- // this._registerCreateListener('block.forged')
-
- this._registerDeleteListener('block.reverted')
- }
-
- /**
- * Get the document index.
- * @return {String}
- */
- getIndex() {
- return 'blocks'
- }
-
- /**
- * Get the document type.
- * @return {String}
- */
- getType() {
- return 'block'
- }
-}
-
-module.exports = new BlockIndex()
diff --git a/packages/core-elasticsearch/lib/index/index.js b/packages/core-elasticsearch/lib/index/index.js
deleted file mode 100644
index 27c67a9817..0000000000
--- a/packages/core-elasticsearch/lib/index/index.js
+++ /dev/null
@@ -1,181 +0,0 @@
-/* eslint camelcase: "off" */
-
-const app = require('@arkecosystem/core-container')
-
-const emitter = app.resolvePlugin('event-emitter')
-const logger = app.resolvePlugin('logger')
-const database = app.resolvePlugin('database')
-const client = require('../services/client')
-const storage = require('../services/storage')
-
-module.exports = class Index {
- /**
- * Create a new index instance.
- * @param {Number} chunkSize
- * @return {void}
- */
- setUp(chunkSize) {
- logger.info(`[Elasticsearch] Initialising ${this.getType()} index :scroll:`)
- this.chunkSize = chunkSize
-
- logger.info(
- `[Elasticsearch] Initialising ${this.getType()} listener :radio:`,
- )
- this.listen()
-
- logger.info(`[Elasticsearch] Indexing ${this.getIndex()} :bookmark:`)
- this.index()
- }
-
- /**
- * Register a new "CREATE" operation listener.
- * @param {String} event
- * @return {void}
- */
- _registerCreateListener(event) {
- emitter.on(event, async doc => {
- try {
- const exists = await this._exists(doc)
-
- if (!exists) {
- await this._create(doc)
- }
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- })
- }
-
- /**
- * Register a new "DELETE" operation listener.
- * @param {String} event
- * @return {void}
- */
- _registerDeleteListener(event) {
- emitter.on(event, async doc => {
- try {
- const exists = await this._exists(doc)
-
- if (exists) {
- await this._delete(doc)
- }
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- })
- }
-
- /**
- * Check if the specified document exists.
- * @param {String} doc
- * @return {Promise}
- */
- _exists(doc) {
- return client.exists(this._getReadQuery(doc))
- }
-
- /**
- * Create a new document.
- * @param {String} doc
- * @return {Promise}
- */
- _create(doc) {
- logger.info(`[Elasticsearch] Creating ${this.getType()} with ID ${doc.id}`)
-
- if (this.getType() === 'block') {
- storage.update('history', { lastBlock: doc.height })
- } else {
- storage.update('history', { lastTransaction: doc.timestamp })
- }
-
- return client.create(this._getWriteQuery(doc))
- }
-
- /**
- * Delete the specified document.
- * @param {String} doc
- * @return {Promise}
- */
- _delete(doc) {
- logger.info(`[Elasticsearch] Deleting ${this.getType()} with ID ${doc.id}`)
-
- return client.delete(this._getReadQuery(doc))
- }
-
- /**
- * Get a query for a "WRITE" operation.
- * @param {String} doc
- * @return {Object}
- */
- _getWriteQuery(doc) {
- return {
- index: this.getIndex(),
- type: this.getType(),
- id: doc.id,
- body: doc,
- }
- }
-
- /**
- * Get a query for a "READ" operation.
- * @param {String} doc
- * @return {Object}
- */
- _getReadQuery(doc) {
- return {
- index: this.getIndex(),
- type: this.getType(),
- id: doc.id,
- }
- }
-
- /**
- * Get a query for a "READ" operation.
- * @param {String} doc
- * @return {Object}
- */
- _getUpsertQuery(doc) {
- return {
- action: {
- update: {
- _index: this.getIndex(),
- _type: this.getType(),
- _id: doc.id,
- },
- },
- document: {
- doc,
- doc_as_upsert: true,
- },
- }
- }
-
- /**
- * Get a query for a "READ" operation.
- * @param {Array} items
- * @return {Object}
- */
- _buildBulkUpsert(items) {
- const actions = []
-
- items.forEach(item => {
- const query = this._getUpsertQuery(item)
- actions.push(query.action)
- actions.push(query.document)
- })
-
- return actions
- }
-
- __createQuery() {
- return database.models[this.getType()].query()
- }
-
- __count() {
- const modelQuery = this.__createQuery()
-
- const query = modelQuery.select(modelQuery.count('count')).from(modelQuery)
-
- return database.query.one(query.toQuery())
- }
-}
diff --git a/packages/core-elasticsearch/lib/index/round.js b/packages/core-elasticsearch/lib/index/round.js
deleted file mode 100644
index c0ee818141..0000000000
--- a/packages/core-elasticsearch/lib/index/round.js
+++ /dev/null
@@ -1,85 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const first = require('lodash/first')
-const last = require('lodash/last')
-const app = require('@arkecosystem/core-container')
-
-const emitter = app.resolvePlugin('event-emitter')
-const database = app.resolvePlugin('database')
-const logger = app.resolvePlugin('logger')
-const Index = require('./index')
-const client = require('../services/client')
-const storage = require('../services/storage')
-
-class RoundIndex extends Index {
- /**
- * Index rounds using the specified chunk size.
- * @return {void}
- */
- async index() {
- const { count } = await this.__count()
-
- const queries = Math.ceil(count / this.chunkSize)
-
- for (let i = 0; i < queries; i++) {
- const modelQuery = this.__createQuery()
-
- const query = modelQuery
- .select()
- .from(modelQuery)
- .where(modelQuery.round.gte(storage.get('history', 'lastRound')))
- .order(modelQuery.round.asc)
- .limit(this.chunkSize)
- .offset(this.chunkSize * i)
-
- const rows = await database.query.manyOrNone(query.toQuery())
-
- if (!rows.length) {
- continue
- }
-
- const roundIds = rows.map(row => row.round)
- logger.info(
- `[Elasticsearch] Indexing rounds from ${first(roundIds)} to ${last(
- roundIds,
- )} :card_index_dividers:`,
- )
-
- try {
- await client.bulk(this._buildBulkUpsert(rows))
-
- storage.update('history', {
- lastRound: last(roundIds),
- })
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- }
- }
-
- /**
- * Register listeners for "round.*" events.
- * @return {void}
- */
- listen() {
- emitter.on('round.created', data => this.index())
- }
-
- /**
- * Get the document index.
- * @return {String}
- */
- getIndex() {
- return 'rounds'
- }
-
- /**
- * Get the document type.
- * @return {String}
- */
- getType() {
- return 'round'
- }
-}
-
-module.exports = new RoundIndex()
diff --git a/packages/core-elasticsearch/lib/index/transaction.js b/packages/core-elasticsearch/lib/index/transaction.js
deleted file mode 100644
index 903013867d..0000000000
--- a/packages/core-elasticsearch/lib/index/transaction.js
+++ /dev/null
@@ -1,98 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const first = require('lodash/first')
-const last = require('lodash/last')
-const app = require('@arkecosystem/core-container')
-
-const database = app.resolvePlugin('database')
-const logger = app.resolvePlugin('logger')
-const { Transaction } = require('@arkecosystem/crypto').models
-const Index = require('./index')
-const client = require('../services/client')
-const storage = require('../services/storage')
-
-class TransactionIndex extends Index {
- /**
- * Index transactions using the specified chunk size.
- * @return {void}
- */
- async index() {
- const { count } = await this.__count()
-
- const queries = Math.ceil(count / this.chunkSize)
-
- for (let i = 0; i < queries; i++) {
- const modelQuery = this.__createQuery()
-
- const query = modelQuery
- .select(modelQuery.block_id, modelQuery.serialized)
- .from(modelQuery)
- .where(
- modelQuery.timestamp.gte(storage.get('history', 'lastTransaction')),
- )
- .order(modelQuery.timestamp.asc)
- .limit(this.chunkSize)
- .offset(this.chunkSize * i)
-
- let rows = await database.query.manyOrNone(query.toQuery())
-
- if (!rows.length) {
- continue
- }
-
- rows = rows.map(row => {
- const transaction = new Transaction(row.serialized.toString('hex'))
- transaction.blockId = row.blockId
-
- return transaction
- })
-
- const blockIds = rows.map(row => row.blockId)
- logger.info(
- `[Elasticsearch] Indexing transactions from block ${first(
- blockIds,
- )} to ${last(blockIds)} :card_index_dividers:`,
- )
-
- try {
- await client.bulk(this._buildBulkUpsert(rows))
-
- storage.update('history', {
- lastTransaction: last(rows.map(row => row.timestamp)),
- })
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- }
- }
-
- /**
- * Register listeners for "transaction.*" events.
- * @return {void}
- */
- listen() {
- this._registerCreateListener('transaction.applied')
- this._registerCreateListener('transaction.forged')
-
- this._registerDeleteListener('transaction.expired')
- this._registerDeleteListener('transaction.reverted')
- }
-
- /**
- * Get the document index.
- * @return {String}
- */
- getIndex() {
- return 'transactions'
- }
-
- /**
- * Get the document type.
- * @return {String}
- */
- getType() {
- return 'transaction'
- }
-}
-
-module.exports = new TransactionIndex()
diff --git a/packages/core-elasticsearch/lib/index/wallet.js b/packages/core-elasticsearch/lib/index/wallet.js
deleted file mode 100644
index 57f305b91d..0000000000
--- a/packages/core-elasticsearch/lib/index/wallet.js
+++ /dev/null
@@ -1,77 +0,0 @@
-/* eslint no-await-in-loop: "off" */
-
-const app = require('@arkecosystem/core-container')
-
-const emitter = app.resolvePlugin('event-emitter')
-const database = app.resolvePlugin('database')
-const logger = app.resolvePlugin('logger')
-const Index = require('./index')
-const client = require('../services/client')
-
-class WalletIndex extends Index {
- /**
- * Index wallets using the specified chunk size.
- * @return {void}
- */
- async index() {
- const { count } = await this.__count()
-
- const queries = Math.ceil(count / this.chunkSize)
-
- for (let i = 0; i < queries; i++) {
- const modelQuery = this.__createQuery()
-
- const query = modelQuery
- .select()
- .from(modelQuery)
- .limit(this.chunkSize)
- .offset(this.chunkSize * i)
-
- const rows = await database.query.manyOrNone(query.toQuery())
-
- if (!rows.length) {
- continue
- }
-
- logger.info(
- `[Elasticsearch] Indexing ${rows.length} wallets :card_index_dividers:`,
- )
-
- try {
- rows.forEach(row => {
- row.id = row.address
- })
-
- await client.bulk(this._buildBulkUpsert(rows))
- } catch (error) {
- logger.error(`[Elasticsearch] ${error.message} :exclamation:`)
- }
- }
- }
-
- /**
- * Register listeners for "wallet.*" events.
- * @return {void}
- */
- listen() {
- emitter.on('wallets:updated', data => this.index())
- }
-
- /**
- * Get the document index.
- * @return {String}
- */
- getIndex() {
- return 'wallets'
- }
-
- /**
- * Get the document type.
- * @return {String}
- */
- getType() {
- return 'wallet'
- }
-}
-
-module.exports = new WalletIndex()
diff --git a/packages/core-elasticsearch/lib/server/handler.js b/packages/core-elasticsearch/lib/server/handler.js
deleted file mode 100644
index 4f762ef63e..0000000000
--- a/packages/core-elasticsearch/lib/server/handler.js
+++ /dev/null
@@ -1,68 +0,0 @@
-const Joi = require('joi')
-const client = require('../services/client')
-
-/**
- * @type {Object}
- */
-exports.index = {
- /**
- * @param {Hapi.Request} request
- * @param {Hapi.Toolkit} h
- * @return {Hapi.Response}
- */
- async handler(request, h) {
- const query = await client.search(request.payload)
-
- return {
- meta: {
- count: query.hits.total,
- },
- data: query.hits.hits.map(result => result._source),
- }
- },
- options: {
- validate: {
- payload: {
- analyzer: Joi.string(),
- analyzeWildcard: Joi.boolean(),
- defaultOperator: Joi.string(),
- df: Joi.string(),
- explain: Joi.boolean(),
- storedFields: Joi.any(),
- docvalueFields: Joi.any(),
- from: Joi.number(),
- allowNoIndices: Joi.boolean(),
- expandWildcards: Joi.string(),
- lenient: Joi.boolean(),
- preference: Joi.string(),
- q: Joi.string(),
- routing: Joi.any(),
- scroll: Joi.string(),
- searchType: Joi.string(),
- size: Joi.number().default(10),
- sort: Joi.any(),
- _source: Joi.any(),
- _sourceExclude: Joi.any(),
- _sourceInclude: Joi.any(),
- terminateAfter: Joi.number(),
- stats: Joi.any(),
- suggestField: Joi.string(),
- suggestMode: Joi.string(),
- suggestSize: Joi.number(),
- suggestText: Joi.string(),
- timeout: Joi.string(),
- trackScores: Joi.boolean(),
- trackTotalHits: Joi.boolean(),
- typedKeys: Joi.boolean(),
- version: Joi.boolean(),
- requestCache: Joi.boolean(),
- batchedReduceSize: Joi.number(),
- maxConcurrentShardRequests: Joi.number(),
- preFilterShardSize: Joi.number(),
- index: Joi.any(),
- type: Joi.any(),
- body: Joi.object(),
- },
- },
- },
-}
diff --git a/packages/core-elasticsearch/lib/server/index.js b/packages/core-elasticsearch/lib/server/index.js
deleted file mode 100644
index fbb0e30a85..0000000000
--- a/packages/core-elasticsearch/lib/server/index.js
+++ /dev/null
@@ -1,36 +0,0 @@
-const {
- createServer,
- mountServer,
- plugins,
-} = require('@arkecosystem/core-http-utils')
-
-/**
- * Creates a new hapi.js server.
- * @param {Object} config
- * @return {Hapi.Server}
- */
-module.exports = async config => {
- const server = await createServer({
- host: config.host,
- port: config.port,
- routes: {
- validate: {
- async failAction(request, h, err) {
- throw err
- },
- },
- },
- })
-
- await server.register({
- plugin: plugins.whitelist,
- options: {
- whitelist: config.whitelist,
- name: 'Elasticsearch API',
- },
- })
-
- await server.register(require('./routes'))
-
- return mountServer('Elasticsearch API', server)
-}
diff --git a/packages/core-elasticsearch/lib/server/routes.js b/packages/core-elasticsearch/lib/server/routes.js
deleted file mode 100644
index f44a24626f..0000000000
--- a/packages/core-elasticsearch/lib/server/routes.js
+++ /dev/null
@@ -1,27 +0,0 @@
-const handler = require('./handler')
-
-/**
- * Register search routes.
- * @param {Hapi.Server} server
- * @param {Object} options
- * @return {void}
- */
-const register = async (server, options) => {
- server.route([
- {
- method: 'POST',
- path: '/',
- ...handler.index,
- },
- ])
-}
-
-/**
- * The struct used by hapi.js.
- * @type {Object}
- */
-exports.plugin = {
- name: 'routes',
- version: '0.1.0',
- register,
-}
diff --git a/packages/core-elasticsearch/lib/services/client.js b/packages/core-elasticsearch/lib/services/client.js
deleted file mode 100644
index 5b392b8193..0000000000
--- a/packages/core-elasticsearch/lib/services/client.js
+++ /dev/null
@@ -1,84 +0,0 @@
-const elasticsearch = require('elasticsearch')
-
-class Client {
- /**
- * Create a new client instance.
- * @param {Object} options
- */
- async setUp(options) {
- this.client = new elasticsearch.Client(options)
- }
-
- /**
- * Get the elasticsearch client.
- * @return {elasticsearch.Client}
- */
- async getClient() {
- return this.client
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} body
- * @return {Promise}
- */
- async bulk(body) {
- return this.client.bulk({ body })
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async count(params) {
- return this.client.count(params)
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async search(params) {
- return this.client.search(params)
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async create(params) {
- return this.client.create(params)
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async update(params) {
- return this.client.update(params)
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async delete(params) {
- return this.client.delete(params)
- }
-
- /**
- * Perform an "UPDATE" operation.
- * @param {Object} params
- * @return {Promise}
- */
- async exists(params) {
- return this.client.exists(params)
- }
-}
-
-module.exports = new Client()
diff --git a/packages/core-elasticsearch/lib/services/storage.js b/packages/core-elasticsearch/lib/services/storage.js
deleted file mode 100644
index bf55f1b35f..0000000000
--- a/packages/core-elasticsearch/lib/services/storage.js
+++ /dev/null
@@ -1,96 +0,0 @@
-const fs = require('fs-extra')
-const loget = require('lodash/get')
-
-class Storage {
- /**
- * Create a new storage instance.
- * @return {void}
- */
- constructor() {
- this.base = `${process.env.ARK_PATH_DATA}/plugins/core-elasticsearch`
- }
-
- /**
- * Read & parse the specified file.
- * @param {String} file
- * @return {Object}
- */
- read(file) {
- return this.exists(file)
- ? JSON.parse(fs.readFileSync(`${this.base}/${file}.json`))
- : {}
- }
-
- /**
- * Write the specified data to the specified file.
- * @param {String} file
- * @param {Object} data
- * @return {void}
- */
- write(file, data) {
- fs.ensureFileSync(`${this.base}/${file}.json`)
-
- fs.writeFileSync(`${this.base}/${file}.json`, JSON.stringify(data, null, 2))
- }
-
- /**
- * Update the specified data in the specified file.
- * @param {String} file
- * @param {Object} data
- * @return {void}
- */
- update(file, data) {
- fs.ensureFileSync(`${this.base}/${file}.json`)
-
- data = Object.assign(this.read(file), data)
-
- fs.writeFileSync(`${this.base}/${file}.json`, JSON.stringify(data, null, 2))
- }
-
- /**
- * Update the specified data in the specified file.
- * @param {String} file
- * @param {Object} data
- * @return {void}
- */
- ensure(file) {
- if (!this.exists(file)) {
- fs.ensureFileSync(`${this.base}/${file}.json`)
-
- fs.writeFileSync(
- `${this.base}/${file}.json`,
- JSON.stringify(
- {
- lastRound: 0,
- lastBlock: 0,
- lastTransaction: 0,
- },
- null,
- 2,
- ),
- )
- }
- }
-
- /**
- * Determine if the specified file exists.
- * @param {String} file
- * @return {Boolean}
- */
- exists(file) {
- return fs.existsSync(`${this.base}/${file}.json`)
- }
-
- /**
- * Get a value from the specified file for the specified key.
- * @param {String} file
- * @param {String} key
- * @param {*} key
- * @return {*}
- */
- get(file, key, defaultValue = null) {
- return loget(this.read(file), key, defaultValue)
- }
-}
-
-module.exports = new Storage()
diff --git a/packages/core-elasticsearch/package.json b/packages/core-elasticsearch/package.json
index 8481b00f7e..fc485a5fe2 100644
--- a/packages/core-elasticsearch/package.json
+++ b/packages/core-elasticsearch/package.json
@@ -1,35 +1,51 @@
{
- "name": "@arkecosystem/core-elasticsearch",
- "description": "A powerful Elasticsearch integration for Ark Core",
- "version": "0.1.0",
- "contributors": [
- "Brian Faust "
- ],
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test": "cross-env ARK_ENV=test jest --runInBand --detectOpenHandles",
- "test:coverage": "cross-env ARK_ENV=test jest --coverage --coveragePathIgnorePatterns='/(defaults.js|index.js)$' --runInBand --detectOpenHandles",
- "test:debug": "cross-env ARK_ENV=test node --inspect-brk ../../node_modules/.bin/jest --runInBand",
- "test:watch": "cross-env ARK_ENV=test jest --runInBand --watch",
- "test:watch:all": "cross-env ARK_ENV=test jest --runInBand --watchAll",
- "lint": "eslint ./ --fix"
- },
- "dependencies": {
- "@arkecosystem/core-container": "~0.2",
- "@arkecosystem/core-http-utils": "~0.2",
- "@arkecosystem/crypto": "~0.2",
- "elasticsearch": "^15.2.0",
- "fs-extra": "^7.0.1",
- "joi": "^14.3.0",
- "lodash.first": "^3.0.0",
- "lodash.get": "^4.4.2",
- "lodash.last": "^3.0.0"
- },
- "publishConfig": {
- "access": "public"
- },
- "engines": {
- "node": ">=10.x"
- }
+ "name": "@arkecosystem/core-elasticsearch",
+ "description": "A powerful Elasticsearch integration for Ark Core",
+ "version": "2.1.0",
+ "contributors": [
+ "Brian Faust "
+ ],
+ "license": "MIT",
+ "main": "dist/index.js",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prepublishOnly": "yarn test && yarn build",
+ "pretest": "yarn lint && yarn build",
+ "compile": "../../node_modules/typescript/bin/tsc",
+ "build": "yarn clean && yarn compile",
+ "build:watch": "yarn clean && yarn compile -w",
+ "clean": "del dist",
+ "docs": "../../node_modules/typedoc/bin/typedoc src --out docs",
+ "lint": "../../node_modules/tslint/bin/tslint -c ../../tslint.json 'src/**/*.ts' '__tests__/**/*.ts' --fix",
+ "updates": "../../node_modules/npm-check-updates/bin/npm-check-updates -a"
+ },
+ "dependencies": {
+ "@arkecosystem/core-interfaces": "^2.1.0",
+ "@arkecosystem/core-container": "^2.1.0",
+ "@arkecosystem/core-http-utils": "^2.1.0",
+ "@arkecosystem/crypto": "^2.1.0",
+ "@types/elasticsearch": "^5.0.30",
+ "@types/fs-extra": "^5.0.4",
+ "@types/joi": "^14.0.1",
+ "@types/lodash.first": "^3.0.4",
+ "@types/lodash.get": "^4.4.4",
+ "@types/lodash.last": "^3.0.4",
+ "elasticsearch": "^15.2.0",
+ "fs-extra": "^7.0.1",
+ "joi": "^14.3.0",
+ "lodash.first": "^3.0.0",
+ "lodash.get": "^4.4.2",
+ "lodash.last": "^3.0.0"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10.x"
+ },
+ "jest": {
+ "preset": "../../jest-preset.json"
+ }
}
diff --git a/packages/core-elasticsearch/src/defaults.ts b/packages/core-elasticsearch/src/defaults.ts
new file mode 100644
index 0000000000..5dc306b667
--- /dev/null
+++ b/packages/core-elasticsearch/src/defaults.ts
@@ -0,0 +1,12 @@
+export const defaults = {
+ server: {
+ host: "0.0.0.0",
+ port: 4007,
+ whitelist: ["*"],
+ },
+ client: {
+ host: "localhost:9200",
+ log: "info",
+ },
+ chunkSize: 50000,
+};
diff --git a/packages/core-elasticsearch/src/index.ts b/packages/core-elasticsearch/src/index.ts
new file mode 100644
index 0000000000..2ca124d76e
--- /dev/null
+++ b/packages/core-elasticsearch/src/index.ts
@@ -0,0 +1,36 @@
+import { Container, Logger } from "@arkecosystem/core-interfaces";
+import { defaults } from "./defaults";
+import { blockIndex } from "./index/block";
+import { roundIndex } from "./index/round";
+import { transactionIndex } from "./index/transaction";
+import { walletIndex } from "./index/wallet";
+import { startServer } from "./server";
+import { client } from "./services/client";
+import { storage } from "./services/storage";
+
+export const plugin: Container.PluginDescriptor = {
+ pkg: require("../package.json"),
+ defaults,
+ alias: "elasticsearch",
+ async register(container: Container.IContainer, options) {
+ const logger = container.resolvePlugin("logger");
+
+ logger.info("[Elasticsearch] Initialising History :hourglass:");
+ storage.ensure("history");
+
+ logger.info("[Elasticsearch] Initialising Client :joystick:");
+ await client.setUp(options.client);
+
+ blockIndex.setUp(options.chunkSize);
+ transactionIndex.setUp(options.chunkSize);
+ walletIndex.setUp(options.chunkSize);
+ roundIndex.setUp(options.chunkSize);
+
+ return startServer(options.server);
+ },
+ async deregister(container: Container.IContainer, options) {
+ container.resolvePlugin("logger").info("[Elasticsearch] Stopping API :warning:");
+
+ return container.resolvePlugin("elasticsearch").stop();
+ },
+};
diff --git a/packages/core-elasticsearch/src/index/block.ts b/packages/core-elasticsearch/src/index/block.ts
new file mode 100644
index 0000000000..c1516d02a1
--- /dev/null
+++ b/packages/core-elasticsearch/src/index/block.ts
@@ -0,0 +1,86 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, Logger } from "@arkecosystem/core-interfaces";
+import first from "lodash/first";
+import last from "lodash/last";
+import { client } from "../services/client";
+import { storage } from "../services/storage";
+import { Index } from "./index";
+
+const logger = app.resolvePlugin("logger");
+const databaseService = app.resolvePlugin("database");
+
+class BlockIndex extends Index {
+ /**
+ * Index blocks using the specified chunk size.
+ * @return {void}
+ */
+ public async index() {
+ const { count } = await this.__count();
+
+ const queries = Math.ceil(count / this.chunkSize);
+
+ for (let i = 0; i < queries; i++) {
+ const modelQuery = this.__createQuery();
+
+ const query = modelQuery
+ .select()
+ .from(modelQuery)
+ .where(modelQuery.timestamp.gte(storage.get("history", "lastBlock")))
+ .order(modelQuery.height.asc)
+ .limit(this.chunkSize)
+ .offset(this.chunkSize * i);
+
+ const rows = await (databaseService.connection as any).query.manyOrNone(query.toQuery());
+
+ if (!rows.length) {
+ continue;
+ }
+
+ const heights = rows.map(row => row.height);
+ logger.info(
+ `[Elasticsearch] Indexing blocks from height ${first(heights)} to ${last(
+ heights,
+ )} :card_index_dividers:`,
+ );
+
+ try {
+ await client.bulk(this._buildBulkUpsert(rows));
+
+ storage.update("history", {
+ lastBlock: last(heights),
+ });
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ }
+ }
+
+ /**
+ * Register listeners for "block.*" events.
+ * @return {void}
+ */
+ public listen() {
+ this._registerCreateListener("block.applied");
+ // this._registerCreateListener('block.forged')
+
+ this._registerDeleteListener("block.reverted");
+ }
+
+ /**
+ * Get the document index.
+ * @return {String}
+ */
+ public getIndex() {
+ return "blocks";
+ }
+
+ /**
+ * Get the document type.
+ * @return {String}
+ */
+ public getType() {
+ return "block";
+ }
+}
+
+export const blockIndex = new BlockIndex();
diff --git a/packages/core-elasticsearch/src/index/index.ts b/packages/core-elasticsearch/src/index/index.ts
new file mode 100644
index 0000000000..d2b7fc9f04
--- /dev/null
+++ b/packages/core-elasticsearch/src/index/index.ts
@@ -0,0 +1,185 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import { client } from "../services/client";
+import { storage } from "../services/storage";
+
+const emitter = app.resolvePlugin("event-emitter");
+const logger = app.resolvePlugin("logger");
+const databaseService = app.resolvePlugin("database");
+
+export abstract class Index {
+ public chunkSize: any;
+
+ public abstract getType(): any;
+ public abstract getIndex(): any;
+ public abstract index(): any;
+ public abstract listen(): any;
+
+ /**
+ * Create a new index instance.
+ * @param {Number} chunkSize
+ * @return {void}
+ */
+ public setUp(chunkSize) {
+ logger.info(`[Elasticsearch] Initialising ${this.getType()} index :scroll:`);
+ this.chunkSize = chunkSize;
+
+ logger.info(`[Elasticsearch] Initialising ${this.getType()} listener :radio:`);
+ this.listen();
+
+ logger.info(`[Elasticsearch] Indexing ${this.getIndex()} :bookmark:`);
+ this.index();
+ }
+
+ /**
+ * Register a new "CREATE" operation listener.
+ * @param {String} event
+ * @return {void}
+ */
+ public _registerCreateListener(event) {
+ emitter.on(event, async doc => {
+ try {
+ const exists = await this._exists(doc);
+
+ if (!exists) {
+ await this._create(doc);
+ }
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ });
+ }
+
+ /**
+ * Register a new "DELETE" operation listener.
+ * @param {String} event
+ * @return {void}
+ */
+ public _registerDeleteListener(event) {
+ emitter.on(event, async doc => {
+ try {
+ const exists = await this._exists(doc);
+
+ if (exists) {
+ await this._delete(doc);
+ }
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ });
+ }
+
+ /**
+ * Check if the specified document exists.
+ * @param {String} doc
+ * @return {Promise}
+ */
+ public _exists(doc) {
+ return client.exists(this._getReadQuery(doc));
+ }
+
+ /**
+ * Create a new document.
+ * @param {String} doc
+ * @return {Promise}
+ */
+ public _create(doc) {
+ logger.info(`[Elasticsearch] Creating ${this.getType()} with ID ${doc.id}`);
+
+ if (this.getType() === "block") {
+ storage.update("history", { lastBlock: doc.height });
+ } else {
+ storage.update("history", { lastTransaction: doc.timestamp });
+ }
+
+ return client.create(this._getWriteQuery(doc));
+ }
+
+ /**
+ * Delete the specified document.
+ * @param {String} doc
+ * @return {Promise}
+ */
+ public _delete(doc) {
+ logger.info(`[Elasticsearch] Deleting ${this.getType()} with ID ${doc.id}`);
+
+ return client.delete(this._getReadQuery(doc));
+ }
+
+ /**
+ * Get a query for a "WRITE" operation.
+ * @param {String} doc
+ * @return {Object}
+ */
+ public _getWriteQuery(doc) {
+ return {
+ index: this.getIndex(),
+ type: this.getType(),
+ id: doc.id,
+ body: doc,
+ };
+ }
+
+ /**
+ * Get a query for a "READ" operation.
+ * @param {String} doc
+ * @return {Object}
+ */
+ public _getReadQuery(doc) {
+ return {
+ index: this.getIndex(),
+ type: this.getType(),
+ id: doc.id,
+ };
+ }
+
+ /**
+ * Get a query for a "READ" operation.
+ * @param {String} doc
+ * @return {Object}
+ */
+ public _getUpsertQuery(doc) {
+ return {
+ action: {
+ update: {
+ _index: this.getIndex(),
+ _type: this.getType(),
+ _id: doc.id,
+ },
+ },
+ document: {
+ doc,
+ doc_as_upsert: true,
+ },
+ };
+ }
+
+ /**
+ * Get a query for a "READ" operation.
+ * @param {Array} items
+ * @return {Object}
+ */
+ public _buildBulkUpsert(items) {
+ const actions = [];
+
+ items.forEach(item => {
+ const query = this._getUpsertQuery(item);
+ actions.push(query.action);
+ actions.push(query.document);
+ });
+
+ return actions;
+ }
+
+ public __createQuery() {
+ return (databaseService.connection as any).models[this.getType()].query();
+ }
+
+ public __count() {
+ const modelQuery = this.__createQuery();
+
+ const query = modelQuery.select(modelQuery.count("count")).from(modelQuery);
+
+ return (databaseService.connection as any).query.one(query.toQuery());
+ }
+}
diff --git a/packages/core-elasticsearch/src/index/round.ts b/packages/core-elasticsearch/src/index/round.ts
new file mode 100644
index 0000000000..cae825e467
--- /dev/null
+++ b/packages/core-elasticsearch/src/index/round.ts
@@ -0,0 +1,82 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import first from "lodash/first";
+import last from "lodash/last";
+import { client } from "../services/client";
+import { storage } from "../services/storage";
+import { Index } from "./index";
+
+const emitter = app.resolvePlugin("event-emitter");
+const logger = app.resolvePlugin("logger");
+const databaseService = app.resolvePlugin("database");
+
+class RoundIndex extends Index {
+ /**
+ * Index rounds using the specified chunk size.
+ * @return {void}
+ */
+ public async index() {
+ const { count } = await this.__count();
+
+ const queries = Math.ceil(count / this.chunkSize);
+
+ for (let i = 0; i < queries; i++) {
+ const modelQuery = this.__createQuery();
+
+ const query = modelQuery
+ .select()
+ .from(modelQuery)
+ .where(modelQuery.round.gte(storage.get("history", "lastRound")))
+ .order(modelQuery.round.asc)
+ .limit(this.chunkSize)
+ .offset(this.chunkSize * i);
+
+ const rows = await (databaseService.connection as any).query.manyOrNone(query.toQuery());
+
+ if (!rows.length) {
+ continue;
+ }
+
+ const roundIds = rows.map(row => row.round);
+ logger.info(
+ `[Elasticsearch] Indexing rounds from ${first(roundIds)} to ${last(roundIds)} :card_index_dividers:`,
+ );
+
+ try {
+ await client.bulk(this._buildBulkUpsert(rows));
+
+ storage.update("history", {
+ lastRound: last(roundIds),
+ });
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ }
+ }
+
+ /**
+ * Register listeners for "round.*" events.
+ * @return {void}
+ */
+ public listen() {
+ emitter.on("round.created", data => this.index());
+ }
+
+ /**
+ * Get the document index.
+ * @return {String}
+ */
+ public getIndex() {
+ return "rounds";
+ }
+
+ /**
+ * Get the document type.
+ * @return {String}
+ */
+ public getType() {
+ return "round";
+ }
+}
+
+export const roundIndex = new RoundIndex();
diff --git a/packages/core-elasticsearch/src/index/transaction.ts b/packages/core-elasticsearch/src/index/transaction.ts
new file mode 100644
index 0000000000..04fb7e4677
--- /dev/null
+++ b/packages/core-elasticsearch/src/index/transaction.ts
@@ -0,0 +1,97 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import first from "lodash/first";
+import last from "lodash/last";
+import { client } from "../services/client";
+import { storage } from "../services/storage";
+import { Index } from "./index";
+
+import { models } from "@arkecosystem/crypto";
+const { Transaction } = models;
+
+const logger = app.resolvePlugin("logger");
+const databaseService = app.resolvePlugin("database");
+
+class TransactionIndex extends Index {
+ /**
+ * Index transactions using the specified chunk size.
+ * @return {void}
+ */
+ public async index() {
+ const { count } = await this.__count();
+
+ const queries = Math.ceil(count / this.chunkSize);
+
+ for (let i = 0; i < queries; i++) {
+ const modelQuery = this.__createQuery();
+
+ const query = modelQuery
+ .select(modelQuery.block_id, modelQuery.serialized)
+ .from(modelQuery)
+ .where(modelQuery.timestamp.gte(storage.get("history", "lastTransaction")))
+ .order(modelQuery.timestamp.asc)
+ .limit(this.chunkSize)
+ .offset(this.chunkSize * i);
+
+ let rows = await (databaseService.connection as any).query.manyOrNone(query.toQuery());
+
+ if (!rows.length) {
+ continue;
+ }
+
+ rows = rows.map(row => {
+ const transaction: any = new Transaction(row.serialized.toString("hex"));
+ transaction.blockId = row.blockId;
+
+ return transaction;
+ });
+
+ const blockIds = rows.map(row => row.blockId);
+ logger.info(
+ `[Elasticsearch] Indexing transactions from block ${first(blockIds)} to ${last(
+ blockIds,
+ )} :card_index_dividers:`,
+ );
+
+ try {
+ await client.bulk(this._buildBulkUpsert(rows));
+
+ storage.update("history", {
+ lastTransaction: last(rows.map(row => row.timestamp)),
+ });
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ }
+ }
+
+ /**
+ * Register listeners for "transaction.*" events.
+ * @return {void}
+ */
+ public listen() {
+ this._registerCreateListener("transaction.applied");
+ this._registerCreateListener("transaction.forged");
+
+ this._registerDeleteListener("transaction.expired");
+ this._registerDeleteListener("transaction.reverted");
+ }
+
+ /**
+ * Get the document index.
+ * @return {String}
+ */
+ public getIndex() {
+ return "transactions";
+ }
+
+ /**
+ * Get the document type.
+ * @return {String}
+ */
+ public getType() {
+ return "transaction";
+ }
+}
+
+export const transactionIndex = new TransactionIndex();
diff --git a/packages/core-elasticsearch/src/index/wallet.ts b/packages/core-elasticsearch/src/index/wallet.ts
new file mode 100644
index 0000000000..2707be2d2b
--- /dev/null
+++ b/packages/core-elasticsearch/src/index/wallet.ts
@@ -0,0 +1,74 @@
+import { app } from "@arkecosystem/core-container";
+import { Database, EventEmitter, Logger } from "@arkecosystem/core-interfaces";
+import { client } from "../services/client";
+import { Index } from "./index";
+
+const emitter = app.resolvePlugin("event-emitter");
+const logger = app.resolvePlugin("logger");
+const databaseService = app.resolvePlugin("database");
+
+class WalletIndex extends Index {
+ /**
+ * Index wallets using the specified chunk size.
+ * @return {void}
+ */
+ public async index() {
+ const { count } = await this.__count();
+
+ const queries = Math.ceil(count / this.chunkSize);
+
+ for (let i = 0; i < queries; i++) {
+ const modelQuery = this.__createQuery();
+
+ const query = modelQuery
+ .select()
+ .from(modelQuery)
+ .limit(this.chunkSize)
+ .offset(this.chunkSize * i);
+
+ const rows = await (databaseService.connection as any).query.manyOrNone(query.toQuery());
+
+ if (!rows.length) {
+ continue;
+ }
+
+ logger.info(`[Elasticsearch] Indexing ${rows.length} wallets :card_index_dividers:`);
+
+ try {
+ rows.forEach(row => {
+ row.id = row.address;
+ });
+
+ await client.bulk(this._buildBulkUpsert(rows));
+ } catch (error) {
+ logger.error(`[Elasticsearch] ${error.message} :exclamation:`);
+ }
+ }
+ }
+
+ /**
+ * Register listeners for "wallet.*" events.
+ * @return {void}
+ */
+ public listen() {
+ emitter.on("wallets:updated", data => this.index());
+ }
+
+ /**
+ * Get the document index.
+ * @return {String}
+ */
+ public getIndex() {
+ return "wallets";
+ }
+
+ /**
+ * Get the document type.
+ * @return {String}
+ */
+ public getType() {
+ return "wallet";
+ }
+}
+
+export const walletIndex = new WalletIndex();
diff --git a/packages/core-elasticsearch/src/server/handler.ts b/packages/core-elasticsearch/src/server/handler.ts
new file mode 100644
index 0000000000..7b99ea77ac
--- /dev/null
+++ b/packages/core-elasticsearch/src/server/handler.ts
@@ -0,0 +1,60 @@
+import Joi from "joi";
+import { client } from "../services/client";
+
+export const index = {
+ async handler(request, h) {
+ const query = await client.search(request.payload);
+
+ return {
+ meta: {
+ count: query.hits.total,
+ },
+ data: query.hits.hits.map(result => result._source),
+ };
+ },
+ options: {
+ validate: {
+ payload: {
+ analyzer: Joi.string(),
+ analyzeWildcard: Joi.boolean(),
+ defaultOperator: Joi.string(),
+ df: Joi.string(),
+ explain: Joi.boolean(),
+ storedFields: Joi.any(),
+ docvalueFields: Joi.any(),
+ from: Joi.number(),
+ allowNoIndices: Joi.boolean(),
+ expandWildcards: Joi.string(),
+ lenient: Joi.boolean(),
+ preference: Joi.string(),
+ q: Joi.string(),
+ routing: Joi.any(),
+ scroll: Joi.string(),
+ searchType: Joi.string(),
+ size: Joi.number().default(10),
+ sort: Joi.any(),
+ _source: Joi.any(),
+ _sourceExclude: Joi.any(),
+ _sourceInclude: Joi.any(),
+ terminateAfter: Joi.number(),
+ stats: Joi.any(),
+ suggestField: Joi.string(),
+ suggestMode: Joi.string(),
+ suggestSize: Joi.number(),
+ suggestText: Joi.string(),
+ timeout: Joi.string(),
+ trackScores: Joi.boolean(),
+ trackTotalHits: Joi.boolean(),
+ typedKeys: Joi.boolean(),
+ version: Joi.boolean(),
+ requestCache: Joi.boolean(),
+ batchedReduceSize: Joi.number(),
+ maxConcurrentShardRequests: Joi.number(),
+ preFilterShardSize: Joi.number(),
+ index: Joi.any(),
+ type: Joi.any(),
+ body: Joi.object(),
+ },
+ },
+ },
+};
diff --git a/packages/core-elasticsearch/src/server/index.ts b/packages/core-elasticsearch/src/server/index.ts
new file mode 100644
index 0000000000..eddc4c4ff7
--- /dev/null
+++ b/packages/core-elasticsearch/src/server/index.ts
@@ -0,0 +1,28 @@
+import { createServer, mountServer, plugins } from "@arkecosystem/core-http-utils";
+import { routes } from "./routes";
+
+export async function startServer(config) {
+ const server = await createServer({
+ host: config.host,
+ port: config.port,
+ routes: {
+ validate: {
+ async failAction(request, h, err) {
+ throw err;
+ },
+ },
+ },
+ });
+
+ await server.register({
+ plugin: plugins.whitelist,
+ options: {
+ whitelist: config.whitelist,
+ name: "Elasticsearch API",
+ },
+ });
+
+ await server.register(routes);
+
+ return mountServer("Elasticsearch API", server);
+}
diff --git a/packages/core-elasticsearch/src/server/routes.ts b/packages/core-elasticsearch/src/server/routes.ts
new file mode 100644
index 0000000000..24efd09ff3
--- /dev/null
+++ b/packages/core-elasticsearch/src/server/routes.ts
@@ -0,0 +1,15 @@
+import { index } from "./handler";
+
+export const routes = {
+ name: "routes",
+ version: "0.1.0",
+ async register(server, options) {
+ server.route([
+ {
+ method: "POST",
+ path: "/",
+ ...index,
+ },
+ ]);
+ },
+};
diff --git a/packages/core-elasticsearch/src/services/client.ts b/packages/core-elasticsearch/src/services/client.ts
new file mode 100644
index 0000000000..f622a30f12
--- /dev/null
+++ b/packages/core-elasticsearch/src/services/client.ts
@@ -0,0 +1,86 @@
+import elasticsearch from "elasticsearch";
+
+class Client {
+ private client: elasticsearch.Client;
+
+ /**
+ * Create a new client instance.
+ * @param {Object} options
+ */
+ public async setUp(options) {
+ this.client = new elasticsearch.Client(options);
+ }
+
+ /**
+ * Get the elasticsearch client.
+ * @return {elasticsearch.Client}
+ */
+ public async getClient() {
+ return this.client;
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} body
+ * @return {Promise}
+ */
+ public async bulk(body) {
+ return this.client.bulk({ body });
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async count(params) {
+ return this.client.count(params);
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async search(params) {
+ return this.client.search(params);
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async create(params) {
+ return this.client.create(params);
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async update(params) {
+ return this.client.update(params);
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async delete(params) {
+ return this.client.delete(params);
+ }
+
+ /**
+ * Perform an "UPDATE" operation.
+ * @param {Object} params
+ * @return {Promise}
+ */
+ public async exists(params) {
+ return this.client.exists(params);
+ }
+}
+
+export const client = new Client();
diff --git a/packages/core-elasticsearch/src/services/storage.ts b/packages/core-elasticsearch/src/services/storage.ts
new file mode 100644
index 0000000000..41b7ce195e
--- /dev/null
+++ b/packages/core-elasticsearch/src/services/storage.ts
@@ -0,0 +1,100 @@
+import { ensureFileSync, existsSync, readFileSync, writeFileSync } from "fs-extra";
+import get from "lodash/get";
+
+class Storage {
+ private base: string;
+
+ /**
+ * Create a new storage instance.
+ * @return {void}
+ */
+ constructor() {
+ this.base = `${process.env.CORE_PATH_DATA}/plugins/core-elasticsearch`;
+ }
+
+ /**
+ * Read & parse the specified file.
+ * @param {String} file
+ * @return {Object}
+ */
+ public read(file) {
+ if (!this.exists(file)) {
+ return {};
+ }
+
+ return JSON.parse(readFileSync(`${this.base}/${file}.json`).toString());
+ }
+
+ /**
+ * Write the specified data to the specified file.
+ * @param {String} file
+ * @param {Object} data
+ * @return {void}
+ */
+ public write(file, data) {
+ ensureFileSync(`${this.base}/${file}.json`);
+
+ writeFileSync(`${this.base}/${file}.json`, JSON.stringify(data, null, 2));
+ }
+
+ /**
+ * Update the specified data in the specified file.
+ * @param {String} file
+ * @param {Object} data
+ * @return {void}
+ */
+ public update(file, data) {
+ ensureFileSync(`${this.base}/${file}.json`);
+
+ data = Object.assign(this.read(file), data);
+
+ writeFileSync(`${this.base}/${file}.json`, JSON.stringify(data, null, 2));
+ }
+
+ /**
+ * Update the specified data in the specified file.
+ * @param {String} file
+ * @return {void}
+ */
+ public ensure(file) {
+ if (!this.exists(file)) {
+ ensureFileSync(`${this.base}/${file}.json`);
+
+ writeFileSync(
+ `${this.base}/${file}.json`,
+ JSON.stringify(
+ {
+ lastRound: 0,
+ lastBlock: 0,
+ lastTransaction: 0,
+ },
+ null,
+ 2,
+ ),
+ );
+ }
+ }
+
+ /**
+ * Determine if the specified file exists.
+ * @param {String} file
+ * @return {Boolean}
+ */
+ public exists(file) {
+ return existsSync(`${this.base}/${file}.json`);
+ }
+
+ /**
+ * Get a value from the specified file for the specified key.
+ * @param {String} file
+ * @param {String} key
+ * @param defaultValue
+ * @param {*} key
+ * @return {*}
+ */
+ public get(file, key, defaultValue = null) {
+ return get(this.read(file), key, defaultValue);
+ }
+}
+
+export const storage = new Storage();
diff --git a/packages/core-elasticsearch/tsconfig.json b/packages/core-elasticsearch/tsconfig.json
new file mode 100644
index 0000000000..0b089c5fa8
--- /dev/null
+++ b/packages/core-elasticsearch/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "dist"
+ },
+ "include": ["src/**/**.ts"]
+}
diff --git a/packages/core-error-tracker-bugsnag/CHANGELOG.md b/packages/core-error-tracker-bugsnag/CHANGELOG.md
deleted file mode 100644
index 6842caf9df..0000000000
--- a/packages/core-error-tracker-bugsnag/CHANGELOG.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
-
-## Unreleased
-
-## 0.1.0 - 2018-12-03
-
-### Added
-
-- initial release
diff --git a/packages/core-error-tracker-bugsnag/LICENSE b/packages/core-error-tracker-bugsnag/LICENSE
deleted file mode 100644
index d6dd75272f..0000000000
--- a/packages/core-error-tracker-bugsnag/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Ark Ecosystem
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/packages/core-error-tracker-bugsnag/README.md b/packages/core-error-tracker-bugsnag/README.md
index 4bac94326b..fda421356b 100644
--- a/packages/core-error-tracker-bugsnag/README.md
+++ b/packages/core-error-tracker-bugsnag/README.md
@@ -14,8 +14,8 @@ If you discover a security vulnerability within this package, please send an e-m
## Credits
-- [Brian Faust](https://github.com/faustbrian)
-- [All Contributors](../../../../contributors)
+- [Brian Faust](https://github.com/faustbrian)
+- [All Contributors](../../../../contributors)
## License
diff --git a/packages/core-error-tracker-bugsnag/lib/defaults.js b/packages/core-error-tracker-bugsnag/lib/defaults.js
deleted file mode 100644
index 712d000cc7..0000000000
--- a/packages/core-error-tracker-bugsnag/lib/defaults.js
+++ /dev/null
@@ -1,8 +0,0 @@
-module.exports = {
- apiKey: process.env.ARK_ERROR_TRACKER_BUGSNAG_API_KEY,
- configuration: {
- metaData: {
- network: process.env.ARK_NETWORK_NAME,
- },
- },
-}
diff --git a/packages/core-error-tracker-bugsnag/lib/index.js b/packages/core-error-tracker-bugsnag/lib/index.js
deleted file mode 100644
index 23c21c4035..0000000000
--- a/packages/core-error-tracker-bugsnag/lib/index.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const bugsnag = require('bugsnag')
-
-/**
- * The struct used by the plugin container.
- * @type {Object}
- */
-exports.plugin = {
- pkg: require('../package.json'),
- defaults: require('./defaults'),
- alias: 'error-tracker',
- async register(container, options) {
- bugsnag.register(options.apiKey, options.configuration)
-
- return bugsnag
- },
-}
diff --git a/packages/core-error-tracker-bugsnag/package.json b/packages/core-error-tracker-bugsnag/package.json
index da1468b57e..9e65072457 100644
--- a/packages/core-error-tracker-bugsnag/package.json
+++ b/packages/core-error-tracker-bugsnag/package.json
@@ -1,22 +1,37 @@
{
- "name": "@arkecosystem/core-error-tracker-bugsnag",
- "description": "Bugsnag error tracker integration for Ark Core.",
- "version": "0.1.0",
- "contributors": [
- "Brian Faust