Skip to content

Commit

Permalink
test: withdrawal operation test
Browse files Browse the repository at this point in the history
  • Loading branch information
AlanVerbner authored and Juan Cruz committed Nov 24, 2020
1 parent c2647ad commit 5b3583f
Show file tree
Hide file tree
Showing 10 changed files with 362 additions and 32 deletions.
8 changes: 4 additions & 4 deletions cardano-rosetta-server/test/e2e/account/account-api.test.ts
Expand Up @@ -3,9 +3,9 @@
import { FastifyInstance } from 'fastify';
import StatusCodes from 'http-status-codes';
import { Pool } from 'pg';
import { setupDatabase, setupServer } from '../utils/test-utils';
import { CARDANO } from '../../../src/server/utils/constants';
import { latestBlock } from '../fixture-data';
import { latestBlockIdentifier } from '../fixture-data';
import { setupDatabase, setupServer } from '../utils/test-utils';

const generatePayload = (
blockchain: string,
Expand Down Expand Up @@ -77,7 +77,7 @@ describe('/account/balance endpoint', () => {
expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual({
balances: [{ currency: { decimals: 6, symbol: 'ADA' }, value: '21063' }],
block_identifier: latestBlock.block.block_identifier,
block_identifier: latestBlockIdentifier,
coins: [
{
coin_identifier: { identifier: 'af0dd90debb1fbaf3854b90686ba2d6f7c95416080e8cda18d9ea3cb6bb195ad:0' },
Expand Down Expand Up @@ -178,7 +178,7 @@ describe('/account/balance endpoint', () => {
});
expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual({
block_identifier: latestBlock.block.block_identifier,
block_identifier: latestBlockIdentifier,
balances: [
{
value: '11509379714',
Expand Down
4 changes: 2 additions & 2 deletions cardano-rosetta-server/test/e2e/block/block-api.test.ts
Expand Up @@ -9,7 +9,7 @@ import {
block7134WithTxs,
blockWith8Txs,
GENESIS_HASH,
latestBlock
latestBlockIdentifier
} from '../fixture-data';
import { setupDatabase, setupServer } from '../utils/test-utils';

Expand Down Expand Up @@ -90,7 +90,7 @@ describe('/block endpoint', () => {
});

expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual(latestBlock);
expect(response.json().block.block_identifier).toEqual(latestBlockIdentifier);
});

test('should properly return a block with transactions', async () => {
Expand Down
Expand Up @@ -2,7 +2,11 @@
import { FastifyInstance } from 'fastify';
import StatusCodes from 'http-status-codes';
import { Pool } from 'pg';
import { block23236WithTransactions, transaction987aOnGenesis } from '../fixture-data';
import {
block23236WithTransactions,
transaction987aOnGenesis,
transactionBlock4876885WithWithdrawals
} from '../fixture-data';
import { setupDatabase, setupServer } from '../utils/test-utils';

const TRANSACTION_NOT_FOUND = 'Transaction not found';
Expand Down Expand Up @@ -171,4 +175,21 @@ describe('/block/transactions endpoint', () => {
expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual(transaction987aOnGenesis);
});

test('should return transaction withdrawals', async () => {
const transaction = '8e071ca57cd7bc53fc333a26df09a2ae1016458a3ed2300699e6fb7608152a7e';
const response = await server.inject({
method: 'post',
url: BLOCK_TRANSACTION_ENDPOINT,
payload: {
...generatePayload(4876885, '8633863f0fc42a0436c2754ce70684a902e2f7b2349a080321e5c3f5e11fd184'),
// eslint-disable-next-line camelcase
transaction_identifier: {
hash: transaction
}
}
});
expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual(transactionBlock4876885WithWithdrawals);
});
});
90 changes: 90 additions & 0 deletions cardano-rosetta-server/test/e2e/block/dump_blocks.sh
@@ -0,0 +1,90 @@
#!/usr/bin/env bash

# As Cardano Rosetta is mostly some queries and data mapping it makes no sense to mock repository
# queries as, after that, it's just some data mapping nad that's it. In order to test our queries
# we can populate the test db with some real mainnet data. We are already importing a mainned snapshot
# but using a whole mainnet snapshot will be huge (+3GB) so, alternatively, we selecting and importing
# some blocks data. It's not an ideal solution as we need to relax some constraints to do so
# but still is better than inserting data manually. This script uses a similar process as
# `pg_dump` using COPY statements
#
# This script helps dumping some information based on block ids.
#
# To run this file, grab a mainnet db-sync postgres db and execute
#
# $ bash dump_blocks.sh
#
# A resulting file like `fixture_data.sql` can then be either importer or compressed `tar` to be used
# in our e2e tests

OUT_FILE='/tmp/fixture_data.sql'
DB='cexplorer'

# Block Ids. Ideally we need to export them in batches of 3 as when we skip Epoch Boundary Blocks checking 3 blocks
# before the one we are interested, so, if you are willing to fetch a block, please state B-2, B-1, B
# See: cardano-rosetta-server/src/server/db/queries/blockchain-queries.ts#findBlock
BLOCKS_TO_EXPORT="4877060, 4877061, 4877062"
echo "-- Dumping blocks with id $BLOCKS_TO_EXPORT" > $OUT_FILE;

echo "ALTER TABLE public.block DISABLE TRIGGER ALL;" >> $OUT_FILE;
echo 'COPY public.block (id, hash, epoch_no, slot_no, epoch_slot_no, block_no, previous_id, merkel_root, slot_leader_id, size, "time", tx_count, proto_major, proto_minor, vrf_key, op_cert) FROM stdin WITH CSV;' >> $OUT_FILE
psql -c "\copy (SELECT * from block WHERE id in ($BLOCKS_TO_EXPORT)) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

echo "-- Dumping transactions" >> $OUT_FILE;
echo "ALTER TABLE public.tx DISABLE TRIGGER ALL;" >> $OUT_FILE;
echo 'COPY public.tx (id, hash, block_id, block_index, out_sum, fee, deposit, size) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT * from tx WHERE block_id in ($BLOCKS_TO_EXPORT)) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

SELECT_TX_ID="(SELECT id from tx WHERE block_id IN ($BLOCKS_TO_EXPORT))"

echo "-- Dumping transaction inputs" >> $OUT_FILE;
echo "ALTER TABLE public.tx_in DISABLE TRIGGER ALL;" >> $OUT_FILE;
echo 'COPY public.tx_in (id, tx_in_id, tx_out_id, tx_out_index) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT * from tx_in WHERE tx_in_id IN $SELECT_TX_ID) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

# Inputs require the source tx to be able to compute the amount
# TODO: Check if this query can be imporved as it's a copy from the one we use to query the data
INPUT_TX_QUERY="
tx
JOIN tx_in
ON tx_in.tx_in_id = tx.id
JOIN tx_out as source_tx_out
ON tx_in.tx_out_id = source_tx_out.tx_id
AND tx_in.tx_out_index = source_tx_out.index
JOIN tx as source_tx
ON source_tx_out.tx_id = source_tx.id
WHERE
tx.id = ANY ($SELECT_TX_ID) AND
source_tx.id NOT IN ($SELECT_TX_ID)
"

echo "-- Dumping transaction inputs references where spent outputs were defined" >> $OUT_FILE;
echo 'COPY public.tx (id, hash, block_id, block_index, out_sum, fee, deposit, size) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT source_tx.* FROM $INPUT_TX_QUERY) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

echo "ALTER TABLE public.tx_out DISABLE TRIGGER ALL;" >> $OUT_FILE;

echo "-- Dumping spent outputs" >> $OUT_FILE;
echo 'COPY public.tx_out (id, tx_id, index, address, address_raw, payment_cred, stake_address_id, value) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT source_tx_out.* FROM $INPUT_TX_QUERY) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

echo "-- Dumping transactions outputs" >> $OUT_FILE;
echo 'COPY public.tx_out (id, tx_id, index, address, address_raw, payment_cred, stake_address_id, value) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT * from tx_out WHERE tx_id IN $SELECT_TX_ID) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

echo "-- Dumping transactions withdrawals" >> $OUT_FILE;
echo "ALTER TABLE public.withdrawal DISABLE TRIGGER ALL;" >> $OUT_FILE;
echo 'COPY public.withdrawal (id, addr_id, amount, tx_id) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT * from withdrawal WHERE tx_id IN $SELECT_TX_ID) to STDOUT WITH CSV" $DB >> $OUT_FILE;
echo "\." >> $OUT_FILE;

echo "-- Dumping Block transaction withdrawals stake addresses" >> $OUT_FILE;
echo "ALTER TABLE public.stake_address DISABLE TRIGGER ALL;" >> $OUT_FILE;
echo 'COPY public.stake_address (id, hash_raw, view, registered_tx_id) FROM stdin WITH CSV;' >> $OUT_FILE;
psql -c "\copy (SELECT * from stake_address WHERE id IN (SELECT addr_id from withdrawal WHERE tx_id IN $SELECT_TX_ID)) to STDOUT WITH CSV" $DB >> $OUT_FILE;
Expand Up @@ -4,7 +4,7 @@ import StatusCodes from 'http-status-codes';
import { Pool } from 'pg';
import { FastifyInstance } from 'fastify';
import { linearFeeParameters, setupDatabase, setupServer, testInvalidNetworkParameters } from '../utils/test-utils';
import { latestBlock, SIGNED_TRANSACTION, TRANSACTION_SIZE_IN_BYTES } from '../fixture-data';
import { latestBlockSlot, SIGNED_TRANSACTION, TRANSACTION_SIZE_IN_BYTES } from '../fixture-data';

const CONSTRUCTION_METADATA_ENDPOINT = '/construction/metadata';

Expand Down Expand Up @@ -47,7 +47,7 @@ describe(CONSTRUCTION_METADATA_ENDPOINT, () => {
expect(response.statusCode).toEqual(StatusCodes.OK);
expect(response.json()).toEqual({
metadata: {
ttl: (latestBlock.block.metadata.slotNo + relativeTtl).toString()
ttl: (latestBlockSlot + relativeTtl).toString()
},
suggested_fee: [
{
Expand Down
169 changes: 149 additions & 20 deletions cardano-rosetta-server/test/e2e/fixture-data.ts
Expand Up @@ -26,26 +26,11 @@ export const block1000WithoutTxs = {
}
};

export const latestBlock = {
block: {
block_identifier: {
index: 67984,
hash: 'a2ee76dfea1dae6eda713246df08e96dabc33fc15ac25e3108bb230765c9559c'
},
parent_block_identifier: {
index: 67983,
hash: 'c4bc9aebc008d270cdd6ee0a0746ee9113ace144e8eba4e7bae7f5d0b03e20d5'
},
timestamp: 1507563311000,
transactions: [],
metadata: {
transactionsCount: 0,
createdBy: 'ByronGenesis-64c61078e9577d3b',
size: 671,
epochNo: 3,
slotNo: 68011
}
}
export const latestBlockHash = '8633863f0fc42a0436c2754ce70684a902e2f7b2349a080321e5c3f5e11fd184';
export const latestBlockSlot = 12312818;
export const latestBlockIdentifier = {
hash: latestBlockHash,
index: 4876885
};

export const block23236WithTransactions = {
Expand Down Expand Up @@ -354,6 +339,150 @@ export const blockWith8Txs = {
]
};

export const transactionBlock4876885WithWithdrawals = {
transaction: {
operations: [
{
account: {
address:
'addr1qx4tp3cdytmuhjgal8gjczm5zss8kcau5lg2q2tq89cjns2aaqy0ysmvwmp8pm0njegu4dd32gwzpdz74vpnczwuqdyqgsyztm'
},
amount: {
currency: {
decimals: 6,
symbol: 'ADA'
},
value: '-19600000'
},
coin_change: {
coin_action: 'coin_spent',
coin_identifier: {
identifier: '82d0ddd8940ec05edc9dbeff3a8647fb9b2aeba85c8573e5c958c3a57b97b819:0'
}
},
operation_identifier: {
index: 0
},
status: 'success',
type: 'input'
},
{
account: {
address: 'stake1u8djnja6l00lmc7gjdlzmj2htq06gjlckyzh82h3wwxh46s4s70j0'
},
metadata: {
withdrawalAmount: {
currency: {
decimals: 6,
symbol: 'ADA'
},
value: '-11748906033'
}
},
operation_identifier: {
index: 1
},
status: 'success',
type: 'withdrawal'
},
{
account: {
address: 'stake1u86xjjg2khnwc0mc3xefwk0y5s95w0qzmffp23a3cvp8urcxwqcp0'
},
metadata: {
withdrawalAmount: {
currency: {
decimals: 6,
symbol: 'ADA'
},
value: '-2525964'
}
},
operation_identifier: {
index: 2
},
status: 'success',
type: 'withdrawal'
},
{
account: {
address:
'addr1qx4tp3cdytmuhjgal8gjczm5zss8kcau5lg2q2tq89cjns2aaqy0ysmvwmp8pm0njegu4dd32gwzpdz74vpnczwuqdyqgsyztm'
},
amount: {
currency: {
decimals: 6,
symbol: 'ADA'
},
value: '19400000'
},
coin_change: {
coin_action: 'coin_created',
coin_identifier: {
identifier: '8e071ca57cd7bc53fc333a26df09a2ae1016458a3ed2300699e6fb7608152a7e:0'
}
},
operation_identifier: {
index: 3,
network_index: 0
},
related_operations: [
{
index: 0
},
{
index: 1
},
{
index: 2
}
],
status: 'success',
type: 'output'
},
{
account: {
address:
'addr1q9l5wwxzq9mypttyqd0duqgqhzzsfpz7qhzdtsr2pt99the82jganjlfyetr45prwl7tjmnla0av2d3arfw8l5p5ay4sq6j4ev'
},
amount: {
currency: {
decimals: 6,
symbol: 'ADA'
},
value: '11751431997'
},
coin_change: {
coin_action: 'coin_created',
coin_identifier: {
identifier: '8e071ca57cd7bc53fc333a26df09a2ae1016458a3ed2300699e6fb7608152a7e:1'
}
},
operation_identifier: {
index: 4,
network_index: 1
},
related_operations: [
{
index: 0
},
{
index: 1
},
{
index: 2
}
],
status: 'success',
type: 'output'
}
],
transaction_identifier: {
hash: '8e071ca57cd7bc53fc333a26df09a2ae1016458a3ed2300699e6fb7608152a7e'
}
}
};

export const GENESIS_HASH = '5f20df933584822601f9e3f8c024eb5eb252fe8cefb24d1317dc3d432e940ebb';

export const block1 = {
Expand Down
11 changes: 11 additions & 0 deletions cardano-rosetta-server/test/e2e/jest-setup/docker.ts
Expand Up @@ -45,6 +45,11 @@ export const setupPostgresContainer = async (
User: 'root'
});

await container.putArchive(path.join(__dirname, 'fixture_data.tar'), {
path: CONTAINER_TEMP_DIR,
User: 'root'
});

// Wait for the db service to be running (container started event is not enough)
await containerExec(container, [
'bash',
Expand All @@ -54,4 +59,10 @@ export const setupPostgresContainer = async (

// Execute backup restore
await containerExec(container, ['bash', '-c', `cat ${CONTAINER_TEMP_DIR}/db.bak | psql -U ${user} ${database}`]);

await containerExec(container, [
'bash',
'-c',
`cat ${CONTAINER_TEMP_DIR}/fixture_data.sql | psql -U ${user} ${database}`
]);
};

0 comments on commit 5b3583f

Please sign in to comment.