Skip to content

Commit

Permalink
Merge bitcoin#15773: test: Add BitcoinTestFramework::sync_* methods
Browse files Browse the repository at this point in the history
fafe5f0 test: Remove unused imports (MarcoFalke)
fa16a09 scripted-diff: use self.sync_* methods (MarcoFalke)
faf77f9 test: Pass self to test_simple_bumpfee_succeeds (MarcoFalke)
fa6dc7c test: Add BitcoinTestFramework::sync_* methods (MarcoFalke)
fafe008 test: Pass at most one node group to sync_all (MarcoFalke)
fa4680e scripted-diff: Rename sync_blocks to send_blocks to avoid name collisions and confusion (MarcoFalke)

Pull request description:

  This adds methods to the test framework that can be called by just `self.sync_*()`.

  This avoids having to import the underlying util method. Also, in the default case, where all nodes are synced this avoid having to pass `self.nodes` explicitly.

  So the effective changes are:

  ```diff
  @@
  -from test_framework.util import sync_blocks, sync_mempools
  @@
  -        sync_blocks(self.nodes)
  +        self.sync_blocks()
  @@
  -        sync_mempools(self.nodes)
  +        self.sync_mempools()

ACKs for commit fafe5f:
  promag:
    utACK fafe5f0.
  jonatack:
    ACK bitcoin@fafe5f0, nice simplification.

Tree-SHA512: 5c81840edf9fb3c5de2d7bf95ca36a5a8d23567cd1479a0f4044547c2080e9a3c5cf375357bc8eebb5b68829be050a171ab2512cfd47b89feed51fe3bad2cd72
  • Loading branch information
MarcoFalke committed Apr 11, 2019
2 parents bb68abe + fafe5f0 commit 0e9cb2d
Show file tree
Hide file tree
Showing 32 changed files with 331 additions and 288 deletions.
4 changes: 2 additions & 2 deletions test/functional/example_test.py
Expand Up @@ -117,7 +117,7 @@ def setup_network(self):
# sync_all() should not include node2, since we're not expecting it to
# sync.
connect_nodes(self.nodes[0], 1)
self.sync_all([self.nodes[0:2]])
self.sync_all(self.nodes[0:2])

# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
Expand All @@ -141,7 +141,7 @@ def run_test(self):

# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all([self.nodes[0:2]])
self.sync_all(self.nodes[0:2])

# Notice above how we called an RPC by calling a method with the same
# name on the node object. Notice also how we used a keyword argument
Expand Down
10 changes: 8 additions & 2 deletions test/functional/feature_bip68_sequence.py
Expand Up @@ -10,7 +10,13 @@
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex
from test_framework.script import CScript
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, get_bip9_status, satoshi_round, sync_blocks
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
get_bip9_status,
satoshi_round,
)

SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
Expand Down Expand Up @@ -385,7 +391,7 @@ def activateCSV(self):
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in")
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active")
sync_blocks(self.nodes)
self.sync_blocks()

# Use self.nodes[1] to test that version 2 transactions are standard.
def test_version2_relay(self):
Expand Down
198 changes: 99 additions & 99 deletions test/functional/feature_block.py

Large diffs are not rendered by default.

62 changes: 31 additions & 31 deletions test/functional/feature_csv_activation.py
Expand Up @@ -168,7 +168,7 @@ def create_test_block(self, txs, version=536870912):
block.solve()
return block

def sync_blocks(self, blocks, success=True):
def send_blocks(self, blocks, success=True):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
Expand All @@ -190,7 +190,7 @@ def run_test(self):
self.log.info("Test that the csv softfork is DEFINED")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
test_blocks = self.generate_blocks(61, 4)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

self.log.info("Advance from DEFINED to STARTED, height = 143")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
Expand All @@ -202,7 +202,7 @@ def run_test(self):
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

self.log.info("Failed to advance past STARTED, height = 287")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
Expand All @@ -214,14 +214,14 @@ def run_test(self):
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

# Generate 140 more version 4 blocks
test_blocks = self.generate_blocks(140, 4)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

# Inputs at height = 572
#
Expand Down Expand Up @@ -264,7 +264,7 @@ def run_test(self):

# 2 more version 4 blocks
test_blocks = self.generate_blocks(2, 4)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
Expand Down Expand Up @@ -318,7 +318,7 @@ def run_test(self):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

self.log.info("Test version 2 txs")
Expand All @@ -337,12 +337,12 @@ def run_test(self):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
test_blocks = self.generate_blocks(1, 4)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

self.log.info("Post-Soft Fork Tests.")
Expand All @@ -354,74 +354,74 @@ def run_test(self):
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
self.sync_blocks([self.create_test_block([bip113tx])], success=False)
self.send_blocks([self.create_test_block([bip113tx])], success=False)
# BIP 113 tests should now pass if the locktime is < MTP
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
self.sync_blocks([self.create_test_block([bip113tx])])
self.send_blocks([self.create_test_block([bip113tx])])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# Next block height = 580 after 4 blocks of random version
test_blocks = self.generate_blocks(4, 1234)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

self.log.info("BIP 68 tests")
self.log.info("Test version 1 txs - all should still pass")

success_txs = []
success_txs.extend(all_rlt_txs(bip68txs_v1))
self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

self.log.info("Test version 2 txs")

# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
self.sync_blocks([self.create_test_block(bip68success_txs)])
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
for tx in bip68timetxs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
for tx in bip68heighttxs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

# Advance one block to 581
test_blocks = self.generate_blocks(1, 1234)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
bip68success_txs.extend(bip68timetxs)
self.sync_blocks([self.create_test_block(bip68success_txs)])
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
for tx in bip68heighttxs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

# Advance one block to 582
test_blocks = self.generate_blocks(1, 1234)
self.sync_blocks(test_blocks)
self.send_blocks(test_blocks)

# All BIP 68 txs should pass
bip68success_txs.extend(bip68heighttxs)
self.sync_blocks([self.create_test_block(bip68success_txs)])
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

self.log.info("BIP 112 tests")
self.log.info("Test version 1 txs")

# -1 OP_CSV tx should fail
self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
self.send_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
Expand All @@ -430,18 +430,18 @@ def run_test(self):
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
for tx in fail_txs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

self.log.info("Test version 2 txs")

# -1 OP_CSV tx should fail
self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)
self.send_blocks([self.create_test_block([bip112tx_special_v2])], success=False)

# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]

self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
Expand All @@ -450,23 +450,23 @@ def run_test(self):
fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
for tx in fail_txs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
for tx in fail_txs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

# If sequencelock types mismatch, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
for tx in fail_txs:
self.sync_blocks([self.create_test_block([tx])], success=False)
self.send_blocks([self.create_test_block([tx])], success=False)

# Remaining txs should pass, just test masking works properly
success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
self.sync_blocks([self.create_test_block(success_txs)])
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# Additional test, of checking that comparison of two time types works properly
Expand All @@ -476,7 +476,7 @@ def run_test(self):
signtx = sign_transaction(self.nodes[0], tx)
time_txs.append(signtx)

self.sync_blocks([self.create_test_block(time_txs)])
self.send_blocks([self.create_test_block(time_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

# TODO: Test empty stack fails
Expand Down
10 changes: 4 additions & 6 deletions test/functional/feature_fee_estimation.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fee estimation code."""
Expand All @@ -15,8 +15,6 @@
assert_greater_than_or_equal,
connect_nodes,
satoshi_round,
sync_blocks,
sync_mempools,
)

# Construct 2 trivial P2SH's and the ScriptSigs that spend them
Expand Down Expand Up @@ -162,9 +160,9 @@ def transact_and_mine(self, numblocks, mining_node):
self.memutxo, Decimal("0.005"), min_fee, min_fee)
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee) / tx_kbytes)
sync_mempools(self.nodes[0:3], wait=.1)
self.sync_mempools(self.nodes[0:3], wait=.1)
mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"]
sync_blocks(self.nodes[0:3], wait=.1)
self.sync_blocks(self.nodes[0:3], wait=.1)
# update which txouts are confirmed
newmem = []
for utx in self.memutxo:
Expand Down Expand Up @@ -237,7 +235,7 @@ def run_test(self):
while len(self.nodes[1].getrawmempool()) > 0:
self.nodes[1].generate(1)

sync_blocks(self.nodes[0:3], wait=.1)
self.sync_blocks(self.nodes[0:3], wait=.1)
self.log.info("Final estimates after emptying mempools")
check_estimates(self.nodes[1], self.fees_per_kb)

Expand Down
21 changes: 14 additions & 7 deletions test/functional/feature_pruning.py
Expand Up @@ -14,7 +14,14 @@
from test_framework.messages import CBlock, ToHex
from test_framework.script import CScript, OP_RETURN, OP_NOP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, connect_nodes, disconnect_nodes, sync_blocks, wait_until
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
connect_nodes,
disconnect_nodes,
wait_until,
)

MIN_BLOCKS_TO_KEEP = 288

Expand Down Expand Up @@ -100,7 +107,7 @@ def setup_network(self):
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[0], 4)
sync_blocks(self.nodes[0:5])
self.sync_blocks(self.nodes[0:5])

def setup_nodes(self):
self.add_nodes(self.num_nodes, self.extra_args)
Expand All @@ -111,13 +118,13 @@ def setup_nodes(self):
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.nodes[1].generate(200)
sync_blocks(self.nodes[0:2])
self.sync_blocks(self.nodes[0:2])
self.nodes[0].generate(150)

# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)

sync_blocks(self.nodes[0:5])
self.sync_blocks(self.nodes[0:5])

def test_height_min(self):
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
Expand Down Expand Up @@ -153,7 +160,7 @@ def create_chain_with_staleblocks(self):
# Create connections in the order so both nodes can see the reorg at the same time
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
sync_blocks(self.nodes[0:3])
self.sync_blocks(self.nodes[0:3])

self.log.info("Usage can be over target because of high stale rate: %d" % calc_usage(self.prunedir))

Expand Down Expand Up @@ -190,7 +197,7 @@ def reorg_test(self):
self.log.info("Reconnect nodes")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
sync_blocks(self.nodes[0:3], timeout=120)
self.sync_blocks(self.nodes[0:3], timeout=120)

self.log.info("Verify height on node 2: %d" % self.nodes[2].getblockcount())
self.log.info("Usage possibly still high because of stale blocks in block files: %d" % calc_usage(self.prunedir))
Expand Down Expand Up @@ -345,7 +352,7 @@ def wallet_test(self):
self.log.info("Syncing node 5 to test wallet")
connect_nodes(self.nodes[0], 5)
nds = [self.nodes[0], self.nodes[5]]
sync_blocks(nds, wait=5, timeout=300)
self.sync_blocks(nds, wait=5, timeout=300)
self.stop_node(5) # stop and start to trigger rescan
self.start_node(5, extra_args=["-prune=550"])
self.log.info("Success")
Expand Down

0 comments on commit 0e9cb2d

Please sign in to comment.