diff --git a/test/functional/bip9-softforks.py b/test/functional/bip9-softforks.py index b90b0ca62842e..f00232c9ffe03 100755 --- a/test/functional/bip9-softforks.py +++ b/test/functional/bip9-softforks.py @@ -15,6 +15,10 @@ test that enforcement has not triggered (which triggers ACTIVE) test that enforcement has triggered """ +from io import BytesIO +import shutil +import time +import itertools from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * @@ -22,9 +26,6 @@ from test_framework.blocktools import create_coinbase, create_block from test_framework.comptool import TestInstance, TestManager from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP -from io import BytesIO -import time -import itertools class BIP9SoftForksTest(ComparisonTestFramework): diff --git a/test/functional/blockchain.py b/test/functional/blockchain.py index eeef05efd22a1..a7034e6bcdbab 100755 --- a/test/functional/blockchain.py +++ b/test/functional/blockchain.py @@ -21,15 +21,13 @@ import http.client import subprocess -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import (BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT) from test_framework.util import ( assert_equal, assert_raises, assert_raises_jsonrpc, assert_is_hex_string, assert_is_hash_string, - bitcoind_processes, - BITCOIND_PROC_WAIT_TIMEOUT, ) @@ -141,13 +139,13 @@ def _test_stopatheight(self): self.nodes[0].generate(6) assert_equal(self.nodes[0].getblockcount(), 206) self.log.debug('Node should not stop at this height') - assert_raises(subprocess.TimeoutExpired, lambda: bitcoind_processes[0].wait(timeout=3)) + assert_raises(subprocess.TimeoutExpired, lambda: self.bitcoind_processes[0].wait(timeout=3)) try: self.nodes[0].generate(1) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') - bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) assert_equal(self.nodes[0].getblockcount(), 207) diff --git a/test/functional/bumpfee.py b/test/functional/bumpfee.py index 569db7ced5b1d..9237f0924012b 100755 --- a/test/functional/bumpfee.py +++ b/test/functional/bumpfee.py @@ -42,7 +42,7 @@ def setup_network(self, split=False): # Encrypt wallet for test_locked_wallet_fails test self.nodes[1].encryptwallet(WALLET_PASSPHRASE) - bitcoind_processes[1].wait() + self.bitcoind_processes[1].wait() self.nodes[1] = self.start_node(1, self.options.tmpdir, extra_args[1]) self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT) diff --git a/test/functional/dbcrash.py b/test/functional/dbcrash.py index 4a10743f04792..6f877f8362fe0 100755 --- a/test/functional/dbcrash.py +++ b/test/functional/dbcrash.py @@ -88,7 +88,7 @@ def restart_node(self, node_index, expected_tip): # An exception here should mean the node is about to crash. # If bitcoind exits, then try again. wait_for_node_exit() # should raise an exception if bitcoind doesn't exit. - wait_for_node_exit(node_index, timeout=10) + self.wait_for_node_exit(node_index, timeout=10) self.crashed_on_restart += 1 time.sleep(1) @@ -140,7 +140,7 @@ def sync_node3blocks(self, block_hashes): if not self.submit_block_catch_error(i, block): # TODO: more carefully check that the crash is due to -dbcrashratio # (change the exit code perhaps, and check that here?) - wait_for_node_exit(i, timeout=30) + self.wait_for_node_exit(i, timeout=30) self.log.debug("Restarting node %d after block hash %s", i, block_hash) nodei_utxo_hash = self.restart_node(i, block_hash) assert nodei_utxo_hash is not None diff --git a/test/functional/fundrawtransaction.py b/test/functional/fundrawtransaction.py index 0a3166b89bb7a..0baab6d01cf3c 100755 --- a/test/functional/fundrawtransaction.py +++ b/test/functional/fundrawtransaction.py @@ -4,7 +4,7 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the fundrawtransaction RPC.""" -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT from test_framework.util import * @@ -452,7 +452,7 @@ def run_test(self): self.stop_node(2) self.stop_node(3) self.nodes[1].encryptwallet("test") - bitcoind_processes[1].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[1].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) # This test is not meant to test fee estimation and we'd like diff --git a/test/functional/keypool.py b/test/functional/keypool.py index f23a427d1fff7..e8be559918d52 100755 --- a/test/functional/keypool.py +++ b/test/functional/keypool.py @@ -18,7 +18,7 @@ def run_test(self): # Encrypt wallet and wait to terminate nodes[0].encryptwallet('test') - bitcoind_processes[0].wait() + self.bitcoind_processes[0].wait() # Restart node 0 nodes[0] = self.start_node(0, self.options.tmpdir) # Keep creating keys diff --git a/test/functional/listtransactions.py b/test/functional/listtransactions.py index f69f1c5724ef3..f75a8e29cc7a0 100755 --- a/test/functional/listtransactions.py +++ b/test/functional/listtransactions.py @@ -23,7 +23,7 @@ def __init__(self): def setup_nodes(self): #This test requires mocktime - enable_mocktime() + self.enable_mocktime() self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) def run_test(self): diff --git a/test/functional/receivedby.py b/test/functional/receivedby.py index 2cad6269acb7e..19d99c9c9e975 100755 --- a/test/functional/receivedby.py +++ b/test/functional/receivedby.py @@ -31,7 +31,7 @@ def __init__(self): def setup_nodes(self): #This test requires mocktime - enable_mocktime() + self.enable_mocktime() self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) def run_test(self): diff --git a/test/functional/rpcbind_test.py b/test/functional/rpcbind_test.py index 198599010e414..951685aa7606a 100755 --- a/test/functional/rpcbind_test.py +++ b/test/functional/rpcbind_test.py @@ -37,7 +37,7 @@ def run_bind_test(self, allow_ips, connect_to, addresses, expected): base_args += ['-rpcallowip=' + x for x in allow_ips] binds = ['-rpcbind='+addr for addr in addresses] self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to) - pid = bitcoind_processes[0].pid + pid = self.bitcoind_processes[0].pid assert_equal(set(get_bind_addrs(pid)), set(expected)) self.stop_nodes() diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index ac0fbe61f89c2..8d698a73276a1 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -5,7 +5,9 @@ """Base class for RPC testing.""" from collections import deque +import errno from enum import Enum +import http.client import logging import optparse import os @@ -14,18 +16,17 @@ import sys import tempfile import time +import traceback +from .authproxy import JSONRPCException +from . import coverage from .util import ( - PortSeed, MAX_NODES, - bitcoind_processes, + PortSeed, + assert_equal, check_json_precision, connect_nodes_bi, - disable_mocktime, disconnect_nodes, - enable_coverage, - enable_mocktime, - get_mocktime, get_rpc_proxy, initialize_datadir, get_datadir_path, @@ -33,15 +34,9 @@ p2p_port, rpc_url, set_node_times, - _start_node, - _start_nodes, - _stop_node, - _stop_nodes, sync_blocks, sync_mempools, - wait_for_bitcoind_start, ) -from .authproxy import JSONRPCException class TestStatus(Enum): PASSED = 1 @@ -52,6 +47,8 @@ class TestStatus(Enum): TEST_EXIT_FAILED = 1 TEST_EXIT_SKIPPED = 77 +BITCOIND_PROC_WAIT_TIMEOUT = 60 + class BitcoinTestFramework(object): """Base class for a bitcoin test script. @@ -71,13 +68,15 @@ class BitcoinTestFramework(object): def __init__(self): self.num_nodes = 4 self.setup_clean_chain = False - self.nodes = None + self.nodes = [] + self.bitcoind_processes = {} + self.mocktime = 0 def add_options(self, parser): pass def setup_chain(self): - self.log.info("Initializing test directory "+self.options.tmpdir) + self.log.info("Initializing test directory " + self.options.tmpdir) if self.setup_clean_chain: self._initialize_chain_clean(self.options.tmpdir, self.num_nodes) else: @@ -97,7 +96,7 @@ def setup_nodes(self): extra_args = None if hasattr(self, "extra_args"): extra_args = self.extra_args - self.nodes = _start_nodes(self.num_nodes, self.options.tmpdir, extra_args) + self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args) def run_test(self): raise NotImplementedError @@ -111,9 +110,9 @@ def main(self): help="Leave bitcoinds and test.* datadir on exit or error") parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true", help="Don't stop bitcoinds after the test execution") - parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"), + parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"), help="Source directory containing bitcoind/bitcoin-cli (default: %default)") - parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"), + parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), help="Directory for caching pregenerated datadirs") parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs") parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO", @@ -129,12 +128,9 @@ def main(self): self.add_options(parser) (self.options, self.args) = parser.parse_args() - if self.options.coveragedir: - enable_coverage(self.options.coveragedir) - PortSeed.n = self.options.port_seed - os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH'] + os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH'] check_json_precision() @@ -188,7 +184,7 @@ def main(self): for fn in filenames: try: with open(fn, 'r') as f: - print("From" , fn, ":") + print("From", fn, ":") print("".join(deque(f, MAX_LINES_TO_PRINT))) except OSError: print("Opening file %s failed." % fn) @@ -208,16 +204,88 @@ def main(self): # Public helper methods. These can be accessed by the subclass test scripts. def start_node(self, i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): - return _start_node(i, dirname, extra_args, rpchost, timewait, binary, stderr) + """Start a bitcoind and return RPC connection to it""" + + datadir = os.path.join(dirname, "node" + str(i)) + if binary is None: + binary = os.getenv("BITCOIND", "bitcoind") + args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(self.mocktime), "-uacomment=testnode%d" % i] + if extra_args is not None: + args.extend(extra_args) + self.bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) + self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up") + self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i, rpchost) + self.log.debug("initialize_chain: RPC successfully started") + proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait) + + if self.options.coveragedir: + coverage.write_all_rpc_commands(self.options.coveragedir, proxy) + + return proxy def start_nodes(self, num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): - return _start_nodes(num_nodes, dirname, extra_args, rpchost, timewait, binary) + """Start multiple bitcoinds, return RPC connections to them""" + + if extra_args is None: + extra_args = [None] * num_nodes + if binary is None: + binary = [None] * num_nodes + assert_equal(len(extra_args), num_nodes) + assert_equal(len(binary), num_nodes) + rpcs = [] + try: + for i in range(num_nodes): + rpcs.append(self.start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) + except: + # If one node failed to start, stop the others + # TODO: abusing self.nodes in this way is a little hacky. + # Eventually we should do a better job of tracking nodes + self.nodes.extend(rpcs) + self.stop_nodes() + self.nodes = [] + raise + return rpcs + + def stop_node(self, i): + """Stop a bitcoind test node""" - def stop_node(self, num_node): - _stop_node(self.nodes[num_node], num_node) + self.log.debug("Stopping node %d" % i) + try: + self.nodes[i].stop() + except http.client.CannotSendRequest as e: + self.log.exception("Unable to stop node") + return_code = self.bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + del self.bitcoind_processes[i] + assert_equal(return_code, 0) def stop_nodes(self): - _stop_nodes(self.nodes) + """Stop multiple bitcoind test nodes""" + + for i in range(len(self.nodes)): + self.stop_node(i) + assert not self.bitcoind_processes.values() # All connections must be gone now + + def assert_start_raises_init_error(self, i, dirname, extra_args=None, expected_msg=None): + with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr: + try: + self.start_node(i, dirname, extra_args, stderr=log_stderr) + self.stop_node(i) + except Exception as e: + assert 'bitcoind exited' in str(e) # node must have shutdown + if expected_msg is not None: + log_stderr.seek(0) + stderr = log_stderr.read().decode('utf-8') + if expected_msg not in stderr: + raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr) + else: + if expected_msg is None: + assert_msg = "bitcoind should have exited with an error" + else: + assert_msg = "bitcoind should have exited with expected error " + expected_msg + raise AssertionError(assert_msg) + + def wait_for_node_exit(self, i, timeout): + self.bitcoind_processes[i].wait(timeout) def split_network(self): """ @@ -242,6 +310,21 @@ def sync_all(self, node_groups=None): sync_blocks(group) sync_mempools(group) + def enable_mocktime(self): + """Enable mocktime for the script. + + mocktime may be needed for scripts that use the cached version of the + blockchain. If the cached version of the blockchain is used without + mocktime then the mempools will not sync due to IBD. + + For backwared compatibility of the python scripts with previous + versions of the cache, this helper function sets mocktime to Jan 1, + 2014 + (201 * 10 * 60)""" + self.mocktime = 1388534400 + (201 * 10 * 60) + + def disable_mocktime(self): + self.mocktime = 0 + # Private helper methods. These should not be accessed by the subclass test scripts. def _start_logging(self): @@ -257,7 +340,7 @@ def _start_logging(self): ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper() ch.setLevel(ll) # Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted) - formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') formatter.converter = time.gmtime fh.setFormatter(formatter) ch.setFormatter(formatter) @@ -299,9 +382,9 @@ def _initialize_chain(self, test_dir, num_nodes, cachedir): args = [os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"] if i > 0: args.append("-connect=127.0.0.1:" + str(p2p_port(0))) - bitcoind_processes[i] = subprocess.Popen(args) + self.bitcoind_processes[i] = subprocess.Popen(args) self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up") - wait_for_bitcoind_start(bitcoind_processes[i], datadir, i) + self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i) self.log.debug("initialize_chain: RPC successfully started") self.nodes = [] @@ -319,8 +402,8 @@ def _initialize_chain(self, test_dir, num_nodes, cachedir): # # blocks are created with timestamps 10 minutes apart # starting from 2010 minutes in the past - enable_mocktime() - block_time = get_mocktime() - (201 * 10 * 60) + self.enable_mocktime() + block_time = self.mocktime - (201 * 10 * 60) for i in range(2): for peer in range(4): for j in range(25): @@ -333,7 +416,7 @@ def _initialize_chain(self, test_dir, num_nodes, cachedir): # Shut them down, and clean up cache directories: self.stop_nodes() self.nodes = [] - disable_mocktime() + self.disable_mocktime() for i in range(MAX_NODES): os.remove(log_filename(cachedir, i, "debug.log")) os.remove(log_filename(cachedir, i, "db.log")) @@ -354,18 +437,37 @@ def _initialize_chain_clean(self, test_dir, num_nodes): for i in range(num_nodes): initialize_datadir(test_dir, i) -# Test framework for doing p2p comparison testing, which sets up some bitcoind -# binaries: -# 1 binary: test binary -# 2 binaries: 1 test binary, 1 ref binary -# n>2 binaries: 1 test binary, n-1 ref binaries - -class SkipTest(Exception): - """This exception is raised to skip a test""" - def __init__(self, message): - self.message = message + def _wait_for_bitcoind_start(self, process, datadir, i, rpchost=None): + """Wait for bitcoind to start. + + This means that RPC is accessible and fully initialized. + Raise an exception if bitcoind exits during initialization.""" + while True: + if process.poll() is not None: + raise Exception('bitcoind exited with status %i during initialization' % process.returncode) + try: + # Check if .cookie file to be created + rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, coveragedir=self.options.coveragedir) + rpc.getblockcount() + break # break out of loop on success + except IOError as e: + if e.errno != errno.ECONNREFUSED: # Port not yet open? + raise # unknown IO error + except JSONRPCException as e: # Initialization phase + if e.error['code'] != -28: # RPC in warmup? + raise # unknown JSON RPC exception + except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting + if "No RPC credentials" not in str(e): + raise + time.sleep(0.25) class ComparisonTestFramework(BitcoinTestFramework): + """Test framework for doing p2p comparison testing + + Sets up some bitcoind binaries: + - 1 binary: test binary + - 2 binaries: 1 test binary, 1 ref binary + - n>2 binaries: 1 test binary, n-1 ref binaries""" def __init__(self): super().__init__() @@ -387,4 +489,9 @@ def setup_network(self): self.nodes = self.start_nodes( self.num_nodes, self.options.tmpdir, extra_args, binary=[self.options.testbinary] + - [self.options.refbinary]*(self.num_nodes-1)) + [self.options.refbinary] * (self.num_nodes - 1)) + +class SkipTest(Exception): + """This exception is raised to skip a test""" + def __init__(self, message): + self.message = message diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 2a4f3104aa0d1..3c918b48fbd87 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -4,30 +4,162 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Helpful routines for regression testing.""" -import os -import sys - -from binascii import hexlify, unhexlify from base64 import b64encode +from binascii import hexlify, unhexlify from decimal import Decimal, ROUND_DOWN import json -import http.client +import logging +import os import random -import shutil -import subprocess -import tempfile -import time import re -import errno -import logging +import time from . import coverage from .authproxy import AuthServiceProxy, JSONRPCException -COVERAGE_DIR = None - logger = logging.getLogger("TestFramework.utils") +# Assert functions +################## + +def assert_fee_amount(fee, tx_size, fee_per_kB): + """Assert the fee was in range""" + target_fee = tx_size * fee_per_kB / 1000 + if fee < target_fee: + raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee))) + # allow the wallet's estimation to be at most 2 bytes off + if fee > (tx_size + 2) * fee_per_kB / 1000: + raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee))) + +def assert_equal(thing1, thing2, *args): + if thing1 != thing2 or any(thing1 != arg for arg in args): + raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) + +def assert_greater_than(thing1, thing2): + if thing1 <= thing2: + raise AssertionError("%s <= %s" % (str(thing1), str(thing2))) + +def assert_greater_than_or_equal(thing1, thing2): + if thing1 < thing2: + raise AssertionError("%s < %s" % (str(thing1), str(thing2))) + +def assert_raises(exc, fun, *args, **kwds): + assert_raises_message(exc, None, fun, *args, **kwds) + +def assert_raises_message(exc, message, fun, *args, **kwds): + try: + fun(*args, **kwds) + except exc as e: + if message is not None and message not in e.error['message']: + raise AssertionError("Expected substring not found:" + e.error['message']) + except Exception as e: + raise AssertionError("Unexpected exception raised: " + type(e).__name__) + else: + raise AssertionError("No exception raised") + +def assert_raises_jsonrpc(code, message, fun, *args, **kwds): + """Run an RPC and verify that a specific JSONRPC exception code and message is raised. + + Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException + and verifies that the error code and message are as expected. Throws AssertionError if + no JSONRPCException was returned or if the error code/message are not as expected. + + Args: + code (int), optional: the error code returned by the RPC call (defined + in src/rpc/protocol.h). Set to None if checking the error code is not required. + message (string), optional: [a substring of] the error string returned by the + RPC call. Set to None if checking the error string is not required + fun (function): the function to call. This should be the name of an RPC. + args*: positional arguments for the function. + kwds**: named arguments for the function. + """ + try: + fun(*args, **kwds) + except JSONRPCException as e: + # JSONRPCException was thrown as expected. Check the code and message values are correct. + if (code is not None) and (code != e.error["code"]): + raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) + if (message is not None) and (message not in e.error['message']): + raise AssertionError("Expected substring not found:" + e.error['message']) + except Exception as e: + raise AssertionError("Unexpected exception raised: " + type(e).__name__) + else: + raise AssertionError("No exception raised") + +def assert_is_hex_string(string): + try: + int(string, 16) + except Exception as e: + raise AssertionError( + "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) + +def assert_is_hash_string(string, length=64): + if not isinstance(string, str): + raise AssertionError("Expected a string, got type %r" % type(string)) + elif length and len(string) != length: + raise AssertionError( + "String of length %d expected; got %d" % (length, len(string))) + elif not re.match('[abcdef0-9]+$', string): + raise AssertionError( + "String %r contains invalid characters for a hash." % string) + +def assert_array_result(object_array, to_match, expected, should_not_find=False): + """ + Pass in array of JSON objects, a dictionary with key/value pairs + to match against, and another dictionary with expected key/value + pairs. + If the should_not_find flag is true, to_match should not be found + in object_array + """ + if should_not_find: + assert_equal(expected, {}) + num_matched = 0 + for item in object_array: + all_match = True + for key, value in to_match.items(): + if item[key] != value: + all_match = False + if not all_match: + continue + elif should_not_find: + num_matched = num_matched + 1 + for key, value in expected.items(): + if item[key] != value: + raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value))) + num_matched = num_matched + 1 + if num_matched == 0 and not should_not_find: + raise AssertionError("No objects matched %s" % (str(to_match))) + if num_matched > 0 and should_not_find: + raise AssertionError("Objects were found %s" % (str(to_match))) + +# Utility functions +################### + +def check_json_precision(): + """Make sure json library being used does not lose precision converting BTC values""" + n = Decimal("20000000.00000003") + satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) + if satoshis != 2000000000000003: + raise RuntimeError("JSON encode/decode loses precision") + +def count_bytes(hex_string): + return len(bytearray.fromhex(hex_string)) + +def bytes_to_hex_str(byte_str): + return hexlify(byte_str).decode('ascii') + +def hex_str_to_bytes(hex_str): + return unhexlify(hex_str.encode('ascii')) + +def str_to_b64str(string): + return b64encode(string.encode('utf-8')).decode('ascii') + +def satoshi_round(amount): + return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + +# RPC/P2P connection constants and functions +############################################ + # The maximum number of nodes a single test can spawn MAX_NODES = 8 # Don't assign rpc or p2p ports lower than this @@ -35,41 +167,11 @@ # The number of ports to "reserve" for p2p and rpc, each PORT_RANGE = 5000 -BITCOIND_PROC_WAIT_TIMEOUT = 60 - - class PortSeed: # Must be initialized with a unique integer for each process n = None -#Set Mocktime default to OFF. -#MOCKTIME is only needed for scripts that use the -#cached version of the blockchain. If the cached -#version of the blockchain is used without MOCKTIME -#then the mempools will not sync due to IBD. -MOCKTIME = 0 - -def enable_mocktime(): - #For backwared compatibility of the python scripts - #with previous versions of the cache, set MOCKTIME - #to Jan 1, 2014 + (201 * 10 * 60) - global MOCKTIME - MOCKTIME = 1388534400 + (201 * 10 * 60) - -def disable_mocktime(): - global MOCKTIME - MOCKTIME = 0 - -def get_mocktime(): - return MOCKTIME - -def enable_coverage(dirname): - """Maintain a log of which RPC calls are made during testing.""" - global COVERAGE_DIR - COVERAGE_DIR = dirname - - -def get_rpc_proxy(url, node_number, timeout=None): +def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): """ Args: url (str): URL of the RPC server to call @@ -90,11 +192,10 @@ def get_rpc_proxy(url, node_number, timeout=None): proxy.url = url # store URL on proxy for info coverage_logfile = coverage.get_filename( - COVERAGE_DIR, node_number) if COVERAGE_DIR else None + coveragedir, node_number) if coveragedir else None return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) - def p2p_port(n): assert(n <= MAX_NODES) return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) @@ -102,94 +203,34 @@ def p2p_port(n): def rpc_port(n): return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) -def check_json_precision(): - """Make sure json library being used does not lose precision converting BTC values""" - n = Decimal("20000000.00000003") - satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) - if satoshis != 2000000000000003: - raise RuntimeError("JSON encode/decode loses precision") - -def count_bytes(hex_string): - return len(bytearray.fromhex(hex_string)) - -def bytes_to_hex_str(byte_str): - return hexlify(byte_str).decode('ascii') - -def hex_str_to_bytes(hex_str): - return unhexlify(hex_str.encode('ascii')) - -def str_to_b64str(string): - return b64encode(string.encode('utf-8')).decode('ascii') - -def sync_blocks(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same tip. - - sync_blocks needs to be called with an rpc_connections set that has least - one node already synced to the latest, stable tip, otherwise there's a - chance it might return before all nodes are stably synced. - """ - # Use getblockcount() instead of waitforblockheight() to determine the - # initial max height because the two RPCs look at different internal global - # variables (chainActive vs latestBlock) and the former gets updated - # earlier. - maxheight = max(x.getblockcount() for x in rpc_connections) - start_time = cur_time = time.time() - while cur_time <= start_time + timeout: - tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] - if all(t["height"] == maxheight for t in tips): - if all(t["hash"] == tips[0]["hash"] for t in tips): - return - raise AssertionError("Block sync failed, mismatched block hashes:{}".format( - "".join("\n {!r}".format(tip) for tip in tips))) - cur_time = time.time() - raise AssertionError("Block sync to height {} timed out:{}".format( - maxheight, "".join("\n {!r}".format(tip) for tip in tips))) - -def sync_chain(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same best block - """ - while timeout > 0: - best_hash = [x.getbestblockhash() for x in rpc_connections] - if best_hash == [best_hash[0]]*len(best_hash): - return - time.sleep(wait) - timeout -= wait - raise AssertionError("Chain sync failed: Best block hashes don't match") - -def sync_mempools(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same transactions in their memory - pools - """ - while timeout > 0: - pool = set(rpc_connections[0].getrawmempool()) - num_match = 1 - for i in range(1, len(rpc_connections)): - if set(rpc_connections[i].getrawmempool()) == pool: - num_match = num_match+1 - if num_match == len(rpc_connections): - return - time.sleep(wait) - timeout -= wait - raise AssertionError("Mempool sync failed") +def rpc_url(datadir, i, rpchost=None): + rpc_u, rpc_p = get_auth_cookie(datadir, i) + host = '127.0.0.1' + port = rpc_port(i) + if rpchost: + parts = rpchost.split(':') + if len(parts) == 2: + host, port = parts + else: + host = rpchost + return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) -bitcoind_processes = {} +# Node functions +################ def initialize_datadir(dirname, n): - datadir = os.path.join(dirname, "node"+str(n)) + datadir = os.path.join(dirname, "node" + str(n)) if not os.path.isdir(datadir): os.makedirs(datadir) with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: f.write("regtest=1\n") - f.write("port="+str(p2p_port(n))+"\n") - f.write("rpcport="+str(rpc_port(n))+"\n") + f.write("port=" + str(p2p_port(n)) + "\n") + f.write("rpcport=" + str(rpc_port(n)) + "\n") f.write("listenonion=0\n") return datadir def get_datadir_path(dirname, n): - return os.path.join(dirname, "node"+str(n)) + return os.path.join(dirname, "node" + str(n)) def get_auth_cookie(datadir, n): user = None @@ -198,10 +239,10 @@ def get_auth_cookie(datadir, n): with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f: for line in f: if line.startswith("rpcuser="): - assert user is None # Ensure that there is only one rpcuser line + assert user is None # Ensure that there is only one rpcuser line user = line.split("=")[1].strip("\n") if line.startswith("rpcpassword="): - assert password is None # Ensure that there is only one rpcpassword line + assert password is None # Ensure that there is only one rpcpassword line password = line.split("=")[1].strip("\n") if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: @@ -213,128 +254,12 @@ def get_auth_cookie(datadir, n): raise ValueError("No RPC credentials") return user, password -def rpc_url(datadir, i, rpchost=None): - rpc_u, rpc_p = get_auth_cookie(datadir, i) - host = '127.0.0.1' - port = rpc_port(i) - if rpchost: - parts = rpchost.split(':') - if len(parts) == 2: - host, port = parts - else: - host = rpchost - return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) - -def wait_for_bitcoind_start(process, datadir, i, rpchost=None): - ''' - Wait for bitcoind to start. This means that RPC is accessible and fully initialized. - Raise an exception if bitcoind exits during initialization. - ''' - while True: - if process.poll() is not None: - raise Exception('bitcoind exited with status %i during initialization' % process.returncode) - try: - # Check if .cookie file to be created - rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i) - blocks = rpc.getblockcount() - break # break out of loop on success - except IOError as e: - if e.errno != errno.ECONNREFUSED: # Port not yet open? - raise # unknown IO error - except JSONRPCException as e: # Initialization phase - if e.error['code'] != -28: # RPC in warmup? - raise # unknown JSON RPC exception - except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting - if "No RPC credentials" not in str(e): - raise - time.sleep(0.25) - -def wait_for_node_exit(node_index, timeout): - bitcoind_processes[node_index].wait(timeout) - -def _start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): - """Start a bitcoind and return RPC connection to it - - This function should only be called from within test_framework, not by individual test scripts.""" - - datadir = os.path.join(dirname, "node"+str(i)) - if binary is None: - binary = os.getenv("BITCOIND", "bitcoind") - args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] - if extra_args is not None: args.extend(extra_args) - bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) - logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up") - wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost) - logger.debug("initialize_chain: RPC successfully started") - proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait) - - if COVERAGE_DIR: - coverage.write_all_rpc_commands(COVERAGE_DIR, proxy) - - return proxy - -def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=None): - with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr: - try: - node = _start_node(i, dirname, extra_args, stderr=log_stderr) - _stop_node(node, i) - except Exception as e: - assert 'bitcoind exited' in str(e) #node must have shutdown - if expected_msg is not None: - log_stderr.seek(0) - stderr = log_stderr.read().decode('utf-8') - if expected_msg not in stderr: - raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr) - else: - if expected_msg is None: - assert_msg = "bitcoind should have exited with an error" - else: - assert_msg = "bitcoind should have exited with expected error " + expected_msg - raise AssertionError(assert_msg) - -def _start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): - """Start multiple bitcoinds, return RPC connections to them - - This function should only be called from within test_framework, not by individual test scripts.""" - - if extra_args is None: extra_args = [ None for _ in range(num_nodes) ] - if binary is None: binary = [ None for _ in range(num_nodes) ] - assert_equal(len(extra_args), num_nodes) - assert_equal(len(binary), num_nodes) - rpcs = [] - try: - for i in range(num_nodes): - rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) - except: # If one node failed to start, stop the others - _stop_nodes(rpcs) - raise - return rpcs - def log_filename(dirname, n_node, logname): - return os.path.join(dirname, "node"+str(n_node), "regtest", logname) - -def _stop_node(node, i): - """Stop a bitcoind test node - - This function should only be called from within test_framework, not by individual test scripts.""" - - logger.debug("Stopping node %d" % i) - try: - node.stop() - except http.client.CannotSendRequest as e: - logger.exception("Unable to stop node") - return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) - del bitcoind_processes[i] - assert_equal(return_code, 0) - -def _stop_nodes(nodes): - """Stop multiple bitcoind test nodes - - This function should only be called from within test_framework, not by individual test scripts.""" + return os.path.join(dirname, "node" + str(n_node), "regtest", logname) - for i, node in enumerate(nodes): - _stop_node(node, i) - assert not bitcoind_processes.values() # All connections must be gone now +def get_bip9_status(node, key): + info = node.getblockchaininfo() + return info['bip9_softforks'][key] def set_node_times(nodes, t): for node in nodes: @@ -352,7 +277,7 @@ def disconnect_nodes(from_connection, node_num): raise AssertionError("timed out waiting for disconnect") def connect_nodes(from_connection, node_num): - ip_port = "127.0.0.1:"+str(p2p_port(node_num)) + ip_port = "127.0.0.1:" + str(p2p_port(node_num)) from_connection.addnode(ip_port, "onetry") # poll until version handshake complete to avoid race conditions # with transaction relaying @@ -363,6 +288,63 @@ def connect_nodes_bi(nodes, a, b): connect_nodes(nodes[a], b) connect_nodes(nodes[b], a) +def sync_blocks(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same tip. + + sync_blocks needs to be called with an rpc_connections set that has least + one node already synced to the latest, stable tip, otherwise there's a + chance it might return before all nodes are stably synced. + """ + # Use getblockcount() instead of waitforblockheight() to determine the + # initial max height because the two RPCs look at different internal global + # variables (chainActive vs latestBlock) and the former gets updated + # earlier. + maxheight = max(x.getblockcount() for x in rpc_connections) + start_time = cur_time = time.time() + while cur_time <= start_time + timeout: + tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] + if all(t["height"] == maxheight for t in tips): + if all(t["hash"] == tips[0]["hash"] for t in tips): + return + raise AssertionError("Block sync failed, mismatched block hashes:{}".format( + "".join("\n {!r}".format(tip) for tip in tips))) + cur_time = time.time() + raise AssertionError("Block sync to height {} timed out:{}".format( + maxheight, "".join("\n {!r}".format(tip) for tip in tips))) + +def sync_chain(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same best block + """ + while timeout > 0: + best_hash = [x.getbestblockhash() for x in rpc_connections] + if best_hash == [best_hash[0]] * len(best_hash): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Chain sync failed: Best block hashes don't match") + +def sync_mempools(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same transactions in their memory + pools + """ + while timeout > 0: + pool = set(rpc_connections[0].getrawmempool()) + num_match = 1 + for i in range(1, len(rpc_connections)): + if set(rpc_connections[i].getrawmempool()) == pool: + num_match = num_match + 1 + if num_match == len(rpc_connections): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Mempool sync failed") + +# Transaction/Block functions +############################# + def find_output(node, txid, amount): """ Return index to output of txid with value amount @@ -372,14 +354,13 @@ def find_output(node, txid, amount): for i in range(len(txdata["vout"])): if txdata["vout"][i]["value"] == amount: return i - raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount))) - + raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount))) def gather_inputs(from_node, amount_needed, confirmations_required=1): """ Return a random set of unspent txouts that are enough to pay amount_needed """ - assert(confirmations_required >=0) + assert(confirmations_required >= 0) utxo = from_node.listunspent(confirmations_required) random.shuffle(utxo) inputs = [] @@ -387,9 +368,9 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1): while total_in < amount_needed and len(utxo) > 0: t = utxo.pop() total_in += t["amount"] - inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } ) + inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]}) if total_in < amount_needed: - raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in)) + raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in)) return (total_in, inputs) def make_change(from_node, amount_in, amount_out, fee): @@ -397,13 +378,13 @@ def make_change(from_node, amount_in, amount_out, fee): Create change output(s), return them """ outputs = {} - amount = amount_out+fee + amount = amount_out + fee change = amount_in - amount - if change > amount*2: + if change > amount * 2: # Create an extra change output to break up big inputs change_address = from_node.getnewaddress() # Split change in two, being careful of rounding: - outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) change = amount_in - amount - outputs[change_address] if change > 0: outputs[from_node.getnewaddress()] = change @@ -416,9 +397,9 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): """ from_node = random.choice(nodes) to_node = random.choice(nodes) - fee = min_fee + fee_increment*random.randint(0,fee_variants) + fee = min_fee + fee_increment * random.randint(0, fee_variants) - (total_in, inputs) = gather_inputs(from_node, amount+fee) + (total_in, inputs) = gather_inputs(from_node, amount + fee) outputs = make_change(from_node, total_in, amount, fee) outputs[to_node.getnewaddress()] = float(amount) @@ -428,123 +409,10 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): return (txid, signresult["hex"], fee) -def assert_fee_amount(fee, tx_size, fee_per_kB): - """Assert the fee was in range""" - target_fee = tx_size * fee_per_kB / 1000 - if fee < target_fee: - raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee))) - # allow the wallet's estimation to be at most 2 bytes off - if fee > (tx_size + 2) * fee_per_kB / 1000: - raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee))) - -def assert_equal(thing1, thing2, *args): - if thing1 != thing2 or any(thing1 != arg for arg in args): - raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) - -def assert_greater_than(thing1, thing2): - if thing1 <= thing2: - raise AssertionError("%s <= %s"%(str(thing1),str(thing2))) - -def assert_greater_than_or_equal(thing1, thing2): - if thing1 < thing2: - raise AssertionError("%s < %s"%(str(thing1),str(thing2))) - -def assert_raises(exc, fun, *args, **kwds): - assert_raises_message(exc, None, fun, *args, **kwds) - -def assert_raises_message(exc, message, fun, *args, **kwds): - try: - fun(*args, **kwds) - except exc as e: - if message is not None and message not in e.error['message']: - raise AssertionError("Expected substring not found:"+e.error['message']) - except Exception as e: - raise AssertionError("Unexpected exception raised: "+type(e).__name__) - else: - raise AssertionError("No exception raised") - -def assert_raises_jsonrpc(code, message, fun, *args, **kwds): - """Run an RPC and verify that a specific JSONRPC exception code and message is raised. - - Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException - and verifies that the error code and message are as expected. Throws AssertionError if - no JSONRPCException was returned or if the error code/message are not as expected. - - Args: - code (int), optional: the error code returned by the RPC call (defined - in src/rpc/protocol.h). Set to None if checking the error code is not required. - message (string), optional: [a substring of] the error string returned by the - RPC call. Set to None if checking the error string is not required - fun (function): the function to call. This should be the name of an RPC. - args*: positional arguments for the function. - kwds**: named arguments for the function. - """ - try: - fun(*args, **kwds) - except JSONRPCException as e: - # JSONRPCException was thrown as expected. Check the code and message values are correct. - if (code is not None) and (code != e.error["code"]): - raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) - if (message is not None) and (message not in e.error['message']): - raise AssertionError("Expected substring not found:"+e.error['message']) - except Exception as e: - raise AssertionError("Unexpected exception raised: "+type(e).__name__) - else: - raise AssertionError("No exception raised") - -def assert_is_hex_string(string): - try: - int(string, 16) - except Exception as e: - raise AssertionError( - "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) - -def assert_is_hash_string(string, length=64): - if not isinstance(string, str): - raise AssertionError("Expected a string, got type %r" % type(string)) - elif length and len(string) != length: - raise AssertionError( - "String of length %d expected; got %d" % (length, len(string))) - elif not re.match('[abcdef0-9]+$', string): - raise AssertionError( - "String %r contains invalid characters for a hash." % string) - -def assert_array_result(object_array, to_match, expected, should_not_find = False): - """ - Pass in array of JSON objects, a dictionary with key/value pairs - to match against, and another dictionary with expected key/value - pairs. - If the should_not_find flag is true, to_match should not be found - in object_array - """ - if should_not_find == True: - assert_equal(expected, { }) - num_matched = 0 - for item in object_array: - all_match = True - for key,value in to_match.items(): - if item[key] != value: - all_match = False - if not all_match: - continue - elif should_not_find == True: - num_matched = num_matched+1 - for key,value in expected.items(): - if item[key] != value: - raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) - num_matched = num_matched+1 - if num_matched == 0 and should_not_find != True: - raise AssertionError("No objects matched %s"%(str(to_match))) - if num_matched > 0 and should_not_find == True: - raise AssertionError("Objects were found %s"%(str(to_match))) - -def satoshi_round(amount): - return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) - # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. def create_confirmed_utxos(fee, node, count): - node.generate(int(0.5*count)+101) + node.generate(int(0.5 * count) + 101) utxos = node.listunspent() iterations = count - len(utxos) addr1 = node.getnewaddress() @@ -554,14 +422,14 @@ def create_confirmed_utxos(fee, node, count): for i in range(iterations): t = utxos.pop() inputs = [] - inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) + inputs.append({"txid": t["txid"], "vout": t["vout"]}) outputs = {} send_value = t['amount'] - fee - outputs[addr1] = satoshi_round(send_value/2) - outputs[addr2] = satoshi_round(send_value/2) + outputs[addr1] = satoshi_round(send_value / 2) + outputs[addr2] = satoshi_round(send_value / 2) raw_tx = node.createrawtransaction(inputs, outputs) signed_tx = node.signrawtransaction(raw_tx)["hex"] - txid = node.sendrawtransaction(signed_tx) + node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): node.generate(1) @@ -576,8 +444,8 @@ def gen_return_txouts(): # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create # So we have big transactions (and therefore can't fit very many into each block) # create one script_pubkey - script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes - for i in range (512): + script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes + for i in range(512): script_pubkey = script_pubkey + "01" # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change txouts = "81" @@ -591,8 +459,8 @@ def gen_return_txouts(): return txouts def create_tx(node, coinbase, to_address, amount): - inputs = [{ "txid" : coinbase, "vout" : 0}] - outputs = { to_address : amount } + inputs = [{"txid": coinbase, "vout": 0}] + outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) assert_equal(signresult["complete"], True) @@ -605,7 +473,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee): txids = [] for _ in range(num): t = utxos.pop() - inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}] + inputs = [{"txid": t["txid"], "vout": t["vout"]}] outputs = {} change = t['amount'] - fee outputs[addr] = satoshi_round(change) @@ -630,7 +498,3 @@ def mine_large_block(node, utxos=None): fee = 100 * node.getnetworkinfo()["relayfee"] create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) node.generate(1) - -def get_bip9_status(node, key): - info = node.getblockchaininfo() - return info['bip9_softforks'][key] diff --git a/test/functional/wallet-dump.py b/test/functional/wallet-dump.py index 9cb32d465023a..569cc46e6cf92 100755 --- a/test/functional/wallet-dump.py +++ b/test/functional/wallet-dump.py @@ -7,7 +7,7 @@ import os from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import (assert_equal, bitcoind_processes) +from test_framework.util import assert_equal def read_dump(file_name, addrs, hd_master_addr_old): @@ -95,7 +95,7 @@ def run_test (self): #encrypt wallet, restart, unlock and dump self.nodes[0].encryptwallet('test') - bitcoind_processes[0].wait() + self.bitcoind_processes[0].wait() self.nodes[0] = self.start_node(0, self.options.tmpdir, self.extra_args[0]) self.nodes[0].walletpassphrase('test', 10) # Should be a no-op: diff --git a/test/functional/wallet-encryption.py b/test/functional/wallet-encryption.py index 33872e3c94650..ba72918fe1564 100755 --- a/test/functional/wallet-encryption.py +++ b/test/functional/wallet-encryption.py @@ -6,12 +6,10 @@ import time -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT from test_framework.util import ( assert_equal, assert_raises_jsonrpc, - bitcoind_processes, - BITCOIND_PROC_WAIT_TIMEOUT, ) class WalletEncryptionTest(BitcoinTestFramework): @@ -33,7 +31,7 @@ def run_test(self): # Encrypt the wallet self.nodes[0].encryptwallet(passphrase) - bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) # Test that the wallet is encrypted diff --git a/test/functional/wallet-hd.py b/test/functional/wallet-hd.py index e7ec72a2484e5..dfd3dc83c523a 100755 --- a/test/functional/wallet-hd.py +++ b/test/functional/wallet-hd.py @@ -8,7 +8,6 @@ from test_framework.util import ( assert_equal, connect_nodes_bi, - assert_start_raises_init_error ) import os import shutil @@ -27,7 +26,7 @@ def run_test (self): # Make sure can't switch off usehd after wallet creation self.stop_node(1) - assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet') + self.assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet') self.nodes[1] = self.start_node(1, self.options.tmpdir, self.extra_args[1]) connect_nodes_bi(self.nodes, 0, 1) diff --git a/test/functional/walletbackup.py b/test/functional/walletbackup.py index a4507182a24df..ff51cba4b3f64 100755 --- a/test/functional/walletbackup.py +++ b/test/functional/walletbackup.py @@ -30,10 +30,11 @@ Shutdown again, restore using importwallet, and confirm again balances are correct. """ +from random import randint +import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * -from random import randint class WalletBackupTest(BitcoinTestFramework):