diff options
Diffstat (limited to 'test/functional')
46 files changed, 893 insertions, 472 deletions
diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py index 596ff206f2..5cddd6527e 100755 --- a/test/functional/feature_backwards_compatibility.py +++ b/test/functional/feature_backwards_compatibility.py @@ -26,10 +26,7 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.descriptors import descsum_create from test_framework.util import ( - adjust_bitcoin_conf_for_pre_17, assert_equal, - sync_blocks, - sync_mempools, ) @@ -60,15 +57,13 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): 170100, 160300, ]) - # adapt bitcoin.conf, because older bitcoind's don't recognize config sections - adjust_bitcoin_conf_for_pre_17(self.nodes[5].bitcoinconf) self.start_nodes() def run_test(self): self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress()) - sync_blocks(self.nodes) + self.sync_blocks() # Sanity check the test framework: res = self.nodes[self.num_nodes - 1].getblockchaininfo() @@ -93,17 +88,17 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): # Create a confirmed transaction, receiving coins address = wallet.getnewaddress() self.nodes[0].sendtoaddress(address, 10) - sync_mempools(self.nodes) + self.sync_mempools() self.nodes[0].generate(1) - sync_blocks(self.nodes) + self.sync_blocks() # Create a conflicting transaction using RBF return_address = self.nodes[0].getnewaddress() tx1_id = self.nodes[1].sendtoaddress(return_address, 1) tx2_id = self.nodes[1].bumpfee(tx1_id)["txid"] # Confirm the transaction - sync_mempools(self.nodes) + self.sync_mempools() self.nodes[0].generate(1) - sync_blocks(self.nodes) + self.sync_blocks() # Create another conflicting transaction using RBF tx3_id = self.nodes[1].sendtoaddress(return_address, 1) tx4_id = self.nodes[1].bumpfee(tx3_id)["txid"] @@ -316,12 +311,19 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): self.start_node(4) # Open most recent wallet in v0.16 (no loadwallet RPC) - self.stop_node(5) - self.start_node(5, extra_args=["-wallet=w2"]) + self.restart_node(5, extra_args=["-wallet=w2"]) wallet = node_v16.get_wallet_rpc("w2") info = wallet.getwalletinfo() assert info['keypoolsize'] == 1 + # Create upgrade wallet in v0.16 + self.restart_node(-1, extra_args=["-wallet=u1_v16"]) + wallet = node_v16.get_wallet_rpc("u1_v16") + v16_addr = wallet.getnewaddress('', "bech32") + v16_info = wallet.validateaddress(v16_addr) + v16_pubkey = v16_info['pubkey'] + self.stop_node(-1) + self.log.info("Test wallet upgrade path...") # u1: regular wallet, created with v0.17 node_v17.rpc.createwallet(wallet_name="u1_v17") @@ -331,6 +333,30 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): hdkeypath = v17_info["hdkeypath"] pubkey = v17_info["pubkey"] + # Copy the 0.16 wallet to the last Bitcoin Core version and open it: + shutil.copyfile( + os.path.join(node_v16_wallets_dir, "wallets/u1_v16"), + os.path.join(node_master_wallets_dir, "u1_v16") + ) + load_res = node_master.loadwallet("u1_v16") + # Make sure this wallet opens without warnings. See https://github.com/bitcoin/bitcoin/pull/19054 + assert_equal(load_res['warning'], '') + wallet = node_master.get_wallet_rpc("u1_v16") + info = wallet.getaddressinfo(v16_addr) + descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v16_pubkey + ")" + assert_equal(info["desc"], descsum_create(descriptor)) + + # Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it + os.remove(os.path.join(node_v16_wallets_dir, "wallets/u1_v16")) + shutil.copyfile( + os.path.join(node_master_wallets_dir, "u1_v16"), + os.path.join(node_v16_wallets_dir, "wallets/u1_v16") + ) + self.start_node(-1, extra_args=["-wallet=u1_v16"]) + wallet = node_v16.get_wallet_rpc("u1_v16") + info = wallet.validateaddress(v16_addr) + assert_equal(info, v16_info) + # Copy the 0.17 wallet to the last Bitcoin Core version and open it: node_v17.unloadwallet("u1_v17") shutil.copytree( diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index 82f1331685..0a457ca17f 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -71,8 +71,7 @@ class LoadblockTest(BitcoinTestFramework): check=True) self.log.info("Restart second, unsynced node with bootstrap file") - self.stop_node(1) - self.start_node(1, ["-loadblock=" + bootstrap_file]) + self.restart_node(1, extra_args=["-loadblock=" + bootstrap_file]) assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100) diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py index e4bf2d849d..afcbcf099a 100755 --- a/test/functional/feature_logging.py +++ b/test/functional/feature_logging.py @@ -67,8 +67,7 @@ class LoggingTest(BitcoinTestFramework): assert not os.path.isfile(default_log_path) # just sanity check no crash here - self.stop_node(0) - self.start_node(0, ["-debuglogfile=%s" % os.devnull]) + self.restart_node(0, ["-debuglogfile=%s" % os.devnull]) if __name__ == '__main__': diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index 33a308ad1b..dd4c318cee 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -93,6 +93,7 @@ class NotificationsTest(BitcoinTestFramework): self.nodes[0].sethdseed(seed=self.nodes[1].dumpprivkey(keyhash_to_p2pkh(hex_str_to_bytes(self.nodes[1].getwalletinfo()['hdseedid'])[::-1]))) self.nodes[0].rescanblockchain() self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE) + self.sync_blocks() # Generate transaction on node 0, sync mempools, and check for # notification on node 1. diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index c9362cf5aa..e46e5aacc8 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -263,8 +263,7 @@ class PruneTest(BitcoinTestFramework): assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500) # now re-start in manual pruning mode - self.stop_node(node_number) - self.start_node(node_number, extra_args=["-prune=1"]) + self.restart_node(node_number, extra_args=["-prune=1"]) node = self.nodes[node_number] assert_equal(node.getblockcount(), 995) @@ -326,16 +325,14 @@ class PruneTest(BitcoinTestFramework): assert not has_block(3), "blk00003.dat is still there, should be pruned by now" # stop node, start back up with auto-prune at 550 MiB, make sure still runs - self.stop_node(node_number) - self.start_node(node_number, extra_args=["-prune=550"]) + self.restart_node(node_number, extra_args=["-prune=550"]) self.log.info("Success") def wallet_test(self): # check that the pruning node's wallet is still in good shape self.log.info("Stop and start pruning node to trigger wallet rescan") - self.stop_node(2) - self.start_node(2, extra_args=["-prune=550"]) + self.restart_node(2, extra_args=["-prune=550"]) self.log.info("Success") # check that wallet loads successfully when restarting a pruned node after IBD. @@ -344,8 +341,7 @@ class PruneTest(BitcoinTestFramework): connect_nodes(self.nodes[0], 5) nds = [self.nodes[0], self.nodes[5]] self.sync_blocks(nds, wait=5, timeout=300) - self.stop_node(5) # stop and start to trigger rescan - self.start_node(5, extra_args=["-prune=550"]) + self.restart_node(5, extra_args=["-prune=550"]) # restart to trigger rescan self.log.info("Success") def run_test(self): diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py index 2298485640..5195d20dcb 100755 --- a/test/functional/feature_segwit.py +++ b/test/functional/feature_segwit.py @@ -559,8 +559,7 @@ class SegWitTest(BitcoinTestFramework): assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) # Assert it is properly saved - self.stop_node(1) - self.start_node(1) + self.restart_node(1) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py index 7530e7daf6..80003aca0d 100755 --- a/test/functional/interface_bitcoin_cli.py +++ b/test/functional/interface_bitcoin_cli.py @@ -3,9 +3,15 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoin-cli""" + from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie +from test_framework.util import ( + assert_equal, + assert_raises_process_error, + assert_raises_rpc_error, + get_auth_cookie, +) # The block reward of coinbaseoutput.nValue (50) BTC/block matures after # COINBASE_MATURITY (100) blocks. Therefore, after mining 101 blocks we expect @@ -13,6 +19,12 @@ from test_framework.util import assert_equal, assert_raises_process_error, get_a BLOCKS = 101 BALANCE = (BLOCKS - 100) * 50 +JSON_PARSING_ERROR = 'error: Error parsing JSON: foo' +BLOCKS_VALUE_OF_ZERO = 'error: the first argument (number of blocks to generate, default: 1) must be an integer value greater than zero' +TOO_MANY_ARGS = 'error: too many arguments (maximum 2 for nblocks and maxtries)' +WALLET_NOT_LOADED = 'Requested wallet does not exist or is not loaded' +WALLET_NOT_SPECIFIED = 'Wallet file not specified' + class TestBitcoinCli(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -75,7 +87,7 @@ class TestBitcoinCli(BitcoinTestFramework): assert_equal(cli_get_info['relayfee'], network_info['relayfee']) assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info) - # Setup to test -getinfo and -rpcwallet= with multiple wallets. + # Setup to test -getinfo, -generate, and -rpcwallet= with multiple wallets. wallets = ['', 'Encrypted', 'secret'] amounts = [BALANCE + Decimal('9.999928'), Decimal(9), Decimal(31)] self.nodes[0].createwallet(wallet_name=wallets[1]) @@ -83,6 +95,8 @@ class TestBitcoinCli(BitcoinTestFramework): w1 = self.nodes[0].get_wallet_rpc(wallets[0]) w2 = self.nodes[0].get_wallet_rpc(wallets[1]) w3 = self.nodes[0].get_wallet_rpc(wallets[2]) + rpcwallet2 = '-rpcwallet={}'.format(wallets[1]) + rpcwallet3 = '-rpcwallet={}'.format(wallets[2]) w1.walletpassphrase(password, self.rpc_timeout) w2.encryptwallet(password) w1.sendtoaddress(w2.getnewaddress(), amounts[1]) @@ -123,17 +137,93 @@ class TestBitcoinCli(BitcoinTestFramework): assert_equal(cli_get_info['balance'], amounts[1]) self.log.info("Test -getinfo with -rpcwallet=remaining-non-default-wallet returns only its balance") - cli_get_info = self.nodes[0].cli('-getinfo', '-rpcwallet={}'.format(wallets[1])).send_cli() + cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet2).send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[1]) self.log.info("Test -getinfo with -rpcwallet=unloaded wallet returns no balances") - cli_get_info = self.nodes[0].cli('-getinfo', '-rpcwallet={}'.format(wallets[2])).send_cli() + cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli() assert 'balance' not in cli_get_info_keys assert 'balances' not in cli_get_info_keys + + # Test bitcoin-cli -generate. + n1 = 3 + n2 = 4 + w2.walletpassphrase(password, self.rpc_timeout) + blocks = self.nodes[0].getblockcount() + + self.log.info('Test -generate with no args') + generate = self.nodes[0].cli('-generate').send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), 1) + assert_equal(self.nodes[0].getblockcount(), blocks + 1) + + self.log.info('Test -generate with bad args') + assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli('-generate', 'foo').echo) + assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli('-generate', 0).echo) + assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli('-generate', 1, 2, 3).echo) + + self.log.info('Test -generate with nblocks') + generate = self.nodes[0].cli('-generate', n1).send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), n1) + assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1) + + self.log.info('Test -generate with nblocks and maxtries') + generate = self.nodes[0].cli('-generate', n2, 1000000).send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), n2) + assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1 + n2) + + self.log.info('Test -generate -rpcwallet in single-wallet mode') + generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), 1) + assert_equal(self.nodes[0].getblockcount(), blocks + 2 + n1 + n2) + + self.log.info('Test -generate -rpcwallet=unloaded wallet raises RPC error') + assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate').echo) + assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 'foo').echo) + assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 0).echo) + assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 1, 2, 3).echo) + + # Test bitcoin-cli -generate with -rpcwallet in multiwallet mode. + self.nodes[0].loadwallet(wallets[2]) + n3 = 4 + n4 = 10 + blocks = self.nodes[0].getblockcount() + + self.log.info('Test -generate -rpcwallet with no args') + generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), 1) + assert_equal(self.nodes[0].getblockcount(), blocks + 1) + + self.log.info('Test -generate -rpcwallet with bad args') + assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli(rpcwallet2, '-generate', 'foo').echo) + assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli(rpcwallet2, '-generate', 0).echo) + assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli(rpcwallet2, '-generate', 1, 2, 3).echo) + + self.log.info('Test -generate -rpcwallet with nblocks') + generate = self.nodes[0].cli(rpcwallet2, '-generate', n3).send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), n3) + assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3) + + self.log.info('Test -generate -rpcwallet with nblocks and maxtries') + generate = self.nodes[0].cli(rpcwallet2, '-generate', n4, 1000000).send_cli() + assert_equal(set(generate.keys()), {'address', 'blocks'}) + assert_equal(len(generate["blocks"]), n4) + assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3 + n4) + + self.log.info('Test -generate without -rpcwallet in multiwallet mode raises RPC error') + assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate').echo) + assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 'foo').echo) + assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 0).echo) + assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo) else: self.log.info("*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped") - self.nodes[0].generate(1) # maintain block parity with the wallet_compiled conditional branch + self.nodes[0].generate(25) # maintain block parity with the wallet_compiled conditional branch self.log.info("Test -version with node stopped") self.stop_node(0) @@ -145,7 +235,7 @@ class TestBitcoinCli(BitcoinTestFramework): self.nodes[0].wait_for_cookie_credentials() # ensure cookie file is available to avoid race condition blocks = self.nodes[0].cli('-rpcwait').send_cli('getblockcount') self.nodes[0].wait_for_rpc_connection() - assert_equal(blocks, BLOCKS + 1) + assert_equal(blocks, BLOCKS + 25) if __name__ == '__main__': diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py new file mode 100755 index 0000000000..999399dec0 --- /dev/null +++ b/test/functional/mempool_compatibility.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2020 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test that mempool.dat is both backward and forward compatible between versions + +NOTE: The test is designed to prevent cases when compatibility is broken accidentally. +In case we need to break mempool compatibility we can continue to use the test by just bumping the version number. + +Download node binaries: +contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2 + +Only v0.15.2 is required by this test. The rest is used in other backwards compatibility tests. +""" + +import os + +from test_framework.test_framework import BitcoinTestFramework + + +class MempoolCompatibilityTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + self.skip_if_no_previous_releases() + + def setup_network(self): + self.add_nodes(self.num_nodes, versions=[ + 150200, # oldest version supported by the test framework + None, + ]) + self.start_nodes() + self.import_deterministic_coinbase_privkeys() + + def run_test(self): + self.log.info("Test that mempool.dat is compatible between versions") + + old_node = self.nodes[0] + new_node = self.nodes[1] + recipient = old_node.getnewaddress() + self.stop_node(1) + + self.log.info("Add a transaction to mempool on old node and shutdown") + old_tx_hash = old_node.sendtoaddress(recipient, 0.0001) + assert old_tx_hash in old_node.getrawmempool() + self.stop_node(0) + + self.log.info("Move mempool.dat from old to new node") + old_node_mempool = os.path.join(old_node.datadir, self.chain, 'mempool.dat') + new_node_mempool = os.path.join(new_node.datadir, self.chain, 'mempool.dat') + os.rename(old_node_mempool, new_node_mempool) + + self.log.info("Start new node and verify mempool contains the tx") + self.start_node(1) + assert old_tx_hash in new_node.getrawmempool() + + self.log.info("Add unbroadcasted tx to mempool on new node and shutdown") + unbroadcasted_tx_hash = new_node.sendtoaddress(recipient, 0.0001) + assert unbroadcasted_tx_hash in new_node.getrawmempool() + mempool = new_node.getrawmempool(True) + assert mempool[unbroadcasted_tx_hash]['unbroadcast'] + self.stop_node(1) + + self.log.info("Move mempool.dat from new to old node") + os.rename(new_node_mempool, old_node_mempool) + + self.log.info("Start old node again and verify mempool contains both txs") + self.start_node(0, ['-nowallet']) + assert old_tx_hash in old_node.getrawmempool() + assert unbroadcasted_tx_hash in old_node.getrawmempool() + +if __name__ == "__main__": + MempoolCompatibilityTest().main() diff --git a/test/functional/p2p_addr_relay.py b/test/functional/p2p_addr_relay.py index 6046237101..5c7e27a3a8 100755 --- a/test/functional/p2p_addr_relay.py +++ b/test/functional/p2p_addr_relay.py @@ -49,9 +49,9 @@ class AddrTest(BitcoinTestFramework): addr_source = self.nodes[0].add_p2p_connection(P2PInterface()) msg = msg_addr() - self.log.info('Send too large addr message') + self.log.info('Send too-large addr message') msg.addrs = ADDRS * 101 - with self.nodes[0].assert_debug_log(['message addr size() = 1010']): + with self.nodes[0].assert_debug_log(['addr message size = 1010']): addr_source.send_and_ping(msg) self.log.info('Check that addr message content is relayed and added to addrman') diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py index d77a744758..0b3738b572 100755 --- a/test/functional/p2p_compactblocks.py +++ b/test/functional/p2p_compactblocks.py @@ -305,10 +305,9 @@ class CompactBlocksTest(BitcoinTestFramework): self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block) # Now fetch the compact block using a normal non-announce getdata - with mininode_lock: - test_node.clear_block_announcement() - inv = CInv(MSG_CMPCT_BLOCK, block_hash) - test_node.send_message(msg_getdata([inv])) + test_node.clear_block_announcement() + inv = CInv(MSG_CMPCT_BLOCK, block_hash) + test_node.send_message(msg_getdata([inv])) wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock) diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py index 9047fc6828..09b9ebeb2d 100755 --- a/test/functional/p2p_disconnect_ban.py +++ b/test/functional/p2p_disconnect_ban.py @@ -69,8 +69,7 @@ class DisconnectBanTest(BitcoinTestFramework): self.nodes[1].setmocktime(old_time + 3) assert_equal(len(self.nodes[1].listbanned()), 3) - self.stop_node(1) - self.start_node(1) + self.restart_node(1) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) diff --git a/test/functional/p2p_eviction.py b/test/functional/p2p_eviction.py new file mode 100755 index 0000000000..b2b3a89aab --- /dev/null +++ b/test/functional/p2p_eviction.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python3 +# Copyright (c) 2019 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +""" Test node eviction logic + +When the number of peers has reached the limit of maximum connections, +the next connecting inbound peer will trigger the eviction mechanism. +We cannot currently test the parts of the eviction logic that are based on +address/netgroup since in the current framework, all peers are connecting from +the same local address. See Issue #14210 for more info. +Therefore, this test is limited to the remaining protection criteria. +""" + +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.mininode import P2PInterface, P2PDataStore +from test_framework.util import assert_equal, wait_until +from test_framework.blocktools import create_block, create_coinbase +from test_framework.messages import CTransaction, FromHex, msg_pong, msg_tx + + +class SlowP2PDataStore(P2PDataStore): + def on_ping(self, message): + time.sleep(0.1) + self.send_message(msg_pong(message.nonce)) + +class SlowP2PInterface(P2PInterface): + def on_ping(self, message): + time.sleep(0.1) + self.send_message(msg_pong(message.nonce)) + +class P2PEvict(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + # The choice of maxconnections=32 results in a maximum of 21 inbound connections + # (32 - 10 outbound - 1 feeler). 20 inbound peers are protected from eviction: + # 4 by netgroup, 4 that sent us blocks, 4 that sent us transactions and 8 via lowest ping time + self.extra_args = [['-maxconnections=32']] + + def run_test(self): + protected_peers = set() # peers that we expect to be protected from eviction + current_peer = -1 + node = self.nodes[0] + node.generatetoaddress(101, node.get_deterministic_priv_key().address) + + self.log.info("Create 4 peers and protect them from eviction by sending us a block") + for _ in range(4): + block_peer = node.add_p2p_connection(SlowP2PDataStore()) + current_peer += 1 + block_peer.sync_with_ping() + best_block = node.getbestblockhash() + tip = int(best_block, 16) + best_block_time = node.getblock(best_block)['time'] + block = create_block(tip, create_coinbase(node.getblockcount() + 1), best_block_time + 1) + block.solve() + block_peer.send_blocks_and_test([block], node, success=True) + protected_peers.add(current_peer) + + self.log.info("Create 5 slow-pinging peers, making them eviction candidates") + for _ in range(5): + node.add_p2p_connection(SlowP2PInterface()) + current_peer += 1 + + self.log.info("Create 4 peers and protect them from eviction by sending us a tx") + for i in range(4): + txpeer = node.add_p2p_connection(SlowP2PInterface()) + current_peer += 1 + txpeer.sync_with_ping() + + prevtx = node.getblock(node.getblockhash(i + 1), 2)['tx'][0] + rawtx = node.createrawtransaction( + inputs=[{'txid': prevtx['txid'], 'vout': 0}], + outputs=[{node.get_deterministic_priv_key().address: 50 - 0.00125}], + ) + sigtx = node.signrawtransactionwithkey( + hexstring=rawtx, + privkeys=[node.get_deterministic_priv_key().key], + prevtxs=[{ + 'txid': prevtx['txid'], + 'vout': 0, + 'scriptPubKey': prevtx['vout'][0]['scriptPubKey']['hex'], + }], + )['hex'] + txpeer.send_message(msg_tx(FromHex(CTransaction(), sigtx))) + protected_peers.add(current_peer) + + self.log.info("Create 8 peers and protect them from eviction by having faster pings") + for _ in range(8): + fastpeer = node.add_p2p_connection(P2PInterface()) + current_peer += 1 + wait_until(lambda: "ping" in fastpeer.last_message, timeout=10) + + # Make sure by asking the node what the actual min pings are + peerinfo = node.getpeerinfo() + pings = {} + for i in range(len(peerinfo)): + pings[i] = peerinfo[i]['minping'] if 'minping' in peerinfo[i] else 1000000 + sorted_pings = sorted(pings.items(), key=lambda x: x[1]) + + # Usually the 8 fast peers are protected. In rare case of unreliable pings, + # one of the slower peers might have a faster min ping though. + for i in range(8): + protected_peers.add(sorted_pings[i][0]) + + self.log.info("Create peer that triggers the eviction mechanism") + node.add_p2p_connection(SlowP2PInterface()) + + # One of the non-protected peers must be evicted. We can't be sure which one because + # 4 peers are protected via netgroup, which is identical for all peers, + # and the eviction mechanism doesn't preserve the order of identical elements. + evicted_peers = [] + for i in range(len(node.p2ps)): + if not node.p2ps[i].is_connected: + evicted_peers.append(i) + + self.log.info("Test that one peer was evicted") + self.log.debug("{} evicted peer: {}".format(len(evicted_peers), set(evicted_peers))) + assert_equal(len(evicted_peers), 1) + + self.log.info("Test that no peer expected to be protected was evicted") + self.log.debug("{} protected peers: {}".format(len(protected_peers), protected_peers)) + assert evicted_peers[0] not in protected_peers + +if __name__ == '__main__': + P2PEvict().main() diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py index 805cb1e84f..f939ea965c 100755 --- a/test/functional/p2p_feefilter.py +++ b/test/functional/p2p_feefilter.py @@ -10,11 +10,13 @@ import time from test_framework.messages import MSG_TX, msg_feefilter from test_framework.mininode import mininode_lock, P2PInterface from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal def hashToHex(hash): return format(hash, '064x') + # Wait up to 60 secs to see if the testnode has received all the expected invs def allInvsMatch(invsExpected, testnode): for x in range(60): @@ -24,6 +26,18 @@ def allInvsMatch(invsExpected, testnode): time.sleep(1) return False + +class FeefilterConn(P2PInterface): + feefilter_received = False + + def on_feefilter(self, message): + self.feefilter_received = True + + def assert_feefilter_received(self, recv: bool): + with mininode_lock: + assert_equal(self.feefilter_received, recv) + + class TestP2PConn(P2PInterface): def __init__(self): super().__init__() @@ -38,6 +52,7 @@ class TestP2PConn(P2PInterface): with mininode_lock: self.txinvs = [] + class FeeFilterTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 @@ -46,41 +61,54 @@ class FeeFilterTest(BitcoinTestFramework): # mempool and wallet feerate calculation based on GetFee # rounding down 3 places, leading to stranded transactions. # See issue #16499 - self.extra_args = [["-minrelaytxfee=0.00000100", "-mintxfee=0.00000100"]]*self.num_nodes + self.extra_args = [["-minrelaytxfee=0.00000100", "-mintxfee=0.00000100"]] * self.num_nodes def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): + self.test_feefilter_forcerelay() + self.test_feefilter() + + def test_feefilter_forcerelay(self): + self.log.info('Check that peers without forcerelay permission (default) get a feefilter message') + self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(True) + + self.log.info('Check that peers with forcerelay permission do not get a feefilter message') + self.restart_node(0, extra_args=['-whitelist=forcerelay@127.0.0.1']) + self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(False) + + # Restart to disconnect peers and load default extra_args + self.restart_node(0) + self.connect_nodes(1, 0) + + def test_feefilter(self): node1 = self.nodes[1] node0 = self.nodes[0] - # Get out of IBD - node1.generate(1) - self.sync_blocks() - self.nodes[0].add_p2p_connection(TestP2PConn()) + conn = self.nodes[0].add_p2p_connection(TestP2PConn()) # Test that invs are received by test connection for all txs at # feerate of .2 sat/byte node1.settxfee(Decimal("0.00000200")) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] - assert allInvsMatch(txids, self.nodes[0].p2p) - self.nodes[0].p2p.clear_invs() + assert allInvsMatch(txids, conn) + conn.clear_invs() # Set a filter of .15 sat/byte on test connection - self.nodes[0].p2p.send_and_ping(msg_feefilter(150)) + conn.send_and_ping(msg_feefilter(150)) # Test that txs are still being received by test connection (paying .15 sat/byte) node1.settxfee(Decimal("0.00000150")) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] - assert allInvsMatch(txids, self.nodes[0].p2p) - self.nodes[0].p2p.clear_invs() + assert allInvsMatch(txids, conn) + conn.clear_invs() # Change tx fee rate to .1 sat/byte and test they are no longer received # by the test connection node1.settxfee(Decimal("0.00000100")) [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] - self.sync_mempools() # must be sure node 0 has received all txs + self.sync_mempools() # must be sure node 0 has received all txs # Send one transaction from node0 that should be received, so that we # we can sync the test on receipt (if node1's txs were relayed, they'd @@ -91,14 +119,15 @@ class FeeFilterTest(BitcoinTestFramework): # as well. node0.settxfee(Decimal("0.00020000")) txids = [node0.sendtoaddress(node0.getnewaddress(), 1)] - assert allInvsMatch(txids, self.nodes[0].p2p) - self.nodes[0].p2p.clear_invs() + assert allInvsMatch(txids, conn) + conn.clear_invs() # Remove fee filter and check that txs are received again - self.nodes[0].p2p.send_and_ping(msg_feefilter(0)) + conn.send_and_ping(msg_feefilter(0)) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] - assert allInvsMatch(txids, self.nodes[0].p2p) - self.nodes[0].p2p.clear_invs() + assert allInvsMatch(txids, conn) + conn.clear_invs() + if __name__ == '__main__': FeeFilterTest().main() diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py index 5726a73e40..741da3be31 100755 --- a/test/functional/p2p_filter.py +++ b/test/functional/p2p_filter.py @@ -124,11 +124,11 @@ class FilterTest(BitcoinTestFramework): self.log.info("Check that a node with bloom filters enabled services p2p mempool messages") filter_peer = P2PBloomFilter() - self.log.info("Create a tx relevant to the peer before connecting") + self.log.debug("Create a tx relevant to the peer before connecting") filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0] txid = self.nodes[0].sendtoaddress(filter_address, 90) - self.log.info("Send a mempool msg after connecting and check that the tx is received") + self.log.debug("Send a mempool msg after connecting and check that the tx is received") self.nodes[0].add_p2p_connection(filter_peer) filter_peer.send_and_ping(filter_peer.watch_filter_init) self.nodes[0].p2p.send_message(msg_mempool()) @@ -227,8 +227,8 @@ class FilterTest(BitcoinTestFramework): self.test_frelay_false(filter_peer_without_nrelay) self.test_filter(filter_peer_without_nrelay) - self.log.info('Test msg_mempool') self.test_msg_mempool() + if __name__ == '__main__': FilterTest().main() diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py index 81302374c9..d9a9ae5188 100755 --- a/test/functional/p2p_invalid_messages.py +++ b/test/functional/p2p_invalid_messages.py @@ -3,13 +3,13 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test node responses to invalid network messages.""" -import asyncio -import struct -import sys from test_framework.messages import ( CBlockHeader, CInv, + MAX_HEADERS_RESULTS, + MAX_INV_SIZE, + MAX_PROTOCOL_MESSAGE_LENGTH, msg_getdata, msg_headers, msg_inv, @@ -18,12 +18,16 @@ from test_framework.messages import ( ser_string, ) from test_framework.mininode import ( - NetworkThread, P2PDataStore, P2PInterface, ) from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + wait_until, +) +VALID_DATA_LIMIT = MAX_PROTOCOL_MESSAGE_LENGTH - 5 # Account for the 5-byte length prefix class msg_unrecognized: """Nonsensical message. Modeled after similar types in test_framework.messages.""" @@ -46,161 +50,71 @@ class InvalidMessagesTest(BitcoinTestFramework): self.setup_clean_chain = True def run_test(self): - """ - . Test msg header - 0. Send a bunch of large (4MB) messages of an unrecognized type. Check to see - that it isn't an effective DoS against the node. - - 1. Send an oversized (4MB+) message and check that we're disconnected. - - 2. Send a few messages with an incorrect data size in the header, ensure the - messages are ignored. - """ + self.test_buffer() self.test_magic_bytes() self.test_checksum() self.test_size() self.test_msgtype() - self.test_large_inv() - - node = self.nodes[0] - self.node = node - node.add_p2p_connection(P2PDataStore()) - conn2 = node.add_p2p_connection(P2PDataStore()) - - msg_limit = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH - valid_data_limit = msg_limit - 5 # Account for the 4-byte length prefix - - # - # 0. - # - # Send as large a message as is valid, ensure we aren't disconnected but - # also can't exhaust resources. - # - msg_at_size = msg_unrecognized(str_data="b" * valid_data_limit) - assert len(msg_at_size.serialize()) == msg_limit + self.test_oversized_inv_msg() + self.test_oversized_getdata_msg() + self.test_oversized_headers_msg() + self.test_resource_exhaustion() - self.log.info("Sending a bunch of large, junk messages to test memory exhaustion. May take a bit...") - - # Run a bunch of times to test for memory exhaustion. - for _ in range(80): - node.p2p.send_message(msg_at_size) - - # Check that, even though the node is being hammered by nonsense from one - # connection, it can still service other peers in a timely way. - for _ in range(20): - conn2.sync_with_ping(timeout=2) - - # Peer 1, despite serving up a bunch of nonsense, should still be connected. - self.log.info("Waiting for node to drop junk messages.") - node.p2p.sync_with_ping(timeout=400) - assert node.p2p.is_connected - - # - # 1. - # - # Send an oversized message, ensure we're disconnected. - # - # Under macOS this test is skipped due to an unexpected error code - # returned from the closing socket which python/asyncio does not - # yet know how to handle. - # - if sys.platform != 'darwin': - msg_over_size = msg_unrecognized(str_data="b" * (valid_data_limit + 1)) - assert len(msg_over_size.serialize()) == (msg_limit + 1) - - # An unknown message type (or *any* message type) over - # MAX_PROTOCOL_MESSAGE_LENGTH should result in a disconnect. - node.p2p.send_message(msg_over_size) - node.p2p.wait_for_disconnect(timeout=4) - - node.disconnect_p2ps() - conn = node.add_p2p_connection(P2PDataStore()) - conn.wait_for_verack() - else: - self.log.info("Skipping test p2p_invalid_messages/1 (oversized message) under macOS") - - # - # 2. - # - # Send messages with an incorrect data size in the header. - # - actual_size = 100 - msg = msg_unrecognized(str_data="b" * actual_size) - - # TODO: handle larger-than cases. I haven't been able to pin down what behavior to expect. - for wrong_size in (2, 77, 78, 79): - self.log.info("Sending a message with incorrect size of {}".format(wrong_size)) - - # Unmodified message should submit okay. - node.p2p.send_and_ping(msg) - - # A message lying about its data size results in a disconnect when the incorrect - # data size is less than the actual size. - # - # TODO: why does behavior change at 78 bytes? - # - node.p2p.send_raw_message(self._tweak_msg_data_size(msg, wrong_size)) - - # For some reason unknown to me, we sometimes have to push additional data to the - # peer in order for it to realize a disconnect. - try: - node.p2p.send_message(msg_ping(nonce=123123)) - except IOError: - pass - - node.p2p.wait_for_disconnect(timeout=10) - node.disconnect_p2ps() - node.add_p2p_connection(P2PDataStore()) - - # Node is still up. - conn = node.add_p2p_connection(P2PDataStore()) + def test_buffer(self): + self.log.info("Test message with header split across two buffers is received") + conn = self.nodes[0].add_p2p_connection(P2PDataStore()) + # Create valid message + msg = conn.build_message(msg_ping(nonce=12345)) + cut_pos = 12 # Chosen at an arbitrary position within the header + # Send message in two pieces + before = int(self.nodes[0].getnettotals()['totalbytesrecv']) + conn.send_raw_message(msg[:cut_pos]) + # Wait until node has processed the first half of the message + wait_until(lambda: int(self.nodes[0].getnettotals()['totalbytesrecv']) != before) + middle = int(self.nodes[0].getnettotals()['totalbytesrecv']) + # If this assert fails, we've hit an unlikely race + # where the test framework sent a message in between the two halves + assert_equal(middle, before + cut_pos) + conn.send_raw_message(msg[cut_pos:]) + conn.sync_with_ping(timeout=1) + self.nodes[0].disconnect_p2ps() def test_magic_bytes(self): + self.log.info("Test message with invalid magic bytes disconnects peer") conn = self.nodes[0].add_p2p_connection(P2PDataStore()) - - async def swap_magic_bytes(): - conn._on_data = lambda: None # Need to ignore all incoming messages from now, since they come with "invalid" magic bytes - conn.magic_bytes = b'\x00\x11\x22\x32' - - # Call .result() to block until the atomic swap is complete, otherwise - # we might run into races later on - asyncio.run_coroutine_threadsafe(swap_magic_bytes(), NetworkThread.network_event_loop).result() - - with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART ping']): - conn.send_message(msg_ping(nonce=0xff)) + with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART badmsg']): + msg = conn.build_message(msg_unrecognized(str_data="d")) + # modify magic bytes + msg = b'\xff' * 4 + msg[4:] + conn.send_raw_message(msg) conn.wait_for_disconnect(timeout=1) - self.nodes[0].disconnect_p2ps() + self.nodes[0].disconnect_p2ps() def test_checksum(self): + self.log.info("Test message with invalid checksum logs an error") conn = self.nodes[0].add_p2p_connection(P2PDataStore()) with self.nodes[0].assert_debug_log(['CHECKSUM ERROR (badmsg, 2 bytes), expected 78df0a04 was ffffffff']): msg = conn.build_message(msg_unrecognized(str_data="d")) - cut_len = ( - 4 + # magic - 12 + # msgtype - 4 #len - ) + # Checksum is after start bytes (4B), message type (12B), len (4B) + cut_len = 4 + 12 + 4 # modify checksum msg = msg[:cut_len] + b'\xff' * 4 + msg[cut_len + 4:] - self.nodes[0].p2p.send_raw_message(msg) + conn.send_raw_message(msg) conn.sync_with_ping(timeout=1) - self.nodes[0].disconnect_p2ps() + self.nodes[0].disconnect_p2ps() def test_size(self): + self.log.info("Test message with oversized payload disconnects peer") conn = self.nodes[0].add_p2p_connection(P2PDataStore()) with self.nodes[0].assert_debug_log(['']): - msg = conn.build_message(msg_unrecognized(str_data="d")) - cut_len = ( - 4 + # magic - 12 # msgtype - ) - # modify len to MAX_SIZE + 1 - msg = msg[:cut_len] + struct.pack("<I", 0x02000000 + 1) + msg[cut_len + 4:] - self.nodes[0].p2p.send_raw_message(msg) + msg = msg_unrecognized(str_data="d" * (VALID_DATA_LIMIT + 1)) + msg = conn.build_message(msg) + conn.send_raw_message(msg) conn.wait_for_disconnect(timeout=1) - self.nodes[0].disconnect_p2ps() + self.nodes[0].disconnect_p2ps() def test_msgtype(self): + self.log.info("Test message with invalid message type logs an error") conn = self.nodes[0].add_p2p_connection(P2PDataStore()) with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: ERRORS IN HEADER']): msg = msg_unrecognized(str_data="d") @@ -208,42 +122,53 @@ class InvalidMessagesTest(BitcoinTestFramework): msg = conn.build_message(msg) # Modify msgtype msg = msg[:7] + b'\x00' + msg[7 + 1:] - self.nodes[0].p2p.send_raw_message(msg) + conn.send_raw_message(msg) conn.sync_with_ping(timeout=1) - self.nodes[0].disconnect_p2ps() - - def test_large_inv(self): - conn = self.nodes[0].add_p2p_connection(P2PInterface()) - with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (0 -> 20): message inv size() = 50001']): - msg = msg_inv([CInv(MSG_TX, 1)] * 50001) - conn.send_and_ping(msg) - with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (20 -> 40): message getdata size() = 50001']): - msg = msg_getdata([CInv(MSG_TX, 1)] * 50001) - conn.send_and_ping(msg) - with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (40 -> 60): headers message size = 2001']): - msg = msg_headers([CBlockHeader()] * 2001) - conn.send_and_ping(msg) self.nodes[0].disconnect_p2ps() - def _tweak_msg_data_size(self, message, wrong_size): - """ - Return a raw message based on another message but with an incorrect data size in - the message header. - """ - raw_msg = self.node.p2p.build_message(message) - - bad_size_bytes = struct.pack("<I", wrong_size) - num_header_bytes_before_size = 4 + 12 - - # Replace the correct data size in the message with an incorrect one. - raw_msg_with_wrong_size = ( - raw_msg[:num_header_bytes_before_size] + - bad_size_bytes + - raw_msg[(num_header_bytes_before_size + len(bad_size_bytes)):] - ) - assert len(raw_msg) == len(raw_msg_with_wrong_size) - - return raw_msg_with_wrong_size + def test_oversized_msg(self, msg, size): + msg_type = msg.msgtype.decode('ascii') + self.log.info("Test {} message of size {} is logged as misbehaving".format(msg_type, size)) + with self.nodes[0].assert_debug_log(['Misbehaving', '{} message size = {}'.format(msg_type, size)]): + self.nodes[0].add_p2p_connection(P2PInterface()).send_and_ping(msg) + self.nodes[0].disconnect_p2ps() + + def test_oversized_inv_msg(self): + size = MAX_INV_SIZE + 1 + self.test_oversized_msg(msg_inv([CInv(MSG_TX, 1)] * size), size) + + def test_oversized_getdata_msg(self): + size = MAX_INV_SIZE + 1 + self.test_oversized_msg(msg_getdata([CInv(MSG_TX, 1)] * size), size) + + def test_oversized_headers_msg(self): + size = MAX_HEADERS_RESULTS + 1 + self.test_oversized_msg(msg_headers([CBlockHeader()] * size), size) + + def test_resource_exhaustion(self): + self.log.info("Test node stays up despite many large junk messages") + conn = self.nodes[0].add_p2p_connection(P2PDataStore()) + conn2 = self.nodes[0].add_p2p_connection(P2PDataStore()) + msg_at_size = msg_unrecognized(str_data="b" * VALID_DATA_LIMIT) + assert len(msg_at_size.serialize()) == MAX_PROTOCOL_MESSAGE_LENGTH + + self.log.info("(a) Send 80 messages, each of maximum valid data size (4MB)") + for _ in range(80): + conn.send_message(msg_at_size) + + # Check that, even though the node is being hammered by nonsense from one + # connection, it can still service other peers in a timely way. + self.log.info("(b) Check node still services peers in a timely way") + for _ in range(20): + conn2.sync_with_ping(timeout=2) + + self.log.info("(c) Wait for node to drop junk messages, while remaining connected") + conn.sync_with_ping(timeout=400) + + # Despite being served up a bunch of nonsense, the peers should still be connected. + assert conn.is_connected + assert conn2.is_connected + self.nodes[0].disconnect_p2ps() if __name__ == '__main__': diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py index 157af68203..3b3dbd08f2 100755 --- a/test/functional/p2p_leak.py +++ b/test/functional/p2p_leak.py @@ -132,9 +132,6 @@ class P2PLeakTest(BitcoinTestFramework): self.nodes[0].disconnect_p2ps() - # Wait until all connections are closed - wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0) - # Make sure no unexpected messages came in assert no_version_bannode.unexpected_msg == False assert no_version_idlenode.unexpected_msg == False diff --git a/test/functional/p2p_nobloomfilter_messages.py b/test/functional/p2p_nobloomfilter_messages.py index 41af74ebb8..accc5dc23c 100755 --- a/test/functional/p2p_nobloomfilter_messages.py +++ b/test/functional/p2p_nobloomfilter_messages.py @@ -4,40 +4,45 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test invalid p2p messages for nodes with bloom filters disabled. -Test that, when bloom filters are not enabled, nodes are disconnected if: +Test that, when bloom filters are not enabled, peers are disconnected if: 1. They send a p2p mempool message 2. They send a p2p filterload message 3. They send a p2p filteradd message +4. They send a p2p filterclear message """ -from test_framework.messages import msg_mempool, msg_filteradd, msg_filterload +from test_framework.messages import msg_mempool, msg_filteradd, msg_filterload, msg_filterclear from test_framework.mininode import P2PInterface from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal -class P2PNobloomfilterMessages(BitcoinTestFramework): +class P2PNoBloomFilterMessages(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.extra_args = [["-peerbloomfilters=0"]] def test_message_causes_disconnect(self, message): - # Add a p2p connection that sends a message and check that it disconnects + """Add a p2p connection that sends a message and check that it disconnects.""" peer = self.nodes[0].add_p2p_connection(P2PInterface()) peer.send_message(message) peer.wait_for_disconnect() - assert_equal(len(self.nodes[0].getpeerinfo()), 0) + assert_equal(self.nodes[0].getconnectioncount(), 0) def run_test(self): - self.log.info("Test that node is disconnected if it sends mempool message") + self.log.info("Test that peer is disconnected if it sends mempool message") self.test_message_causes_disconnect(msg_mempool()) - self.log.info("Test that node is disconnected if it sends filterload message") + self.log.info("Test that peer is disconnected if it sends filterload message") self.test_message_causes_disconnect(msg_filterload()) - self.log.info("Test that node is disconnected if it sends filteradd message") + self.log.info("Test that peer is disconnected if it sends filteradd message") self.test_message_causes_disconnect(msg_filteradd(data=b'\xcc')) + self.log.info("Test that peer is disconnected if it sends a filterclear message") + self.test_message_causes_disconnect(msg_filterclear()) + + if __name__ == '__main__': - P2PNobloomfilterMessages().main() + P2PNoBloomFilterMessages().main() diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py index ed3429a037..a2f6ea538c 100755 --- a/test/functional/p2p_node_network_limited.py +++ b/test/functional/p2p_node_network_limited.py @@ -83,7 +83,6 @@ class NodeNetworkLimitedTest(BitcoinTestFramework): assert_equal(node1.firstAddrnServices, expected_services) self.nodes[0].disconnect_p2ps() - node1.wait_for_disconnect() # connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer # because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py index 2c200fccad..bea202855d 100755 --- a/test/functional/p2p_permissions.py +++ b/test/functional/p2p_permissions.py @@ -43,6 +43,12 @@ class P2PPermissionsTests(BitcoinTestFramework): True) self.checkpermission( + # no permission (even with forcerelay) + ["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"], + [], + False) + + self.checkpermission( # relay permission removed (no specific permissions) ["-whitelist=127.0.0.1", "-whitelistrelay=0"], ["noban", "mempool"], diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py index 8803086213..25dd765442 100755 --- a/test/functional/p2p_segwit.py +++ b/test/functional/p2p_segwit.py @@ -1898,8 +1898,7 @@ class SegWitTest(BitcoinTestFramework): def test_upgrade_after_activation(self): """Test the behavior of starting up a segwit-aware node after the softfork has activated.""" - self.stop_node(2) - self.start_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)]) + self.restart_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)]) connect_nodes(self.nodes[0], 2) # We reconnect more than 100 blocks, give it plenty of time diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py index 4bc4913bda..57c8f511ac 100755 --- a/test/functional/rpc_fundrawtransaction.py +++ b/test/functional/rpc_fundrawtransaction.py @@ -271,7 +271,11 @@ class RawTransactionsTest(BitcoinTestFramework): assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) + # Should fail without add_inputs: + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False}) + # add_inputs is enabled by default rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 @@ -299,7 +303,10 @@ class RawTransactionsTest(BitcoinTestFramework): dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) - rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + # Should fail without add_inputs: + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False}) + rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True}) + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 @@ -330,7 +337,10 @@ class RawTransactionsTest(BitcoinTestFramework): dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) - rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + # Should fail without add_inputs: + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False}) + rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True}) + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 diff --git a/test/functional/rpc_getaddressinfo_label_deprecation.py b/test/functional/rpc_getaddressinfo_label_deprecation.py deleted file mode 100755 index 09545ebce7..0000000000 --- a/test/functional/rpc_getaddressinfo_label_deprecation.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2020-2019 The Bitcoin Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. -""" -Test deprecation of the RPC getaddressinfo `label` field. It has been -superseded by the `labels` field. - -""" -from test_framework.test_framework import BitcoinTestFramework - -class GetAddressInfoLabelDeprecationTest(BitcoinTestFramework): - def set_test_params(self): - self.num_nodes = 2 - self.setup_clean_chain = False - # Start node[0] with -deprecatedrpc=label, and node[1] without. - self.extra_args = [["-deprecatedrpc=label"], []] - - def skip_test_if_missing_module(self): - self.skip_if_no_wallet() - - def test_label_with_deprecatedrpc_flag(self): - self.log.info("Test getaddressinfo label with -deprecatedrpc flag") - node = self.nodes[0] - address = node.getnewaddress() - info = node.getaddressinfo(address) - assert "label" in info - - def test_label_without_deprecatedrpc_flag(self): - self.log.info("Test getaddressinfo label without -deprecatedrpc flag") - node = self.nodes[1] - address = node.getnewaddress() - info = node.getaddressinfo(address) - assert "label" not in info - - def run_test(self): - """Test getaddressinfo label with and without -deprecatedrpc flag.""" - self.test_label_with_deprecatedrpc_flag() - self.test_label_without_deprecatedrpc_flag() - - -if __name__ == '__main__': - GetAddressInfoLabelDeprecationTest().main() diff --git a/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py b/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py deleted file mode 100755 index 903f5536b9..0000000000 --- a/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2020 The Bitcoin Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. -""" -Test deprecation of RPC getaddressinfo `labels` returning an array -containing a JSON object of `name` and purpose` key-value pairs. It now -returns an array containing only the label name. - -""" -from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import assert_equal - -LABELS_TO_TEST = frozenset({"" , "New 𝅘𝅥𝅯 $<#>&!рыба Label"}) - -class GetAddressInfoLabelsPurposeDeprecationTest(BitcoinTestFramework): - def set_test_params(self): - self.num_nodes = 2 - self.setup_clean_chain = False - # Start node[0] with -deprecatedrpc=labelspurpose and node[1] without. - self.extra_args = [["-deprecatedrpc=labelspurpose"], []] - - def skip_test_if_missing_module(self): - self.skip_if_no_wallet() - - def test_labels(self, node_num, label_name, expected_value): - node = self.nodes[node_num] - address = node.getnewaddress() - if label_name != "": - node.setlabel(address, label_name) - self.log.info(" set label to {}".format(label_name)) - labels = node.getaddressinfo(address)["labels"] - self.log.info(" labels = {}".format(labels)) - assert_equal(labels, expected_value) - - def run_test(self): - """Test getaddressinfo labels with and without -deprecatedrpc flag.""" - self.log.info("Test getaddressinfo labels with -deprecatedrpc flag") - for label in LABELS_TO_TEST: - self.test_labels(node_num=0, label_name=label, expected_value=[{"name": label, "purpose": "receive"}]) - - self.log.info("Test getaddressinfo labels without -deprecatedrpc flag") - for label in LABELS_TO_TEST: - self.test_labels(node_num=1, label_name=label, expected_value=[label]) - - -if __name__ == '__main__': - GetAddressInfoLabelsPurposeDeprecationTest().main() diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py index bd93b6f7a4..8fa36445cd 100755 --- a/test/functional/rpc_getblockfilter.py +++ b/test/functional/rpc_getblockfilter.py @@ -7,7 +7,7 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_is_hex_string, assert_raises_rpc_error, - connect_nodes, disconnect_nodes, sync_blocks + connect_nodes, disconnect_nodes ) FILTER_TYPES = ["basic"] @@ -30,7 +30,7 @@ class GetBlockFilterTest(BitcoinTestFramework): # Reorg node 0 to a new chain connect_nodes(self.nodes[0], 1) - sync_blocks(self.nodes) + self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 4) chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)] diff --git a/test/functional/rpc_help.py b/test/functional/rpc_help.py index 027ae368e7..9b981b864e 100755 --- a/test/functional/rpc_help.py +++ b/test/functional/rpc_help.py @@ -18,6 +18,8 @@ class HelpRpcTest(BitcoinTestFramework): def run_test(self): self.test_categories() self.dump_help() + if self.is_wallet_compiled(): + self.wallet_help() def test_categories(self): node = self.nodes[0] @@ -53,6 +55,11 @@ class HelpRpcTest(BitcoinTestFramework): # Make sure the node can generate the help at runtime without crashing f.write(self.nodes[0].help(call)) + def wallet_help(self): + assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress') + self.restart_node(0, extra_args=['-nowallet=1']) + assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress') + if __name__ == '__main__': HelpRpcTest().main() diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py index 58d8c4abe1..ca26152e7e 100755 --- a/test/functional/rpc_net.py +++ b/test/functional/rpc_net.py @@ -46,10 +46,12 @@ class NetTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 - self.extra_args = [["-minrelaytxfee=0.00001000"],["-minrelaytxfee=0.00000500"]] + self.extra_args = [["-minrelaytxfee=0.00001000"], ["-minrelaytxfee=0.00000500"]] self.supports_cli = False def run_test(self): + self.log.info('Get out of IBD for the minfeefilter test') + self.nodes[0].generate(1) self.log.info('Connect nodes both way') connect_nodes(self.nodes[0], 1) connect_nodes(self.nodes[1], 0) diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py index 9b07c39606..e5e62fd646 100755 --- a/test/functional/rpc_psbt.py +++ b/test/functional/rpc_psbt.py @@ -8,6 +8,7 @@ from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( + assert_approx, assert_equal, assert_greater_than, assert_raises_rpc_error, @@ -37,6 +38,7 @@ class PSBTTest(BitcoinTestFramework): def skip_test_if_missing_module(self): self.skip_if_no_wallet() + # TODO: Re-enable this test with segwit v1 def test_utxo_conversion(self): mining_node = self.nodes[2] offline_node = self.nodes[0] @@ -85,6 +87,13 @@ class PSBTTest(BitcoinTestFramework): # Create and fund a raw tx for sending 10 BTC psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt'] + # If inputs are specified, do not automatically add more: + utxo1 = self.nodes[0].listunspent()[0] + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[0].walletcreatefundedpsbt, [{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}) + + psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt'] + assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2) + # Node 1 should not be able to add anything to it but still return the psbtx same as before psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt'] assert_equal(psbtx1, psbtx) @@ -148,17 +157,21 @@ class PSBTTest(BitcoinTestFramework): # spend single key from node 1 rawtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt'] walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(rawtx) + # Make sure it has both types of UTXOs + decoded = self.nodes[1].decodepsbt(walletprocesspsbt_out['psbt']) + assert 'non_witness_utxo' in decoded['inputs'][0] + assert 'witness_utxo' in decoded['inputs'][0] assert_equal(walletprocesspsbt_out['complete'], True) self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex']) # feeRate of 0.1 BTC / KB produces a total fee slightly below -maxtxfee (~0.05280000): - res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1}) - assert_greater_than(res["fee"], 0.05) - assert_greater_than(0.06, res["fee"]) + res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1, "add_inputs": True}) + assert_approx(res["fee"], 0.055, 0.005) # feeRate of 10 BTC / KB produces a total fee well above -maxtxfee # previously this was silently capped at -maxtxfee - assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10}) + assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10, "add_inputs": True}) + assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():1}, 0, {"feeRate": 10, "add_inputs": False}) # partially sign multisig things with node 1 psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt'] @@ -239,7 +252,7 @@ class PSBTTest(BitcoinTestFramework): # replaceable arg block_height = self.nodes[0].getblockcount() unspent = self.nodes[0].listunspent()[0] - psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False}, False) + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False, "add_inputs": True}, False) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) @@ -247,7 +260,7 @@ class PSBTTest(BitcoinTestFramework): assert_equal(decoded_psbt["tx"]["locktime"], block_height+2) # Same construction with only locktime set and RBF explicitly enabled - psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True}, True) + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True, "add_inputs": True}, True) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) @@ -255,7 +268,7 @@ class PSBTTest(BitcoinTestFramework): assert_equal(decoded_psbt["tx"]["locktime"], block_height) # Same construction without optional arguments - psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}]) + psbtx_info = self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}]) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) @@ -264,7 +277,7 @@ class PSBTTest(BitcoinTestFramework): # Same construction without optional arguments, for a node with -walletrbf=0 unspent1 = self.nodes[1].listunspent()[0] - psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height) + psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height, {"add_inputs": True}) decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) @@ -275,7 +288,7 @@ class PSBTTest(BitcoinTestFramework): self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False) # Regression test for 14473 (mishandling of already-signed witness transaction): - psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}]) + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True}) complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"]) double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"]) assert_equal(complete_psbt, double_processed_psbt) @@ -344,7 +357,8 @@ class PSBTTest(BitcoinTestFramework): for i, signer in enumerate(signers): self.nodes[2].unloadwallet("wallet{}".format(i)) - self.test_utxo_conversion() + # TODO: Re-enable this for segwit v1 + # self.test_utxo_conversion() # Test that psbts with p2pkh outputs are created properly p2pkh = self.nodes[0].getnewaddress(address_type='legacy') @@ -467,7 +481,7 @@ class PSBTTest(BitcoinTestFramework): assert_equal(analysis['next'], 'creator') assert_equal(analysis['error'], 'PSBT is not valid. Input 0 specifies invalid prevout') - assert_raises_rpc_error(-25, 'Missing inputs', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==') + assert_raises_rpc_error(-25, 'Inputs missing or spent', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==') if __name__ == '__main__': PSBTTest().main() diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py index 05308931e3..81eb881234 100644 --- a/test/functional/test_framework/authproxy.py +++ b/test/functional/test_framework/authproxy.py @@ -115,6 +115,8 @@ class AuthServiceProxy(): except OSError as e: retry = ( '[WinError 10053] An established connection was aborted by the software in your host machine' in str(e)) + # Workaround for a bug on macOS. See https://bugs.python.org/issue33450 + retry = retry or ('[Errno 41] Protocol wrong type for socket' in str(e)) if retry: self.__conn.close() self.__conn.request(method, path, postdata, headers) diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py index 4d1dd4422e..eb1244035f 100755 --- a/test/functional/test_framework/messages.py +++ b/test/functional/test_framework/messages.py @@ -45,6 +45,10 @@ MAX_MONEY = 21000000 * COIN BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out +MAX_PROTOCOL_MESSAGE_LENGTH = 4000000 # Maximum length of incoming protocol messages +MAX_HEADERS_RESULTS = 2000 # Number of headers sent in one getheaders result +MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message + NODE_NETWORK = (1 << 0) NODE_GETUTXO = (1 << 1) NODE_BLOOM = (1 << 2) diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py index 45063aaff2..e6da33763d 100755 --- a/test/functional/test_framework/mininode.py +++ b/test/functional/test_framework/mininode.py @@ -26,6 +26,7 @@ import threading from test_framework.messages import ( CBlockHeader, + MAX_HEADERS_RESULTS, MIN_VERSION_SUPPORTED, msg_addr, msg_block, @@ -492,7 +493,7 @@ class P2PInterface(P2PConnection): # P2PConnection acquires this lock whenever delivering a message to a P2PInterface. # This lock should be acquired in the thread running the test logic to synchronize # access to any data shared with the P2PInterface or P2PConnection. -mininode_lock = threading.RLock() +mininode_lock = threading.Lock() class NetworkThread(threading.Thread): @@ -553,7 +554,6 @@ class P2PDataStore(P2PInterface): return headers_list = [self.block_store[self.last_block_hash]] - maxheaders = 2000 while headers_list[-1].sha256 not in locator.vHave: # Walk back through the block store, adding headers to headers_list # as we go. @@ -569,7 +569,7 @@ class P2PDataStore(P2PInterface): break # Truncate the list if there are too many headers - headers_list = headers_list[:-maxheaders - 1:-1] + headers_list = headers_list[:-MAX_HEADERS_RESULTS - 1:-1] response = msg_headers(headers_list) if response is not None: @@ -658,8 +658,6 @@ class P2PTxInvStore(P2PInterface): # save txid self.tx_invs_received[i.hash] += 1 - super().on_inv(message) - def get_invs(self): with mininode_lock: return list(self.tx_invs_received.keys()) @@ -669,6 +667,6 @@ class P2PTxInvStore(P2PInterface): The mempool should mark unbroadcast=False for these transactions. """ # Wait until invs have been received (and getdatas sent) for each txid. - self.wait_until(lambda: set(self.get_invs()) == set([int(tx, 16) for tx in txns]), timeout) + self.wait_until(lambda: set(self.tx_invs_received.keys()) == set([int(tx, 16) for tx in txns]), timeout) # Flush messages and wait for the getdatas to be processed self.sync_with_ping() diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 9f5e9e5f0d..9d9e065158 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -31,8 +31,6 @@ from .util import ( disconnect_nodes, get_datadir_path, initialize_datadir, - sync_blocks, - sync_mempools, ) @@ -355,9 +353,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): # See fPreferredDownload in net_processing. # # If further outbound connections are needed, they can be added at the beginning of the test with e.g. - # connect_nodes(self.nodes[1], 2) + # self.connect_nodes(1, 2) for i in range(self.num_nodes - 1): - connect_nodes(self.nodes[i + 1], i) + self.connect_nodes(i + 1, i) self.sync_all() def setup_nodes(self): @@ -452,7 +450,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): assert_equal(len(binary), num_nodes) assert_equal(len(binary_cli), num_nodes) for i in range(num_nodes): - self.nodes.append(TestNode( + test_node_i = TestNode( i, get_datadir_path(self.options.tmpdir, i), chain=self.chain, @@ -470,7 +468,15 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): start_perf=self.options.perf, use_valgrind=self.options.valgrind, descriptors=self.options.descriptors, - )) + ) + self.nodes.append(test_node_i) + if not test_node_i.version_is_at_least(170000): + # adjust conf for pre 17 + conf_file = test_node_i.bitcoinconf + with open(conf_file, 'r', encoding='utf8') as conf: + conf_data = conf.read() + with open(conf_file, 'w', encoding='utf8') as conf: + conf.write(conf_data.replace('[regtest]', '')) def start_node(self, i, *args, **kwargs): """Start a bitcoind""" @@ -526,11 +532,17 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): def wait_for_node_exit(self, i, timeout): self.nodes[i].process.wait(timeout) + def connect_nodes(self, a, b): + connect_nodes(self.nodes[a], b) + + def disconnect_nodes(self, a, b): + disconnect_nodes(self.nodes[a], b) + def split_network(self): """ Split the network of four nodes into nodes 0/1 and 2/3. """ - disconnect_nodes(self.nodes[1], 2) + self.disconnect_nodes(1, 2) self.sync_all(self.nodes[:2]) self.sync_all(self.nodes[2:]) @@ -538,18 +550,57 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): """ Join the (previously split) network halves together. """ - connect_nodes(self.nodes[1], 2) + self.connect_nodes(1, 2) self.sync_all() - def sync_blocks(self, nodes=None, **kwargs): - sync_blocks(nodes or self.nodes, **kwargs) - - def sync_mempools(self, nodes=None, **kwargs): - sync_mempools(nodes or self.nodes, **kwargs) - - def sync_all(self, nodes=None, **kwargs): - self.sync_blocks(nodes, **kwargs) - self.sync_mempools(nodes, **kwargs) + def sync_blocks(self, nodes=None, wait=1, timeout=60): + """ + Wait until everybody has the same tip. + sync_blocks needs to be called with an rpc_connections set that has least + one node already synced to the latest, stable tip, otherwise there's a + chance it might return before all nodes are stably synced. + """ + rpc_connections = nodes or self.nodes + timeout = int(timeout * self.options.timeout_factor) + stop_time = time.time() + timeout + while time.time() <= stop_time: + best_hash = [x.getbestblockhash() for x in rpc_connections] + if best_hash.count(best_hash[0]) == len(rpc_connections): + return + # Check that each peer has at least one connection + assert (all([len(x.getpeerinfo()) for x in rpc_connections])) + time.sleep(wait) + raise AssertionError("Block sync timed out after {}s:{}".format( + timeout, + "".join("\n {!r}".format(b) for b in best_hash), + )) + + def sync_mempools(self, nodes=None, wait=1, timeout=60, flush_scheduler=True): + """ + Wait until everybody has the same transactions in their memory + pools + """ + rpc_connections = nodes or self.nodes + timeout = int(timeout * self.options.timeout_factor) + stop_time = time.time() + timeout + while time.time() <= stop_time: + pool = [set(r.getrawmempool()) for r in rpc_connections] + if pool.count(pool[0]) == len(rpc_connections): + if flush_scheduler: + for r in rpc_connections: + r.syncwithvalidationinterfacequeue() + return + # Check that each peer has at least one connection + assert (all([len(x.getpeerinfo()) for x in rpc_connections])) + time.sleep(wait) + raise AssertionError("Mempool sync timed out after {}s:{}".format( + timeout, + "".join("\n {!r}".format(m) for m in pool), + )) + + def sync_all(self, nodes=None): + self.sync_blocks(nodes) + self.sync_mempools(nodes) # Private helper methods. These should not be accessed by the subclass test scripts. diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index ebc0501e11..66bb2c89b5 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -23,6 +23,7 @@ import sys from .authproxy import JSONRPCException from .descriptors import descsum_create +from .messages import MY_SUBVERSION from .util import ( MAX_NODES, append_config, @@ -549,11 +550,16 @@ class TestNode(): assert self.p2ps, self._node_msg("No p2p connection") return self.p2ps[0] + def num_connected_mininodes(self): + """Return number of test framework p2p connections to the node.""" + return len([peer for peer in self.getpeerinfo() if peer['subver'] == MY_SUBVERSION]) + def disconnect_p2ps(self): """Close all p2p connections to the node.""" for p in self.p2ps: p.peer_disconnect() del self.p2ps[:] + wait_until(lambda: self.num_connected_mininodes() == 0) class TestNodeCLIAttr: diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 52306c8c3d..506057f1fa 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -25,6 +25,7 @@ logger = logging.getLogger("TestFramework.utils") # Assert functions ################## + def assert_approx(v, vexp, vspan=0.00001): """Assert that `v` is within `vspan` of `vexp`""" if v < vexp - vspan: @@ -32,6 +33,7 @@ def assert_approx(v, vexp, vspan=0.00001): if v > vexp + vspan: raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan))) + def assert_fee_amount(fee, tx_size, fee_per_kB): """Assert the fee was in range""" target_fee = round(tx_size * fee_per_kB / 1000, 8) @@ -41,21 +43,26 @@ def assert_fee_amount(fee, tx_size, fee_per_kB): if fee > (tx_size + 2) * fee_per_kB / 1000: raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee))) + def assert_equal(thing1, thing2, *args): if thing1 != thing2 or any(thing1 != arg for arg in args): raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) + def assert_greater_than(thing1, thing2): if thing1 <= thing2: raise AssertionError("%s <= %s" % (str(thing1), str(thing2))) + def assert_greater_than_or_equal(thing1, thing2): if thing1 < thing2: raise AssertionError("%s < %s" % (str(thing1), str(thing2))) + def assert_raises(exc, fun, *args, **kwds): assert_raises_message(exc, None, fun, *args, **kwds) + def assert_raises_message(exc, message, fun, *args, **kwds): try: fun(*args, **kwds) @@ -71,6 +78,7 @@ def assert_raises_message(exc, message, fun, *args, **kwds): else: raise AssertionError("No exception raised") + def assert_raises_process_error(returncode, output, fun, *args, **kwds): """Execute a process and asserts the process return code and output. @@ -95,6 +103,7 @@ def assert_raises_process_error(returncode, output, fun, *args, **kwds): else: raise AssertionError("No exception raised") + def assert_raises_rpc_error(code, message, fun, *args, **kwds): """Run an RPC and verify that a specific JSONRPC exception code and message is raised. @@ -113,6 +122,7 @@ def assert_raises_rpc_error(code, message, fun, *args, **kwds): """ assert try_rpc(code, message, fun, *args, **kwds), "No exception raised" + def try_rpc(code, message, fun, *args, **kwds): """Tries to run an rpc command. @@ -134,22 +144,22 @@ def try_rpc(code, message, fun, *args, **kwds): else: return False + def assert_is_hex_string(string): try: int(string, 16) except Exception as e: - raise AssertionError( - "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) + raise AssertionError("Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) + def assert_is_hash_string(string, length=64): if not isinstance(string, str): raise AssertionError("Expected a string, got type %r" % type(string)) elif length and len(string) != length: - raise AssertionError( - "String of length %d expected; got %d" % (length, len(string))) + raise AssertionError("String of length %d expected; got %d" % (length, len(string))) elif not re.match('[abcdef0-9]+$', string): - raise AssertionError( - "String %r contains invalid characters for a hash." % string) + raise AssertionError("String %r contains invalid characters for a hash." % string) + def assert_array_result(object_array, to_match, expected, should_not_find=False): """ @@ -180,9 +190,11 @@ def assert_array_result(object_array, to_match, expected, should_not_find=False) if num_matched > 0 and should_not_find: raise AssertionError("Objects were found %s" % (str(to_match))) + # Utility functions ################### + def check_json_precision(): """Make sure json library being used does not lose precision converting BTC values""" n = Decimal("20000000.00000003") @@ -190,11 +202,13 @@ def check_json_precision(): if satoshis != 2000000000000003: raise RuntimeError("JSON encode/decode loses precision") + def EncodeDecimal(o): if isinstance(o, Decimal): return str(o) raise TypeError(repr(o) + " is not JSON serializable") + def count_bytes(hex_string): return len(bytearray.fromhex(hex_string)) @@ -202,12 +216,15 @@ def count_bytes(hex_string): def hex_str_to_bytes(hex_str): return unhexlify(hex_str.encode('ascii')) + def str_to_b64str(string): return b64encode(string.encode('utf-8')).decode('ascii') + def satoshi_round(amount): return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0): if attempts == float('inf') and timeout == float('inf'): timeout = 60 @@ -235,6 +252,7 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout)) raise RuntimeError('Unreachable') + # RPC/P2P connection constants and functions ############################################ @@ -250,6 +268,7 @@ class PortSeed: # Must be initialized with a unique integer for each process n = None + def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None): """ Args: @@ -271,18 +290,20 @@ def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None): proxy = AuthServiceProxy(url, **proxy_kwargs) proxy.url = url # store URL on proxy for info - coverage_logfile = coverage.get_filename( - coveragedir, node_number) if coveragedir else None + coverage_logfile = coverage.get_filename(coveragedir, node_number) if coveragedir else None return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) + def p2p_port(n): assert n <= MAX_NODES return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + def rpc_port(n): return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + def rpc_url(datadir, i, chain, rpchost): rpc_u, rpc_p = get_auth_cookie(datadir, chain) host = '127.0.0.1' @@ -295,9 +316,11 @@ def rpc_url(datadir, i, chain, rpchost): host = rpchost return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) + # Node functions ################ + def initialize_datadir(dirname, n, chain): datadir = get_datadir_path(dirname, n) if not os.path.isdir(datadir): @@ -327,21 +350,17 @@ def initialize_datadir(dirname, n, chain): os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True) return datadir -def adjust_bitcoin_conf_for_pre_17(conf_file): - with open(conf_file,'r', encoding='utf8') as conf: - conf_data = conf.read() - with open(conf_file, 'w', encoding='utf8') as conf: - conf_data_changed = conf_data.replace('[regtest]', '') - conf.write(conf_data_changed) def get_datadir_path(dirname, n): return os.path.join(dirname, "node" + str(n)) + def append_config(datadir, options): with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f: for option in options: f.write(option + "\n") + def get_auth_cookie(datadir, chain): user = None password = None @@ -366,20 +385,24 @@ def get_auth_cookie(datadir, chain): raise ValueError("No RPC credentials") return user, password + # If a cookie file exists in the given datadir, delete it. def delete_cookie_file(datadir, chain): if os.path.isfile(os.path.join(datadir, chain, ".cookie")): logger.debug("Deleting leftover cookie file") os.remove(os.path.join(datadir, chain, ".cookie")) + def softfork_active(node, key): """Return whether a softfork is active.""" return node.getblockchaininfo()['softforks'][key]['active'] + def set_node_times(nodes, t): for node in nodes: node.setmocktime(t) + def disconnect_nodes(from_connection, node_num): def get_peer_ids(): result = [] @@ -392,7 +415,7 @@ def disconnect_nodes(from_connection, node_num): if not peer_ids: logger.warning("disconnect_nodes: {} and {} were not connected".format( from_connection.index, - node_num + node_num, )) return for peer_id in peer_ids: @@ -402,12 +425,13 @@ def disconnect_nodes(from_connection, node_num): # If this node is disconnected between calculating the peer id # and issuing the disconnect, don't worry about it. # This avoids a race condition if we're mass-disconnecting peers. - if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED + if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED raise # wait to disconnect wait_until(lambda: not get_peer_ids(), timeout=5) + def connect_nodes(from_connection, node_num): ip_port = "127.0.0.1:" + str(p2p_port(node_num)) from_connection.addnode(ip_port, "onetry") @@ -420,50 +444,6 @@ def connect_nodes(from_connection, node_num): wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo())) -def sync_blocks(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same tip. - - sync_blocks needs to be called with an rpc_connections set that has least - one node already synced to the latest, stable tip, otherwise there's a - chance it might return before all nodes are stably synced. - """ - stop_time = time.time() + timeout - while time.time() <= stop_time: - best_hash = [x.getbestblockhash() for x in rpc_connections] - if best_hash.count(best_hash[0]) == len(rpc_connections): - return - # Check that each peer has at least one connection - assert (all([len(x.getpeerinfo()) for x in rpc_connections])) - time.sleep(wait) - raise AssertionError("Block sync timed out after {}s:{}".format( - timeout, - "".join("\n {!r}".format(b) for b in best_hash), - )) - - -def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True): - """ - Wait until everybody has the same transactions in their memory - pools - """ - stop_time = time.time() + timeout - while time.time() <= stop_time: - pool = [set(r.getrawmempool()) for r in rpc_connections] - if pool.count(pool[0]) == len(rpc_connections): - if flush_scheduler: - for r in rpc_connections: - r.syncwithvalidationinterfacequeue() - return - # Check that each peer has at least one connection - assert (all([len(x.getpeerinfo()) for x in rpc_connections])) - time.sleep(wait) - raise AssertionError("Mempool sync timed out after {}s:{}".format( - timeout, - "".join("\n {!r}".format(m) for m in pool), - )) - - # Transaction/Block functions ############################# @@ -479,6 +459,7 @@ def find_output(node, txid, amount, *, blockhash=None): return i raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount))) + def gather_inputs(from_node, amount_needed, confirmations_required=1): """ Return a random set of unspent txouts that are enough to pay amount_needed @@ -496,6 +477,7 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1): raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in)) return (total_in, inputs) + def make_change(from_node, amount_in, amount_out, fee): """ Create change output(s), return them @@ -513,6 +495,7 @@ def make_change(from_node, amount_in, amount_out, fee): outputs[from_node.getnewaddress()] = change return outputs + def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): """ Create a random transaction. @@ -532,6 +515,7 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): return (txid, signresult["hex"], fee) + # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. def create_confirmed_utxos(fee, node, count): @@ -564,6 +548,7 @@ def create_confirmed_utxos(fee, node, count): assert len(utxos) >= count return utxos + # Create large OP_RETURN txouts that can be appended to a transaction # to make it large (helper for constructing large transactions). def gen_return_txouts(): @@ -583,6 +568,7 @@ def gen_return_txouts(): txouts.append(txout) return txouts + # Create a spend of each passed-in utxo, splicing in "txouts" to each raw # transaction to make it large. See gen_return_txouts() above. def create_lots_of_big_transactions(node, txouts, utxos, num, fee): @@ -606,6 +592,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee): txids.append(txid) return txids + def mine_large_block(node, utxos=None): # generate a 66k transaction, # and 14 of them is close to the 1MB block limit @@ -619,6 +606,7 @@ def mine_large_block(node, utxos=None): create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) node.generate(1) + def find_vout_for_address(node, txid, addr): """ Locate the vout index of the given transaction sending to the diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py index c0bc471d5a..2fa48006f4 100755 --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -190,6 +190,7 @@ BASE_SCRIPTS = [ 'rpc_preciousblock.py', 'wallet_importprunedfunds.py', 'p2p_leak_tx.py', + 'p2p_eviction.py', 'rpc_signmessage.py', 'rpc_generateblock.py', 'wallet_balance.py', @@ -232,6 +233,7 @@ BASE_SCRIPTS = [ 'feature_includeconf.py', 'feature_asmap.py', 'mempool_unbroadcast.py', + 'mempool_compatibility.py', 'rpc_deriveaddresses.py', 'rpc_deriveaddresses.py --usecli', 'rpc_scantxoutset.py', @@ -240,8 +242,6 @@ BASE_SCRIPTS = [ 'p2p_permissions.py', 'feature_blocksdir.py', 'feature_config_args.py', - 'rpc_getaddressinfo_labels_purpose_deprecation.py', - 'rpc_getaddressinfo_label_deprecation.py', 'rpc_getdescriptorinfo.py', 'rpc_help.py', 'feature_help.py', @@ -396,11 +396,12 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage= args = args or [] # Warn if bitcoind is already running - # pidof might fail or return an empty string if bitcoind is not running try: - if subprocess.check_output(["pidof", "bitcoind"]) not in [b'']: + # pgrep exits with code zero when one or more matching processes found + if subprocess.run(["pgrep", "-x", "bitcoind"], stdout=subprocess.DEVNULL).returncode == 0: print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0])) - except (OSError, subprocess.SubprocessError): + except OSError: + # pgrep not supported pass # Warn if there is a cache directory diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py index 524e1593ba..18f0beb598 100755 --- a/test/functional/tool_wallet.py +++ b/test/functional/tool_wallet.py @@ -71,8 +71,7 @@ class ToolWalletTest(BitcoinTestFramework): self.assert_raises_tool_error('Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create') self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo') self.assert_raises_tool_error( - 'Error initializing wallet database environment "{}"!\nError loading wallet.dat. Is wallet being used by other process?' - .format(os.path.join(self.nodes[0].datadir, self.chain, 'wallets')), + 'Error loading wallet.dat. Is wallet being used by another process?', '-wallet=wallet.dat', 'info', ) diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py index 90d17a806c..8837e13005 100755 --- a/test/functional/wallet_abandonconflict.py +++ b/test/functional/wallet_abandonconflict.py @@ -95,8 +95,7 @@ class AbandonConflictTest(BitcoinTestFramework): # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction - self.stop_node(0) - self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) + self.restart_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert self.nodes[0].getmempoolinfo()['loaded'] # Verify txs no longer in either node's mempool @@ -123,8 +122,7 @@ class AbandonConflictTest(BitcoinTestFramework): balance = newbalance # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned - self.stop_node(0) - self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) + self.restart_node(0, extra_args=["-minrelaytxfee=0.00001"]) assert self.nodes[0].getmempoolinfo()['loaded'] assert_equal(len(self.nodes[0].getrawmempool()), 0) @@ -145,8 +143,7 @@ class AbandonConflictTest(BitcoinTestFramework): balance = newbalance # Remove using high relay fee again - self.stop_node(0) - self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) + self.restart_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert self.nodes[0].getmempoolinfo()['loaded'] assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py index 780cce9d02..eddd938847 100755 --- a/test/functional/wallet_avoidreuse.py +++ b/test/functional/wallet_avoidreuse.py @@ -110,9 +110,7 @@ class AvoidReuseTest(BitcoinTestFramework): assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False) assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True) - # Stop and restart node 1 - self.stop_node(1) - self.start_node(1) + self.restart_node(1) connect_nodes(self.nodes[0], 1) # Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py index 8efa66a856..31829a18b3 100755 --- a/test/functional/wallet_balance.py +++ b/test/functional/wallet_balance.py @@ -12,7 +12,6 @@ from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, - sync_blocks, ) @@ -264,7 +263,7 @@ class WalletTest(BitcoinTestFramework): # Now confirm tx_orig self.restart_node(1, ['-persistmempool=0']) connect_nodes(self.nodes[0], 1) - sync_blocks(self.nodes) + self.sync_blocks() self.nodes[1].sendrawtransaction(tx_orig) self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY) self.sync_all() diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py index 797c903dd3..8962362276 100755 --- a/test/functional/wallet_basic.py +++ b/test/functional/wallet_basic.py @@ -219,6 +219,60 @@ class WalletTest(BitcoinTestFramework): assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) + # Sendmany with explicit fee (BTC/kB) + # Throw if no conf_target provided + assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate", + self.nodes[2].sendmany, + amounts={ address: 10 }, + estimate_mode='bTc/kB') + # Throw if negative feerate + assert_raises_rpc_error(-3, "Amount out of range", + self.nodes[2].sendmany, + amounts={ address: 10 }, + conf_target=-1, + estimate_mode='bTc/kB') + fee_per_kb = 0.0002500 + explicit_fee_per_byte = Decimal(fee_per_kb) / 1000 + txid = self.nodes[2].sendmany( + amounts={ address: 10 }, + conf_target=fee_per_kb, + estimate_mode='bTc/kB', + ) + self.nodes[2].generate(1) + self.sync_all(self.nodes[0:3]) + node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), explicit_fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) + assert_equal(self.nodes[2].getbalance(), node_2_bal) + node_0_bal += Decimal('10') + assert_equal(self.nodes[0].getbalance(), node_0_bal) + + # Sendmany with explicit fee (SAT/B) + # Throw if no conf_target provided + assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate", + self.nodes[2].sendmany, + amounts={ address: 10 }, + estimate_mode='sat/b') + # Throw if negative feerate + assert_raises_rpc_error(-3, "Amount out of range", + self.nodes[2].sendmany, + amounts={ address: 10 }, + conf_target=-1, + estimate_mode='sat/b') + fee_sat_per_b = 2 + fee_per_kb = fee_sat_per_b / 100000.0 + explicit_fee_per_byte = Decimal(fee_per_kb) / 1000 + txid = self.nodes[2].sendmany( + amounts={ address: 10 }, + conf_target=fee_sat_per_b, + estimate_mode='sAT/b', + ) + self.nodes[2].generate(1) + self.sync_all(self.nodes[0:3]) + balance = self.nodes[2].getbalance() + node_2_bal = self.check_fee_amount(balance, node_2_bal - Decimal('10'), explicit_fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) + assert_equal(balance, node_2_bal) + node_0_bal += Decimal('10') + assert_equal(self.nodes[0].getbalance(), node_0_bal) + self.start_node(3, self.nodes[3].extra_args) connect_nodes(self.nodes[0], 3) self.sync_all() @@ -349,6 +403,74 @@ class WalletTest(BitcoinTestFramework): self.nodes[0].generate(1) self.sync_all(self.nodes[0:3]) + # send with explicit btc/kb fee + self.log.info("test explicit fee (sendtoaddress as btc/kb)") + self.nodes[0].generate(1) + self.sync_all(self.nodes[0:3]) + prebalance = self.nodes[2].getbalance() + assert prebalance > 2 + address = self.nodes[1].getnewaddress() + # Throw if no conf_target provided + assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate", + self.nodes[2].sendtoaddress, + address=address, + amount=1.0, + estimate_mode='BTc/Kb') + # Throw if negative feerate + assert_raises_rpc_error(-3, "Amount out of range", + self.nodes[2].sendtoaddress, + address=address, + amount=1.0, + conf_target=-1, + estimate_mode='btc/kb') + txid = self.nodes[2].sendtoaddress( + address=address, + amount=1.0, + conf_target=0.00002500, + estimate_mode='btc/kb', + ) + tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) + self.sync_all(self.nodes[0:3]) + self.nodes[0].generate(1) + self.sync_all(self.nodes[0:3]) + postbalance = self.nodes[2].getbalance() + fee = prebalance - postbalance - Decimal('1') + assert_fee_amount(fee, tx_size, Decimal('0.00002500')) + + # send with explicit sat/b fee + self.sync_all(self.nodes[0:3]) + self.log.info("test explicit fee (sendtoaddress as sat/b)") + self.nodes[0].generate(1) + prebalance = self.nodes[2].getbalance() + assert prebalance > 2 + address = self.nodes[1].getnewaddress() + # Throw if no conf_target provided + assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate", + self.nodes[2].sendtoaddress, + address=address, + amount=1.0, + estimate_mode='SAT/b') + # Throw if negative feerate + assert_raises_rpc_error(-3, "Amount out of range", + self.nodes[2].sendtoaddress, + address=address, + amount=1.0, + conf_target=-1, + estimate_mode='SAT/b') + txid = self.nodes[2].sendtoaddress( + address=address, + amount=1.0, + conf_target=2, + estimate_mode='SAT/B', + ) + tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) + self.sync_all(self.nodes[0:3]) + self.nodes[0].generate(1) + self.sync_all(self.nodes[0:3]) + postbalance = self.nodes[2].getbalance() + fee = prebalance - postbalance - Decimal('1') + assert_fee_amount(fee, tx_size, Decimal('0.00002000')) + # 2. Import address from node2 to node1 self.nodes[1].importaddress(address_to_import) diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py index 27197e3b6d..72c85b8832 100755 --- a/test/functional/wallet_bumpfee.py +++ b/test/functional/wallet_bumpfee.py @@ -71,6 +71,7 @@ class BumpFeeTest(BitcoinTestFramework): self.log.info("Running tests") dest_address = peer_node.getnewaddress() + test_invalid_parameters(rbf_node, dest_address) test_simple_bumpfee_succeeds(self, "default", rbf_node, peer_node, dest_address) test_simple_bumpfee_succeeds(self, "fee_rate", rbf_node, peer_node, dest_address) test_feerate_args(self, rbf_node, peer_node, dest_address) @@ -92,6 +93,28 @@ class BumpFeeTest(BitcoinTestFramework): test_small_output_with_feerate_succeeds(self, rbf_node, dest_address) test_no_more_inputs_fails(self, rbf_node, dest_address) +def test_invalid_parameters(node, dest_address): + txid = spend_one_input(node, dest_address) + # invalid estimate mode + assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, { + "estimate_mode": "moo", + }) + assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, { + "estimate_mode": 38, + }) + assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, { + "estimate_mode": { + "foo": "bar", + }, + }) + assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, { + "estimate_mode": Decimal("3.141592"), + }) + # confTarget and conf_target + assert_raises_rpc_error(-8, "confTarget and conf_target options should not both be set", node.bumpfee, txid, { + "confTarget": 123, + "conf_target": 456, + }) def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address): self.log.info('Test simple bumpfee: {}'.format(mode)) @@ -127,9 +150,10 @@ def test_feerate_args(self, rbf_node, peer_node, dest_address): self.sync_mempools((rbf_node, peer_node)) assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool() - assert_raises_rpc_error(-8, "confTarget can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL, "confTarget": 1}) + assert_raises_rpc_error(-8, "conf_target can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL, "confTarget": 1}) assert_raises_rpc_error(-3, "Unexpected key totalFee", rbf_node.bumpfee, rbfid, {"totalFee": NORMAL}) + assert_raises_rpc_error(-8, "conf_target can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate":0.00001, "confTarget": 1}) # Bumping to just above minrelay should fail to increase total fee enough, at least assert_raises_rpc_error(-8, "Insufficient total fee", rbf_node.bumpfee, rbfid, {"fee_rate": INSUFFICIENT}) diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py index cc349c7bc5..ba1e494d9a 100755 --- a/test/functional/wallet_dump.py +++ b/test/functional/wallet_dump.py @@ -190,8 +190,7 @@ class WalletDumpTest(BitcoinTestFramework): assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump)) # Restart node with new wallet, and test importwallet - self.stop_node(0) - self.start_node(0, ['-wallet=w2']) + self.restart_node(0, ['-wallet=w2']) # Make sure the address is not IsMine before import result = self.nodes[0].getaddressinfo(multisig_addr) diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py index c441b75652..3c336623e2 100755 --- a/test/functional/wallet_hd.py +++ b/test/functional/wallet_hd.py @@ -103,8 +103,7 @@ class WalletHDTest(BitcoinTestFramework): self.sync_all() # Needs rescan - self.stop_node(1) - self.start_node(1, extra_args=self.extra_args[1] + ['-rescan']) + self.restart_node(1, extra_args=self.extra_args[1] + ['-rescan']) assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) # Try a RPC based rescan @@ -183,8 +182,7 @@ class WalletHDTest(BitcoinTestFramework): self.nodes[0].generate(10) # Restart node 1 with keypool of 3 and a different wallet self.nodes[1].createwallet(wallet_name='origin', blank=True) - self.stop_node(1) - self.start_node(1, extra_args=['-keypool=3', '-wallet=origin']) + self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin']) connect_nodes(self.nodes[0], 1) # sethdseed restoring and seeing txs to addresses out of the keypool diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index ff9ff34185..88beef1034 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -7,19 +7,36 @@ Verify that a bitcoind node can load multiple wallet files """ from decimal import Decimal +from threading import Thread import os import shutil import time +from test_framework.authproxy import JSONRPCException from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch from test_framework.util import ( assert_equal, assert_raises_rpc_error, + get_rpc_proxy, ) FEATURE_LATEST = 169900 +got_loading_error = False +def test_load_unload(node, name): + global got_loading_error + for i in range(10): + if got_loading_error: + return + try: + node.loadwallet(name) + node.unloadwallet(name) + except JSONRPCException as e: + if e.error['code'] == -4 and 'Wallet already being loading' in e.error['message']: + got_loading_error = True + return + class MultiWalletTest(BitcoinTestFramework): def set_test_params(self): @@ -212,6 +229,18 @@ class MultiWalletTest(BitcoinTestFramework): w2 = node.get_wallet_rpc(wallet_names[1]) w2.getwalletinfo() + self.log.info("Concurrent wallet loading") + threads = [] + for _ in range(3): + n = node.cli if self.options.usecli else get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir) + t = Thread(target=test_load_unload, args=(n, wallet_names[2], )) + t.start() + threads.append(t) + for t in threads: + t.join() + global got_loading_error + assert_equal(got_loading_error, True) + self.log.info("Load remaining wallets") for wallet_name in wallet_names[2:]: loadwallet_name = self.nodes[0].loadwallet(wallet_name) diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index 497a5dd95e..455f1fc5e8 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -77,8 +77,7 @@ class ReorgsRestoreTest(BitcoinTestFramework): assert_equal(conflicted["walletconflicts"][0], conflicting["txid"]) # Node0 wallet is shutdown - self.stop_node(0) - self.start_node(0) + self.restart_node(0) # The block chain re-orgs and the tx is included in a different block self.nodes[1].generate(9) diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py index bb81746715..cc2139a027 100755 --- a/test/functional/wallet_upgradewallet.py +++ b/test/functional/wallet_upgradewallet.py @@ -16,7 +16,6 @@ import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( - adjust_bitcoin_conf_for_pre_17, assert_equal, assert_greater_than, assert_is_hex_string, @@ -46,9 +45,6 @@ class UpgradeWalletTest(BitcoinTestFramework): 160300, 150200, ]) - # adapt bitcoin.conf, because older bitcoind's don't recognize config sections - adjust_bitcoin_conf_for_pre_17(self.nodes[1].bitcoinconf) - adjust_bitcoin_conf_for_pre_17(self.nodes[2].bitcoinconf) self.start_nodes() def dumb_sync_blocks(self): diff --git a/test/functional/wallet_zapwallettxes.py b/test/functional/wallet_zapwallettxes.py index adebff360a..7f1cdbd20b 100755 --- a/test/functional/wallet_zapwallettxes.py +++ b/test/functional/wallet_zapwallettxes.py @@ -49,17 +49,15 @@ class ZapWalletTXesTest (BitcoinTestFramework): assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) - # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet. - self.stop_node(0) - self.start_node(0) + # Restart node0. Both confirmed and unconfirmed transactions remain in the wallet. + self.restart_node(0) assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) - # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed + # Restart node0 with zapwallettxes and persistmempool. The unconfirmed # transaction is zapped from the wallet, but is re-added when the mempool is reloaded. - self.stop_node(0) - self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"]) + self.restart_node(0, ["-persistmempool=1", "-zapwallettxes=2"]) wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3) self.nodes[0].syncwithvalidationinterfacequeue() # Flush mempool to wallet @@ -67,10 +65,9 @@ class ZapWalletTXesTest (BitcoinTestFramework): assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) - # Stop node0 and restart with zapwallettxes, but not persistmempool. + # Restart node0 with zapwallettxes, but not persistmempool. # The unconfirmed transaction is zapped and is no longer in the wallet. - self.stop_node(0) - self.start_node(0, ["-zapwallettxes=2"]) + self.restart_node(0, ["-zapwallettxes=2"]) # tx1 is still be available because it was confirmed assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) |