aboutsummaryrefslogtreecommitdiff
path: root/test/functional
diff options
context:
space:
mode:
Diffstat (limited to 'test/functional')
-rwxr-xr-xtest/functional/feature_backwards_compatibility.py36
-rwxr-xr-xtest/functional/feature_notifications.py5
-rwxr-xr-xtest/functional/feature_segwit.py9
-rwxr-xr-xtest/functional/mempool_compatibility.py75
-rwxr-xr-xtest/functional/mining_basic.py32
-rwxr-xr-xtest/functional/p2p_compactblocks.py7
-rwxr-xr-xtest/functional/p2p_eviction.py129
-rwxr-xr-xtest/functional/p2p_filter.py166
-rwxr-xr-xtest/functional/p2p_invalid_messages.py212
-rwxr-xr-xtest/functional/p2p_leak.py3
-rwxr-xr-xtest/functional/p2p_mempool.py34
-rwxr-xr-xtest/functional/p2p_nobloomfilter_messages.py48
-rwxr-xr-xtest/functional/p2p_node_network_limited.py4
-rwxr-xr-xtest/functional/rpc_createmultisig.py14
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py14
-rwxr-xr-xtest/functional/rpc_help.py7
-rwxr-xr-xtest/functional/rpc_psbt.py30
-rw-r--r--test/functional/test_framework/key.py13
-rwxr-xr-xtest/functional/test_framework/mininode.py6
-rwxr-xr-xtest/functional/test_framework/test_framework.py13
-rwxr-xr-xtest/functional/test_framework/test_node.py6
-rw-r--r--test/functional/test_framework/util.py80
-rwxr-xr-xtest/functional/test_framework/wallet_util.py17
-rwxr-xr-xtest/functional/test_runner.py4
-rwxr-xr-xtest/functional/wallet_txn_clone.py1
-rwxr-xr-xtest/functional/wallet_txn_doublespend.py1
-rwxr-xr-xtest/functional/wallet_upgradewallet.py4
27 files changed, 644 insertions, 326 deletions
diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py
index 596ff206f2..cd380997c1 100755
--- a/test/functional/feature_backwards_compatibility.py
+++ b/test/functional/feature_backwards_compatibility.py
@@ -26,7 +26,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
- adjust_bitcoin_conf_for_pre_17,
assert_equal,
sync_blocks,
sync_mempools,
@@ -60,8 +59,6 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
170100,
160300,
])
- # adapt bitcoin.conf, because older bitcoind's don't recognize config sections
- adjust_bitcoin_conf_for_pre_17(self.nodes[5].bitcoinconf)
self.start_nodes()
@@ -322,6 +319,15 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
info = wallet.getwalletinfo()
assert info['keypoolsize'] == 1
+ # Create upgrade wallet in v0.16
+ self.stop_node(-1)
+ self.start_node(-1, extra_args=["-wallet=u1_v16"])
+ wallet = node_v16.get_wallet_rpc("u1_v16")
+ v16_addr = wallet.getnewaddress('', "bech32")
+ v16_info = wallet.validateaddress(v16_addr)
+ v16_pubkey = v16_info['pubkey']
+ self.stop_node(-1)
+
self.log.info("Test wallet upgrade path...")
# u1: regular wallet, created with v0.17
node_v17.rpc.createwallet(wallet_name="u1_v17")
@@ -331,6 +337,30 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
hdkeypath = v17_info["hdkeypath"]
pubkey = v17_info["pubkey"]
+ # Copy the 0.16 wallet to the last Bitcoin Core version and open it:
+ shutil.copyfile(
+ os.path.join(node_v16_wallets_dir, "wallets/u1_v16"),
+ os.path.join(node_master_wallets_dir, "u1_v16")
+ )
+ load_res = node_master.loadwallet("u1_v16")
+ # Make sure this wallet opens without warnings. See https://github.com/bitcoin/bitcoin/pull/19054
+ assert_equal(load_res['warning'], '')
+ wallet = node_master.get_wallet_rpc("u1_v16")
+ info = wallet.getaddressinfo(v16_addr)
+ descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v16_pubkey + ")"
+ assert_equal(info["desc"], descsum_create(descriptor))
+
+ # Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it
+ os.remove(os.path.join(node_v16_wallets_dir, "wallets/u1_v16"))
+ shutil.copyfile(
+ os.path.join(node_master_wallets_dir, "u1_v16"),
+ os.path.join(node_v16_wallets_dir, "wallets/u1_v16")
+ )
+ self.start_node(-1, extra_args=["-wallet=u1_v16"])
+ wallet = node_v16.get_wallet_rpc("u1_v16")
+ info = wallet.validateaddress(v16_addr)
+ assert_equal(info, v16_info)
+
# Copy the 0.17 wallet to the last Bitcoin Core version and open it:
node_v17.unloadwallet("u1_v17")
shutil.copytree(
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index fb0c7ceed4..dd4c318cee 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -19,7 +19,7 @@ from test_framework.util import (
# Windows disallow control characters (0-31) and /\?%:|"<>
FILE_CHAR_START = 32 if os.name == 'nt' else 1
FILE_CHAR_END = 128
-FILE_CHAR_BLACKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
+FILE_CHAR_BLOCKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
def notify_outputname(walletname, txid):
@@ -32,7 +32,7 @@ class NotificationsTest(BitcoinTestFramework):
self.setup_clean_chain = True
def setup_network(self):
- self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLACKLIST)
+ self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLOCKLIST)
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify")
@@ -93,6 +93,7 @@ class NotificationsTest(BitcoinTestFramework):
self.nodes[0].sethdseed(seed=self.nodes[1].dumpprivkey(keyhash_to_p2pkh(hex_str_to_bytes(self.nodes[1].getwalletinfo()['hdseedid'])[::-1])))
self.nodes[0].rescanblockchain()
self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_blocks()
# Generate transaction on node 0, sync mempools, and check for
# notification on node 1.
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 24c357091f..2298485640 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -20,6 +20,7 @@ from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash16
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_is_hex_string,
assert_raises_rpc_error,
connect_nodes,
hex_str_to_bytes,
@@ -188,6 +189,14 @@ class SegWitTest(BitcoinTestFramework):
assert self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].gettransaction(tx_id)["hex"]
assert self.nodes[0].getrawtransaction(tx_id, False, blockhash) == tx.serialize_without_witness().hex()
+ # Coinbase contains the witness commitment nonce, check that RPC shows us
+ coinbase_txid = self.nodes[2].getblock(blockhash)['tx'][0]
+ coinbase_tx = self.nodes[2].gettransaction(txid=coinbase_txid, verbose=True)
+ witnesses = coinbase_tx["decoded"]["vin"][0]["txinwitness"]
+ assert_equal(len(witnesses), 1)
+ assert_is_hex_string(witnesses[0])
+ assert_equal(witnesses[0], '00'*32)
+
self.log.info("Verify witness txs without witness data are invalid after the fork")
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py
new file mode 100755
index 0000000000..999399dec0
--- /dev/null
+++ b/test/functional/mempool_compatibility.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test that mempool.dat is both backward and forward compatible between versions
+
+NOTE: The test is designed to prevent cases when compatibility is broken accidentally.
+In case we need to break mempool compatibility we can continue to use the test by just bumping the version number.
+
+Download node binaries:
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+Only v0.15.2 is required by this test. The rest is used in other backwards compatibility tests.
+"""
+
+import os
+
+from test_framework.test_framework import BitcoinTestFramework
+
+
+class MempoolCompatibilityTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
+
+ def setup_network(self):
+ self.add_nodes(self.num_nodes, versions=[
+ 150200, # oldest version supported by the test framework
+ None,
+ ])
+ self.start_nodes()
+ self.import_deterministic_coinbase_privkeys()
+
+ def run_test(self):
+ self.log.info("Test that mempool.dat is compatible between versions")
+
+ old_node = self.nodes[0]
+ new_node = self.nodes[1]
+ recipient = old_node.getnewaddress()
+ self.stop_node(1)
+
+ self.log.info("Add a transaction to mempool on old node and shutdown")
+ old_tx_hash = old_node.sendtoaddress(recipient, 0.0001)
+ assert old_tx_hash in old_node.getrawmempool()
+ self.stop_node(0)
+
+ self.log.info("Move mempool.dat from old to new node")
+ old_node_mempool = os.path.join(old_node.datadir, self.chain, 'mempool.dat')
+ new_node_mempool = os.path.join(new_node.datadir, self.chain, 'mempool.dat')
+ os.rename(old_node_mempool, new_node_mempool)
+
+ self.log.info("Start new node and verify mempool contains the tx")
+ self.start_node(1)
+ assert old_tx_hash in new_node.getrawmempool()
+
+ self.log.info("Add unbroadcasted tx to mempool on new node and shutdown")
+ unbroadcasted_tx_hash = new_node.sendtoaddress(recipient, 0.0001)
+ assert unbroadcasted_tx_hash in new_node.getrawmempool()
+ mempool = new_node.getrawmempool(True)
+ assert mempool[unbroadcasted_tx_hash]['unbroadcast']
+ self.stop_node(1)
+
+ self.log.info("Move mempool.dat from new to old node")
+ os.rename(new_node_mempool, old_node_mempool)
+
+ self.log.info("Start old node again and verify mempool contains both txs")
+ self.start_node(0, ['-nowallet'])
+ assert old_tx_hash in old_node.getrawmempool()
+ assert unbroadcasted_tx_hash in old_node.getrawmempool()
+
+if __name__ == "__main__":
+ MempoolCompatibilityTest().main()
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index 86d7c78d63..63d1ccfb36 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -18,11 +18,9 @@ from test_framework.blocktools import (
from test_framework.messages import (
CBlock,
CBlockHeader,
- BLOCK_HEADER_SIZE
-)
-from test_framework.mininode import (
- P2PDataStore,
+ BLOCK_HEADER_SIZE,
)
+from test_framework.mininode import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
@@ -30,10 +28,15 @@ from test_framework.util import (
connect_nodes,
)
+
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
- rsp = node.getblocktemplate(template_request={'data': block.serialize().hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ rsp = node.getblocktemplate(template_request={
+ 'data': block.serialize().hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
assert_equal(rsp, expect)
@@ -85,10 +88,9 @@ class MiningTest(BitcoinTestFramework):
next_height = int(tmpl["height"])
coinbase_tx = create_coinbase(height=next_height)
# sequence numbers must not be max for nLockTime to have effect
- coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
+ coinbase_tx.vin[0].nSequence = 2**32 - 2
coinbase_tx.rehash()
- # round-trip the encoded bip34 block height commitment
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
@@ -116,7 +118,11 @@ class MiningTest(BitcoinTestFramework):
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex())
self.log.info("getblocktemplate: Test truncated final transaction")
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': block.serialize()[:-1].hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
+ 'data': block.serialize()[:-1].hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
@@ -135,7 +141,7 @@ class MiningTest(BitcoinTestFramework):
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
- bad_block.vtx[0].nLockTime = 2 ** 32 - 1
+ bad_block.vtx[0].nLockTime = 2**32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
assert_submitblock(bad_block, 'bad-txns-nonfinal')
@@ -145,7 +151,11 @@ class MiningTest(BitcoinTestFramework):
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1)
bad_block_sn[BLOCK_HEADER_SIZE] += 1
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': bad_block_sn.hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
+ 'data': bad_block_sn.hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
@@ -160,7 +170,7 @@ class MiningTest(BitcoinTestFramework):
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
- bad_block.nTime = 2 ** 31 - 1
+ bad_block.nTime = 2**31 - 1
assert_template(node, bad_block, 'time-too-new')
assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
bad_block.nTime = 0
diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py
index d77a744758..0b3738b572 100755
--- a/test/functional/p2p_compactblocks.py
+++ b/test/functional/p2p_compactblocks.py
@@ -305,10 +305,9 @@ class CompactBlocksTest(BitcoinTestFramework):
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
# Now fetch the compact block using a normal non-announce getdata
- with mininode_lock:
- test_node.clear_block_announcement()
- inv = CInv(MSG_CMPCT_BLOCK, block_hash)
- test_node.send_message(msg_getdata([inv]))
+ test_node.clear_block_announcement()
+ inv = CInv(MSG_CMPCT_BLOCK, block_hash)
+ test_node.send_message(msg_getdata([inv]))
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
diff --git a/test/functional/p2p_eviction.py b/test/functional/p2p_eviction.py
new file mode 100755
index 0000000000..b2b3a89aab
--- /dev/null
+++ b/test/functional/p2p_eviction.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+""" Test node eviction logic
+
+When the number of peers has reached the limit of maximum connections,
+the next connecting inbound peer will trigger the eviction mechanism.
+We cannot currently test the parts of the eviction logic that are based on
+address/netgroup since in the current framework, all peers are connecting from
+the same local address. See Issue #14210 for more info.
+Therefore, this test is limited to the remaining protection criteria.
+"""
+
+import time
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.mininode import P2PInterface, P2PDataStore
+from test_framework.util import assert_equal, wait_until
+from test_framework.blocktools import create_block, create_coinbase
+from test_framework.messages import CTransaction, FromHex, msg_pong, msg_tx
+
+
+class SlowP2PDataStore(P2PDataStore):
+ def on_ping(self, message):
+ time.sleep(0.1)
+ self.send_message(msg_pong(message.nonce))
+
+class SlowP2PInterface(P2PInterface):
+ def on_ping(self, message):
+ time.sleep(0.1)
+ self.send_message(msg_pong(message.nonce))
+
+class P2PEvict(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ # The choice of maxconnections=32 results in a maximum of 21 inbound connections
+ # (32 - 10 outbound - 1 feeler). 20 inbound peers are protected from eviction:
+ # 4 by netgroup, 4 that sent us blocks, 4 that sent us transactions and 8 via lowest ping time
+ self.extra_args = [['-maxconnections=32']]
+
+ def run_test(self):
+ protected_peers = set() # peers that we expect to be protected from eviction
+ current_peer = -1
+ node = self.nodes[0]
+ node.generatetoaddress(101, node.get_deterministic_priv_key().address)
+
+ self.log.info("Create 4 peers and protect them from eviction by sending us a block")
+ for _ in range(4):
+ block_peer = node.add_p2p_connection(SlowP2PDataStore())
+ current_peer += 1
+ block_peer.sync_with_ping()
+ best_block = node.getbestblockhash()
+ tip = int(best_block, 16)
+ best_block_time = node.getblock(best_block)['time']
+ block = create_block(tip, create_coinbase(node.getblockcount() + 1), best_block_time + 1)
+ block.solve()
+ block_peer.send_blocks_and_test([block], node, success=True)
+ protected_peers.add(current_peer)
+
+ self.log.info("Create 5 slow-pinging peers, making them eviction candidates")
+ for _ in range(5):
+ node.add_p2p_connection(SlowP2PInterface())
+ current_peer += 1
+
+ self.log.info("Create 4 peers and protect them from eviction by sending us a tx")
+ for i in range(4):
+ txpeer = node.add_p2p_connection(SlowP2PInterface())
+ current_peer += 1
+ txpeer.sync_with_ping()
+
+ prevtx = node.getblock(node.getblockhash(i + 1), 2)['tx'][0]
+ rawtx = node.createrawtransaction(
+ inputs=[{'txid': prevtx['txid'], 'vout': 0}],
+ outputs=[{node.get_deterministic_priv_key().address: 50 - 0.00125}],
+ )
+ sigtx = node.signrawtransactionwithkey(
+ hexstring=rawtx,
+ privkeys=[node.get_deterministic_priv_key().key],
+ prevtxs=[{
+ 'txid': prevtx['txid'],
+ 'vout': 0,
+ 'scriptPubKey': prevtx['vout'][0]['scriptPubKey']['hex'],
+ }],
+ )['hex']
+ txpeer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
+ protected_peers.add(current_peer)
+
+ self.log.info("Create 8 peers and protect them from eviction by having faster pings")
+ for _ in range(8):
+ fastpeer = node.add_p2p_connection(P2PInterface())
+ current_peer += 1
+ wait_until(lambda: "ping" in fastpeer.last_message, timeout=10)
+
+ # Make sure by asking the node what the actual min pings are
+ peerinfo = node.getpeerinfo()
+ pings = {}
+ for i in range(len(peerinfo)):
+ pings[i] = peerinfo[i]['minping'] if 'minping' in peerinfo[i] else 1000000
+ sorted_pings = sorted(pings.items(), key=lambda x: x[1])
+
+ # Usually the 8 fast peers are protected. In rare case of unreliable pings,
+ # one of the slower peers might have a faster min ping though.
+ for i in range(8):
+ protected_peers.add(sorted_pings[i][0])
+
+ self.log.info("Create peer that triggers the eviction mechanism")
+ node.add_p2p_connection(SlowP2PInterface())
+
+ # One of the non-protected peers must be evicted. We can't be sure which one because
+ # 4 peers are protected via netgroup, which is identical for all peers,
+ # and the eviction mechanism doesn't preserve the order of identical elements.
+ evicted_peers = []
+ for i in range(len(node.p2ps)):
+ if not node.p2ps[i].is_connected:
+ evicted_peers.append(i)
+
+ self.log.info("Test that one peer was evicted")
+ self.log.debug("{} evicted peer: {}".format(len(evicted_peers), set(evicted_peers)))
+ assert_equal(len(evicted_peers), 1)
+
+ self.log.info("Test that no peer expected to be protected was evicted")
+ self.log.debug("{} protected peers: {}".format(len(protected_peers), protected_peers))
+ assert evicted_peers[0] not in protected_peers
+
+if __name__ == '__main__':
+ P2PEvict().main()
diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py
index 15955a938c..741da3be31 100755
--- a/test/functional/p2p_filter.py
+++ b/test/functional/p2p_filter.py
@@ -16,13 +16,15 @@ from test_framework.messages import (
msg_filterclear,
msg_filterload,
msg_getdata,
+ msg_mempool,
+ msg_version,
)
-from test_framework.mininode import P2PInterface
+from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.script import MAX_SCRIPT_ELEMENT_SIZE
from test_framework.test_framework import BitcoinTestFramework
-class FilterNode(P2PInterface):
+class P2PBloomFilter(P2PInterface):
# This is a P2SH watch-only wallet
watch_script_pubkey = 'a914ffffffffffffffffffffffffffffffffffffffff87'
# The initial filter (n=10, fp=0.000001) with just the above scriptPubKey added
@@ -34,6 +36,11 @@ class FilterNode(P2PInterface):
nFlags=1,
)
+ def __init__(self):
+ super().__init__()
+ self._tx_received = False
+ self._merkleblock_received = False
+
def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
@@ -46,10 +53,30 @@ class FilterNode(P2PInterface):
self.send_message(want)
def on_merkleblock(self, message):
- self.merkleblock_received = True
+ self._merkleblock_received = True
def on_tx(self, message):
- self.tx_received = True
+ self._tx_received = True
+
+ @property
+ def tx_received(self):
+ with mininode_lock:
+ return self._tx_received
+
+ @tx_received.setter
+ def tx_received(self, value):
+ with mininode_lock:
+ self._tx_received = value
+
+ @property
+ def merkleblock_received(self):
+ with mininode_lock:
+ return self._merkleblock_received
+
+ @merkleblock_received.setter
+ def merkleblock_received(self, value):
+ with mininode_lock:
+ self._merkleblock_received = value
class FilterTest(BitcoinTestFramework):
@@ -64,94 +91,143 @@ class FilterTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def test_size_limits(self, filter_node):
+ def test_size_limits(self, filter_peer):
self.log.info('Check that too large filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
- filter_node.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE+1)))
+ filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE+1)))
self.log.info('Check that max size filter is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
- filter_node.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE)))
- filter_node.send_and_ping(msg_filterclear())
+ filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE)))
+ filter_peer.send_and_ping(msg_filterclear())
self.log.info('Check that filter with too many hash functions is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
- filter_node.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
+ filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
self.log.info('Check that filter with max hash functions is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
- filter_node.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
+ filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
# Don't send filterclear until next two filteradd checks are done
self.log.info('Check that max size data element to add to the filter is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
- filter_node.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE)))
+ filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE)))
self.log.info('Check that too large data element to add to the filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
- filter_node.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE+1)))
+ filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE+1)))
- filter_node.send_and_ping(msg_filterclear())
+ filter_peer.send_and_ping(msg_filterclear())
- def run_test(self):
- filter_node = self.nodes[0].add_p2p_connection(FilterNode())
+ def test_msg_mempool(self):
+ self.log.info("Check that a node with bloom filters enabled services p2p mempool messages")
+ filter_peer = P2PBloomFilter()
- self.test_size_limits(filter_node)
+ self.log.debug("Create a tx relevant to the peer before connecting")
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
+ txid = self.nodes[0].sendtoaddress(filter_address, 90)
- self.log.info('Add filtered P2P connection to the node')
- filter_node.send_and_ping(filter_node.watch_filter_init)
- filter_address = self.nodes[0].decodescript(filter_node.watch_script_pubkey)['addresses'][0]
+ self.log.debug("Send a mempool msg after connecting and check that the tx is received")
+ self.nodes[0].add_p2p_connection(filter_peer)
+ filter_peer.send_and_ping(filter_peer.watch_filter_init)
+ self.nodes[0].p2p.send_message(msg_mempool())
+ filter_peer.wait_for_tx(txid)
+
+ def test_frelay_false(self, filter_peer):
+ self.log.info("Check that a node with fRelay set to false does not receive invs until the filter is set")
+ filter_peer.tx_received = False
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
+ self.nodes[0].sendtoaddress(filter_address, 90)
+ # Sync to make sure the reason filter_peer doesn't receive the tx is not p2p delays
+ filter_peer.sync_with_ping()
+ assert not filter_peer.tx_received
+
+ # Clear the mempool so that this transaction does not impact subsequent tests
+ self.nodes[0].generate(1)
+
+ def test_filter(self, filter_peer):
+ # Set the bloomfilter using filterload
+ filter_peer.send_and_ping(filter_peer.watch_filter_init)
+ # If fRelay is not already True, sending filterload sets it to True
+ assert self.nodes[0].getpeerinfo()[0]['relaytxes']
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
self.log.info('Check that we receive merkleblock and tx if the filter matches a tx in a block')
block_hash = self.nodes[0].generatetoaddress(1, filter_address)[0]
txid = self.nodes[0].getblock(block_hash)['tx'][0]
- filter_node.wait_for_merkleblock(block_hash)
- filter_node.wait_for_tx(txid)
+ filter_peer.wait_for_merkleblock(block_hash)
+ filter_peer.wait_for_tx(txid)
self.log.info('Check that we only receive a merkleblock if the filter does not match a tx in a block')
- filter_node.tx_received = False
+ filter_peer.tx_received = False
block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
- filter_node.wait_for_merkleblock(block_hash)
- assert not filter_node.tx_received
+ filter_peer.wait_for_merkleblock(block_hash)
+ assert not filter_peer.tx_received
self.log.info('Check that we not receive a tx if the filter does not match a mempool tx')
- filter_node.merkleblock_received = False
- filter_node.tx_received = False
+ filter_peer.merkleblock_received = False
+ filter_peer.tx_received = False
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 90)
- filter_node.sync_with_ping()
- filter_node.sync_with_ping()
- assert not filter_node.merkleblock_received
- assert not filter_node.tx_received
+ filter_peer.sync_with_ping()
+ filter_peer.sync_with_ping()
+ assert not filter_peer.merkleblock_received
+ assert not filter_peer.tx_received
- self.log.info('Check that we receive a tx in reply to a mempool msg if the filter matches a mempool tx')
- filter_node.merkleblock_received = False
+ self.log.info('Check that we receive a tx if the filter matches a mempool tx')
+ filter_peer.merkleblock_received = False
txid = self.nodes[0].sendtoaddress(filter_address, 90)
- filter_node.wait_for_tx(txid)
- assert not filter_node.merkleblock_received
+ filter_peer.wait_for_tx(txid)
+ assert not filter_peer.merkleblock_received
self.log.info('Check that after deleting filter all txs get relayed again')
- filter_node.send_and_ping(msg_filterclear())
+ filter_peer.send_and_ping(msg_filterclear())
for _ in range(5):
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 7)
- filter_node.wait_for_tx(txid)
+ filter_peer.wait_for_tx(txid)
self.log.info('Check that request for filtered blocks is ignored if no filter is set')
- filter_node.merkleblock_received = False
- filter_node.tx_received = False
+ filter_peer.merkleblock_received = False
+ filter_peer.tx_received = False
with self.nodes[0].assert_debug_log(expected_msgs=['received getdata']):
block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
- filter_node.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
- filter_node.sync_with_ping()
- assert not filter_node.merkleblock_received
- assert not filter_node.tx_received
+ filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
+ filter_peer.sync_with_ping()
+ assert not filter_peer.merkleblock_received
+ assert not filter_peer.tx_received
self.log.info('Check that sending "filteradd" if no filter is set is treated as misbehavior')
with self.nodes[0].assert_debug_log(['Misbehaving']):
- filter_node.send_and_ping(msg_filteradd(data=b'letsmisbehave'))
+ filter_peer.send_and_ping(msg_filteradd(data=b'letsmisbehave'))
self.log.info("Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed")
- filter_node.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
- filter_node.send_and_ping(msg_filteradd(data=b'letstrytocrashthisnode'))
+ filter_peer.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
+ filter_peer.send_and_ping(msg_filteradd(data=b'letstrytocrashthisnode'))
+ self.nodes[0].disconnect_p2ps()
+
+ def run_test(self):
+ filter_peer = self.nodes[0].add_p2p_connection(P2PBloomFilter())
+ self.log.info('Test filter size limits')
+ self.test_size_limits(filter_peer)
+
+ self.log.info('Test BIP 37 for a node with fRelay = True (default)')
+ self.test_filter(filter_peer)
+ self.nodes[0].disconnect_p2ps()
+
+ self.log.info('Test BIP 37 for a node with fRelay = False')
+ # Add peer but do not send version yet
+ filter_peer_without_nrelay = self.nodes[0].add_p2p_connection(P2PBloomFilter(), send_version=False, wait_for_verack=False)
+ # Send version with fRelay=False
+ filter_peer_without_nrelay.wait_until(lambda: filter_peer_without_nrelay.is_connected, timeout=10)
+ version_without_fRelay = msg_version()
+ version_without_fRelay.nRelay = 0
+ filter_peer_without_nrelay.send_message(version_without_fRelay)
+ filter_peer_without_nrelay.wait_for_verack()
+ assert not self.nodes[0].getpeerinfo()[0]['relaytxes']
+ self.test_frelay_false(filter_peer_without_nrelay)
+ self.test_filter(filter_peer_without_nrelay)
+
+ self.test_msg_mempool()
if __name__ == '__main__':
diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py
index 81302374c9..d99bc621de 100755
--- a/test/functional/p2p_invalid_messages.py
+++ b/test/functional/p2p_invalid_messages.py
@@ -3,9 +3,6 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid network messages."""
-import asyncio
-import struct
-import sys
from test_framework.messages import (
CBlockHeader,
@@ -18,12 +15,17 @@ from test_framework.messages import (
ser_string,
)
from test_framework.mininode import (
- NetworkThread,
P2PDataStore,
P2PInterface,
)
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ wait_until,
+)
+MSG_LIMIT = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH
+VALID_DATA_LIMIT = MSG_LIMIT - 5 # Account for the 5-byte length prefix
class msg_unrecognized:
"""Nonsensical message. Modeled after similar types in test_framework.messages."""
@@ -46,128 +48,40 @@ class InvalidMessagesTest(BitcoinTestFramework):
self.setup_clean_chain = True
def run_test(self):
- """
- . Test msg header
- 0. Send a bunch of large (4MB) messages of an unrecognized type. Check to see
- that it isn't an effective DoS against the node.
-
- 1. Send an oversized (4MB+) message and check that we're disconnected.
-
- 2. Send a few messages with an incorrect data size in the header, ensure the
- messages are ignored.
- """
+ self.test_buffer()
self.test_magic_bytes()
self.test_checksum()
self.test_size()
self.test_msgtype()
self.test_large_inv()
+ self.test_resource_exhaustion()
- node = self.nodes[0]
- self.node = node
- node.add_p2p_connection(P2PDataStore())
- conn2 = node.add_p2p_connection(P2PDataStore())
-
- msg_limit = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH
- valid_data_limit = msg_limit - 5 # Account for the 4-byte length prefix
-
- #
- # 0.
- #
- # Send as large a message as is valid, ensure we aren't disconnected but
- # also can't exhaust resources.
- #
- msg_at_size = msg_unrecognized(str_data="b" * valid_data_limit)
- assert len(msg_at_size.serialize()) == msg_limit
-
- self.log.info("Sending a bunch of large, junk messages to test memory exhaustion. May take a bit...")
-
- # Run a bunch of times to test for memory exhaustion.
- for _ in range(80):
- node.p2p.send_message(msg_at_size)
-
- # Check that, even though the node is being hammered by nonsense from one
- # connection, it can still service other peers in a timely way.
- for _ in range(20):
- conn2.sync_with_ping(timeout=2)
-
- # Peer 1, despite serving up a bunch of nonsense, should still be connected.
- self.log.info("Waiting for node to drop junk messages.")
- node.p2p.sync_with_ping(timeout=400)
- assert node.p2p.is_connected
-
- #
- # 1.
- #
- # Send an oversized message, ensure we're disconnected.
- #
- # Under macOS this test is skipped due to an unexpected error code
- # returned from the closing socket which python/asyncio does not
- # yet know how to handle.
- #
- if sys.platform != 'darwin':
- msg_over_size = msg_unrecognized(str_data="b" * (valid_data_limit + 1))
- assert len(msg_over_size.serialize()) == (msg_limit + 1)
-
- # An unknown message type (or *any* message type) over
- # MAX_PROTOCOL_MESSAGE_LENGTH should result in a disconnect.
- node.p2p.send_message(msg_over_size)
- node.p2p.wait_for_disconnect(timeout=4)
-
- node.disconnect_p2ps()
- conn = node.add_p2p_connection(P2PDataStore())
- conn.wait_for_verack()
- else:
- self.log.info("Skipping test p2p_invalid_messages/1 (oversized message) under macOS")
-
- #
- # 2.
- #
- # Send messages with an incorrect data size in the header.
- #
- actual_size = 100
- msg = msg_unrecognized(str_data="b" * actual_size)
-
- # TODO: handle larger-than cases. I haven't been able to pin down what behavior to expect.
- for wrong_size in (2, 77, 78, 79):
- self.log.info("Sending a message with incorrect size of {}".format(wrong_size))
-
- # Unmodified message should submit okay.
- node.p2p.send_and_ping(msg)
-
- # A message lying about its data size results in a disconnect when the incorrect
- # data size is less than the actual size.
- #
- # TODO: why does behavior change at 78 bytes?
- #
- node.p2p.send_raw_message(self._tweak_msg_data_size(msg, wrong_size))
-
- # For some reason unknown to me, we sometimes have to push additional data to the
- # peer in order for it to realize a disconnect.
- try:
- node.p2p.send_message(msg_ping(nonce=123123))
- except IOError:
- pass
-
- node.p2p.wait_for_disconnect(timeout=10)
- node.disconnect_p2ps()
- node.add_p2p_connection(P2PDataStore())
-
- # Node is still up.
- conn = node.add_p2p_connection(P2PDataStore())
+ def test_buffer(self):
+ self.log.info("Test message with header split across two buffers, should be received")
+ conn = self.nodes[0].add_p2p_connection(P2PDataStore())
+ # Create valid message
+ msg = conn.build_message(msg_ping(nonce=12345))
+ cut_pos = 12 # Chosen at an arbitrary position within the header
+ # Send message in two pieces
+ before = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ conn.send_raw_message(msg[:cut_pos])
+ # Wait until node has processed the first half of the message
+ wait_until(lambda: int(self.nodes[0].getnettotals()['totalbytesrecv']) != before)
+ middle = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ # If this assert fails, we've hit an unlikely race
+ # where the test framework sent a message in between the two halves
+ assert_equal(middle, before + cut_pos)
+ conn.send_raw_message(msg[cut_pos:])
+ conn.sync_with_ping(timeout=1)
+ self.nodes[0].disconnect_p2ps()
def test_magic_bytes(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
-
- async def swap_magic_bytes():
- conn._on_data = lambda: None # Need to ignore all incoming messages from now, since they come with "invalid" magic bytes
- conn.magic_bytes = b'\x00\x11\x22\x32'
-
- # Call .result() to block until the atomic swap is complete, otherwise
- # we might run into races later on
- asyncio.run_coroutine_threadsafe(swap_magic_bytes(), NetworkThread.network_event_loop).result()
-
- with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART ping']):
- conn.send_message(msg_ping(nonce=0xff))
+ with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART badmsg']):
+ msg = conn.build_message(msg_unrecognized(str_data="d"))
+ # modify magic bytes
+ msg = b'\xff' * 4 + msg[4:]
+ conn.send_raw_message(msg)
conn.wait_for_disconnect(timeout=1)
self.nodes[0].disconnect_p2ps()
@@ -175,11 +89,8 @@ class InvalidMessagesTest(BitcoinTestFramework):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['CHECKSUM ERROR (badmsg, 2 bytes), expected 78df0a04 was ffffffff']):
msg = conn.build_message(msg_unrecognized(str_data="d"))
- cut_len = (
- 4 + # magic
- 12 + # msgtype
- 4 #len
- )
+ # Checksum is after start bytes (4B), message type (12B), len (4B)
+ cut_len = 4 + 12 + 4
# modify checksum
msg = msg[:cut_len] + b'\xff' * 4 + msg[cut_len + 4:]
self.nodes[0].p2p.send_raw_message(msg)
@@ -189,13 +100,9 @@ class InvalidMessagesTest(BitcoinTestFramework):
def test_size(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['']):
- msg = conn.build_message(msg_unrecognized(str_data="d"))
- cut_len = (
- 4 + # magic
- 12 # msgtype
- )
- # modify len to MAX_SIZE + 1
- msg = msg[:cut_len] + struct.pack("<I", 0x02000000 + 1) + msg[cut_len + 4:]
+ # Create a message with oversized payload
+ msg = msg_unrecognized(str_data="d" * (VALID_DATA_LIMIT + 1))
+ msg = conn.build_message(msg)
self.nodes[0].p2p.send_raw_message(msg)
conn.wait_for_disconnect(timeout=1)
self.nodes[0].disconnect_p2ps()
@@ -214,36 +121,39 @@ class InvalidMessagesTest(BitcoinTestFramework):
def test_large_inv(self):
conn = self.nodes[0].add_p2p_connection(P2PInterface())
- with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (0 -> 20): message inv size() = 50001']):
+ with self.nodes[0].assert_debug_log(['Misbehaving', '(0 -> 20): message inv size() = 50001']):
msg = msg_inv([CInv(MSG_TX, 1)] * 50001)
conn.send_and_ping(msg)
- with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (20 -> 40): message getdata size() = 50001']):
+ with self.nodes[0].assert_debug_log(['Misbehaving', '(20 -> 40): message getdata size() = 50001']):
msg = msg_getdata([CInv(MSG_TX, 1)] * 50001)
conn.send_and_ping(msg)
- with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (40 -> 60): headers message size = 2001']):
+ with self.nodes[0].assert_debug_log(['Misbehaving', '(40 -> 60): headers message size = 2001']):
msg = msg_headers([CBlockHeader()] * 2001)
conn.send_and_ping(msg)
self.nodes[0].disconnect_p2ps()
- def _tweak_msg_data_size(self, message, wrong_size):
- """
- Return a raw message based on another message but with an incorrect data size in
- the message header.
- """
- raw_msg = self.node.p2p.build_message(message)
-
- bad_size_bytes = struct.pack("<I", wrong_size)
- num_header_bytes_before_size = 4 + 12
-
- # Replace the correct data size in the message with an incorrect one.
- raw_msg_with_wrong_size = (
- raw_msg[:num_header_bytes_before_size] +
- bad_size_bytes +
- raw_msg[(num_header_bytes_before_size + len(bad_size_bytes)):]
- )
- assert len(raw_msg) == len(raw_msg_with_wrong_size)
-
- return raw_msg_with_wrong_size
+ def test_resource_exhaustion(self):
+ conn = self.nodes[0].add_p2p_connection(P2PDataStore())
+ conn2 = self.nodes[0].add_p2p_connection(P2PDataStore())
+ msg_at_size = msg_unrecognized(str_data="b" * VALID_DATA_LIMIT)
+ assert len(msg_at_size.serialize()) == MSG_LIMIT
+
+ self.log.info("Sending a bunch of large, junk messages to test memory exhaustion. May take a bit...")
+
+ # Run a bunch of times to test for memory exhaustion.
+ for _ in range(80):
+ conn.send_message(msg_at_size)
+
+ # Check that, even though the node is being hammered by nonsense from one
+ # connection, it can still service other peers in a timely way.
+ for _ in range(20):
+ conn2.sync_with_ping(timeout=2)
+
+ # Peer 1, despite being served up a bunch of nonsense, should still be connected.
+ self.log.info("Waiting for node to drop junk messages.")
+ conn.sync_with_ping(timeout=400)
+ assert conn.is_connected
+ self.nodes[0].disconnect_p2ps()
if __name__ == '__main__':
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index 157af68203..3b3dbd08f2 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -132,9 +132,6 @@ class P2PLeakTest(BitcoinTestFramework):
self.nodes[0].disconnect_p2ps()
- # Wait until all connections are closed
- wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
-
# Make sure no unexpected messages came in
assert no_version_bannode.unexpected_msg == False
assert no_version_idlenode.unexpected_msg == False
diff --git a/test/functional/p2p_mempool.py b/test/functional/p2p_mempool.py
deleted file mode 100755
index a8fcb181e6..0000000000
--- a/test/functional/p2p_mempool.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2018 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test p2p mempool message.
-
-Test that nodes are disconnected if they send mempool messages when bloom
-filters are not enabled.
-"""
-
-from test_framework.messages import msg_mempool
-from test_framework.mininode import P2PInterface
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-
-class P2PMempoolTests(BitcoinTestFramework):
- def set_test_params(self):
- self.setup_clean_chain = True
- self.num_nodes = 1
- self.extra_args = [["-peerbloomfilters=0"]]
-
- def run_test(self):
- # Add a p2p connection
- self.nodes[0].add_p2p_connection(P2PInterface())
-
- #request mempool
- self.nodes[0].p2p.send_message(msg_mempool())
- self.nodes[0].p2p.wait_for_disconnect()
-
- #mininode must be disconnected at this point
- assert_equal(len(self.nodes[0].getpeerinfo()), 0)
-
-if __name__ == '__main__':
- P2PMempoolTests().main()
diff --git a/test/functional/p2p_nobloomfilter_messages.py b/test/functional/p2p_nobloomfilter_messages.py
new file mode 100755
index 0000000000..accc5dc23c
--- /dev/null
+++ b/test/functional/p2p_nobloomfilter_messages.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test invalid p2p messages for nodes with bloom filters disabled.
+
+Test that, when bloom filters are not enabled, peers are disconnected if:
+1. They send a p2p mempool message
+2. They send a p2p filterload message
+3. They send a p2p filteradd message
+4. They send a p2p filterclear message
+"""
+
+from test_framework.messages import msg_mempool, msg_filteradd, msg_filterload, msg_filterclear
+from test_framework.mininode import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+
+class P2PNoBloomFilterMessages(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [["-peerbloomfilters=0"]]
+
+ def test_message_causes_disconnect(self, message):
+ """Add a p2p connection that sends a message and check that it disconnects."""
+ peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer.send_message(message)
+ peer.wait_for_disconnect()
+ assert_equal(self.nodes[0].getconnectioncount(), 0)
+
+ def run_test(self):
+ self.log.info("Test that peer is disconnected if it sends mempool message")
+ self.test_message_causes_disconnect(msg_mempool())
+
+ self.log.info("Test that peer is disconnected if it sends filterload message")
+ self.test_message_causes_disconnect(msg_filterload())
+
+ self.log.info("Test that peer is disconnected if it sends filteradd message")
+ self.test_message_causes_disconnect(msg_filteradd(data=b'\xcc'))
+
+ self.log.info("Test that peer is disconnected if it sends a filterclear message")
+ self.test_message_causes_disconnect(msg_filterclear())
+
+
+if __name__ == '__main__':
+ P2PNoBloomFilterMessages().main()
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index 9c8c36c89e..a2f6ea538c 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -42,9 +42,6 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
def disconnect_all(self):
disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[1], 0)
- disconnect_nodes(self.nodes[2], 1)
- disconnect_nodes(self.nodes[2], 0)
disconnect_nodes(self.nodes[0], 2)
disconnect_nodes(self.nodes[1], 2)
@@ -86,7 +83,6 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
assert_equal(node1.firstAddrnServices, expected_services)
self.nodes[0].disconnect_p2ps()
- node1.wait_for_disconnect()
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py
index 56e9ecfcc2..3c81a4a4e2 100755
--- a/test/functional/rpc_createmultisig.py
+++ b/test/functional/rpc_createmultisig.py
@@ -3,21 +3,21 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multisig RPCs"""
+import binascii
+import decimal
+import itertools
+import json
+import os
from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create, drop_origins
+from test_framework.key import ECPubKey, ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
-from test_framework.key import ECPubKey, ECKey, bytes_to_wif
-
-import binascii
-import decimal
-import itertools
-import json
-import os
+from test_framework.wallet_util import bytes_to_wif
class RpcCreateMultiSigTest(BitcoinTestFramework):
def set_test_params(self):
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 4bc4913bda..57c8f511ac 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -271,7 +271,11 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ # add_inputs is enabled by default
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
@@ -299,7 +303,10 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True})
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
@@ -330,7 +337,10 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True})
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
diff --git a/test/functional/rpc_help.py b/test/functional/rpc_help.py
index 027ae368e7..9b981b864e 100755
--- a/test/functional/rpc_help.py
+++ b/test/functional/rpc_help.py
@@ -18,6 +18,8 @@ class HelpRpcTest(BitcoinTestFramework):
def run_test(self):
self.test_categories()
self.dump_help()
+ if self.is_wallet_compiled():
+ self.wallet_help()
def test_categories(self):
node = self.nodes[0]
@@ -53,6 +55,11 @@ class HelpRpcTest(BitcoinTestFramework):
# Make sure the node can generate the help at runtime without crashing
f.write(self.nodes[0].help(call))
+ def wallet_help(self):
+ assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
+ self.restart_node(0, extra_args=['-nowallet=1'])
+ assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
+
if __name__ == '__main__':
HelpRpcTest().main()
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index 51d136d26a..c20f813d14 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -8,6 +8,7 @@
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
+ assert_approx,
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
@@ -43,10 +44,8 @@ class PSBTTest(BitcoinTestFramework):
online_node = self.nodes[1]
# Disconnect offline node from others
+ # Topology of test network is linear, so this one call is enough
disconnect_nodes(offline_node, 1)
- disconnect_nodes(online_node, 0)
- disconnect_nodes(offline_node, 2)
- disconnect_nodes(mining_node, 0)
# Create watchonly on online_node
online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
@@ -87,6 +86,13 @@ class PSBTTest(BitcoinTestFramework):
# Create and fund a raw tx for sending 10 BTC
psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt']
+ # If inputs are specified, do not automatically add more:
+ utxo1 = self.nodes[0].listunspent()[0]
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[0].walletcreatefundedpsbt, [{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90})
+
+ psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt']
+ assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2)
+
# Node 1 should not be able to add anything to it but still return the psbtx same as before
psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt']
assert_equal(psbtx1, psbtx)
@@ -154,13 +160,13 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
# feeRate of 0.1 BTC / KB produces a total fee slightly below -maxtxfee (~0.05280000):
- res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1})
- assert_greater_than(res["fee"], 0.05)
- assert_greater_than(0.06, res["fee"])
+ res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1, "add_inputs": True})
+ assert_approx(res["fee"], 0.055, 0.005)
# feeRate of 10 BTC / KB produces a total fee well above -maxtxfee
# previously this was silently capped at -maxtxfee
- assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10, "add_inputs": True})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():1}, 0, {"feeRate": 10, "add_inputs": False})
# partially sign multisig things with node 1
psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt']
@@ -241,7 +247,7 @@ class PSBTTest(BitcoinTestFramework):
# replaceable arg
block_height = self.nodes[0].getblockcount()
unspent = self.nodes[0].listunspent()[0]
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False}, False)
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False, "add_inputs": True}, False)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -249,7 +255,7 @@ class PSBTTest(BitcoinTestFramework):
assert_equal(decoded_psbt["tx"]["locktime"], block_height+2)
# Same construction with only locktime set and RBF explicitly enabled
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True}, True)
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True, "add_inputs": True}, True)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -257,7 +263,7 @@ class PSBTTest(BitcoinTestFramework):
assert_equal(decoded_psbt["tx"]["locktime"], block_height)
# Same construction without optional arguments
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -266,7 +272,7 @@ class PSBTTest(BitcoinTestFramework):
# Same construction without optional arguments, for a node with -walletrbf=0
unspent1 = self.nodes[1].listunspent()[0]
- psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height)
+ psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height, {"add_inputs": True})
decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -277,7 +283,7 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
# Regression test for 14473 (mishandling of already-signed witness transaction):
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True})
complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"])
assert_equal(complete_psbt, double_processed_psbt)
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index f2d6fba4a6..912c0ca978 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -8,8 +8,6 @@ keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
import random
-from .address import byte_to_base58
-
def modinv(a, n):
"""Compute the modular inverse of a modulo n
@@ -386,14 +384,3 @@ class ECKey():
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb
-
-def bytes_to_wif(b, compressed=True):
- if compressed:
- b += b'\x01'
- return byte_to_base58(b, 239)
-
-def generate_wif_key():
- # Makes a WIF privkey for imports
- k = ECKey()
- k.generate()
- return bytes_to_wif(k.get_bytes(), k.is_compressed)
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 45063aaff2..b6c37bc7e0 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -492,7 +492,7 @@ class P2PInterface(P2PConnection):
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface.
# This lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
-mininode_lock = threading.RLock()
+mininode_lock = threading.Lock()
class NetworkThread(threading.Thread):
@@ -658,8 +658,6 @@ class P2PTxInvStore(P2PInterface):
# save txid
self.tx_invs_received[i.hash] += 1
- super().on_inv(message)
-
def get_invs(self):
with mininode_lock:
return list(self.tx_invs_received.keys())
@@ -669,6 +667,6 @@ class P2PTxInvStore(P2PInterface):
The mempool should mark unbroadcast=False for these transactions.
"""
# Wait until invs have been received (and getdatas sent) for each txid.
- self.wait_until(lambda: set(self.get_invs()) == set([int(tx, 16) for tx in txns]), timeout)
+ self.wait_until(lambda: set(self.tx_invs_received.keys()) == set([int(tx, 16) for tx in txns]), timeout)
# Flush messages and wait for the getdatas to be processed
self.sync_with_ping()
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 8e0da5295d..c9fad91481 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -452,7 +452,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
assert_equal(len(binary), num_nodes)
assert_equal(len(binary_cli), num_nodes)
for i in range(num_nodes):
- self.nodes.append(TestNode(
+ test_node_i = TestNode(
i,
get_datadir_path(self.options.tmpdir, i),
chain=self.chain,
@@ -470,7 +470,15 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
descriptors=self.options.descriptors,
- ))
+ )
+ self.nodes.append(test_node_i)
+ if not test_node_i.version_is_at_least(170000):
+ # adjust conf for pre 17
+ conf_file = test_node_i.bitcoinconf
+ with open(conf_file, 'r', encoding='utf8') as conf:
+ conf_data = conf.read()
+ with open(conf_file, 'w', encoding='utf8') as conf:
+ conf.write(conf_data.replace('[regtest]', ''))
def start_node(self, i, *args, **kwargs):
"""Start a bitcoind"""
@@ -531,7 +539,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
self.sync_all(self.nodes[:2])
self.sync_all(self.nodes[2:])
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index ebc0501e11..66bb2c89b5 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -23,6 +23,7 @@ import sys
from .authproxy import JSONRPCException
from .descriptors import descsum_create
+from .messages import MY_SUBVERSION
from .util import (
MAX_NODES,
append_config,
@@ -549,11 +550,16 @@ class TestNode():
assert self.p2ps, self._node_msg("No p2p connection")
return self.p2ps[0]
+ def num_connected_mininodes(self):
+ """Return number of test framework p2p connections to the node."""
+ return len([peer for peer in self.getpeerinfo() if peer['subver'] == MY_SUBVERSION])
+
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
+ wait_until(lambda: self.num_connected_mininodes() == 0)
class TestNodeCLIAttr:
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 6dfea7efd2..17b2cbb971 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -25,6 +25,7 @@ logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
+
def assert_approx(v, vexp, vspan=0.00001):
"""Assert that `v` is within `vspan` of `vexp`"""
if v < vexp - vspan:
@@ -32,6 +33,7 @@ def assert_approx(v, vexp, vspan=0.00001):
if v > vexp + vspan:
raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
+
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
@@ -41,21 +43,26 @@ def assert_fee_amount(fee, tx_size, fee_per_kB):
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
+
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
+
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
+
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
+
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
+
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
@@ -71,6 +78,7 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
else:
raise AssertionError("No exception raised")
+
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
@@ -95,6 +103,7 @@ def assert_raises_process_error(returncode, output, fun, *args, **kwds):
else:
raise AssertionError("No exception raised")
+
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
@@ -113,6 +122,7 @@ def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
+
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
@@ -134,22 +144,22 @@ def try_rpc(code, message, fun, *args, **kwds):
else:
return False
+
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
- raise AssertionError(
- "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
+ raise AssertionError("Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
+
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
- raise AssertionError(
- "String of length %d expected; got %d" % (length, len(string)))
+ raise AssertionError("String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
- raise AssertionError(
- "String %r contains invalid characters for a hash." % string)
+ raise AssertionError("String %r contains invalid characters for a hash." % string)
+
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
@@ -180,9 +190,11 @@ def assert_array_result(object_array, to_match, expected, should_not_find=False)
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
+
# Utility functions
###################
+
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
@@ -190,11 +202,13 @@ def check_json_precision():
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
+
def EncodeDecimal(o):
if isinstance(o, Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
+
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
@@ -202,12 +216,15 @@ def count_bytes(hex_string):
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
+
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
+
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
@@ -235,6 +252,7 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N
raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
+
# RPC/P2P connection constants and functions
############################################
@@ -250,6 +268,7 @@ class PortSeed:
# Must be initialized with a unique integer for each process
n = None
+
def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
Args:
@@ -271,18 +290,20 @@ def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
- coverage_logfile = coverage.get_filename(
- coveragedir, node_number) if coveragedir else None
+ coverage_logfile = coverage.get_filename(coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
+
def p2p_port(n):
assert n <= MAX_NODES
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
+
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
+
def rpc_url(datadir, i, chain, rpchost):
rpc_u, rpc_p = get_auth_cookie(datadir, chain)
host = '127.0.0.1'
@@ -295,9 +316,11 @@ def rpc_url(datadir, i, chain, rpchost):
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
+
# Node functions
################
+
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
@@ -327,21 +350,17 @@ def initialize_datadir(dirname, n, chain):
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
-def adjust_bitcoin_conf_for_pre_17(conf_file):
- with open(conf_file,'r', encoding='utf8') as conf:
- conf_data = conf.read()
- with open(conf_file, 'w', encoding='utf8') as conf:
- conf_data_changed = conf_data.replace('[regtest]', '')
- conf.write(conf_data_changed)
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
+
def append_config(datadir, options):
with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f:
for option in options:
f.write(option + "\n")
+
def get_auth_cookie(datadir, chain):
user = None
password = None
@@ -366,33 +385,52 @@ def get_auth_cookie(datadir, chain):
raise ValueError("No RPC credentials")
return user, password
+
# If a cookie file exists in the given datadir, delete it.
def delete_cookie_file(datadir, chain):
if os.path.isfile(os.path.join(datadir, chain, ".cookie")):
logger.debug("Deleting leftover cookie file")
os.remove(os.path.join(datadir, chain, ".cookie"))
+
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getblockchaininfo()['softforks'][key]['active']
+
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
+
def disconnect_nodes(from_connection, node_num):
- for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
+ def get_peer_ids():
+ result = []
+ for peer in from_connection.getpeerinfo():
+ if "testnode{}".format(node_num) in peer['subver']:
+ result.append(peer['id'])
+ return result
+
+ peer_ids = get_peer_ids()
+ if not peer_ids:
+ logger.warning("disconnect_nodes: {} and {} were not connected".format(
+ from_connection.index,
+ node_num,
+ ))
+ return
+ for peer_id in peer_ids:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
- if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
+ if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
- wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
+ wait_until(lambda: not get_peer_ids(), timeout=5)
+
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
@@ -465,6 +503,7 @@ def find_output(node, txid, amount, *, blockhash=None):
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
+
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
@@ -482,6 +521,7 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1):
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
+
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
@@ -499,6 +539,7 @@ def make_change(from_node, amount_in, amount_out, fee):
outputs[from_node.getnewaddress()] = change
return outputs
+
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
@@ -518,6 +559,7 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
return (txid, signresult["hex"], fee)
+
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
@@ -550,6 +592,7 @@ def create_confirmed_utxos(fee, node, count):
assert len(utxos) >= count
return utxos
+
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
@@ -569,6 +612,7 @@ def gen_return_txouts():
txouts.append(txout)
return txouts
+
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
@@ -592,6 +636,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
txids.append(txid)
return txids
+
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
@@ -605,6 +650,7 @@ def mine_large_block(node, utxos=None):
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
+
def find_vout_for_address(node, txid, addr):
"""
Locate the vout index of the given transaction sending to the
diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py
index 1b6686ff45..b9c0fb6691 100755
--- a/test/functional/test_framework/wallet_util.py
+++ b/test/functional/test_framework/wallet_util.py
@@ -6,6 +6,7 @@
from collections import namedtuple
from test_framework.address import (
+ byte_to_base58,
key_to_p2pkh,
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
@@ -13,10 +14,7 @@ from test_framework.address import (
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
-from test_framework.key import (
- bytes_to_wif,
- ECKey,
-)
+from test_framework.key import ECKey
from test_framework.script import (
CScript,
OP_0,
@@ -120,3 +118,14 @@ def test_address(node, address, **kwargs):
raise AssertionError("key {} unexpectedly returned in getaddressinfo.".format(key))
elif addr_info[key] != value:
raise AssertionError("key {} value {} did not match expected value {}".format(key, addr_info[key], value))
+
+def bytes_to_wif(b, compressed=True):
+ if compressed:
+ b += b'\x01'
+ return byte_to_base58(b, 239)
+
+def generate_wif_key():
+ # Makes a WIF privkey for imports
+ k = ECKey()
+ k.generate()
+ return bytes_to_wif(k.get_bytes(), k.is_compressed)
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 9cb1dee0d9..41f9bde183 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -164,7 +164,7 @@ BASE_SCRIPTS = [
'wallet_keypool.py',
'wallet_keypool.py --descriptors',
'wallet_descriptor.py',
- 'p2p_mempool.py',
+ 'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
@@ -190,6 +190,7 @@ BASE_SCRIPTS = [
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'p2p_leak_tx.py',
+ 'p2p_eviction.py',
'rpc_signmessage.py',
'rpc_generateblock.py',
'wallet_balance.py',
@@ -232,6 +233,7 @@ BASE_SCRIPTS = [
'feature_includeconf.py',
'feature_asmap.py',
'mempool_unbroadcast.py',
+ 'mempool_compatibility.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'rpc_scantxoutset.py',
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index ad23206c90..5e1a804d33 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -31,7 +31,6 @@ class TxnMallTest(BitcoinTestFramework):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
def run_test(self):
if self.options.segwit:
diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py
index 1891cd9190..cac58aeaf2 100755
--- a/test/functional/wallet_txn_doublespend.py
+++ b/test/functional/wallet_txn_doublespend.py
@@ -29,7 +29,6 @@ class TxnMallTest(BitcoinTestFramework):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 BTC:
diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py
index bb81746715..cc2139a027 100755
--- a/test/functional/wallet_upgradewallet.py
+++ b/test/functional/wallet_upgradewallet.py
@@ -16,7 +16,6 @@ import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
- adjust_bitcoin_conf_for_pre_17,
assert_equal,
assert_greater_than,
assert_is_hex_string,
@@ -46,9 +45,6 @@ class UpgradeWalletTest(BitcoinTestFramework):
160300,
150200,
])
- # adapt bitcoin.conf, because older bitcoind's don't recognize config sections
- adjust_bitcoin_conf_for_pre_17(self.nodes[1].bitcoinconf)
- adjust_bitcoin_conf_for_pre_17(self.nodes[2].bitcoinconf)
self.start_nodes()
def dumb_sync_blocks(self):