aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rwxr-xr-xtest/functional/feature_notifications.py54
-rwxr-xr-xtest/functional/feature_taproot.py20
-rwxr-xr-xtest/functional/interface_rpc.py26
-rwxr-xr-xtest/functional/p2p_invalid_tx.py2
-rwxr-xr-xtest/functional/p2p_leak.py64
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py22
-rwxr-xr-xtest/functional/rpc_getblockfilter.py6
-rwxr-xr-xtest/functional/rpc_invalid_address_message.py20
-rw-r--r--test/functional/test_framework/script.py6
-rw-r--r--test/functional/test_framework/segwit_addr.py54
-rwxr-xr-xtest/functional/wallet_groups.py16
-rwxr-xr-xtest/functional/wallet_labels.py10
-rwxr-xr-xtest/functional/wallet_multiwallet.py2
-rwxr-xr-xtest/fuzz/test_runner.py54
14 files changed, 245 insertions, 111 deletions
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index b068ce612c..4e2de1daf4 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -17,7 +17,7 @@ from test_framework.util import (
FILE_CHAR_START = 32 if os.name == 'nt' else 1
FILE_CHAR_END = 128
FILE_CHARS_DISALLOWED = '/\\?%*:|"<>' if os.name == 'nt' else '/'
-
+UNCONFIRMED_HASH_STRING = 'unconfirmed'
def notify_outputname(walletname, txid):
return txid if os.name == 'nt' else '{}_{}'.format(walletname, txid)
@@ -43,7 +43,7 @@ class NotificationsTest(BitcoinTestFramework):
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s')),
], [
"-rescan",
- "-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))),
+ "-walletnotify=echo %h_%b > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))),
]]
self.wallet_names = [self.default_wallet_name, self.wallet]
super().setup_network()
@@ -90,11 +90,9 @@ class NotificationsTest(BitcoinTestFramework):
self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
- txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
- assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
+ tx_details = list(map(lambda t: (t['txid'], t['blockheight'], t['blockhash']), self.nodes[1].listtransactions("*", block_count)))
self.stop_node(1)
- for tx_file in os.listdir(self.walletnotify_dir):
- os.remove(os.path.join(self.walletnotify_dir, tx_file))
+ self.expect_wallet_notify(tx_details)
self.log.info("test -walletnotify after rescan")
# restart node to rescan to force wallet notifications
@@ -104,10 +102,8 @@ class NotificationsTest(BitcoinTestFramework):
self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
- txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
- assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
- for tx_file in os.listdir(self.walletnotify_dir):
- os.remove(os.path.join(self.walletnotify_dir, tx_file))
+ tx_details = list(map(lambda t: (t['txid'], t['blockheight'], t['blockhash']), self.nodes[1].listtransactions("*", block_count)))
+ self.expect_wallet_notify(tx_details)
# Conflicting transactions tests.
# Generate spends from node 0, and check notifications
@@ -122,7 +118,7 @@ class NotificationsTest(BitcoinTestFramework):
tx1 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
assert_equal(tx1 in self.nodes[0].getrawmempool(), True)
self.sync_mempools()
- self.expect_wallet_notify([tx1])
+ self.expect_wallet_notify([(tx1, -1, UNCONFIRMED_HASH_STRING)])
# Generate bump transaction, sync mempools, and check for bump1
# notification. In the future, per
@@ -131,39 +127,57 @@ class NotificationsTest(BitcoinTestFramework):
bump1 = self.nodes[0].bumpfee(tx1)["txid"]
assert_equal(bump1 in self.nodes[0].getrawmempool(), True)
self.sync_mempools()
- self.expect_wallet_notify([bump1])
+ self.expect_wallet_notify([(bump1, -1, UNCONFIRMED_HASH_STRING)])
# Add bump1 transaction to new block, checking for a notification
# and the correct number of confirmations.
- self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ blockhash1 = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
+ blockheight1 = self.nodes[0].getblockcount()
self.sync_blocks()
- self.expect_wallet_notify([bump1])
+ self.expect_wallet_notify([(bump1, blockheight1, blockhash1)])
assert_equal(self.nodes[1].gettransaction(bump1)["confirmations"], 1)
# Generate a second transaction to be bumped.
tx2 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
assert_equal(tx2 in self.nodes[0].getrawmempool(), True)
self.sync_mempools()
- self.expect_wallet_notify([tx2])
+ self.expect_wallet_notify([(tx2, -1, UNCONFIRMED_HASH_STRING)])
# Bump tx2 as bump2 and generate a block on node 0 while
# disconnected, then reconnect and check for notifications on node 1
# about newly confirmed bump2 and newly conflicted tx2.
self.disconnect_nodes(0, 1)
bump2 = self.nodes[0].bumpfee(tx2)["txid"]
- self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ blockhash2 = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
+ blockheight2 = self.nodes[0].getblockcount()
assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1)
assert_equal(tx2 in self.nodes[1].getrawmempool(), True)
self.connect_nodes(0, 1)
self.sync_blocks()
- self.expect_wallet_notify([bump2, tx2])
+ self.expect_wallet_notify([(bump2, blockheight2, blockhash2), (tx2, -1, UNCONFIRMED_HASH_STRING)])
assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1)
# TODO: add test for `-alertnotify` large fork notifications
- def expect_wallet_notify(self, tx_ids):
- self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids), timeout=10)
- assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id in tx_ids), sorted(os.listdir(self.walletnotify_dir)))
+ def expect_wallet_notify(self, tx_details):
+ self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_details), timeout=10)
+ # Should have no more and no less files than expected
+ assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id, _, _ in tx_details), sorted(os.listdir(self.walletnotify_dir)))
+ # Should now verify contents of each file
+ for tx_id, blockheight, blockhash in tx_details:
+ fname = os.path.join(self.walletnotify_dir, notify_outputname(self.wallet, tx_id))
+ with open(fname, 'rt', encoding='utf-8') as f:
+ text = f.read()
+ # Universal newline ensures '\n' on 'nt'
+ assert_equal(text[-1], '\n')
+ text = text[:-1]
+ if os.name == 'nt':
+ # On Windows, echo as above will append a whitespace
+ assert_equal(text[-1], ' ')
+ text = text[:-1]
+ expected = str(blockheight) + '_' + blockhash
+ assert_equal(text, expected)
+
for tx_file in os.listdir(self.walletnotify_dir):
os.remove(os.path.join(self.walletnotify_dir, tx_file))
diff --git a/test/functional/feature_taproot.py b/test/functional/feature_taproot.py
index 5027a9828f..183a43abd4 100755
--- a/test/functional/feature_taproot.py
+++ b/test/functional/feature_taproot.py
@@ -177,17 +177,17 @@ def default_negflag(ctx):
"""Default expression for "negflag": tap.negflag."""
return get(ctx, "tap").negflag
-def default_pubkey_inner(ctx):
- """Default expression for "pubkey_inner": tap.inner_pubkey."""
- return get(ctx, "tap").inner_pubkey
+def default_pubkey_internal(ctx):
+ """Default expression for "pubkey_internal": tap.internal_pubkey."""
+ return get(ctx, "tap").internal_pubkey
def default_merklebranch(ctx):
"""Default expression for "merklebranch": tapleaf.merklebranch."""
return get(ctx, "tapleaf").merklebranch
def default_controlblock(ctx):
- """Default expression for "controlblock": combine leafversion, negflag, pubkey_inner, merklebranch."""
- return bytes([get(ctx, "leafversion") + get(ctx, "negflag")]) + get(ctx, "pubkey_inner") + get(ctx, "merklebranch")
+ """Default expression for "controlblock": combine leafversion, negflag, pubkey_internal, merklebranch."""
+ return bytes([get(ctx, "leafversion") + get(ctx, "negflag")]) + get(ctx, "pubkey_internal") + get(ctx, "merklebranch")
def default_sighash(ctx):
"""Default expression for "sighash": depending on mode, compute BIP341, BIP143, or legacy sighash."""
@@ -341,9 +341,9 @@ DEFAULT_CONTEXT = {
"tapleaf": default_tapleaf,
# The script to push, and include in the sighash, for a taproot script path spend.
"script_taproot": default_script_taproot,
- # The inner pubkey for a taproot script path spend (32 bytes).
- "pubkey_inner": default_pubkey_inner,
- # The negation flag of the inner pubkey for a taproot script path spend.
+ # The internal pubkey for a taproot script path spend (32 bytes).
+ "pubkey_internal": default_pubkey_internal,
+ # The negation flag of the internal pubkey for a taproot script path spend.
"negflag": default_negflag,
# The leaf version to include in the sighash (this does not affect the one in the control block).
"leafversion": default_leafversion,
@@ -780,8 +780,8 @@ def spenders_taproot_active():
add_spender(spenders, "spendpath/negflag", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"negflag": lambda ctx: 1 - default_negflag(ctx)}, **ERR_WITNESS_PROGRAM_MISMATCH)
# Test that bitflips in the Merkle branch invalidate it.
add_spender(spenders, "spendpath/bitflipmerkle", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"merklebranch": bitflipper(default_merklebranch)}, **ERR_WITNESS_PROGRAM_MISMATCH)
- # Test that bitflips in the inner pubkey invalidate it.
- add_spender(spenders, "spendpath/bitflippubkey", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"pubkey_inner": bitflipper(default_pubkey_inner)}, **ERR_WITNESS_PROGRAM_MISMATCH)
+ # Test that bitflips in the internal pubkey invalidate it.
+ add_spender(spenders, "spendpath/bitflippubkey", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"pubkey_internal": bitflipper(default_pubkey_internal)}, **ERR_WITNESS_PROGRAM_MISMATCH)
# Test that empty witnesses are invalid.
add_spender(spenders, "spendpath/emptywit", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"witness": []}, **ERR_EMPTY_WITNESS)
# Test that adding garbage to the control block invalidates it.
diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py
index 9c877aaeae..4d5666f414 100755
--- a/test/functional/interface_rpc.py
+++ b/test/functional/interface_rpc.py
@@ -8,6 +8,9 @@ import os
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than_or_equal
+from threading import Thread
+import subprocess
+
def expect_http_status(expected_http_status, expected_rpc_code,
fcn, *args):
@@ -18,6 +21,16 @@ def expect_http_status(expected_http_status, expected_rpc_code,
assert_equal(exc.error["code"], expected_rpc_code)
assert_equal(exc.http_status, expected_http_status)
+
+def test_work_queue_getblock(node, got_exceeded_error):
+ while not got_exceeded_error:
+ try:
+ node.cli('getrpcinfo').send_cli()
+ except subprocess.CalledProcessError as e:
+ assert_equal(e.output, 'error: Server response: Work queue depth exceeded\n')
+ got_exceeded_error.append(True)
+
+
class RPCInterfaceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -67,10 +80,23 @@ class RPCInterfaceTest(BitcoinTestFramework):
expect_http_status(404, -32601, self.nodes[0].invalidmethod)
expect_http_status(500, -8, self.nodes[0].getblockhash, 42)
+ def test_work_queue_exceeded(self):
+ self.log.info("Testing work queue exceeded...")
+ self.restart_node(0, ['-rpcworkqueue=1', '-rpcthreads=1'])
+ got_exceeded_error = []
+ threads = []
+ for _ in range(3):
+ t = Thread(target=test_work_queue_getblock, args=(self.nodes[0], got_exceeded_error))
+ t.start()
+ threads.append(t)
+ for t in threads:
+ t.join()
+
def run_test(self):
self.test_getrpcinfo()
self.test_batch_request()
self.test_http_status_codes()
+ self.test_work_queue_exceeded()
if __name__ == '__main__':
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index cca7390ae3..8783c244c3 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -154,7 +154,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
orphan_tx_pool[i].vin.append(CTxIn(outpoint=COutPoint(i, 333)))
orphan_tx_pool[i].vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
- with node.assert_debug_log(['mapOrphan overflow, removed 1 tx']):
+ with node.assert_debug_log(['orphanage overflow, removed 1 tx']):
node.p2ps[0].send_txs_and_test(orphan_tx_pool, node, success=False)
rejected_parent = CTransaction()
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index 12b8b7baff..71d5ca92b3 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -4,8 +4,8 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
-A node should never send anything other than VERSION/VERACK until it's
-received a VERACK.
+Before receiving a VERACK, a node should not send anything but VERSION/VERACK
+and feature negotiation messages (WTXIDRELAY, SENDADDRV2).
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
@@ -35,10 +35,12 @@ class LazyPeer(P2PInterface):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
+ self.got_wtxidrelay = False
+ self.got_sendaddrv2 = False
def bad_message(self, message):
self.unexpected_msg = True
- self.log.info("should not have received message: %s" % message.msgtype)
+ print("should not have received message: %s" % message.msgtype)
def on_open(self):
self.ever_connected = True
@@ -64,6 +66,8 @@ class LazyPeer(P2PInterface):
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
+ def on_wtxidrelay(self, message): self.got_wtxidrelay = True
+ def on_sendaddrv2(self, message): self.got_sendaddrv2 = True
# Peer that sends a version but not a verack.
@@ -94,32 +98,61 @@ class P2PVersionStore(P2PInterface):
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
+ self.extra_args = [['-peertimeout=4']]
+
+ def create_old_version(self, nversion):
+ old_version_msg = msg_version()
+ old_version_msg.nVersion = nversion
+ old_version_msg.strSubVer = P2P_SUBVERSION
+ old_version_msg.nServices = P2P_SERVICES
+ old_version_msg.relay = P2P_VERSION_RELAY
+ return old_version_msg
def run_test(self):
- # Another peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
+ self.log.info('Check that the node doesn\'t send unexpected messages before handshake completion')
+ # Peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
no_version_idle_peer = self.nodes[0].add_p2p_connection(LazyPeer(), send_version=False, wait_for_verack=False)
# Peer that sends a version but not a verack.
no_verack_idle_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), wait_for_verack=False)
- # Wait until we got the verack in response to the version. Though, don't wait for the node to receive the
- # verack, since we never sent one
+ # Pre-wtxidRelay peer that sends a version but not a verack and does not support feature negotiation
+ # messages which start at nVersion == 70016
+ pre_wtxidrelay_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), send_version=False, wait_for_verack=False)
+ pre_wtxidrelay_peer.send_message(self.create_old_version(70015))
+
+ # Wait until the peer gets the verack in response to the version. Though, don't wait for the node to receive the
+ # verack, since the peer never sent one
no_verack_idle_peer.wait_for_verack()
+ pre_wtxidrelay_peer.wait_for_verack()
no_version_idle_peer.wait_until(lambda: no_version_idle_peer.ever_connected)
no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
+ pre_wtxidrelay_peer.wait_until(lambda: pre_wtxidrelay_peer.version_received)
# Mine a block and make sure that it's not sent to the connected peers
self.nodes[0].generate(nblocks=1)
- #Give the node enough time to possibly leak out a message
+ # Give the node enough time to possibly leak out a message
time.sleep(5)
- self.nodes[0].disconnect_p2ps()
+ # Make sure only expected messages came in
+ assert not no_version_idle_peer.unexpected_msg
+ assert not no_version_idle_peer.got_wtxidrelay
+ assert not no_version_idle_peer.got_sendaddrv2
- # Make sure no unexpected messages came in
- assert no_version_idle_peer.unexpected_msg == False
- assert no_verack_idle_peer.unexpected_msg == False
+ assert not no_verack_idle_peer.unexpected_msg
+ assert no_verack_idle_peer.got_wtxidrelay
+ assert no_verack_idle_peer.got_sendaddrv2
+
+ assert not pre_wtxidrelay_peer.unexpected_msg
+ assert not pre_wtxidrelay_peer.got_wtxidrelay
+ assert not pre_wtxidrelay_peer.got_sendaddrv2
+
+ # Expect peers to be disconnected due to timeout
+ assert not no_version_idle_peer.is_connected
+ assert not no_verack_idle_peer.is_connected
+ assert not pre_wtxidrelay_peer.is_connected
self.log.info('Check that the version message does not leak the local address of the node')
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
@@ -134,13 +167,8 @@ class P2PLeakTest(BitcoinTestFramework):
self.log.info('Check that old peers are disconnected')
p2p_old_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
- old_version_msg = msg_version()
- old_version_msg.nVersion = 31799
- old_version_msg.strSubVer = P2P_SUBVERSION
- old_version_msg.nServices = P2P_SERVICES
- old_version_msg.relay = P2P_VERSION_RELAY
- with self.nodes[0].assert_debug_log(['peer=3 using obsolete version 31799; disconnecting']):
- p2p_old_peer.send_message(old_version_msg)
+ with self.nodes[0].assert_debug_log(['peer=4 using obsolete version 31799; disconnecting']):
+ p2p_old_peer.send_message(self.create_old_version(31799))
p2p_old_peer.wait_for_disconnect()
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 569471dc87..8c9755cc8c 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -32,6 +32,7 @@ class RawTransactionsTest(BitcoinTestFramework):
# This test isn't testing tx relay. Set whitelist on the peers for
# instant tx relay.
self.extra_args = [['-whitelist=noban@127.0.0.1']] * self.num_nodes
+ self.rpc_timeout = 90 # to prevent timeouts in `test_transaction_too_large`
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -94,6 +95,7 @@ class RawTransactionsTest(BitcoinTestFramework):
self.test_address_reuse()
self.test_option_subtract_fee_from_outputs()
self.test_subtract_fee_with_presets()
+ self.test_transaction_too_large()
def test_change_position(self):
"""Ensure setting changePosition in fundraw with an exact match is handled properly."""
@@ -907,5 +909,25 @@ class RawTransactionsTest(BitcoinTestFramework):
signedtx = self.nodes[0].signrawtransactionwithwallet(fundedtx['hex'])
self.nodes[0].sendrawtransaction(signedtx['hex'])
+ def test_transaction_too_large(self):
+ self.log.info("Test fundrawtx where BnB solution would result in a too large transaction, but Knapsack would not")
+ self.nodes[0].createwallet("large")
+ wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+ recipient = self.nodes[0].get_wallet_rpc("large")
+ outputs = {}
+ rawtx = recipient.createrawtransaction([], {wallet.getnewaddress(): 147.99899260})
+
+ # Make 1500 0.1 BTC outputs. The amount that we target for funding is in
+ # the BnB range when these outputs are used. However if these outputs
+ # are selected, the transaction will end up being too large, so it
+ # shouldn't use BnB and instead fall back to Knapsack but that behavior
+ # is not implemented yet. For now we just check that we get an error.
+ for _ in range(1500):
+ outputs[recipient.getnewaddress()] = 0.1
+ wallet.sendmany("", outputs)
+ self.nodes[0].generate(10)
+ assert_raises_rpc_error(-4, "Transaction too large", recipient.fundrawtransaction, rawtx)
+
+
if __name__ == '__main__':
RawTransactionsTest().main()
diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py
index 044dbd35bf..a99e50f29f 100755
--- a/test/functional/rpc_getblockfilter.py
+++ b/test/functional/rpc_getblockfilter.py
@@ -54,5 +54,11 @@ class GetBlockFilterTest(BitcoinTestFramework):
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
+ # Test getblockfilter fails on node without compact block filter index
+ self.restart_node(0, extra_args=["-blockfilterindex=0"])
+ for filter_type in FILTER_TYPES:
+ assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
+ self.nodes[0].getblockfilter, genesis_hash, filter_type)
+
if __name__ == '__main__':
GetBlockFilterTest().main()
diff --git a/test/functional/rpc_invalid_address_message.py b/test/functional/rpc_invalid_address_message.py
index 469d6bdb05..e362642f0f 100755
--- a/test/functional/rpc_invalid_address_message.py
+++ b/test/functional/rpc_invalid_address_message.py
@@ -12,8 +12,12 @@ from test_framework.util import (
)
BECH32_VALID = 'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv'
-BECH32_INVALID_SIZE = 'bcrt1sqqpl9r5c'
-BECH32_INVALID_PREFIX = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'
+BECH32_INVALID_BECH32 = 'bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqdmchcc'
+BECH32_INVALID_BECH32M = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7k35mrzd'
+BECH32_INVALID_VERSION = 'bcrt130xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqynjegk'
+BECH32_INVALID_SIZE = 'bcrt1s0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7v8n0nx0muaewav25430mtr'
+BECH32_INVALID_V0_SIZE = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7kqqq5k3my'
+BECH32_INVALID_PREFIX = 'bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx'
BASE58_VALID = 'mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn'
BASE58_INVALID_PREFIX = '17VZNX1SN5NtKa8UQFxwQbFeFc3iqRYhem'
@@ -40,6 +44,18 @@ class InvalidAddressErrorMessageTest(BitcoinTestFramework):
assert not info['isvalid']
assert_equal(info['error'], 'Invalid prefix for Bech32 address')
+ info = node.validateaddress(BECH32_INVALID_BECH32)
+ assert not info['isvalid']
+ assert_equal(info['error'], 'Version 1+ witness address must use Bech32m checksum')
+
+ info = node.validateaddress(BECH32_INVALID_BECH32M)
+ assert not info['isvalid']
+ assert_equal(info['error'], 'Version 0 witness address must use Bech32 checksum')
+
+ info = node.validateaddress(BECH32_INVALID_V0_SIZE)
+ assert not info['isvalid']
+ assert_equal(info['error'], 'Invalid Bech32 v0 address data size')
+
info = node.validateaddress(BECH32_VALID)
assert info['isvalid']
assert 'error' not in info
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index c35533698c..3c9b8a6e69 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -826,11 +826,11 @@ def taproot_tree_helper(scripts):
# A TaprootInfo object has the following fields:
# - scriptPubKey: the scriptPubKey (witness v1 CScript)
-# - inner_pubkey: the inner pubkey (32 bytes)
-# - negflag: whether the pubkey in the scriptPubKey was negated from inner_pubkey+tweak*G (bool).
+# - internal_pubkey: the internal pubkey (32 bytes)
+# - negflag: whether the pubkey in the scriptPubKey was negated from internal_pubkey+tweak*G (bool).
# - tweak: the tweak (32 bytes)
# - leaves: a dict of name -> TaprootLeafInfo objects for all known leaves
-TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,inner_pubkey,negflag,tweak,leaves")
+TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,internal_pubkey,negflag,tweak,leaves")
# A TaprootLeafInfo object has the following fields:
# - script: the leaf script (CScript or bytes)
diff --git a/test/functional/test_framework/segwit_addr.py b/test/functional/test_framework/segwit_addr.py
index 00c0d8a919..861ca2b949 100644
--- a/test/functional/test_framework/segwit_addr.py
+++ b/test/functional/test_framework/segwit_addr.py
@@ -2,10 +2,18 @@
# Copyright (c) 2017 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Reference implementation for Bech32 and segwit addresses."""
+"""Reference implementation for Bech32/Bech32m and segwit addresses."""
import unittest
+from enum import Enum
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
+BECH32_CONST = 1
+BECH32M_CONST = 0x2bc830a3
+
+class Encoding(Enum):
+ """Enumeration type to list the various supported encodings."""
+ BECH32 = 1
+ BECH32M = 2
def bech32_polymod(values):
@@ -27,38 +35,45 @@ def bech32_hrp_expand(hrp):
def bech32_verify_checksum(hrp, data):
"""Verify a checksum given HRP and converted data characters."""
- return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1
-
+ check = bech32_polymod(bech32_hrp_expand(hrp) + data)
+ if check == BECH32_CONST:
+ return Encoding.BECH32
+ elif check == BECH32M_CONST:
+ return Encoding.BECH32M
+ else:
+ return None
-def bech32_create_checksum(hrp, data):
+def bech32_create_checksum(encoding, hrp, data):
"""Compute the checksum values given HRP and data."""
values = bech32_hrp_expand(hrp) + data
- polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1
+ const = BECH32M_CONST if encoding == Encoding.BECH32M else BECH32_CONST
+ polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ const
return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)]
-def bech32_encode(hrp, data):
- """Compute a Bech32 string given HRP and data values."""
- combined = data + bech32_create_checksum(hrp, data)
+def bech32_encode(encoding, hrp, data):
+ """Compute a Bech32 or Bech32m string given HRP and data values."""
+ combined = data + bech32_create_checksum(encoding, hrp, data)
return hrp + '1' + ''.join([CHARSET[d] for d in combined])
def bech32_decode(bech):
- """Validate a Bech32 string, and determine HRP and data."""
+ """Validate a Bech32/Bech32m string, and determine HRP and data."""
if ((any(ord(x) < 33 or ord(x) > 126 for x in bech)) or
(bech.lower() != bech and bech.upper() != bech)):
- return (None, None)
+ return (None, None, None)
bech = bech.lower()
pos = bech.rfind('1')
if pos < 1 or pos + 7 > len(bech) or len(bech) > 90:
- return (None, None)
+ return (None, None, None)
if not all(x in CHARSET for x in bech[pos+1:]):
- return (None, None)
+ return (None, None, None)
hrp = bech[:pos]
data = [CHARSET.find(x) for x in bech[pos+1:]]
- if not bech32_verify_checksum(hrp, data):
- return (None, None)
- return (hrp, data[:-6])
+ encoding = bech32_verify_checksum(hrp, data)
+ if encoding is None:
+ return (None, None, None)
+ return (encoding, hrp, data[:-6])
def convertbits(data, frombits, tobits, pad=True):
@@ -86,7 +101,7 @@ def convertbits(data, frombits, tobits, pad=True):
def decode_segwit_address(hrp, addr):
"""Decode a segwit address."""
- hrpgot, data = bech32_decode(addr)
+ encoding, hrpgot, data = bech32_decode(addr)
if hrpgot != hrp:
return (None, None)
decoded = convertbits(data[1:], 5, 8, False)
@@ -96,12 +111,15 @@ def decode_segwit_address(hrp, addr):
return (None, None)
if data[0] == 0 and len(decoded) != 20 and len(decoded) != 32:
return (None, None)
+ if (data[0] == 0 and encoding != Encoding.BECH32) or (data[0] != 0 and encoding != Encoding.BECH32M):
+ return (None, None)
return (data[0], decoded)
def encode_segwit_address(hrp, witver, witprog):
"""Encode a segwit address."""
- ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
+ encoding = Encoding.BECH32 if witver == 0 else Encoding.BECH32M
+ ret = bech32_encode(encoding, hrp, [witver] + convertbits(witprog, 8, 5))
if decode_segwit_address(hrp, ret) == (None, None):
return None
return ret
@@ -119,3 +137,5 @@ class TestFrameworkScript(unittest.TestCase):
# P2WSH
test_python_bech32('bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj')
test_python_bech32('bcrt1qft5p2uhsdcdc3l2ua4ap5qqfg4pjaqlp250x7us7a8qqhrxrxfsqseac85')
+ # P2TR
+ test_python_bech32('bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqc8gma6')
diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py
index e5c4f12f20..c0b76d960f 100755
--- a/test/functional/wallet_groups.py
+++ b/test/functional/wallet_groups.py
@@ -29,8 +29,9 @@ class WalletGroupTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def run_test(self):
+ self.log.info("Setting up")
# Mine some coins
- self.nodes[0].generate(110)
+ self.nodes[0].generate(101)
# Get some addresses from the two nodes
addr1 = [self.nodes[1].getnewaddress() for _ in range(3)]
@@ -48,6 +49,7 @@ class WalletGroupTest(BitcoinTestFramework):
# - node[1] should pick one 0.5 UTXO and leave the rest
# - node[2] should pick one (1.0 + 0.5) UTXO group corresponding to a
# given address, and leave the rest
+ self.log.info("Test sending transactions picks one UTXO group and leaves the rest")
txid1 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
tx1 = self.nodes[1].getrawtransaction(txid1, True)
# txid1 should have 1 input and 2 outputs
@@ -70,7 +72,7 @@ class WalletGroupTest(BitcoinTestFramework):
assert_approx(v[0], vexp=0.2, vspan=0.0001)
assert_approx(v[1], vexp=1.3, vspan=0.0001)
- # Test 'avoid partial if warranted, even if disabled'
+ self.log.info("Test avoiding partial spends if warranted, even if avoidpartialspends is disabled")
self.sync_all()
self.nodes[0].generate(1)
# Nodes 1-2 now have confirmed UTXOs (letters denote destinations):
@@ -104,7 +106,7 @@ class WalletGroupTest(BitcoinTestFramework):
assert_equal(input_addrs[0], input_addrs[1])
# Node 2 enforces avoidpartialspends so needs no checking here
- # Test wallet option maxapsfee with Node 3
+ self.log.info("Test wallet option maxapsfee")
addr_aps = self.nodes[3].getnewaddress()
self.nodes[0].sendtoaddress(addr_aps, 1.0)
self.nodes[0].sendtoaddress(addr_aps, 1.0)
@@ -131,6 +133,7 @@ class WalletGroupTest(BitcoinTestFramework):
# Test wallet option maxapsfee with node 4, which sets maxapsfee
# 1 sat higher, crossing the threshold from non-grouped to grouped.
+ self.log.info("Test wallet option maxapsfee threshold from non-grouped to grouped")
addr_aps3 = self.nodes[4].getnewaddress()
[self.nodes[0].sendtoaddress(addr_aps3, 1.0) for _ in range(5)]
self.nodes[0].generate(1)
@@ -147,8 +150,7 @@ class WalletGroupTest(BitcoinTestFramework):
self.sync_all()
self.nodes[0].generate(1)
- # Fill node2's wallet with 10000 outputs corresponding to the same
- # scriptPubKey
+ self.log.info("Fill a wallet with 10,000 outputs corresponding to the same scriptPubKey")
for _ in range(5):
raw_tx = self.nodes[0].createrawtransaction([{"txid":"0"*64, "vout":0}], [{addr2[0]: 0.05}])
tx = FromHex(CTransaction(), raw_tx)
@@ -158,12 +160,12 @@ class WalletGroupTest(BitcoinTestFramework):
signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex'])
self.nodes[0].sendrawtransaction(signed_tx['hex'])
self.nodes[0].generate(1)
-
- self.sync_all()
+ self.sync_all()
# Check that we can create a transaction that only requires ~100 of our
# utxos, without pulling in all outputs and creating a transaction that
# is way too big.
+ self.log.info("Test creating txn that only requires ~100 of our UTXOs without pulling in all outputs")
assert self.nodes[2].sendtoaddress(address=addr2[0], amount=5)
diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py
index 883b97561e..551eb72720 100755
--- a/test/functional/wallet_labels.py
+++ b/test/functional/wallet_labels.py
@@ -138,13 +138,13 @@ class WalletLabelsTest(BitcoinTestFramework):
node.createwallet(wallet_name='watch_only', disable_private_keys=True)
wallet_watch_only = node.get_wallet_rpc('watch_only')
BECH32_VALID = {
- '✔️_VER15_PROG40': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqn2cjv3',
- '✔️_VER16_PROG03': 'bcrt1sqqqqqjq8pdp',
- '✔️_VER16_PROB02': 'bcrt1sqqqqqjq8pv',
+ '✔️_VER15_PROG40': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqxkg7fn',
+ '✔️_VER16_PROG03': 'bcrt1sqqqqq8uhdgr',
+ '✔️_VER16_PROB02': 'bcrt1sqqqq4wstyw',
}
BECH32_INVALID = {
- '❌_VER15_PROG41': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzc7xyq',
- '❌_VER16_PROB01': 'bcrt1sqqpl9r5c',
+ '❌_VER15_PROG41': 'bcrt1sqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqajlxj8',
+ '❌_VER16_PROB01': 'bcrt1sqq5r4036',
}
for l in BECH32_VALID:
ad = BECH32_VALID[l]
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index bf24b9c7b3..71d1b96a95 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -34,7 +34,7 @@ def test_load_unload(node, name):
node.loadwallet(name)
node.unloadwallet(name)
except JSONRPCException as e:
- if e.error['code'] == -4 and 'Wallet already being loading' in e.error['message']:
+ if e.error['code'] == -4 and 'Wallet already loading' in e.error['message']:
got_loading_error = True
return
diff --git a/test/fuzz/test_runner.py b/test/fuzz/test_runner.py
index 611061072f..eeff7a4515 100755
--- a/test/fuzz/test_runner.py
+++ b/test/fuzz/test_runner.py
@@ -27,7 +27,7 @@ def get_fuzz_env(*, target, source_dir):
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
- description='''Run the fuzz targets with all inputs from the seed_dir once.''',
+ description='''Run the fuzz targets with all inputs from the corpus_dir once.''',
)
parser.add_argument(
"-l",
@@ -54,8 +54,8 @@ def main():
help='How many targets to merge or execute in parallel.',
)
parser.add_argument(
- 'seed_dir',
- help='The seed corpus to run on (must contain subfolders for each fuzz target).',
+ 'corpus_dir',
+ help='The corpus to run on (must contain subfolders for each fuzz target).',
)
parser.add_argument(
'target',
@@ -64,15 +64,15 @@ def main():
)
parser.add_argument(
'--m_dir',
- help='Merge inputs from this directory into the seed_dir.',
+ help='Merge inputs from this directory into the corpus_dir.',
)
parser.add_argument(
'-g',
'--generate',
action='store_true',
- help='Create new corpus seeds (or extend the existing ones) by running'
+ help='Create new corpus (or extend the existing ones) by running'
' the given targets for a finite number of times. Outputs them to'
- ' the passed seed_dir.'
+ ' the passed corpus_dir.'
)
args = parser.parse_args()
@@ -119,19 +119,19 @@ def main():
logging.info("{} of {} detected fuzz target(s) selected: {}".format(len(test_list_selection), len(test_list_all), " ".join(test_list_selection)))
if not args.generate:
- test_list_seedless = []
+ test_list_missing_corpus = []
for t in test_list_selection:
- corpus_path = os.path.join(args.seed_dir, t)
+ corpus_path = os.path.join(args.corpus_dir, t)
if not os.path.exists(corpus_path) or len(os.listdir(corpus_path)) == 0:
- test_list_seedless.append(t)
- test_list_seedless.sort()
- if test_list_seedless:
+ test_list_missing_corpus.append(t)
+ test_list_missing_corpus.sort()
+ if test_list_missing_corpus:
logging.info(
- "Fuzzing harnesses lacking a seed corpus: {}".format(
- " ".join(test_list_seedless)
+ "Fuzzing harnesses lacking a corpus: {}".format(
+ " ".join(test_list_missing_corpus)
)
)
- logging.info("Please consider adding a fuzz seed corpus at https://github.com/bitcoin-core/qa-assets")
+ logging.info("Please consider adding a fuzz corpus at https://github.com/bitcoin-core/qa-assets")
try:
help_output = subprocess.run(
@@ -154,18 +154,18 @@ def main():
with ThreadPoolExecutor(max_workers=args.par) as fuzz_pool:
if args.generate:
- return generate_corpus_seeds(
+ return generate_corpus(
fuzz_pool=fuzz_pool,
src_dir=config['environment']['SRCDIR'],
build_dir=config["environment"]["BUILDDIR"],
- seed_dir=args.seed_dir,
+ corpus_dir=args.corpus_dir,
targets=test_list_selection,
)
if args.m_dir:
merge_inputs(
fuzz_pool=fuzz_pool,
- corpus=args.seed_dir,
+ corpus=args.corpus_dir,
test_list=test_list_selection,
src_dir=config['environment']['SRCDIR'],
build_dir=config["environment"]["BUILDDIR"],
@@ -175,7 +175,7 @@ def main():
run_once(
fuzz_pool=fuzz_pool,
- corpus=args.seed_dir,
+ corpus=args.corpus_dir,
test_list=test_list_selection,
src_dir=config['environment']['SRCDIR'],
build_dir=config["environment"]["BUILDDIR"],
@@ -183,13 +183,13 @@ def main():
)
-def generate_corpus_seeds(*, fuzz_pool, src_dir, build_dir, seed_dir, targets):
- """Generates new corpus seeds.
+def generate_corpus(*, fuzz_pool, src_dir, build_dir, corpus_dir, targets):
+ """Generates new corpus.
- Run {targets} without input, and outputs the generated corpus seeds to
- {seed_dir}.
+ Run {targets} without input, and outputs the generated corpus to
+ {corpus_dir}.
"""
- logging.info("Generating corpus seeds to {}".format(seed_dir))
+ logging.info("Generating corpus to {}".format(corpus_dir))
def job(command, t):
logging.debug("Running '{}'\n".format(" ".join(command)))
@@ -205,12 +205,12 @@ def generate_corpus_seeds(*, fuzz_pool, src_dir, build_dir, seed_dir, targets):
futures = []
for target in targets:
- target_seed_dir = os.path.join(seed_dir, target)
- os.makedirs(target_seed_dir, exist_ok=True)
+ target_corpus_dir = os.path.join(corpus_dir, target)
+ os.makedirs(target_corpus_dir, exist_ok=True)
command = [
os.path.join(build_dir, 'src', 'test', 'fuzz', 'fuzz'),
"-runs=100000",
- target_seed_dir,
+ target_corpus_dir,
]
futures.append(fuzz_pool.submit(job, command, target))
@@ -219,7 +219,7 @@ def generate_corpus_seeds(*, fuzz_pool, src_dir, build_dir, seed_dir, targets):
def merge_inputs(*, fuzz_pool, corpus, test_list, src_dir, build_dir, merge_dir):
- logging.info("Merge the inputs from the passed dir into the seed_dir. Passed dir {}".format(merge_dir))
+ logging.info("Merge the inputs from the passed dir into the corpus_dir. Passed dir {}".format(merge_dir))
jobs = []
for t in test_list:
args = [